diff --git a/.github/auto_assign.yml b/.github/auto_assign.yml
index 18151f7454..2a6cff517c 100644
--- a/.github/auto_assign.yml
+++ b/.github/auto_assign.yml
@@ -9,7 +9,6 @@ assignees:
- woop
- tsotnet
- achals
- - adchia
- felixwang9817
# A number of assignees to add to the pull request
diff --git a/.github/fork_workflows/fork_pr_integration_tests_aws.yml b/.github/fork_workflows/fork_pr_integration_tests_aws.yml
index ef53fc1c7d..7261833ae6 100644
--- a/.github/fork_workflows/fork_pr_integration_tests_aws.yml
+++ b/.github/fork_workflows/fork_pr_integration_tests_aws.yml
@@ -83,7 +83,7 @@ jobs:
--health-timeout 5s
--health-retries 5
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
@@ -91,7 +91,7 @@ jobs:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: ${{ matrix.python-version }}
@@ -109,9 +109,6 @@ jobs:
aws-region: us-west-2
- name: Use AWS CLI
run: aws sts get-caller-identity
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -126,6 +123,9 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<22.3"
- name: Install pip-tools
run: pip install pip-tools
- name: Install apache-arrow on ubuntu
@@ -139,7 +139,9 @@ jobs:
sudo apt install -y -V libarrow-dev
- name: Install apache-arrow on macos
if: matrix.os == 'macOS-latest'
- run: brew install apache-arrow
+ run: |
+ brew install apache-arrow
+ brew install pkg-config
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Setup Redis Cluster
diff --git a/.github/fork_workflows/fork_pr_integration_tests_gcp.yml b/.github/fork_workflows/fork_pr_integration_tests_gcp.yml
index 4cb22f33fb..1a05c068b5 100644
--- a/.github/fork_workflows/fork_pr_integration_tests_gcp.yml
+++ b/.github/fork_workflows/fork_pr_integration_tests_gcp.yml
@@ -25,7 +25,7 @@ jobs:
--health-timeout 5s
--health-retries 5
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
@@ -33,7 +33,7 @@ jobs:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: ${{ matrix.python-version }}
@@ -53,9 +53,6 @@ jobs:
project_id: ${{ secrets.GCP_PROJECT_ID }}
- name: Use gcloud CLI
run: gcloud info
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -70,6 +67,9 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- name: Install apache-arrow on ubuntu
@@ -83,7 +83,9 @@ jobs:
sudo apt install -y -V libarrow-dev
- name: Install apache-arrow on macos
if: matrix.os == 'macOS-latest'
- run: brew install apache-arrow
+ run: |
+ brew install apache-arrow
+ brew install pkg-config
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Setup Redis Cluster
diff --git a/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml b/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml
index 8832c75fca..9327f5c729 100644
--- a/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml
+++ b/.github/fork_workflows/fork_pr_integration_tests_snowflake.yml
@@ -25,7 +25,7 @@ jobs:
--health-timeout 5s
--health-retries 5
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
@@ -33,7 +33,7 @@ jobs:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: ${{ matrix.python-version }}
@@ -43,10 +43,6 @@ jobs:
uses: actions/setup-go@v2
with:
go-version: 1.18.0
-
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -61,6 +57,9 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- name: Install apache-arrow on ubuntu
@@ -74,7 +73,9 @@ jobs:
sudo apt install -y -V libarrow-dev
- name: Install apache-arrow on macos
if: matrix.os == 'macOS-latest'
- run: brew install apache-arrow
+ run: |
+ brew install apache-arrow
+ brew install pkg-config
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Setup Redis Cluster
diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml
index aef486d601..6e6539cf9e 100644
--- a/.github/workflows/build_wheels.yml
+++ b/.github/workflows/build_wheels.yml
@@ -18,7 +18,7 @@ jobs:
highest_semver_tag: ${{ steps.get_highest_semver.outputs.highest_semver_tag }}
steps:
- name: Checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
with:
persist-credentials: false
- name: Get release version
@@ -52,83 +52,44 @@ jobs:
echo $HIGHEST_SEMVER_TAG
build-python-wheel:
- name: Build wheels on ${{ matrix.os }}
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- os: [ ubuntu-latest, macos-10.15 ]
+ name: Build wheels
+ runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v3
+ with:
+ python-version: "3.8"
+ architecture: x64
- name: Setup Node
- uses: actions/setup-node@v2
+ uses: actions/setup-node@v3
with:
node-version: '17.x'
registry-url: 'https://registry.npmjs.org'
- name: Build UI
run: make build-ui
- name: Build wheels
- uses: pypa/cibuildwheel@v2.7.0
- env:
- CIBW_BUILD: "cp3*_x86_64"
- CIBW_SKIP: "cp36-* cp37-* *-musllinux_x86_64 cp310-macosx_x86_64"
- CIBW_ARCHS: "native"
- CIBW_ENVIRONMENT: >
- COMPILE_GO=True PATH=$PATH:/usr/local/go/bin
- CIBW_BEFORE_ALL_LINUX: |
- curl -o go.tar.gz https://dl.google.com/go/go1.18.2.linux-amd64.tar.gz
- tar -C /usr/local -xzf go.tar.gz
- go version
- yum -y update &&
- yum install -y epel-release || yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-$(cut -d: -f5 /etc/system-release-cpe | cut -d. -f1).noarch.rpm &&
- yum install -y https://apache.jfrog.io/artifactory/arrow/centos/$(cut -d: -f5 /etc/system-release-cpe | cut -d. -f1)/apache-arrow-release-latest.rpm &&
- yum install -y --enablerepo=epel arrow-devel # For C++
- CIBW_BEFORE_ALL_MACOS: |
- brew install apache-arrow
- curl -o python.pkg https://www.python.org/ftp/python/3.9.12/python-3.9.12-macosx10.9.pkg
- sudo installer -pkg python.pkg -target /
- # There's a `git restore` in here because `make install-go-ci-dependencies` is actually messing up go.mod & go.sum.
- CIBW_BEFORE_BUILD: |
- make install-protoc-dependencies
- make install-go-proto-dependencies
- make install-go-ci-dependencies
- git status
- git restore go.mod go.sum
- git restore sdk/python/feast/ui/yarn.lock
- CIBW_BEFORE_TEST: "cd {project} && git status"
- # py3.10 on MacOS does not work with Go so we have to install separately. Issue is tracked here: https://github.com/feast-dev/feast/issues/2881.
- - name: Build py310 specific wheels for macos
- if: matrix.os == 'macos-10.15'
- uses: pypa/cibuildwheel@v2.7.0
- env:
- CIBW_BUILD: "cp310-macosx_x86_64"
- CIBW_ARCHS: "native"
- # Need this environment variable because of this issue: https://github.com/pypa/cibuildwheel/issues/952.
- CIBW_ENVIRONMENT: >
- _PYTHON_HOST_PLATFORM=macosx-10.15-x86_64
- # There's a `git restore` in here because remnant go.mod, go.sum changes from the build mess up the wheel naming.
- CIBW_BEFORE_BUILD: |
- git status
- git restore go.mod go.sum
- git restore sdk/python/feast/ui/yarn.lock
- brew install apache-arrow
- - uses: actions/upload-artifact@v2
+ run: |
+ python -m pip install build
+ python -m build --wheel --outdir wheelhouse/
+ - uses: actions/upload-artifact@v3
with:
name: wheels
path: ./wheelhouse/*.whl
build-source-distribution:
name: Build source distribution
- runs-on: macos-10.15
+ runs-on: macos-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Setup Python
id: setup-python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
with:
python-version: "3.10"
architecture: x64
- name: Setup Node
- uses: actions/setup-node@v2
+ uses: actions/setup-node@v3
with:
node-version: '17.x'
registry-url: 'https://registry.npmjs.org'
@@ -137,8 +98,6 @@ jobs:
run: |
pip install -U pip setuptools wheel twine
make install-protoc-dependencies
- make install-go-proto-dependencies
- make install-go-ci-dependencies
make build-ui
git status
git restore go.mod go.sum
@@ -146,7 +105,7 @@ jobs:
- name: Build
run: |
python3 setup.py sdist
- - uses: actions/upload-artifact@v2
+ - uses: actions/upload-artifact@v3
with:
name: wheels
path: dist/*
@@ -161,7 +120,7 @@ jobs:
env:
REGISTRY: feastdev
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
@@ -177,7 +136,7 @@ jobs:
needs: [build-python-wheel, build-source-distribution, get-version]
strategy:
matrix:
- os: [ubuntu-latest, macos-10.15 ]
+ os: [ubuntu-latest, macos-latest ]
python-version: [ "3.8", "3.9", "3.10"]
from-source: [ True, False ]
env:
@@ -197,17 +156,17 @@ jobs:
steps:
- name: Setup Python
id: setup-python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- - uses: actions/setup-go@v3
- with:
- go-version: '>=1.17.0'
- uses: actions/download-artifact@v2
with:
name: wheels
path: dist
+ - name: Install OS X dependencies
+ if: matrix.os == 'macos-latest'
+ run: brew install coreutils
- name: Install wheel
if: ${{ !matrix.from-source }}
# try to install all wheels; only the current platform wheel should be actually installed
@@ -215,34 +174,10 @@ jobs:
cd dist/
pip install wheel
for f in *.whl; do pip install $f || true; done
- - name: Install apache-arrow on ubuntu
- if: ${{ matrix.from-source && matrix.os == 'ubuntu-latest' }}
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- - name: Install apache-arrow on macos
- if: ${{ matrix.from-source && matrix.os == 'macos-10.15' && matrix.python-version != '3.10' }}
- run: brew install apache-arrow
- - name: Install dist with go
- if: ${{ matrix.from-source && (matrix.python-version != '3.10' || matrix.os == 'ubuntu-latest')}}
- env:
- COMPILE_GO: "True"
- run: |
- pip install 'grpcio-tools==1.47.0' 'pybindgen==0.22.0'
- go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26.0
- go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.1.0
- pip install dist/*tar.gz
- # py3.10 on MacOS does not work with Go so we have to install separately. Issue is tracked here: https://github.com/feast-dev/feast/issues/2881
- - name: Install dist w/o go
- if: ${{ matrix.from-source && matrix.python-version == '3.10' && matrix.os == 'macos-10.15'}}
+ - name: Install sdist
+ # try to install the sdist
+ if: ${{ matrix.from-source }}
run: pip install dist/*tar.gz
- - name: Install OS X dependencies
- if: matrix.os == 'macos-10.15'
- run: brew install coreutils
# Validate that the feast version installed is not development and is the correct version of the tag we ran it off of.
- name: Validate Feast Version
run: |
@@ -257,21 +192,12 @@ jobs:
echo "$VERSION_OUTPUT from installed wheel is not in the correct format or doesn't have the right version $VERSION."
exit 1
fi
- - name: Smoke test
- run: |
- feast init test_repo
- cd test_repo/feature_repo
- feast apply
- echo "$TEST_SCRIPT" > run-and-wait.sh
- bash run-and-wait.sh feast serve
- bash run-and-wait.sh feast ui
- # We disable this test for the Python 3.10 binary since it does not include Go.
- - name: Smoke test with go
- if: matrix.python-version != '3.10' || matrix.os == 'ubuntu-latest'
- run: |
- cd test_repo/feature_repo
- feast apply
- echo "$TEST_SCRIPT" > run-and-wait.sh
- pip install cffi
- printf "\ngo_feature_serving: True" >> feature_store.yaml
- bash run-and-wait.sh feast serve
\ No newline at end of file
+ # This is temporarily disabled.
+ # - name: Smoke test
+ # run: |
+ # feast init test_repo
+ # cd test_repo/feature_repo
+ # feast apply
+ # echo "$TEST_SCRIPT" > run-and-wait.sh
+ # bash run-and-wait.sh feast serve
+ # bash run-and-wait.sh feast ui
diff --git a/.github/workflows/java_master_only.yml b/.github/workflows/java_master_only.yml
index 356208332f..d82f69dd3c 100644
--- a/.github/workflows/java_master_only.yml
+++ b/.github/workflows/java_master_only.yml
@@ -18,11 +18,11 @@ jobs:
MAVEN_CACHE: gs://feast-templocation-kf-feast/.m2.2020-08-19.tar
REGISTRY: gcr.io/kf-feast
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
submodules: 'true'
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: "3.8"
@@ -53,7 +53,7 @@ jobs:
if: github.repository == 'feast-dev/feast'
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
submodules: 'true'
- name: Lint java
@@ -63,7 +63,7 @@ jobs:
if: github.repository == 'feast-dev/feast'
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
submodules: 'true'
- name: Set up JDK 11
@@ -86,7 +86,7 @@ jobs:
${{ runner.os }}-ut-maven-
- name: Test java
run: make test-java-with-coverage
- - uses: actions/upload-artifact@v2
+ - uses: actions/upload-artifact@v3
with:
name: java-coverage-report
path: ${{ github.workspace }}/docs/coverage/java/target/site/jacoco-aggregate/
@@ -97,7 +97,7 @@ jobs:
env:
PYTHON: 3.8
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
submodules: 'true'
- name: Set up JDK 11
@@ -107,19 +107,11 @@ jobs:
java-package: jdk
architecture: x64
- name: Setup Python (to call feast apply)
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: 3.8
architecture: x64
- - name: Setup Go
- id: setup-go
- uses: actions/setup-go@v2
- with:
- go-version: 1.18.0
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -134,16 +126,11 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- - name: Install apache-arrow on ubuntu
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- name: Install Python dependencies
run: make install-python-ci-dependencies
- uses: actions/cache@v2
diff --git a/.github/workflows/java_pr.yml b/.github/workflows/java_pr.yml
index ea0ec0d9a9..83c52e7dbf 100644
--- a/.github/workflows/java_pr.yml
+++ b/.github/workflows/java_pr.yml
@@ -12,7 +12,7 @@ jobs:
if: github.repository == 'feast-dev/feast'
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
@@ -27,7 +27,7 @@ jobs:
runs-on: ubuntu-latest
needs: lint-java
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
@@ -54,7 +54,7 @@ jobs:
${{ runner.os }}-ut-maven-
- name: Test java
run: make test-java-with-coverage
- - uses: actions/upload-artifact@v2
+ - uses: actions/upload-artifact@v3
with:
name: java-coverage-report
path: ${{ github.workspace }}/docs/coverage/java/target/site/jacoco-aggregate/
@@ -69,11 +69,11 @@ jobs:
MAVEN_CACHE: gs://feast-templocation-kf-feast/.m2.2020-08-19.tar
REGISTRY: gcr.io/kf-feast
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
submodules: 'true'
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: "3.8"
@@ -101,7 +101,7 @@ jobs:
env:
PYTHON: 3.8
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
@@ -114,7 +114,7 @@ jobs:
java-version: '11'
java-package: jdk
architecture: x64
- - uses: actions/setup-python@v2
+ - uses: actions/setup-python@v3
with:
python-version: '3.8'
architecture: 'x64'
@@ -143,19 +143,11 @@ jobs:
- name: Use AWS CLI
run: aws sts get-caller-identity
- name: Setup Python (to call feast apply)
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: 3.8
architecture: x64
- - name: Setup Go
- id: setup-go
- uses: actions/setup-go@v2
- with:
- go-version: 1.18.0
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -170,22 +162,17 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- - name: Install apache-arrow on ubuntu
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- name: Install Python dependencies
run: make install-python-ci-dependencies
- name: Run integration tests
run: make test-java-integration
- name: Save report
- uses: actions/upload-artifact@v2
+ uses: actions/upload-artifact@v3
if: failure()
with:
name: it-report
diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml
index ba475e2585..a4a42a11ed 100644
--- a/.github/workflows/linter.yml
+++ b/.github/workflows/linter.yml
@@ -8,21 +8,13 @@ jobs:
env:
PYTHON: 3.8
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Setup Python
id: setup-python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
with:
python-version: "3.8"
architecture: x64
- - name: Setup Go
- id: setup-go
- uses: actions/setup-go@v2
- with:
- go-version: 1.18.0
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -37,47 +29,13 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- - name: Install apache-arrow on ubuntu
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- name: Install dependencies
run: |
- make compile-protos-go
make install-python-ci-dependencies
- name: Lint python
run: make lint-python
-
- lint-go:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Setup Go
- id: setup-go
- uses: actions/setup-go@v2
- with:
- go-version: 1.18.0
- - name: Setup Python
- id: setup-python
- uses: actions/setup-python@v2
- with:
- python-version: "3.8"
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- - name: Install apache-arrow on ubuntu
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- - name: Lint go
- run: make lint-go
\ No newline at end of file
diff --git a/.github/workflows/master_only.yml b/.github/workflows/master_only.yml
index 32c967c6eb..580ea3171b 100644
--- a/.github/workflows/master_only.yml
+++ b/.github/workflows/master_only.yml
@@ -10,7 +10,7 @@ jobs:
if: github.repository == 'feast-dev/feast'
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
@@ -58,7 +58,7 @@ jobs:
docker push $ECR_REGISTRY/$ECR_REPOSITORY:${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }}
outputs:
DOCKER_IMAGE_TAG: ${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }}
- integration-test-python-and-go:
+ integration-test-python:
if: github.repository == 'feast-dev/feast'
needs: build-lambda-docker-image
runs-on: ${{ matrix.os }}
@@ -66,7 +66,6 @@ jobs:
fail-fast: false
matrix:
python-version: [ "3.8", "3.9", "3.10" ]
- go-version: [ 1.17.0 ]
os: [ ubuntu-latest ]
env:
OS: ${{ matrix.os }}
@@ -82,18 +81,13 @@ jobs:
--health-timeout 5s
--health-retries 5
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Setup Python
id: setup-python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- - name: Setup Go
- id: setup-go
- uses: actions/setup-go@v2
- with:
- go-version: ${{ matrix.go-version }}
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v1'
with:
@@ -112,9 +106,6 @@ jobs:
aws-region: us-west-2
- name: Use AWS CLI
run: aws sts get-caller-identity
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -129,20 +120,11 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- - name: Install apache-arrow on ubuntu
- if: matrix.os == 'ubuntu-latest'
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- - name: Install apache-arrow on macos
- if: matrix.os == 'macOS-latest'
- run: brew install apache-arrow
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Setup Redis Cluster
@@ -184,7 +166,7 @@ jobs:
MAVEN_CACHE: gs://feast-templocation-kf-feast/.m2.2020-08-19.tar
REGISTRY: gcr.io/kf-feast
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
diff --git a/.github/workflows/nightly-ci.yml b/.github/workflows/nightly-ci.yml
index 2b7eac9301..0e1df81262 100644
--- a/.github/workflows/nightly-ci.yml
+++ b/.github/workflows/nightly-ci.yml
@@ -17,7 +17,7 @@ jobs:
outputs:
WAS_EDITED: ${{ steps.check_date.outputs.WAS_EDITED }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
ref: master
- id: check_date
@@ -27,13 +27,13 @@ jobs:
cleanup_dynamo_tables:
if: github.repository == 'feast-dev/feast'
runs-on: ubuntu-latest
- name: Cleanup dynamo tables which can fail to cleanup
+ name: Cleanup Bigtable / Dynamo tables which can fail to cleanup
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
ref: master
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: "3.8"
@@ -47,9 +47,20 @@ jobs:
- name: Install Python dependencies
run: |
pip install boto3
+ pip install google-cloud-bigtable
pip install tqdm
- - name: Run DynamoDB cleanup script
- run: python infra/scripts/cleanup_dynamo_ci.py
+ - name: Authenticate to Google Cloud
+ uses: 'google-github-actions/auth@v1'
+ with:
+ credentials_json: '${{ secrets.GCP_SA_KEY }}'
+ - name: Set up gcloud SDK
+ uses: google-github-actions/setup-gcloud@v1
+ with:
+ project_id: ${{ secrets.GCP_PROJECT_ID }}
+ - name: Use gcloud CLI
+ run: gcloud info
+ - name: Run DynamoDB / Bigtable cleanup script
+ run: python infra/scripts/cleanup_ci.py
build-docker-image:
if: github.repository == 'feast-dev/feast'
needs: [check_date]
@@ -129,12 +140,12 @@ jobs:
--health-timeout 5s
--health-retries 5
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
ref: master
submodules: recursive
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: ${{ matrix.python-version }}
@@ -162,9 +173,6 @@ jobs:
aws-region: us-west-2
- name: Use AWS CLI
run: aws sts get-caller-identity
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -179,6 +187,9 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- name: Install apache-arrow on ubuntu
diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml
index 9aba7e3bfd..73344ec2dd 100644
--- a/.github/workflows/pr_integration_tests.yml
+++ b/.github/workflows/pr_integration_tests.yml
@@ -102,7 +102,7 @@ jobs:
--health-timeout 5s
--health-retries 5
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
@@ -110,16 +110,11 @@ jobs:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- - name: Setup Go
- id: setup-go
- uses: actions/setup-go@v2
- with:
- go-version: 1.18.0
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v1'
with:
@@ -138,9 +133,6 @@ jobs:
aws-region: us-west-2
- name: Use AWS CLI
run: aws sts get-caller-identity
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -155,20 +147,11 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- - name: Install apache-arrow on ubuntu
- if: matrix.os == 'ubuntu-latest'
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- - name: Install apache-arrow on macos
- if: matrix.os == 'macOS-latest'
- run: brew install apache-arrow
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Setup Redis Cluster
diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml
index 4705771911..111a9b51a9 100644
--- a/.github/workflows/pr_local_integration_tests.yml
+++ b/.github/workflows/pr_local_integration_tests.yml
@@ -25,7 +25,7 @@ jobs:
OS: ${{ matrix.os }}
PYTHON: ${{ matrix.python-version }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
@@ -33,14 +33,11 @@ jobs:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Setup Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
id: setup-python
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
@@ -55,17 +52,11 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- - name: Install apache-arrow on ubuntu
- if: matrix.os == 'ubuntu-latest'
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Test local integration tests
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
index 11f08bf2e5..135d1d3a8d 100644
--- a/.github/workflows/publish.yml
+++ b/.github/workflows/publish.yml
@@ -14,7 +14,7 @@ jobs:
version_without_prefix: ${{ steps.get_release_version_without_prefix.outputs.version_without_prefix }}
highest_semver_tag: ${{ steps.get_highest_semver.outputs.highest_semver_tag }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Get release version
id: get_release_version
run: echo ::set-output name=release_version::${GITHUB_REF#refs/*/}
@@ -54,7 +54,7 @@ jobs:
MAVEN_CACHE: gs://feast-templocation-kf-feast/.m2.2020-08-19.tar
REGISTRY: feastdev
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
@@ -105,7 +105,7 @@ jobs:
HELM_VERSION: v3.8.0
VERSION_WITHOUT_PREFIX: ${{ needs.get-version.outputs.version_without_prefix }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v1'
with:
@@ -149,7 +149,7 @@ jobs:
runs-on: ubuntu-latest
needs: get-version
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
with:
submodules: 'true'
- name: Set up JDK 11
@@ -158,7 +158,7 @@ jobs:
java-version: '11'
java-package: jdk
architecture: x64
- - uses: actions/setup-python@v2
+ - uses: actions/setup-python@v3
with:
python-version: '3.7'
architecture: 'x64'
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index db62766a1c..a01bae4068 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -13,6 +13,11 @@ on:
required: true
default: ""
type: string
+ publish_ui:
+ description: 'Publish to NPM?'
+ required: true
+ default: true
+ type: boolean
jobs:
@@ -25,13 +30,14 @@ jobs:
next_version: ${{ steps.get_versions.outputs.next_version }}
steps:
- name: Checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
with:
persist-credentials: false
- name: Setup Node.js
- uses: actions/setup-node@v2
+ uses: actions/setup-node@v3
with:
- node-version: '16'
+ node-version: '18.x'
+ registry-url: 'https://registry.npmjs.org'
- name: Release (Dry Run)
id: get_versions
run: |
@@ -53,10 +59,10 @@ jobs:
CURRENT_VERSION: ${{ needs.get_dry_release_versions.outputs.current_version }}
NEXT_VERSION: ${{ needs.get_dry_release_versions.outputs.next_version }}
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-node@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-node@v3
with:
- node-version: '17.x'
+ node-version: '18.x'
registry-url: 'https://registry.npmjs.org'
- name: Bump file versions
run: python ./infra/scripts/release/bump_file_versions.py ${CURRENT_VERSION} ${NEXT_VERSION}
@@ -86,7 +92,6 @@ jobs:
publish-web-ui-npm:
- if: github.repository == 'feast-dev/feast'
needs: [validate_version_bumps, get_dry_release_versions]
runs-on: ubuntu-latest
env:
@@ -95,10 +100,10 @@ jobs:
CURRENT_VERSION: ${{ needs.get_dry_release_versions.outputs.current_version }}
NEXT_VERSION: ${{ needs.get_dry_release_versions.outputs.next_version }}
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-node@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-node@v3
with:
- node-version: '17.x'
+ node-version: '18.x'
registry-url: 'https://registry.npmjs.org'
- name: Bump file versions (temporarily for Web UI publish)
run: python ./infra/scripts/release/bump_file_versions.py ${CURRENT_VERSION} ${NEXT_VERSION}
@@ -109,8 +114,8 @@ jobs:
working-directory: ./ui
run: yarn build:lib
- name: Publish UI package
- if: github.event.inputs.dry_run == 'false'
working-directory: ./ui
+ if: github.event.inputs.dry_run == 'false' && github.event.inputs.publish_ui == 'true'
run: npm publish
env:
# This publish is working using an NPM automation token to bypass 2FA
@@ -128,13 +133,14 @@ jobs:
GIT_COMMITTER_EMAIL: feast-ci-bot@willem.co
steps:
- name: Checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
with:
persist-credentials: false
- name: Setup Node.js
- uses: actions/setup-node@v2
+ uses: actions/setup-node@v3
with:
- node-version: '16'
+ node-version: '18.x'
+ registry-url: 'https://registry.npmjs.org'
- name: Set up Homebrew
id: set-up-homebrew
uses: Homebrew/actions/setup-homebrew@master
diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml
index ea8bef2e2f..f03cd33346 100644
--- a/.github/workflows/unit_tests.yml
+++ b/.github/workflows/unit_tests.yml
@@ -18,26 +18,24 @@ jobs:
OS: ${{ matrix.os }}
PYTHON: ${{ matrix.python-version }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- name: Setup Python
id: setup-python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- - name: Setup Go
- id: setup-go
- uses: actions/setup-go@v2
- with:
- go-version: 1.18.0
- name: Install mysql on macOS
if: startsWith(matrix.os, 'macOS')
run: |
brew install mysql
PATH=$PATH:/usr/local/mysql/bin
- - name: Upgrade pip version
+ - name: Work around Homebrew MySQL being broken
+ # See https://github.com/Homebrew/homebrew-core/issues/130258 for more details.
+ if: startsWith(matrix.os, 'macOS')
run: |
- pip install --upgrade "pip>=22.1,<23"
+ brew install zlib
+ ln -sv $(brew --prefix zlib)/lib/libz.dylib $(brew --prefix)/lib/libzlib.dylib
- name: Get pip cache dir
id: pip-cache
run: |
@@ -52,60 +50,24 @@ jobs:
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
+ - name: Upgrade pip version
+ run: |
+ pip install --upgrade "pip>=21.3.1,<23.2"
- name: Install pip-tools
run: pip install pip-tools
- - name: Install apache-arrow on ubuntu
- if: matrix.os == 'ubuntu-latest'
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- - name: Install apache-arrow on macos
- if: matrix.os == 'macOS-latest'
- run: brew install apache-arrow
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Test Python
run: pytest -n 8 --cov=./ --cov-report=xml --color=yes sdk/python/tests
- unit-test-go:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Setup Python
- id: setup-python
- uses: actions/setup-python@v2
- with:
- python-version: "3.8"
- - name: Upgrade pip version
- run: |
- pip install --upgrade "pip>=22.1,<23"
- - name: Setup Go
- id: setup-go
- uses: actions/setup-go@v2
- with:
- go-version: 1.18.0
- - name: Install apache-arrow on ubuntu
- run: |
- sudo apt update
- sudo apt install -y -V ca-certificates lsb-release wget
- wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
- sudo apt update
- sudo apt install -y -V libarrow-dev
- - name: Test
- run: make test-go
unit-test-ui:
runs-on: ubuntu-latest
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-node@v2
+ - uses: actions/checkout@v3
+ - uses: actions/setup-node@v3
with:
node-version: '17.x'
registry-url: 'https://registry.npmjs.org'
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 68bdd8db5c..7d5c493f1d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,148 @@
# Changelog
+## [0.34.1](https://github.com/feast-dev/feast/compare/v0.34.0...v0.34.1) (2023-09-07)
+
+
+### Bug Fixes
+
+* Fix warnings from deprecated paths and update default log level ([#3757](https://github.com/feast-dev/feast/issues/3757)) ([77bfe36](https://github.com/feast-dev/feast/commit/77bfe3625fe3815fb820b4bc462361861c5c165d))
+* Pin numpy version to avoid spammy deprecation messages ([904c4c7](https://github.com/feast-dev/feast/commit/904c4c705242572fd7b5bc2c0d8d607a5f88eb02))
+* Set keepalives_idle None by default ([#3756](https://github.com/feast-dev/feast/issues/3756)) ([093928d](https://github.com/feast-dev/feast/commit/093928d10342105ecff4348eb365d6703a0fdc28))
+
+# [0.34.0](https://github.com/feast-dev/feast/compare/v0.33.0...v0.34.0) (2023-09-07)
+
+
+### Bug Fixes
+
+* Add NUMERIC to bq_to_feast type map ([#3719](https://github.com/feast-dev/feast/issues/3719)) ([6474b4b](https://github.com/feast-dev/feast/commit/6474b4b0169dc9b3df8e8daecded2b1fad5ead58))
+* Fix python unit tests ([#3734](https://github.com/feast-dev/feast/issues/3734)) ([e81684d](https://github.com/feast-dev/feast/commit/e81684d4f7916c986fa8e6cf06c2918951469799))
+* Handle unknown postgres source types gracefully ([#3634](https://github.com/feast-dev/feast/issues/3634)) ([d7041f4](https://github.com/feast-dev/feast/commit/d7041f4cce813d349e9016da55d65a65c1ec2355))
+* Pin protobuf version to avoid seg fault on some machines ([028cc20](https://github.com/feast-dev/feast/commit/028cc20a28118bd31deca8965782d5ad25f74300))
+* Remove unwanted excessive splitting of gcs path, so expected gcs parquet paths are returned from BigQueryRetrievalJob.to_remote_storage() ([#3730](https://github.com/feast-dev/feast/issues/3730)) ([f2c5988](https://github.com/feast-dev/feast/commit/f2c59885e31f3f238dbd9c13cd1ba168e3233a9d))
+* Run store.plan() only when need it. ([#3708](https://github.com/feast-dev/feast/issues/3708)) ([7bc7c47](https://github.com/feast-dev/feast/commit/7bc7c47b4507310850474290131c03fb6d480834))
+* Saved datasets no longer break CLI registry-dump command ([#3717](https://github.com/feast-dev/feast/issues/3717)) ([f28ccc2](https://github.com/feast-dev/feast/commit/f28ccc2b8f42bcca943d498ad583337d4cd70383))
+* Update py3.8 ci requirements for cython 3.0 release ([#3735](https://github.com/feast-dev/feast/issues/3735)) ([1695c13](https://github.com/feast-dev/feast/commit/1695c13fa8f48fdc2b5e627837043c5eea0914a9))
+
+
+### Features
+
+* Enhance customization of Trino connections when using Trino-based Offline Stores ([#3699](https://github.com/feast-dev/feast/issues/3699)) ([ed7535e](https://github.com/feast-dev/feast/commit/ed7535e23d490249ca7d224fb88e53b98d496ec0))
+* Implement gRPC server to ingest streaming features ([#3687](https://github.com/feast-dev/feast/issues/3687)) ([a3fcd1f](https://github.com/feast-dev/feast/commit/a3fcd1f369bdf07174b5ecf2a49ca9864cf145d4))
+
+# [0.33.0](https://github.com/feast-dev/feast/compare/v0.32.0...v0.33.0) (2023-08-14)
+
+
+### Bug Fixes
+
+* Add aws-sts dependency in java sdk so that S3 client acquires IRSA role ([#3696](https://github.com/feast-dev/feast/issues/3696)) ([c75a01f](https://github.com/feast-dev/feast/commit/c75a01fce2d52cd18479ace748b8eb2e6c81c988))
+* Redshift push ignores schema ([#3671](https://github.com/feast-dev/feast/issues/3671)) ([76270f6](https://github.com/feast-dev/feast/commit/76270f66b3d98b0119b70927c06908f9834b6120))
+
+
+### Features
+
+* Add possibility to save dataset as table, when spark config has remote warehouse info ([#3645](https://github.com/feast-dev/feast/issues/3645)) ([22c109b](https://github.com/feast-dev/feast/commit/22c109bc088d093a7c81c59e11490a9a21f82309))
+
+# [0.32.0](https://github.com/feast-dev/feast/compare/v0.31.0...v0.32.0) (2023-07-17)
+
+
+### Bug Fixes
+
+* Added generic Feature store Creation for CLI ([#3618](https://github.com/feast-dev/feast/issues/3618)) ([bf740d2](https://github.com/feast-dev/feast/commit/bf740d2d0ef3b62cf27f25083b5b4d49955b56fb))
+* Broken non-root path with projects-list.json ([#3665](https://github.com/feast-dev/feast/issues/3665)) ([4861af0](https://github.com/feast-dev/feast/commit/4861af0f3206f965ad2f7a4eddf2ebec2df149f8))
+* Clean up snowflake to_spark_df() ([#3607](https://github.com/feast-dev/feast/issues/3607)) ([e8e643e](https://github.com/feast-dev/feast/commit/e8e643e3555127daf37fa4961fdadb0508a041db))
+* Entityless fv breaks with `KeyError: __dummy` applying feature_store.plan() on python ([#3640](https://github.com/feast-dev/feast/issues/3640)) ([ef4ef32](https://github.com/feast-dev/feast/commit/ef4ef32f7466b5d874d537abe8def4731b15dc85))
+* Fix scan datasize to 0 for inference schema ([#3628](https://github.com/feast-dev/feast/issues/3628)) ([c3dd74e](https://github.com/feast-dev/feast/commit/c3dd74e238b5b9e784e3dbe102941c66a63f6686))
+* Fix timestamp consistency in push api ([#3614](https://github.com/feast-dev/feast/issues/3614)) ([9b227d7](https://github.com/feast-dev/feast/commit/9b227d7d44f30d28d1faadc8015f25dc4a6f56b5))
+* For SQL registry, increase max data_source_name length to 255 ([#3630](https://github.com/feast-dev/feast/issues/3630)) ([478caec](https://github.com/feast-dev/feast/commit/478caecc8d61b6070ec03bc13688a83b8c5f5936))
+* Implements connection pool for postgres online store ([#3633](https://github.com/feast-dev/feast/issues/3633)) ([059509a](https://github.com/feast-dev/feast/commit/059509a492d180effb1786713738665e293838e7))
+* Manage redis pipe's context ([#3655](https://github.com/feast-dev/feast/issues/3655)) ([48e0971](https://github.com/feast-dev/feast/commit/48e097130e68241e751bd4be5af7427fffad47cf))
+* Missing Catalog argument in athena connector ([#3661](https://github.com/feast-dev/feast/issues/3661)) ([f6d3caf](https://github.com/feast-dev/feast/commit/f6d3caf8affc74aef7ac489d3e6816d45b30d820))
+* Optimize bytes processed when retrieving entity df schema to 0 ([#3680](https://github.com/feast-dev/feast/issues/3680)) ([1c01035](https://github.com/feast-dev/feast/commit/1c010357affd48616d39f1ad01b872fac946269d))
+
+
+### Features
+
+* Add gunicorn for serve with multiprocess ([#3636](https://github.com/feast-dev/feast/issues/3636)) ([4de7faf](https://github.com/feast-dev/feast/commit/4de7faf7b262d30a9f6795911d8fa97df775fa8d))
+* Use string as a substitute for unregistered types during schema inference ([#3646](https://github.com/feast-dev/feast/issues/3646)) ([c474ccd](https://github.com/feast-dev/feast/commit/c474ccdd23ca8161de5e2958f0a12826c020dc44))
+
+# [0.31.0](https://github.com/feast-dev/feast/compare/v0.30.0...v0.31.0) (2023-04-21)
+
+
+### Bug Fixes
+
+* Add Stream Feature Views to helper that collect Feature View names ([#3582](https://github.com/feast-dev/feast/issues/3582)) ([7854f63](https://github.com/feast-dev/feast/commit/7854f637160d4d1f4758b83e6c396fe49447e7b7))
+* Add StreamFeatureViewSpec to FeastObjectSpecProto convenience type ([#3550](https://github.com/feast-dev/feast/issues/3550)) ([3cefd6c](https://github.com/feast-dev/feast/commit/3cefd6cf806997be4ea8427bcf4aa9852d6ce038))
+* Batch Snowflake materialization queries to obey Snowpark 100 fea… ([#3406](https://github.com/feast-dev/feast/issues/3406)) ([f9862b5](https://github.com/feast-dev/feast/commit/f9862b565b6c9019ec146871d2fb45590eb31576))
+* Bytewax materializer security context ([#3573](https://github.com/feast-dev/feast/issues/3573)) ([6794338](https://github.com/feast-dev/feast/commit/6794338d0c9405a5a9ba7ef2b47de98cd905474e))
+* **cI:** Install coreutils in mac github workers for smoke test ([#3563](https://github.com/feast-dev/feast/issues/3563)) ([e7421c1](https://github.com/feast-dev/feast/commit/e7421c11172aaafff34da98fc14cf763c2d70002))
+* Fix bug with no SqlRegistryConfig class ([#3586](https://github.com/feast-dev/feast/issues/3586)) ([6dc1368](https://github.com/feast-dev/feast/commit/6dc1368afb66a4231b7513939a7cbf204ab4d46f))
+* Fix Snowflake template ([#3584](https://github.com/feast-dev/feast/issues/3584)) ([6c09c39](https://github.com/feast-dev/feast/commit/6c09c39b64e31dc6e84be566524d6126683f3013))
+* Make snowflake to remote tables temporary ([#3588](https://github.com/feast-dev/feast/issues/3588)) ([ad48146](https://github.com/feast-dev/feast/commit/ad4814643abd28d5b2e119b8ef46ddfdce77424a))
+* Remove snowflake source warehouse tech debt ([#3422](https://github.com/feast-dev/feast/issues/3422)) ([7da0580](https://github.com/feast-dev/feast/commit/7da058085cd1211fb383ff0a6c5ae8f59999c5f0))
+* Snowflake remote storage ([#3574](https://github.com/feast-dev/feast/issues/3574)) ([f8d3890](https://github.com/feast-dev/feast/commit/f8d3890f9f049c4b9190456b071e0fdb29aae69e))
+* Support param timeout when persisting ([#3593](https://github.com/feast-dev/feast/issues/3593)) ([01a98f0](https://github.com/feast-dev/feast/commit/01a98f08e9e6d0aebf41188f2644f49111ea4ca9))
+* Use pyarrow in a way that works across versions ([#3562](https://github.com/feast-dev/feast/issues/3562)) ([1289f3f](https://github.com/feast-dev/feast/commit/1289f3f7eea6bd3b08617606862a75f0224f9f18))
+* Wrap the bigquery table name with backtick. ([#3577](https://github.com/feast-dev/feast/issues/3577)) ([09f0e7e](https://github.com/feast-dev/feast/commit/09f0e7e1011fc451b3bfb94c4b7764007fc69836))
+
+
+### Features
+
+* Add AWS Redshift Serverless support ([#3595](https://github.com/feast-dev/feast/issues/3595)) ([58ce148](https://github.com/feast-dev/feast/commit/58ce148401fe578b1727bc42ee6b4b9a558660c7))
+* Add Hazelcast as an online store ([#3523](https://github.com/feast-dev/feast/issues/3523)) ([b05d50b](https://github.com/feast-dev/feast/commit/b05d50bcfeb179c2596f96f0d0a714754c516361))
+* Cache Bigtable client ([#3602](https://github.com/feast-dev/feast/issues/3602)) ([b27472f](https://github.com/feast-dev/feast/commit/b27472fc1fb42368ffe1556c848dc3b21b2fca0c))
+* Relax aws extras requirements ([#3585](https://github.com/feast-dev/feast/issues/3585)) ([7e77382](https://github.com/feast-dev/feast/commit/7e77382c6b75f514e18b683fef1495fa1fa87308))
+* Show bigquery datasource table and query on UI ([#3600](https://github.com/feast-dev/feast/issues/3600)) ([58d63f7](https://github.com/feast-dev/feast/commit/58d63f7e6b1dde3dcd8893e4448940ea34e671cf))
+* Update snowflake offline store job output formats -- added arrow ([#3589](https://github.com/feast-dev/feast/issues/3589)) ([be3e349](https://github.com/feast-dev/feast/commit/be3e3491d83e337af42e06f75226919904cb5d86))
+
+# [0.30.0](https://github.com/feast-dev/feast/compare/v0.29.0...v0.30.0) (2023-03-24)
+
+
+### Bug Fixes
+
+* Add description attribute to the Field.from_proto method ([#3469](https://github.com/feast-dev/feast/issues/3469)) ([473f8d9](https://github.com/feast-dev/feast/commit/473f8d93fa8d565e53fc59b3c444a1b8ed061c51))
+* Add filesystem kwargs when read prev_table on FileRetrievalJob (… ([#3491](https://github.com/feast-dev/feast/issues/3491)) ([dca4745](https://github.com/feast-dev/feast/commit/dca47458c81c211fee485a502feebe28426848f0)), closes [#3490](https://github.com/feast-dev/feast/issues/3490)
+* Bytewax image pull secret config ([#3547](https://github.com/feast-dev/feast/issues/3547)) ([d2d13b1](https://github.com/feast-dev/feast/commit/d2d13b1762ba67c386fcd48351f3872b92671450))
+* Clean up Rockset Online Store for use ([#3549](https://github.com/feast-dev/feast/issues/3549)) ([a76c6d0](https://github.com/feast-dev/feast/commit/a76c6d0f9e3aa28d03d430f6f85ce4e91870c844))
+* Feature view `entities` from_proto type ([#3524](https://github.com/feast-dev/feast/issues/3524)) ([57bbb61](https://github.com/feast-dev/feast/commit/57bbb61829fffe08ff3e09aceb1e82ea862b55a2))
+* Fix missing requests requirement after GCP requirement removed. Make BigQuerySource not require gcp extra ([2c85421](https://github.com/feast-dev/feast/commit/2c85421fef02dc85854960b4616f00e613934c01))
+* Fix SQL Registry cache miss ([#3482](https://github.com/feast-dev/feast/issues/3482)) ([3249b97](https://github.com/feast-dev/feast/commit/3249b97b5471322e068f81fc65d9072f2eed1ba3))
+* Fixed path inside quickstart notebook ([#3456](https://github.com/feast-dev/feast/issues/3456)) ([66edc32](https://github.com/feast-dev/feast/commit/66edc32b97f51049dd0fc97765c714c0c6e374e5))
+* Improve BQ point-in-time joining scalability ([#3429](https://github.com/feast-dev/feast/issues/3429)) ([ff66784](https://github.com/feast-dev/feast/commit/ff66784ddb1e63e68b88c47132996eccb13891ae))
+* Pin typeguard to 2.13.3 which is what we are currently using. ([#3542](https://github.com/feast-dev/feast/issues/3542)) ([61f6fb0](https://github.com/feast-dev/feast/commit/61f6fb03b27cfa92672beb054ee8aba41145645c))
+* Protobuf lower bound to 3.20 to alert that Feast is incompatible with tensorflow ([#3476](https://github.com/feast-dev/feast/issues/3476)) ([9ca59e3](https://github.com/feast-dev/feast/commit/9ca59e32d2f1c01cec3b5afaff6802e6036dcad8))
+* Spark kafka processor sorting ([#3479](https://github.com/feast-dev/feast/issues/3479)) ([f2cbf43](https://github.com/feast-dev/feast/commit/f2cbf43d4be6829ce3affb72b8a5416d8e084ba9))
+* UI working behind base url ([#3514](https://github.com/feast-dev/feast/issues/3514)) ([9a3fd98](https://github.com/feast-dev/feast/commit/9a3fd98468edc6e5fd185d05b8dd1cabac73845c))
+* Update go dependencies ([#3512](https://github.com/feast-dev/feast/issues/3512)) ([bada97c](https://github.com/feast-dev/feast/commit/bada97c9dadf05bb369e6f820290b0411bc7412d))
+
+
+### Features
+
+* Add Rockset as an OnlineStore ([#3405](https://github.com/feast-dev/feast/issues/3405)) ([fd91cda](https://github.com/feast-dev/feast/commit/fd91cda1af47bde948ef6a2f4688785358ae38ec))
+* Add Snowflake Registry ([#3363](https://github.com/feast-dev/feast/issues/3363)) ([ec1e61d](https://github.com/feast-dev/feast/commit/ec1e61d11c24247996b0f8508f44ec7f31b9145c))
+* Added SnowflakeConnection caching ([#3531](https://github.com/feast-dev/feast/issues/3531)) ([f9f8df2](https://github.com/feast-dev/feast/commit/f9f8df2802b980a3ee161222b174d397764f755a))
+* Adding query timeout to `to_df` and `to_arrow` retrieval methods ([#3505](https://github.com/feast-dev/feast/issues/3505)) ([bab6644](https://github.com/feast-dev/feast/commit/bab6644308efb1aa8bce52f2f47df9de87492fc9))
+* adds k8s config options to Bytewax materialization engine ([#3518](https://github.com/feast-dev/feast/issues/3518)) ([1883f55](https://github.com/feast-dev/feast/commit/1883f55729ffddaef8d730ba3ffb76eb50cc7201))
+
+# [0.29.0](https://github.com/feast-dev/feast/compare/v0.28.0...v0.29.0) (2023-01-31)
+
+
+### Bug Fixes
+
+* Add check for bool type in addition to sample ([#3452](https://github.com/feast-dev/feast/issues/3452)) ([1c7c491](https://github.com/feast-dev/feast/commit/1c7c491378c9a5dc892ec58f2d81d4e95b800580))
+* Buggy SQL for postgres source ([#3424](https://github.com/feast-dev/feast/issues/3424)) ([1ea100e](https://github.com/feast-dev/feast/commit/1ea100ef472a7cc5b750d4b84992a254b4582de6))
+* Ensure no duplicates in `fv.schema` ([#3460](https://github.com/feast-dev/feast/issues/3460)) ([08ffa8d](https://github.com/feast-dev/feast/commit/08ffa8dff61acd7047d205083b78efa98e2dccb8))
+* Fix delete sfv twice issue ([#3466](https://github.com/feast-dev/feast/issues/3466)) ([dfd5eae](https://github.com/feast-dev/feast/commit/dfd5eaec6bab4961a7981e4f6a70b45e4d72bce4))
+* Stream feature view UI shows transformation issue ([#3464](https://github.com/feast-dev/feast/issues/3464)) ([1ef5137](https://github.com/feast-dev/feast/commit/1ef51376a67347c31ee2e7a037be844526ecc48d))
+* Update registry.refresh to have a default arg ([#3450](https://github.com/feast-dev/feast/issues/3450)) ([2f7c4ed](https://github.com/feast-dev/feast/commit/2f7c4ede8f9e66703714261f1152f78526d4bf43))
+* Updating the batch field so that you can query create and event date. ([#3411](https://github.com/feast-dev/feast/issues/3411)) ([01ab462](https://github.com/feast-dev/feast/commit/01ab462d49442d8c7f4de418132665e48552c22d)), closes [#3401](https://github.com/feast-dev/feast/issues/3401)
+
+
+### Features
+
+* Add data source search ([#3449](https://github.com/feast-dev/feast/issues/3449)) ([fbbb293](https://github.com/feast-dev/feast/commit/fbbb2935fd7c722dbe85f19a8ddf788765116360))
+* Adding list_validation_references for default and sql registry ([#3436](https://github.com/feast-dev/feast/issues/3436)) ([21dd253](https://github.com/feast-dev/feast/commit/21dd253adda26c18366cf4338512bdc2c00882cf))
+* Make UI accessible behind proxy ([#3428](https://github.com/feast-dev/feast/issues/3428)) ([753d8db](https://github.com/feast-dev/feast/commit/753d8dbb5e34c24cf065f599a2cd370b3723de9c))
+
# [0.28.0](https://github.com/feast-dev/feast/compare/v0.27.0...v0.28.0) (2023-01-03)
diff --git a/CODEOWNERS b/CODEOWNERS
index 259c13ea3f..4eae6d3524 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -2,16 +2,16 @@
# for more info about CODEOWNERS file
# Core Interfaces
-/sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers @chhabrakadabra @mavysavydav @sfc-gh-madkins
+/sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers @sfc-gh-madkins
/sdk/python/feast/infra/online_stores/online_store.py @feast-dev/maintainers @DvirDukhan
/sdk/python/feast/infra/materialization_engine/batch_materialization_engine.py @feast-dev/maintainers @whoahbot @sfc-gh-madkins
# ==== Offline Stores ====
# Core utils
-/sdk/python/feast/infra/offline_stores/offline_utils.py @feast-dev/maintainers @chhabrakadabra @mavysavydav @sfc-gh-madkins
+/sdk/python/feast/infra/offline_stores/offline_utils.py @feast-dev/maintainers @sfc-gh-madkins
# BigQuery
-/sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers @chhabrakadabra @mavysavydav
+/sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers
# Snowflake
/sdk/python/feast/infra/offline_stores/snowflake* @sfc-gh-madkins
@@ -47,8 +47,3 @@
# AWS Lambda
/sdk/python/feast/infra/materialization/contrib/aws_lambda/ @achals
-
-# ==== Web UI ====
-/ui/ @adchia
-/sdk/python/feast/ui/ @adchia
-/sdk/python/feast/ui_server.py @adchia
diff --git a/Makefile b/Makefile
index 8d9a1a8d3b..4b85c0e448 100644
--- a/Makefile
+++ b/Makefile
@@ -24,19 +24,19 @@ TRINO_VERSION ?= 376
# General
-format: format-python format-java format-go
+format: format-python format-java
-lint: lint-python lint-java lint-go
+lint: lint-python lint-java
-test: test-python test-java test-go
+test: test-python test-java
-protos: compile-protos-go compile-protos-python compile-protos-docs
+protos: compile-protos-python compile-protos-docs
build: protos build-java build-docker
# Python SDK
-install-python-ci-dependencies: install-go-proto-dependencies install-go-ci-dependencies
+install-python-ci-dependencies:
python -m piptools sync sdk/python/requirements/py$(PYTHON)-ci-requirements.txt
COMPILE_GO=true python setup.py develop
@@ -259,6 +259,27 @@ test-python-universal-cassandra:
python -m pytest -x --integration \
sdk/python/tests
+test-python-universal-hazelcast:
+ PYTHONPATH='.' \
+ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.hazelcast_repo_configuration \
+ PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.hazelcast \
+ FEAST_USAGE=False \
+ IS_TEST=True \
+ python -m pytest -n 8 --integration \
+ -k "not test_universal_cli and \
+ not test_go_feature_server and \
+ not test_feature_logging and \
+ not test_reorder_columns and \
+ not test_logged_features_validation and \
+ not test_lambda_materialization_consistency and \
+ not test_offline_write and \
+ not test_push_features_to_offline_store and \
+ not gcs_registry and \
+ not s3_registry and \
+ not test_universal_types and \
+ not test_snowflake" \
+ sdk/python/tests
+
test-python-universal-cassandra-no-cloud-providers:
PYTHONPATH='.' \
FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.cassandra_repo_configuration \
@@ -281,15 +302,12 @@ test-python-universal-cassandra-no-cloud-providers:
test-python-universal:
FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests
-test-python-go-server: compile-go-lib
- FEAST_USAGE=False IS_TEST=True pytest --integration --goserver sdk/python/tests
-
format-python:
# Sort
cd ${ROOT_DIR}/sdk/python; python -m isort feast/ tests/
# Format
- cd ${ROOT_DIR}/sdk/python; python -m black --target-version py37 feast tests
+ cd ${ROOT_DIR}/sdk/python; python -m black --target-version py38 feast tests
lint-python:
cd ${ROOT_DIR}/sdk/python; python -m mypy
@@ -334,48 +352,15 @@ test-trino-plugin-locally:
kill-trino-locally:
cd ${ROOT_DIR}; docker stop trino
-# Go SDK & embedded
-
-install-go-proto-dependencies:
- go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26.0
- go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.1.0
-
-install-go-ci-dependencies:
- # TODO: currently gopy installation doesn't work w/o explicit go get in the next line
- # TODO: there should be a better way to install gopy
- go get github.com/go-python/gopy@v0.4.4
- go install golang.org/x/tools/cmd/goimports
- # The `go get` command on the previous lines download the lib along with replacing the dep to `feast-dev/gopy`
- # but the following command is needed to install it for some reason.
- go install github.com/go-python/gopy
- python -m pip install pybindgen==0.22.0 protobuf==3.20.1
-
install-protoc-dependencies:
- pip install --ignore-installed protobuf grpcio-tools==1.47.0 mypy-protobuf==3.1.0
-
-compile-protos-go: install-go-proto-dependencies install-protoc-dependencies
- python setup.py build_go_protos
-
-compile-go-lib: install-go-proto-dependencies install-go-ci-dependencies
- CGO_LDFLAGS_ALLOW=".*" COMPILE_GO=True python setup.py build_ext --inplace
+ pip install --ignore-installed protobuf==4.23.4 "grpcio-tools>=1.56.2,<2" mypy-protobuf==3.1.0
install-feast-ci-locally:
pip install -e ".[ci]"
-# Needs feast package to setup the feature store
-# CGO flag is due to this issue: https://github.com/golang/go/wiki/InvalidFlag
-test-go: compile-protos-go compile-protos-python compile-go-lib install-feast-ci-locally
- CGO_LDFLAGS_ALLOW=".*" go test -tags cgo,ccalloc ./...
-
-format-go:
- gofmt -s -w go/
-
-lint-go: compile-protos-go compile-go-lib
- go vet -tags cgo,ccalloc ./go/internal/feast ./go/embedded
-
# Docker
-build-docker: build-feature-server-python-docker build-feature-server-python-aws-docker build-feature-transformation-server-docker build-feature-server-java-docker
+build-docker: build-feature-server-python-aws-docker build-feature-transformation-server-docker build-feature-server-java-docker
push-ci-docker:
docker push $(REGISTRY)/feast-ci:$(VERSION)
diff --git a/OWNERS b/OWNERS
index c34fd6baaf..d726837e57 100644
--- a/OWNERS
+++ b/OWNERS
@@ -6,10 +6,10 @@ approvers:
- achals
- adchia
- felixwang9817
- - mavysavydav
- MattDelac
- kevjumba
- chhabrakadabra
+ - gbmarc1
- sfc-gh-madkins
- zhilingc
- whoahbot
@@ -23,10 +23,10 @@ reviewers:
- tedhtchang
- adchia
- felixwang9817
- - mavysavydav
- MattDelac
- kevjumba
- chhabrakadabra
+ - gbmarc1
- sfc-gh-madkins
- zhilingc
- whoahbot
diff --git a/README.md b/README.md
index 625fae3ecf..41d3816b69 100644
--- a/README.md
+++ b/README.md
@@ -26,7 +26,7 @@ Feast allows ML platform teams to:
* **Avoid data leakage** by generating point-in-time correct feature sets so data scientists can focus on feature engineering rather than debugging error-prone dataset joining logic. This ensure that future feature values do not leak to models during training.
* **Decouple ML from data infrastructure** by providing a single data access layer that abstracts feature storage from feature retrieval, ensuring models remain portable as you move from training models to serving models, from batch models to realtime models, and from one data infra system to another.
-Please see our [documentation](https://docs.feast.dev/) for more information about the project, or sign up for an [email newsletter](https://feast.dev/).
+Please see our [documentation](https://docs.feast.dev/) for more information about the project.
## 📐 Architecture
![](docs/assets/feast_marchitecture.png)
@@ -144,7 +144,6 @@ pprint(feature_vector)
The list below contains the functionality that contributors are planning to develop for Feast.
* We welcome contribution to all items in the roadmap!
-* Have questions about the roadmap? Go to the Slack channel to ask on #feast-development.
* **Data Sources**
* [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake)
@@ -174,6 +173,7 @@ The list below contains the functionality that contributors are planning to deve
* [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore)
* [x] [Bigtable](https://docs.feast.dev/reference/online-stores/bigtable)
* [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite)
+ * [x] [Dragonfly](https://docs.feast.dev/reference/online-stores/dragonfly)
* [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure)
* [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/online-stores/postgres)
* [x] [Cassandra / AstraDB (contrib plugin)](https://docs.feast.dev/reference/online-stores/cassandra)
@@ -212,7 +212,6 @@ Please refer to the official documentation at [Documentation](https://docs.feast
* [Tutorials](https://docs.feast.dev/tutorials/tutorials-overview)
* [Running Feast with Snowflake/GCP/AWS](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws)
* [Change Log](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md)
- * [Slack (#Feast)](https://slack.feast.dev/)
## 👋 Contributing
Feast is a community project and is still under active development. Please have a look at our contributing and development guides if you want to contribute to the project:
diff --git a/community/governance.excalidraw b/community/governance.excalidraw
deleted file mode 100644
index f4c8dad9a4..0000000000
--- a/community/governance.excalidraw
+++ /dev/null
@@ -1,913 +0,0 @@
-{
- "type": "excalidraw",
- "version": 2,
- "source": "https://excalidraw.com",
- "elements": [
- {
- "type": "rectangle",
- "version": 620,
- "versionNonce": 853777363,
- "isDeleted": false,
- "id": "pr0yIJcUDXb4nFgowH9_r",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "dashed",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 409.5,
- "y": 620.5,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 194,
- "height": 83,
- "seed": 1695250557,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [
- {
- "id": "YfmPferxgVKoP70zGfYDK",
- "type": "text"
- },
- {
- "id": "YfmPferxgVKoP70zGfYDK",
- "type": "text"
- },
- {
- "type": "text",
- "id": "YfmPferxgVKoP70zGfYDK"
- },
- {
- "id": "IsihlXUGDSklv2RsxX6wO",
- "type": "arrow"
- },
- {
- "id": "G5s2AUFJ730fyPsIbA8xP",
- "type": "arrow"
- },
- {
- "id": "j9ZVC3ZgHTsAGe3hJQecp",
- "type": "arrow"
- }
- ],
- "updated": 1662582134601,
- "link": null,
- "locked": false
- },
- {
- "type": "text",
- "version": 623,
- "versionNonce": 328400605,
- "isDeleted": false,
- "id": "YfmPferxgVKoP70zGfYDK",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 414.5,
- "y": 649.5,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 184,
- "height": 25,
- "seed": 1575229907,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [],
- "updated": 1662582134601,
- "link": null,
- "locked": false,
- "fontSize": 20,
- "fontFamily": 1,
- "text": "CODEOWNERS",
- "baseline": 18,
- "textAlign": "center",
- "verticalAlign": "middle",
- "containerId": "pr0yIJcUDXb4nFgowH9_r",
- "originalText": "CODEOWNERS"
- },
- {
- "type": "rectangle",
- "version": 756,
- "versionNonce": 1648798067,
- "isDeleted": false,
- "id": "XDy4VWWtJ9sd6hzPJDdFe",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 409.5,
- "y": 779.5,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 194,
- "height": 83,
- "seed": 1925179667,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [
- {
- "id": "gUz4p_oPytb5-ejbYb81N",
- "type": "text"
- },
- {
- "id": "gUz4p_oPytb5-ejbYb81N",
- "type": "text"
- },
- {
- "id": "gUz4p_oPytb5-ejbYb81N",
- "type": "text"
- },
- {
- "type": "text",
- "id": "gUz4p_oPytb5-ejbYb81N"
- },
- {
- "id": "G5s2AUFJ730fyPsIbA8xP",
- "type": "arrow"
- }
- ],
- "updated": 1662582134601,
- "link": null,
- "locked": false
- },
- {
- "type": "text",
- "version": 781,
- "versionNonce": 1240013629,
- "isDeleted": false,
- "id": "gUz4p_oPytb5-ejbYb81N",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 414.5,
- "y": 808.5,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 184,
- "height": 25,
- "seed": 140322205,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [],
- "updated": 1662582134601,
- "link": null,
- "locked": false,
- "fontSize": 20,
- "fontFamily": 1,
- "text": "Contributors",
- "baseline": 18,
- "textAlign": "center",
- "verticalAlign": "middle",
- "containerId": "XDy4VWWtJ9sd6hzPJDdFe",
- "originalText": "Contributors"
- },
- {
- "type": "text",
- "version": 463,
- "versionNonce": 2109720179,
- "isDeleted": false,
- "id": "AYJKq2RGJrSIpbfiJOf_4",
- "fillStyle": "hachure",
- "strokeWidth": 2,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 526,
- "y": 517.5,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 274,
- "height": 75,
- "seed": 1616513981,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [],
- "updated": 1662582134602,
- "link": null,
- "locked": false,
- "fontSize": 20,
- "fontFamily": 1,
- "text": "1. organize contributors\n2. influence roadmap\n3. own direction of an area",
- "baseline": 68,
- "textAlign": "left",
- "verticalAlign": "top",
- "containerId": null,
- "originalText": "1. organize contributors\n2. influence roadmap\n3. own direction of an area"
- },
- {
- "type": "rectangle",
- "version": 776,
- "versionNonce": 519656573,
- "isDeleted": false,
- "id": "z5LT5d710gSTA9DjwiL3O",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 70,
- "angle": 0,
- "x": 1013.7117834394903,
- "y": 187.5000000000001,
- "strokeColor": "#000000",
- "backgroundColor": "#4c6ef5",
- "width": 132,
- "height": 682,
- "seed": 1424710877,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [
- {
- "id": "RscqyQXicYOkFsE_zvran",
- "type": "text"
- },
- {
- "id": "J7IG4T5j15pB3b_K0Cpd9",
- "type": "arrow"
- },
- {
- "id": "XEohLLmfFl0L9Wi2ew5AU",
- "type": "arrow"
- },
- {
- "id": "o3Pp-94PORhEiEauRcZW_",
- "type": "arrow"
- },
- {
- "type": "text",
- "id": "RscqyQXicYOkFsE_zvran"
- },
- {
- "id": "j9ZVC3ZgHTsAGe3hJQecp",
- "type": "arrow"
- },
- {
- "id": "Klq-VJGZiolZnGuaNJ8k9",
- "type": "arrow"
- }
- ],
- "updated": 1662582138112,
- "link": null,
- "locked": false
- },
- {
- "type": "text",
- "version": 896,
- "versionNonce": 1733426643,
- "isDeleted": false,
- "id": "RscqyQXicYOkFsE_zvran",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 1018.7117834394903,
- "y": 476.0000000000001,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 122,
- "height": 105,
- "seed": 1202400115,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [],
- "updated": 1662582138113,
- "link": null,
- "locked": false,
- "fontSize": 28,
- "fontFamily": 1,
- "text": "Feast \nGitHub \nproject",
- "baseline": 95,
- "textAlign": "center",
- "verticalAlign": "middle",
- "containerId": "z5LT5d710gSTA9DjwiL3O",
- "originalText": "Feast GitHub project"
- },
- {
- "id": "IsihlXUGDSklv2RsxX6wO",
- "type": "arrow",
- "x": 506.997671158975,
- "y": 619,
- "width": 0,
- "height": 132,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "#868e96",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "seed": 345290749,
- "version": 680,
- "versionNonce": 787007421,
- "isDeleted": false,
- "boundElements": null,
- "updated": 1662582134602,
- "link": null,
- "locked": false,
- "points": [
- [
- 0,
- 0
- ],
- [
- 0,
- -132
- ]
- ],
- "lastCommittedPoint": null,
- "startBinding": {
- "elementId": "pr0yIJcUDXb4nFgowH9_r",
- "focus": 0.005130630504896713,
- "gap": 1.5
- },
- "endBinding": {
- "elementId": "TBYpmrW2OsKEqbpZfEeJg",
- "focus": 0.461338833375829,
- "gap": 1
- },
- "startArrowhead": null,
- "endArrowhead": "arrow"
- },
- {
- "id": "G5s2AUFJ730fyPsIbA8xP",
- "type": "arrow",
- "x": 506.9985864097345,
- "y": 776,
- "width": 0.9914493467237548,
- "height": 71,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "#868e96",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "seed": 241364467,
- "version": 241,
- "versionNonce": 649485971,
- "isDeleted": false,
- "boundElements": null,
- "updated": 1662582134602,
- "link": null,
- "locked": false,
- "points": [
- [
- 0,
- 0
- ],
- [
- -0.9914493467237548,
- -71
- ]
- ],
- "lastCommittedPoint": null,
- "startBinding": {
- "elementId": "XDy4VWWtJ9sd6hzPJDdFe",
- "focus": 0.011569796958606356,
- "gap": 3.5
- },
- "endBinding": {
- "elementId": "pr0yIJcUDXb4nFgowH9_r",
- "focus": 0.011204382815075232,
- "gap": 1.5
- },
- "startArrowhead": null,
- "endArrowhead": "arrow"
- },
- {
- "id": "TBYpmrW2OsKEqbpZfEeJg",
- "type": "rectangle",
- "x": 409.5,
- "y": 188,
- "width": 361.99999999999994,
- "height": 298,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "#868e96",
- "fillStyle": "solid",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 30,
- "groupIds": [
- "mcHoJ-dlfU3T8l_C93UPa"
- ],
- "strokeSharpness": "sharp",
- "seed": 1515491581,
- "version": 231,
- "versionNonce": 593345661,
- "isDeleted": false,
- "boundElements": [
- {
- "id": "IsihlXUGDSklv2RsxX6wO",
- "type": "arrow"
- },
- {
- "id": "Klq-VJGZiolZnGuaNJ8k9",
- "type": "arrow"
- }
- ],
- "updated": 1662582134602,
- "link": null,
- "locked": false
- },
- {
- "id": "YEEHpa4RXaR8G9YW55v25",
- "type": "rectangle",
- "x": 428.5,
- "y": 398,
- "width": 163.61445783132532,
- "height": 70.00000000000001,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "groupIds": [
- "mcHoJ-dlfU3T8l_C93UPa"
- ],
- "strokeSharpness": "sharp",
- "seed": 932648787,
- "version": 319,
- "versionNonce": 398988755,
- "isDeleted": false,
- "boundElements": [
- {
- "type": "text",
- "id": "8iyUZwSph5yMVrXehf6vg"
- },
- {
- "id": "o3Pp-94PORhEiEauRcZW_",
- "type": "arrow"
- },
- {
- "id": "IsihlXUGDSklv2RsxX6wO",
- "type": "arrow"
- }
- ],
- "updated": 1662582134602,
- "link": null,
- "locked": false
- },
- {
- "id": "8iyUZwSph5yMVrXehf6vg",
- "type": "text",
- "x": 433.5,
- "y": 422.5,
- "width": 153.61445783132532,
- "height": 21,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "groupIds": [
- "mcHoJ-dlfU3T8l_C93UPa"
- ],
- "strokeSharpness": "sharp",
- "seed": 1803538003,
- "version": 365,
- "versionNonce": 952837341,
- "isDeleted": false,
- "boundElements": null,
- "updated": 1662582134602,
- "link": null,
- "locked": false,
- "text": "Area maintainers",
- "fontSize": 16.697223677317968,
- "fontFamily": 1,
- "textAlign": "center",
- "verticalAlign": "middle",
- "baseline": 15,
- "containerId": "YEEHpa4RXaR8G9YW55v25",
- "originalText": "Area maintainers"
- },
- {
- "type": "rectangle",
- "version": 355,
- "versionNonce": 1753998195,
- "isDeleted": false,
- "id": "Wh-PpzmGy1bWJko0akD-a",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 429.8072289156627,
- "y": 257.1185567010309,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 161,
- "height": 68.88144329896907,
- "seed": 1844448573,
- "groupIds": [
- "mcHoJ-dlfU3T8l_C93UPa"
- ],
- "strokeSharpness": "sharp",
- "boundElements": [
- {
- "id": "OJCS1hAx71BD6u1jesJzR",
- "type": "text"
- },
- {
- "type": "text",
- "id": "OJCS1hAx71BD6u1jesJzR"
- },
- {
- "id": "o3Pp-94PORhEiEauRcZW_",
- "type": "arrow"
- }
- ],
- "updated": 1662582134602,
- "link": null,
- "locked": false
- },
- {
- "type": "text",
- "version": 409,
- "versionNonce": 556564797,
- "isDeleted": false,
- "id": "OJCS1hAx71BD6u1jesJzR",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 434.8072289156627,
- "y": 271.55927835051546,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 151,
- "height": 40,
- "seed": 852504851,
- "groupIds": [
- "mcHoJ-dlfU3T8l_C93UPa"
- ],
- "strokeSharpness": "sharp",
- "boundElements": [],
- "updated": 1662582134602,
- "link": null,
- "locked": false,
- "fontSize": 16.413043478260864,
- "fontFamily": 1,
- "text": "Project \nmaintainers",
- "baseline": 34,
- "textAlign": "center",
- "verticalAlign": "middle",
- "containerId": "Wh-PpzmGy1bWJko0akD-a",
- "originalText": "Project maintainers"
- },
- {
- "id": "o3Pp-94PORhEiEauRcZW_",
- "type": "arrow",
- "x": 510.1952932956257,
- "y": 396.60017389144207,
- "width": 0.34508644012566947,
- "height": 69.20034778288408,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "fillStyle": "hachure",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "groupIds": [
- "mcHoJ-dlfU3T8l_C93UPa"
- ],
- "strokeSharpness": "sharp",
- "seed": 1889236627,
- "version": 572,
- "versionNonce": 918879507,
- "isDeleted": false,
- "boundElements": null,
- "updated": 1662582134602,
- "link": null,
- "locked": false,
- "points": [
- [
- 0,
- 0
- ],
- [
- 0.34508644012566947,
- -69.20034778288408
- ]
- ],
- "lastCommittedPoint": null,
- "startBinding": {
- "elementId": "YEEHpa4RXaR8G9YW55v25",
- "focus": -0.0035794947090358044,
- "gap": 1.3998261085579315
- },
- "endBinding": {
- "elementId": "Wh-PpzmGy1bWJko0akD-a",
- "focus": -0.0051056226396315905,
- "gap": 1.3998261085579884
- },
- "startArrowhead": null,
- "endArrowhead": "arrow"
- },
- {
- "id": "4CHi-UfB3oI1PAfcFm2o_",
- "type": "text",
- "x": 528.5,
- "y": 354.5,
- "width": 218,
- "height": 20,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "fillStyle": "hachure",
- "strokeWidth": 2,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "groupIds": [
- "mcHoJ-dlfU3T8l_C93UPa"
- ],
- "strokeSharpness": "sharp",
- "seed": 2054408115,
- "version": 238,
- "versionNonce": 1105416605,
- "isDeleted": false,
- "boundElements": null,
- "updated": 1662582134602,
- "link": null,
- "locked": false,
- "text": "break ties by majority vote",
- "fontSize": 16,
- "fontFamily": 1,
- "textAlign": "left",
- "verticalAlign": "top",
- "baseline": 14,
- "containerId": null,
- "originalText": "break ties by majority vote"
- },
- {
- "id": "gHvMhIQl4S1SxPE8kzHLx",
- "type": "text",
- "x": 431.8072289156627,
- "y": 201.5,
- "width": 157,
- "height": 35,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "#868e96",
- "fillStyle": "solid",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "groupIds": [
- "mcHoJ-dlfU3T8l_C93UPa"
- ],
- "strokeSharpness": "sharp",
- "seed": 1597289651,
- "version": 154,
- "versionNonce": 1038738099,
- "isDeleted": false,
- "boundElements": null,
- "updated": 1662582134602,
- "link": null,
- "locked": false,
- "text": "Maintainers",
- "fontSize": 28,
- "fontFamily": 1,
- "textAlign": "left",
- "verticalAlign": "top",
- "baseline": 25,
- "containerId": null,
- "originalText": "Maintainers"
- },
- {
- "type": "text",
- "version": 545,
- "versionNonce": 1478563325,
- "isDeleted": false,
- "id": "_qJ5MtLgnvmF1-EDKX6qg",
- "fillStyle": "hachure",
- "strokeWidth": 2,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 533,
- "y": 732.5,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 106,
- "height": 25,
- "seed": 1870614973,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [],
- "updated": 1662582134602,
- "link": null,
- "locked": false,
- "fontSize": 20,
- "fontFamily": 1,
- "text": "review PRs",
- "baseline": 18,
- "textAlign": "left",
- "verticalAlign": "top",
- "containerId": null,
- "originalText": "review PRs"
- },
- {
- "id": "j9ZVC3ZgHTsAGe3hJQecp",
- "type": "arrow",
- "x": 610.590909090909,
- "y": 673.6931323855418,
- "width": 394.7818338530517,
- "height": 1.1368683772161603e-13,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "#868e96",
- "fillStyle": "solid",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "seed": 1115132605,
- "version": 594,
- "versionNonce": 1612334739,
- "isDeleted": false,
- "boundElements": null,
- "updated": 1662582138112,
- "link": null,
- "locked": false,
- "points": [
- [
- 0,
- 0
- ],
- [
- 394.7818338530517,
- 1.1368683772161603e-13
- ]
- ],
- "lastCommittedPoint": null,
- "startBinding": {
- "elementId": "pr0yIJcUDXb4nFgowH9_r",
- "gap": 7.0909090909090455,
- "focus": 0.2817622261576348
- },
- "endBinding": {
- "elementId": "z5LT5d710gSTA9DjwiL3O",
- "gap": 9.339040495529549,
- "focus": -0.4257863119810608
- },
- "startArrowhead": null,
- "endArrowhead": "arrow"
- },
- {
- "id": "Klq-VJGZiolZnGuaNJ8k9",
- "type": "arrow",
- "x": 775.7385321100917,
- "y": 334,
- "width": 233.2672383568049,
- "height": 0,
- "angle": 0,
- "strokeColor": "#000000",
- "backgroundColor": "#868e96",
- "fillStyle": "solid",
- "strokeWidth": 1,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "seed": 632787667,
- "version": 198,
- "versionNonce": 1893334067,
- "isDeleted": false,
- "boundElements": null,
- "updated": 1662582138112,
- "link": null,
- "locked": false,
- "points": [
- [
- 0,
- 0
- ],
- [
- 233.2672383568049,
- 0
- ]
- ],
- "lastCommittedPoint": null,
- "startBinding": {
- "elementId": "TBYpmrW2OsKEqbpZfEeJg",
- "gap": 4.238532110091741,
- "focus": -0.020134228187919462
- },
- "endBinding": {
- "elementId": "z5LT5d710gSTA9DjwiL3O",
- "gap": 5.7060129725937765,
- "focus": 0.5703812316715546
- },
- "startArrowhead": null,
- "endArrowhead": "arrow"
- },
- {
- "type": "text",
- "version": 651,
- "versionNonce": 1375877757,
- "isDeleted": false,
- "id": "diazwl57WWW_7gfm7wMea",
- "fillStyle": "hachure",
- "strokeWidth": 2,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 678,
- "y": 637.5,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 262,
- "height": 25,
- "seed": 2077675699,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [],
- "updated": 1662582152851,
- "link": null,
- "locked": false,
- "fontSize": 20,
- "fontFamily": 1,
- "text": "merge PRs if no objections",
- "baseline": 18,
- "textAlign": "left",
- "verticalAlign": "top",
- "containerId": null,
- "originalText": "merge PRs if no objections"
- },
- {
- "type": "text",
- "version": 658,
- "versionNonce": 1558756051,
- "isDeleted": false,
- "id": "_T1wMHFNqfA6a8Ku2OLDl",
- "fillStyle": "hachure",
- "strokeWidth": 2,
- "strokeStyle": "solid",
- "roughness": 1,
- "opacity": 100,
- "angle": 0,
- "x": 840,
- "y": 296.5,
- "strokeColor": "#000000",
- "backgroundColor": "transparent",
- "width": 102,
- "height": 25,
- "seed": 1987233139,
- "groupIds": [],
- "strokeSharpness": "sharp",
- "boundElements": [],
- "updated": 1662582148465,
- "link": null,
- "locked": false,
- "fontSize": 20,
- "fontFamily": 1,
- "text": "merge PRs",
- "baseline": 18,
- "textAlign": "left",
- "verticalAlign": "top",
- "containerId": null,
- "originalText": "merge PRs"
- }
- ],
- "appState": {
- "gridSize": null,
- "viewBackgroundColor": "#ffffff"
- },
- "files": {}
-}
\ No newline at end of file
diff --git a/community/governance.md b/community/governance.md
index 89cf800bc8..087b3599db 100644
--- a/community/governance.md
+++ b/community/governance.md
@@ -42,11 +42,7 @@ A formal governance structure helps us to
On a high level, the key moving parts of the community are:
- **GitHub activity** (issues + pull requests)
-- **Slack community** ([slack.feast.dev](slack.feast.dev))
- - `#feast-development` is where design discussions happen amongst contributors
- - Other Slack channels exist for users to ask and answer questions.
- **RFCs** ([drive folder](https://drive.google.com/drive/u/0/folders/1msUsgmDbVBaysmhBlg9lklYLLTMk4bC3)) for detailed discussions
-- **Community calls** (biweekly) to discuss best practices, contributions, and announce changes
- **Maintainer syncs** (monthly) for [maintainers](maintainers.md) to discuss project direction and health
With this structure, users and contributors largely self-organize and contribute changes as per [lazy consensus](#lazy-consensus). If there is active opposition and unresolvable conflict, then maintainers step in to break ties or make decisions.
@@ -61,10 +57,6 @@ Anyone interested in the project can join the community to:
- contribute to the project design
- participate in the decision-making process.
-The general decision making workflow is as follows:
-
-
-
> **Note**: There may not always a corresponding CODEOWNER for the affected code, in which case the responsibility falls on other maintainers or contributors with write access to review + merge the PR
# Roles And Responsibilities
@@ -96,7 +88,7 @@ In addition to their actions as users, contributors may also find themselves doi
* Writing, editing, translating or reviewing the documentation
* Organizing events or evangelizing the project
-Contributors engage with the project through the issue tracker and slack community, or by writing or editing documentation. They submit changes to the project itself via Pull Requests (PRs), which will be considered for inclusion in the project by existing maintainers (see next section).
+Contributors engage with the project through the issue tracker or by writing or editing documentation. They submit changes to the project itself via Pull Requests (PRs), which will be considered for inclusion in the project by existing maintainers (see next section).
Contributors should follow the following guides when creating PRs:
- [Contribution Process](https://docs.feast.dev/project/contributing)
@@ -116,27 +108,14 @@ Maintainers are community members who have shown that they are committed to Feas
> **Note**: maintainers, like other contributors, must make changes to Feast via pull requests (with code review). This applies to all changes to documentation, code, configuration, governance, etc.
-### Types of maintainers
-
-There are two kinds of maintainers
-
-1. **Project maintainers** control overall project organization and resolving disputes. They also
- - Attend a regular maintainers sync
- - Participate in strategic planning, approve changes to the governance model, and manage the copyrights within the project outputs.
- - (optional) Attend community calls
- - (optional) Planning project roadmaps and articulating vision
- - (optional) Guide design decisions to reinforce key project values (e.g. simplicity)
-2. **Area maintainers** own a specific technical area (which may span code modules), often specifically targeting a user journey or tech stack. They
- - are generally point people in GitHub or Slack on discussions in that area (e.g. tagged in `#feast-development`)
- - (optional) help drive roadmap decisions
-
-> **Note:** project maintainers may also be area maintainers, but this does not give their ideas increased weight over other area maintainers.
-
-Decisions that need tie breakers may require intervention via project maintainers majority consensus.
+Maintainers control overall project organization and resolving disputes. They also
+- Attend a regular maintainers sync
+- Participate in strategic planning, approve changes to the governance model, and manage the copyrights within the project outputs.
+- (optional) Planning project roadmaps and articulating vision
+- (optional) Guide design decisions to reinforce key project values (e.g. simplicity)
### Optional maintainer responsibilities
Other optional activites a maintainer (project or area maintainer) may participate in:
- * Monitor email aliases and our Slack (#feast-general, #feast-development, #feast-beginners).
* Perform code reviews for other maintainers and the community. The areas of specialization listed in [OWNERS.md](OWNERS.md) can be used to help with routing an issue/question to the right person.
* Triage GitHub issues, applying [labels]([https://github.com/feast-dev/feast/labels](https://github.com/feast-dev/feast/labels)) to each new item. Labels are extremely useful for future issue follow ups. Adding labels is somewhat subjective, so please use your best judgment.
* Triage build issues, filing issues for known flaky builds or bugs, fixing or finding someone to fix any master build breakages.
diff --git a/community/governance.png b/community/governance.png
deleted file mode 100644
index c2b00930e3..0000000000
Binary files a/community/governance.png and /dev/null differ
diff --git a/community/maintainers.md b/community/maintainers.md
index cdf78b150c..e66dbeb762 100644
--- a/community/maintainers.md
+++ b/community/maintainers.md
@@ -7,36 +7,25 @@ See [Governance](governance.md) for what each maintainer type is
In alphabetical order
| Name | GitHub Username | Email | Organization |
-| -------------- | ---------------- | --------------------------- | ------------------ |
-| Abhin Chhabra | `chhabrakadabra` | chhabra.abhin@gmail.com | Shopify |
+| -------------- | ---------------- |-----------------------------| ------------------ |
| Achal Shah | `achals` | achals@gmail.com | Tecton |
-| Danny Chiao | `adchia` | d.chiao@gmail.com | Tecton |
-| David Liu | `mavysavydav` | davidyliuliu@gmail.com | Twitter |
| Felix Wang | `felixwang9817` | wangfelix98@gmail.com | Tecton |
| Kevin Zhang | `kevjumba` | kevin.zhang.13499@gmail.com | Tecton |
-| Matt Delacour | `MattDelac` | mdelacour@hey.com | (formerly) Shopify |
| Miles Adkins | `sfc-gh-madkins` | miles.adkins@snowflake.com | Snowflake |
| Willem Pienaar | `woop` | will.pienaar@gmail.com | Tecton |
| Zhiling Chen | `zhilingc` | chnzhlng@gmail.com | GetGround |
-## Area maintainers
-
-Generally, with contribution questions here, default to `#feast-development` in the [slack.feast.dev](slack.feast.dev) Slack channel, but these may be folks for you to tag in messages
-
-| Area | Description | Name |
-| -------------------- | -------------------------------------------------------------------------- | --------------------------------------------- |
-| Data ingestion | ingesting batch + stream data into the online store (materialization) | Achal Shah,
Felix Wang,
Kevin Zhang |
-| Developer experience | tooling, testing, documentation, tutorials | Achal Shah |
-| Feature serving | optimization, caching, deployment patterns, batch retrieval, range queries | Dvir Dukhan |
-| Ops | general deployment concerns, CI/CD, versioning | Keith Adler,
Danny Chiao,
Felix Wang |
-| Web UI | i.e. `feast ui` output | Danny Chiao,
David Liu |
-
## Emeritus Maintainers
-| Name | GitHub Username | Email | Organization |
-| ------------------- | --------------- | --------------------------- | ------------ |
-| Oleg Avdeev | oavdeev | oleg.v.avdeev@gmail.com | Tecton |
-| Oleksii Moskalenko | pyalex | moskalenko.alexey@gmail.com | Tecton |
-| Jay Parthasarthy | jparthasarthy | jparthasarthy@gmail.com | Tecton |
-| Pradithya Aria Pura | pradithya | pradithya.aria@gmail.com | Gojek |
-| Tsotne Tabidze | tsotnet | tsotnet@gmail.com | Tecton |
\ No newline at end of file
+| Name | GitHub Username | Email | Organization |
+|---------------------|-----------------|-----------------------------|-------------------|
+| Oleg Avdeev | oavdeev | oleg.v.avdeev@gmail.com | Tecton |
+| Oleksii Moskalenko | pyalex | moskalenko.alexey@gmail.com | Tecton |
+| Jay Parthasarthy | jparthasarthy | jparthasarthy@gmail.com | Tecton |
+| Danny Chiao | adchia | danny@tecton.ai | Tecton |
+| Pradithya Aria Pura | pradithya | pradithya.aria@gmail.com | Gojek |
+| Tsotne Tabidze | tsotnet | tsotnet@gmail.com | Tecton |
+| Abhin Chhabra | chhabrakadabra | chhabra.abhin@gmail.com | Shopify |
+| Danny Chiao | adchia | danny@tecton.ai | Tecton |
+| David Liu | mavysavydav | davidyliuliu@gmail.com | Twitter |
+| Matt Delacour | MattDelac | mdelacour@hey.com | Shopify |
diff --git a/docs/README.md b/docs/README.md
index a305c4aecd..66c7548440 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -57,7 +57,7 @@ Many companies have used Feast to power real-world ML use cases such as:
## How can I get started?
{% hint style="info" %}
-The best way to learn Feast is to use it. Join our [Slack channel](http://slack.feast.dev) and head over to our [Quickstart](getting-started/quickstart.md) and try it out!
+The best way to learn Feast is to use it. Head over to our [Quickstart](getting-started/quickstart.md) and try it out!
{% endhint %}
Explore the following resources to get started with Feast:
diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md
index 1bab8a61ef..c80ded2adf 100644
--- a/docs/SUMMARY.md
+++ b/docs/SUMMARY.md
@@ -90,12 +90,15 @@
* [SQLite](reference/online-stores/sqlite.md)
* [Snowflake](reference/online-stores/snowflake.md)
* [Redis](reference/online-stores/redis.md)
+ * [Dragonfly](reference/online-stores/dragonfly.md)
* [Datastore](reference/online-stores/datastore.md)
* [DynamoDB](reference/online-stores/dynamodb.md)
* [Bigtable](reference/online-stores/bigtable.md)
* [PostgreSQL (contrib)](reference/online-stores/postgres.md)
* [Cassandra + Astra DB (contrib)](reference/online-stores/cassandra.md)
* [MySQL (contrib)](reference/online-stores/mysql.md)
+ * [Rockset (contrib)](reference/online-stores/rockset.md)
+ * [Hazelcast (contrib)](reference/online-stores/hazelcast.md)
* [Providers](reference/providers/README.md)
* [Local](reference/providers/local.md)
* [Google Cloud Platform](reference/providers/google-cloud-platform.md)
diff --git a/docs/community.md b/docs/community.md
index 098b6b3f90..21cca702bf 100644
--- a/docs/community.md
+++ b/docs/community.md
@@ -4,13 +4,6 @@
* [GitHub Repository](https://github.com/feast-dev/feast/): Find the complete Feast codebase on GitHub.
* [Community Governance Doc](https://github.com/feast-dev/feast/blob/master/community): See the governance model of Feast, including who the maintainers are and how decisions are made.
-* [Slack](https://slack.feast.dev): Feel free to ask questions or say hello! This is the main place where maintainers and contributors brainstorm and where users ask questions or discuss best practices.
- * Feast users should join `#feast-general` or `#feast-beginners` to ask questions
- * Feast developers / contributors should join `#feast-development`
-* [Mailing list](https://groups.google.com/d/forum/feast-dev): We have both a user and developer mailing list.
- * Feast users should join [feast-discuss@googlegroups.com](mailto:feast-discuss@googlegroups.com) group by clicking [here](https://groups.google.com/g/feast-discuss).
- * Feast developers / contributors should join [feast-dev@googlegroups.com](mailto:feast-dev@googlegroups.com) group by clicking [here](https://groups.google.com/d/forum/feast-dev).
-* [Community Calendar](https://calendar.google.com/calendar/u/0?cid=ZTFsZHVhdGM3MDU3YTJucTBwMzNqNW5rajBAZ3JvdXAuY2FsZW5kYXIuZ29vZ2xlLmNvbQ): Includes community calls and design meetings.
* [Google Folder](https://drive.google.com/drive/u/0/folders/1jgMHOPDT2DvBlJeO9LCM79DP4lm4eOrR): This folder is used as a central repository for all Feast resources. For example:
* Design proposals in the form of Request for Comments (RFC).
* User surveys and meeting minutes.
@@ -19,36 +12,4 @@
## How can I get help?
-* **Slack:** Need to speak to a human? Come ask a question in our Slack channel (link above).
* **GitHub Issues:** Found a bug or need a feature? [Create an issue on GitHub](https://github.com/feast-dev/feast/issues/new).
-* **StackOverflow:** Need to ask a question on how to use Feast? We also monitor and respond to [StackOverflow](https://stackoverflow.com/questions/tagged/feast).
-
-## Community Calls
-
-### General community call (biweekly)
-We have a user and contributor community call every two weeks (US & EU friendly).
-
-{% hint style="info" %}
-Please join the above Feast user groups in order to see calendar invites to the community calls
-{% endhint %}
-
-#### Frequency (every 2 weeks)
-
-* Tuesday 10:00 am to 10:30 am PST
-
-#### Links
-
-* Zoom: [https://zoom.us/j/6325193230](https://zoom.us/j/6325193230)
-* Meeting notes (incl recordings): [https://bit.ly/feast-notes](https://bit.ly/feast-notes)
-
-### Developers call (biweekly)
-We also have a `#feast-development` community call every two weeks, where we discuss contributions + brainstorm best practices.
-
-#### Frequency (every 2 weeks)
-
-* Tuesday 8:00 am to 8:30 am PST
-
-#### Links
-
-* Meeting notes (incl recordings): [Feast Development Biweekly](https://docs.google.com/document/d/1zUbIWFWjaBEVlToOdupnmKQwgAtFYx41sPoEEEdd2io/edit#)
-* Zoom: [https://zoom.us/j/93657748160?pwd=K3ZpdzhqejgrcXNhc3BlSjFMdzUxdz09](https://zoom.us/j/93657748160?pwd=K3ZpdzhqejgrcXNhc3BlSjFMdzUxdz09)
diff --git a/docs/getting-started/faq.md b/docs/getting-started/faq.md
index a511ddb0dc..9b7eb834bf 100644
--- a/docs/getting-started/faq.md
+++ b/docs/getting-started/faq.md
@@ -3,7 +3,7 @@
{% hint style="info" %}
**Don't see your question?**
-We encourage you to ask questions on [Slack](https://slack.feast.dev) or [GitHub](https://github.com/feast-dev/feast). Even better, once you get an answer, add the answer to this FAQ via a [pull request](../project/development-guide.md)!
+We encourage you to ask questions on [GitHub](https://github.com/feast-dev/feast). Even better, once you get an answer, add the answer to this FAQ via a [pull request](../project/development-guide.md)!
{% endhint %}
## Getting started
diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md
index b30bdb585c..d10e8a174a 100644
--- a/docs/getting-started/quickstart.md
+++ b/docs/getting-started/quickstart.md
@@ -555,9 +555,7 @@ show up in the upcoming concepts + architecture + tutorial pages as well.
## Next steps
-* Join the [email newsletter](https://feast.dev/) to get new updates on Feast / feature stores.
* Read the [Concepts](concepts/) page to understand the Feast data model.
* Read the [Architecture](architecture-and-components/) page.
* Check out our [Tutorials](../tutorials/tutorials-overview/) section for more examples on how to use Feast.
* Follow our [Running Feast with Snowflake/GCP/AWS](../how-to-guides/feast-snowflake-gcp-aws/) guide for a more in-depth tutorial on using Feast.
-* Join other Feast users and contributors in [Slack](https://slack.feast.dev) and become part of the community!
diff --git a/docs/project/contributing.md b/docs/project/contributing.md
index 9a3e3e1a3e..cded378951 100644
--- a/docs/project/contributing.md
+++ b/docs/project/contributing.md
@@ -2,21 +2,15 @@
## Getting started
After familiarizing yourself with the documentation, the simplest way to get started is to:
-1. Join the `#feast-development` [Slack channel](https://tectonfeast.slack.com/archives/C01NTDB88QK), where contributors discuss ideas and PRs
-2. Join our Google Groups in order to get access to RFC folders + get invites to community calls. See [community](../community.md) for more details.
-3. Setup your developer environment by following [development guide](development-guide.md).
-4. Either create a [GitHub issue](https://github.com/feast-dev/feast/issues) or make a draft PR (following [development guide](development-guide.md)) to get the ball rolling!
+1. Setup your developer environment by following [development guide](development-guide.md).
+2. Either create a [GitHub issue](https://github.com/feast-dev/feast/issues) or make a draft PR (following [development guide](development-guide.md)) to get the ball rolling!
## Decision making process
*See [governance](../../community/governance.md) for more details here*
We follow a process of [lazy consensus](http://community.apache.org/committers/lazyConsensus.html). If you believe you know what the project needs then just start development. As long as there is no active opposition and the PR has been approved by maintainers or CODEOWNERS, contributions will be merged.
-We use our `#feast-development` [Slack channel](https://tectonfeast.slack.com/archives/C01NTDB88QK), [GitHub issues](https://github.com/feast-dev/feast/issues), and [GitHub pull requests](https://github.com/feast-dev/feast/pulls) to communicate development ideas.
-
-The general decision making workflow is as follows:
-
-
+We use our [GitHub issues](https://github.com/feast-dev/feast/issues), and [GitHub pull requests](https://github.com/feast-dev/feast/pulls) to communicate development ideas.
> **Note**: There may not always a corresponding CODEOWNER for the affected code, in which case the responsibility falls on other maintainers or contributors with write access to review + merge the PR
@@ -30,7 +24,7 @@ See also [Making a pull request](development-guide.md#making-a-pull-request) for
## Resources
-- [Community](../community.md) for other ways to get involved with the community (e.g. joining community calls)
+- [Community](../community.md) for other ways to get involved with the community
- [Development guide](development-guide.md) for tips on how to contribute
- [Feast GitHub issues](https://github.com/feast-dev/feast/issues) to see what others are working on
- [Feast RFCs](https://drive.google.com/drive/u/0/folders/1msUsgmDbVBaysmhBlg9lklYLLTMk4bC3) for a folder of previously written RFCs
\ No newline at end of file
diff --git a/docs/project/development-guide.md b/docs/project/development-guide.md
index 5a0a414d1d..931d0243d2 100644
--- a/docs/project/development-guide.md
+++ b/docs/project/development-guide.md
@@ -16,7 +16,7 @@
- [Incorporating upstream changes from master](#incorporating-upstream-changes-from-master)
- [Feast Python SDK / CLI](#feast-python-sdk--cli)
- [Environment Setup](#environment-setup)
- - [Code Style & Linting](#code-style--linting)
+ - [Code Style \& Linting](#code-style--linting)
- [Unit Tests](#unit-tests)
- [Integration Tests](#integration-tests)
- [Local integration tests](#local-integration-tests)
@@ -33,12 +33,7 @@
- [Feast Java Serving](#feast-java-serving)
- [Developing the Feast Helm charts](#developing-the-feast-helm-charts)
- [Feast Java Feature Server Helm Chart](#feast-java-feature-server-helm-chart)
- - [Feast Python / Go Feature Server Helm Chart](#feast-python--go-feature-server-helm-chart)
- - [Feast Go Client](#feast-go-client)
- - [Go Environment Setup](#go-environment-setup)
- - [Building Go](#building-go)
- - [Go Code Style & Linting](#go-code-style--linting)
- - [Go Unit Tests](#go-unit-tests)
+ - [Feast Python Feature Server Helm Chart](#feast-python-feature-server-helm-chart)
- [Testing with Github Actions workflows](#testing-with-github-actions-workflows)
- [Feast Data Storage Format](#feast-data-storage-format)
## Overview
@@ -46,7 +41,6 @@ This guide is targeted at developers looking to contribute to Feast components i
the main Feast repository:
- [Feast Python SDK / CLI](#feast-python-sdk--cli)
- [Feast Java Serving](#feast-java-serving)
-- [Feast Go Client](#feast-go-client)
Please see [this page](../reference/codebase-structure.md) for more details on the structure of the entire codebase.
@@ -57,13 +51,6 @@ The compatibility policy for Feast can be found [here](compatibility.md), and sh
## Community
See [Contribution process](./contributing.md) and [Community](../community.md) for details on how to get more involved in the community.
-A quick few highlights:
-- [RFCs](https://drive.google.com/drive/u/0/folders/0AAe8j7ZK3sxSUk9PVA)
-- [Community Slack](https://slack.feast.dev/)
-- [Feast Dev Mailing List](https://groups.google.com/g/feast-dev)
-- [Community Calendar](https://calendar.google.com/calendar/u/0?cid=ZTFsZHVhdGM3MDU3YTJucTBwMzNqNW5rajBAZ3JvdXAuY2FsZW5kYXIuZ29vZ2xlLmNvbQ)
- - Includes biweekly community calls at 10AM PST
-
## Making a pull request
We use the convention that the assignee of a PR is the person with the next action.
@@ -135,7 +122,7 @@ Note that this means if you are midway through working through a PR and rebase,
### Environment Setup
Setting up your development environment for Feast Python SDK / CLI:
1. Ensure that you have Docker installed in your environment. Docker is used to provision service dependencies during testing, and build images for feature servers and other components.
- 1. Please note that we use [Docker with BuiltKit](https://docs.docker.com/develop/develop-images/build_enhancements/).
+ - Please note that we use [Docker with BuiltKit](https://docs.docker.com/develop/develop-images/build_enhancements/).
2. Ensure that you have `make`, Python (3.8 and above) with `pip`, installed.
3. _Recommended:_ Create a virtual environment to isolate development dependencies to be installed
```sh
@@ -147,13 +134,20 @@ Setting up your development environment for Feast Python SDK / CLI:
```sh
pip install --upgrade pip
```
-
-5. (Optional): Install Node & Yarn. Then run the following to build Feast UI artifacts for use in `feast ui`
+5. (M1 Mac only): Follow the [dev guide](https://github.com/feast-dev/feast/issues/2105)
+6. Install pip-tools
+ ```sh
+ pip install pip-tools
+ ```
+7. (Optional): Install Node & Yarn. Then run the following to build Feast UI artifacts for use in `feast ui`
```
make build-ui
```
-
-6. Install development dependencies for Feast Python SDK / CLI
+8. Install mysql (needed for ci dependencies)
+```sh
+brew install mysql
+```
+9. Install development dependencies for Feast Python SDK / CLI
```sh
pip install -e ".[dev]"
```
@@ -361,18 +355,17 @@ You can run `test-python-universal-postgres-online` to run all tests against the
TODO
## (Experimental) Feast UI
-See [Feast contributing guide](ui/CONTRIBUTING.md)
+See [Feast contributing guide](https://github.com/feast-dev/feast/blob/master/ui/CONTRIBUTING.md)
## Feast Java Serving
-See [Java contributing guide](java/CONTRIBUTING.md)
+See [Java contributing guide](https://github.com/feast-dev/feast/blob/master/java/CONTRIBUTING.md)
See also development instructions related to the helm chart below at [Developing the Feast Helm charts](#developing-the-feast-helm-charts)
## Developing the Feast Helm charts
-There are 3 helm charts:
+There are 2 helm charts:
- Feast Java feature server
-- Feast Python / Go feature server
-- (deprecated) Feast Python feature server
+- Feast Python feature server
Generally, you can override the images in the helm charts with locally built Docker images, and install the local helm
chart.
@@ -381,58 +374,23 @@ All README's for helm charts are generated using [helm-docs](https://github.com/
(e.g. with `brew install norwoodj/tap/helm-docs`) and then run `make build-helm-docs`.
### Feast Java Feature Server Helm Chart
-See the Java demo example (it has development instructions too using minikube) [here](examples/java-demo/README.md)
+See the Java demo example (it has development instructions too using minikube) [here](https://github.com/feast-dev/feast/blob/master/examples/java-demo/README.md)
It will:
- run `make build-java-docker-dev` to build local Java feature server binaries
- configure the included `application-override.yaml` to override the image tag to use the locally built dev images.
- install the local chart with `helm install feast-release ../../../infra/charts/feast --values application-override.yaml`
-### Feast Python / Go Feature Server Helm Chart
+### Feast Python Feature Server Helm Chart
See the Python demo example (it has development instructions too using minikube) [here](examples/python-helm-demo/README.md)
It will:
- run `make build-feature-server-dev` to build a local python feature server binary
- install the local chart with `helm install feast-release ../../../infra/charts/feast-feature-server --set image.tag=dev --set feature_store_yaml_base64=$(base64 feature_store.yaml)`
-## Feast Go Client
-### Go Environment Setup
-Setting up your development environment for Feast Go SDK:
-
-- Install Golang, [`protoc` with the Golang & grpc plugins](https://developers.google.com/protocol-buffers/docs/gotutorial#compiling-your-protocol-buffers)
-
-### Building Go
-Build the Feast Go Client with the `go` toolchain:
-```sh
-make compile-go-lib
-```
-
-### Go Code Style & Linting
-Feast Go Client codebase:
-- Conforms to the code style enforced by `go fmt`.
-- Is lintable by `go vet`.
-
-Autoformat your Go code to satisfy the Code Style standard:
-```sh
-go fmt
-```
-
-Lint your Go code:
-```sh
-go vet
-```
-
-> Setup [pre-commit hooks](#pre-commit-hooks) to automatically format and lint on commit.
-
-### Go Unit Tests
-Unit tests for the Feast Go Client can be run as follows:
-```sh
-make test-go
-```
-
### Testing with Github Actions workflows
-Please refer to the maintainers [doc](./docs/project/maintainers.md) if you would like to locally test out the github actions workflow changes.
+Please refer to the maintainers [doc](maintainers.md) if you would like to locally test out the github actions workflow changes.
This document will help you setup your fork to test the ci integration tests and other workflows without needing to make a pull request against feast-dev master.
## Feast Data Storage Format
diff --git a/docs/project/release-process.md b/docs/project/release-process.md
index 0aa7d3fb5b..d3ff34bbc3 100644
--- a/docs/project/release-process.md
+++ b/docs/project/release-process.md
@@ -16,14 +16,14 @@ If you were cutting Feast 0.22.3, for example, you might do:
After this step, you will have all the changes you need in the branch.
-### 2. Pre-release verification
+### 2. Pre-release verification (currently broken)
A lot of things can go wrong. One of the most common is getting the wheels to build correctly (and not accidentally
building dev wheels from improper tagging or local code changes during the release process).
Another possible failure is that the Docker images might not build correctly.
We verify the building the wheels and Docker images in **your fork** of Feast, not the main feast-dev/feast repo.
-#### For minor releases (e.g. v0.22.0)
+#### 2a. Verifying minor releases (e.g. v0.22.0)
1. Merge upstream master changes into your **fork**. Make sure you are running the workflow off of your fork!
2. Create a tag manually for the release on your fork. For example, if you are doing a release for version 0.22.0, create a tag by doing the following.
- Checkout master branch and run `git tag v0.22.0`.
@@ -37,17 +37,7 @@ We verify the building the wheels and Docker images in **your fork** of Feast, n
5. Run the workflow off of the tag you just created(`v0.22.0` in this case, **not** the master branch) and verify that
the workflow worked (i.e ensure that all jobs are green).
-#### For patch releases (e.g. v0.22.3)
-You should already have checked out the existing minor release branch from step 1 (e.g. `v0.22-branch`).
-1. Push the minor release branch to your fork (`git push -u origin `).
-2. Add a patch release tag (e.g `v0.22.1`) by running `git tag `.
- > This is important. If you don't have a tag, then the wheels you build will be **dev wheels**, which we can't
- > push. The release process will automatically produce a tag for you via Semantic Release.
-3. Push tags to your **origin branch** (not the upstream feast-dev/feast branch) with `git push origin `.
-4. Kick off `build_wheels` workflow in your fork in the same way as is detailed in the last section, running the
- workflow from this tag you just pushed up.
-
-### 3. Release for Python and Java SDK
+### 2. Release for Python and Java SDK
1. Generate a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) or retrieve your saved personal access token.
* The personal access token should have all of the permissions under the `repo` checkbox.
2. Access the `Actions` tab on the main `feast-dev/feast` repo and find the `release` action.
@@ -81,7 +71,7 @@ so it helps to have a high level overview. See https://github.com/feast-dev/feas
#### 4c: Update documentation
-In the Feast Gitbook (ask [Danny Chiao](https://tectonfeast.slack.com/team/U029405HFEU) in Slack for access):
+In the Feast Gitbook:
1. Create a new space within the Feast collection
2. Go to the overflow menu on the top -> Synchronize with Git
1. Specify GitHub as the provider
diff --git a/docs/reference/batch-materialization/bytewax.md b/docs/reference/batch-materialization/bytewax.md
index bd98a4dc6e..6a97bd391d 100644
--- a/docs/reference/batch-materialization/bytewax.md
+++ b/docs/reference/batch-materialization/bytewax.md
@@ -23,6 +23,8 @@ To configure secrets, first create them using `kubectl`:
kubectl create secret generic -n bytewax aws-credentials --from-literal=aws-access-key-id='' --from-literal=aws-secret-access-key=''
```
+If your Docker registry requires authentication to store/pull containers, you can use this same approach to store your repository access credential and use when running the materialization engine.
+
Then configure them in the batch_engine section of `feature_store.yaml`:
``` yaml
@@ -40,6 +42,8 @@ batch_engine:
secretKeyRef:
name: aws-credentials
key: aws-secret-access-key
+ image_pull_secrets:
+ - docker-repository-access-secret
```
#### Configuration
@@ -51,9 +55,30 @@ batch_engine:
type: bytewax
namespace: bytewax
image: bytewax/bytewax-feast:latest
+ image_pull_secrets:
+ - my_container_secret
+ service_account_name: my-k8s-service-account
+ include_security_context_capabilities: false
+ annotations:
+ # example annotation you might include if running on AWS EKS
+ iam.amazonaws.com/role: arn:aws:iam:::role/MyBytewaxPlatformRole
+ resources:
+ limits:
+ cpu: 1000m
+ memory: 2048Mi
+ requests:
+ cpu: 500m
+ memory: 1024Mi
```
-The `namespace` configuration directive specifies which Kubernetes [namespace](https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/) jobs, services and configuration maps will be created in.
+**Notes:**
+
+* The `namespace` configuration directive specifies which Kubernetes [namespace](https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/) jobs, services and configuration maps will be created in.
+* The `image_pull_secrets` configuration directive specifies the pre-configured secret to use when pulling the image container from your registry.
+* The `service_account_name` specifies which Kubernetes service account to run the job under.
+* The `include_security_context_capabilities` flag indicates whether or not `"add": ["NET_BIND_SERVICE"]` and `"drop": ["ALL"]` are included in the job & pod security context capabilities.
+* `annotations` allows you to include additional Kubernetes annotations to the job. This is particularly useful for IAM roles which grant the running pod access to cloud platform resources (for example).
+* The `resources` configuration directive sets the standard Kubernetes [resource requests](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/) for the job containers to utilise when materializing data.
#### Building a custom Bytewax Docker image
diff --git a/docs/reference/feast-cli-commands.md b/docs/reference/feast-cli-commands.md
index 38e85843d4..7bdea19e61 100644
--- a/docs/reference/feast-cli-commands.md
+++ b/docs/reference/feast-cli-commands.md
@@ -11,8 +11,6 @@ Usage: feast [OPTIONS] COMMAND [ARGS]...
For more information, see our public docs at https://docs.feast.dev/
- For any questions, you can reach us at https://slack.feast.dev/
-
Options:
-c, --chdir TEXT Switch to a different feature repository directory before
executing the given subcommand.
diff --git a/docs/reference/offline-stores/overview.md b/docs/reference/offline-stores/overview.md
index 10f99813ba..8ce9045496 100644
--- a/docs/reference/offline-stores/overview.md
+++ b/docs/reference/offline-stores/overview.md
@@ -47,10 +47,10 @@ Below is a matrix indicating which `RetrievalJob`s support what functionality.
| export to dataframe | yes | yes | yes | yes | yes | yes | yes |
| export to arrow table | yes | yes | yes | yes | yes | yes | yes |
| export to arrow batches | no | no | no | yes | no | no | no |
-| export to SQL | no | yes | no | yes | yes | no | yes |
+| export to SQL | no | yes | yes | yes | yes | no | yes |
| export to data lake (S3, GCS, etc.) | no | no | yes | no | yes | no | no |
| export to data warehouse | no | yes | yes | yes | yes | no | no |
-| export as Spark dataframe | no | no | no | no | no | yes | no |
+| export as Spark dataframe | no | no | yes | no | no | yes | no |
| local execution of Python-based on-demand transforms | yes | yes | yes | yes | yes | no | yes |
| remote execution of Python-based on-demand transforms | no | no | no | no | no | no | no |
| persist results in the offline store | yes | yes | yes | yes | yes | yes | no |
diff --git a/docs/reference/offline-stores/redshift.md b/docs/reference/offline-stores/redshift.md
index 98092c44be..e9bcbfeff1 100644
--- a/docs/reference/offline-stores/redshift.md
+++ b/docs/reference/offline-stores/redshift.md
@@ -155,3 +155,25 @@ While the following trust relationship is necessary to make sure that Redshift,
]
}
```
+
+
+## Redshift Serverless
+
+In order to use [AWS Redshift Serverless](https://aws.amazon.com/redshift/redshift-serverless/), specify a workgroup instead of a cluster_id and user.
+
+{% code title="feature_store.yaml" %}
+```yaml
+project: my_feature_repo
+registry: data/registry.db
+provider: aws
+offline_store:
+ type: redshift
+ region: us-west-2
+ workgroup: feast-workgroup
+ database: feast-database
+ s3_staging_location: s3://feast-bucket/redshift
+ iam_role: arn:aws:iam::123456789012:role/redshift_s3_access_role
+```
+{% endcode %}
+
+Please note that the IAM policies above will need the [redshift-serverless](https://aws.permissions.cloud/iam/redshift-serverless) version, rather than the standard [redshift](https://aws.permissions.cloud/iam/redshift).
\ No newline at end of file
diff --git a/docs/reference/offline-stores/snowflake.md b/docs/reference/offline-stores/snowflake.md
index 4ac7f16436..9f2dafee67 100644
--- a/docs/reference/offline-stores/snowflake.md
+++ b/docs/reference/offline-stores/snowflake.md
@@ -7,7 +7,7 @@ The [Snowflake](https://trial.snowflake.com) offline store provides support for
* Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. A Pandas dataframes will be uploaded to Snowflake as a temporary table in order to complete join operations.
## Getting started
-In order to use this offline store, you'll need to run `pip install 'feast[snowflake]'`.
+In order to use this offline store, you'll need to run `pip install 'feast[snowflake]'`.
If you're using a file based registry, then you'll also need to install the relevant cloud extra (`pip install 'feast[snowflake, CLOUD]'` where `CLOUD` is one of `aws`, `gcp`, `azure`)
@@ -25,9 +25,10 @@ offline_store:
account: snowflake_deployment.us-east-1
user: user_login
password: user_password
- role: sysadmin
- warehouse: demo_wh
+ role: SYSADMIN
+ warehouse: COMPUTE_WH
database: FEAST
+ schema: PUBLIC
```
{% endcode %}
@@ -52,11 +53,11 @@ Below is a matrix indicating which functionality is supported by `SnowflakeRetri
| ----------------------------------------------------- | --------- |
| export to dataframe | yes |
| export to arrow table | yes |
-| export to arrow batches | no |
+| export to arrow batches | yes |
| export to SQL | yes |
| export to data lake (S3, GCS, etc.) | yes |
| export to data warehouse | yes |
-| export as Spark dataframe | no |
+| export as Spark dataframe | yes |
| local execution of Python-based on-demand transforms | yes |
| remote execution of Python-based on-demand transforms | no |
| persist results in the offline store | yes |
diff --git a/docs/reference/offline-stores/trino.md b/docs/reference/offline-stores/trino.md
index 446db620e3..fd437a7aa6 100644
--- a/docs/reference/offline-stores/trino.md
+++ b/docs/reference/offline-stores/trino.md
@@ -27,6 +27,47 @@ offline_store:
catalog: memory
connector:
type: memory
+ user: trino
+ source: feast-trino-offline-store
+ http-scheme: https
+ ssl-verify: false
+ x-trino-extra-credential-header: foo=bar, baz=qux
+
+ # enables authentication in Trino connections, pick the one you need
+ # if you don't need authentication, you can safely remove the whole auth block
+ auth:
+ # Basic Auth
+ type: basic
+ config:
+ username: foo
+ password: $FOO
+
+ # Certificate
+ type: certificate
+ config:
+ cert-file: /path/to/cert/file
+ key-file: /path/to/key/file
+
+ # JWT
+ type: jwt
+ config:
+ token: $JWT_TOKEN
+
+ # OAuth2 (no config required)
+ type: oauth2
+
+ # Kerberos
+ type: kerberos
+ config:
+ config-file: /path/to/kerberos/config/file
+ service-name: foo
+ mutual-authentication: true
+ force-preemptive: true
+ hostname-override: custom-hostname
+ sanitize-mutual-error-response: true
+ principal: principal-name
+ delegate: true
+ ca_bundle: /path/to/ca/bundle/file
online_store:
path: data/online_store.db
```
diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md
index e46fc28d16..f86e6f6a1d 100644
--- a/docs/reference/online-stores/README.md
+++ b/docs/reference/online-stores/README.md
@@ -18,6 +18,10 @@ Please see [Online Store](../../getting-started/architecture-and-components/onli
[redis.md](redis.md)
{% endcontent-ref %}
+{% content-ref url="dragonfly.md" %}
+[dragonfly.md](dragonfly.md)
+{% endcontent-ref %}
+
{% content-ref url="datastore.md" %}
[datastore.md](datastore.md)
{% endcontent-ref %}
@@ -42,3 +46,12 @@ Please see [Online Store](../../getting-started/architecture-and-components/onli
[mysql.md](mysql.md)
{% endcontent-ref %}
+{% content-ref url="rockset.md" %}
+[rockset.md](rockset.md)
+{% endcontent-ref %}
+
+{% content-ref url="hazelcast.md" %}
+[hazelcast.md](hazelcast.md)
+{% endcontent-ref %}
+
+
diff --git a/docs/reference/online-stores/cassandra.md b/docs/reference/online-stores/cassandra.md
index 30514305b6..61659ba7a2 100644
--- a/docs/reference/online-stores/cassandra.md
+++ b/docs/reference/online-stores/cassandra.md
@@ -2,7 +2,7 @@
## Description
-The [Cassandra / Astra DB] online store provides support for materializing feature values into an Apache Cassandra / Astra DB database for online features.
+The [[Cassandra](https://cassandra.apache.org/_/index.html) / [Astra DB](https://www.datastax.com/products/datastax-astra?utm_source=feast)] online store provides support for materializing feature values into an Apache Cassandra / Astra DB database for online features.
* The whole project is contained within a Cassandra keyspace
* Each feature view is mapped one-to-one to a specific Cassandra table
diff --git a/docs/reference/online-stores/dragonfly.md b/docs/reference/online-stores/dragonfly.md
new file mode 100644
index 0000000000..bcd814ecc4
--- /dev/null
+++ b/docs/reference/online-stores/dragonfly.md
@@ -0,0 +1,90 @@
+# Dragonfly online store
+
+## Description
+
+[Dragonfly](https://github.com/dragonflydb/dragonfly) is a modern in-memory datastore that implements novel algorithms and data structures on top of a multi-threaded, share-nothing architecture. Thanks to its API compatibility, Dragonfly can act as a drop-in replacement for Redis. Due to Dragonfly's hardware efficiency, you can run a single node instance on a small 8GB instance or scale vertically to large 768GB machines with 64 cores. This greatly reduces infrastructure costs as well as architectural complexity.
+
+Similar to Redis, Dragonfly can be used as an online feature store for Feast.
+
+## Using Dragonfly as a drop-in Feast online store instead of Redis
+
+Make sure you have Python and `pip` installed.
+
+Install the Feast SDK and CLI
+
+`pip install feast`
+
+In order to use Dragonfly as the online store, you'll need to install the redis extra:
+
+`pip install 'feast[redis]'`
+
+### 1. Create a feature repository
+
+Bootstrap a new feature repository:
+
+```
+feast init feast_dragonfly
+cd feast_dragonfly/feature_repo
+```
+
+Update `feature_repo/feature_store.yaml` with the below contents:
+
+```
+project: feast_dragonfly
+registry: data/registry.db
+provider: local
+online_store:
+type: redis
+connection_string: "localhost:6379"
+```
+
+### 2. Start Dragonfly
+
+There are several options available to get Dragonfly up and running quickly. We will be using Docker for this tutorial.
+
+`docker run --network=host --ulimit memlock=-1 docker.dragonflydb.io/dragonflydb/dragonfly`
+
+### 3. Register feature definitions and deploy your feature store
+
+`feast apply`
+
+The `apply` command scans python files in the current directory (`example_repo.py` in this case) for feature view/entity definitions, registers the objects, and deploys infrastructure.
+You should see the following output:
+
+```
+....
+Created entity driver
+Created feature view driver_hourly_stats_fresh
+Created feature view driver_hourly_stats
+Created on demand feature view transformed_conv_rate
+Created on demand feature view transformed_conv_rate_fresh
+Created feature service driver_activity_v1
+Created feature service driver_activity_v3
+Created feature service driver_activity_v2
+```
+
+## Functionality Matrix
+
+The set of functionality supported by online stores is described in detail [here](overview.md#functionality).
+Below is a matrix indicating which functionality is supported by the Redis online store.
+
+| | Redis |
+| :-------------------------------------------------------- | :---- |
+| write feature values to the online store | yes |
+| read feature values from the online store | yes |
+| update infrastructure (e.g. tables) in the online store | yes |
+| teardown infrastructure (e.g. tables) in the online store | yes |
+| generate a plan of infrastructure changes | no |
+| support for on-demand transforms | yes |
+| readable by Python SDK | yes |
+| readable by Java | yes |
+| readable by Go | yes |
+| support for entityless feature views | yes |
+| support for concurrent writing to the same key | yes |
+| support for ttl (time to live) at retrieval | yes |
+| support for deleting expired data | yes |
+| collocated by feature view | no |
+| collocated by feature service | no |
+| collocated by entity key | yes |
+
+To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix).
diff --git a/docs/reference/online-stores/hazelcast.md b/docs/reference/online-stores/hazelcast.md
new file mode 100644
index 0000000000..ef65f42b31
--- /dev/null
+++ b/docs/reference/online-stores/hazelcast.md
@@ -0,0 +1,59 @@
+# Hazelcast online store
+
+## Description
+
+Hazelcast online store is in alpha development.
+
+The [Hazelcast](htpps://hazelcast.com) online store provides support for materializing feature values into a Hazelcast cluster for serving online features in real-time.
+In order to use Hazelcast as online store, you need to have a running Hazelcast cluster. You can create a cluster using Hazelcast Viridian Serverless. See this [getting started](https://hazelcast.com/get-started/) page for more details.
+
+* Each feature view is mapped one-to-one to a specific Hazelcast IMap
+* This implementation inherits all strengths of Hazelcast such as high availability, fault-tolerance, and data distribution.
+* Secure TSL/SSL connection is supported by Hazelcast online store.
+* You can set TTL (Time-To-Live) setting for your features in Hazelcast cluster.
+
+Each feature view corresponds to an IMap in Hazelcast cluster and the entries in that IMap corresponds to features of entities.
+Each feature value stored separately and can be retrieved individually.
+
+## Getting started
+
+In order to use Hazelcast online store, you'll need to run `pip install 'feast[hazelcast]'`. You can then get started with the command `feast init REPO_NAME -t hazelcast`.
+
+
+## Examples
+
+{% code title="feature_store.yaml" %}
+```yaml
+project: my_feature_repo
+registry: data/registry.db
+provider: local
+online_store:
+ type: hazelcast
+ cluster_name: dev
+ cluster_members: ["localhost:5701"]
+ key_ttl_seconds: 36000
+```
+
+## Functionality Matrix
+
+| | Hazelcast |
+| :-------------------------------------------------------- |:----------|
+| write feature values to the online store | yes |
+| read feature values from the online store | yes |
+| update infrastructure (e.g. tables) in the online store | yes |
+| teardown infrastructure (e.g. tables) in the online store | yes |
+| generate a plan of infrastructure changes | no |
+| support for on-demand transforms | yes |
+| readable by Python SDK | yes |
+| readable by Java | no |
+| readable by Go | no |
+| support for entityless feature views | yes |
+| support for concurrent writing to the same key | yes |
+| support for ttl (time to live) at retrieval | yes |
+| support for deleting expired data | yes |
+| collocated by feature view | no |
+| collocated by feature service | no |
+| collocated by entity key | yes |
+
+To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix).
+
diff --git a/docs/reference/online-stores/overview.md b/docs/reference/online-stores/overview.md
index 981a1aeeed..7a51a9a468 100644
--- a/docs/reference/online-stores/overview.md
+++ b/docs/reference/online-stores/overview.md
@@ -34,7 +34,7 @@ Details for each specific online store, such as how to configure it in a `featur
Below is a matrix indicating which online stores support what functionality.
-| | Sqlite | Redis | DynamoDB | Snowflake | Datastore | Postgres | Hbase | Cassandra |
+| | Sqlite | Redis | DynamoDB | Snowflake | Datastore | Postgres | Hbase | [[Cassandra](https://cassandra.apache.org/_/index.html) / [Astra DB](https://www.datastax.com/products/datastax-astra?utm_source=feast)] |
| :-------------------------------------------------------- | :-- | :-- | :-- | :-- | :-- | :-- | :-- | :-- |
| write feature values to the online store | yes | yes | yes | yes | yes | yes | yes | yes |
| read feature values from the online store | yes | yes | yes | yes | yes | yes | yes | yes |
diff --git a/docs/reference/online-stores/rockset.md b/docs/reference/online-stores/rockset.md
new file mode 100644
index 0000000000..082bddf37b
--- /dev/null
+++ b/docs/reference/online-stores/rockset.md
@@ -0,0 +1,84 @@
+# Rockset (contrib)
+
+## Description
+
+In Alpha Development.
+
+The [Rockset](https://rockset.com/demo-signup/) online store provides support for materializing feature values within a Rockset collection in order to serve features in real-time.
+
+* Each document is uniquely identified by its '_id' value. Repeated inserts into the same document '_id' will result in an upsert.
+
+Rockset indexes all columns allowing for quick per feature look up and also allows for a dynamic typed schema that can change based on any new requirements. API Keys can be found in the Rockset console.
+You can also find host urls on the same tab by clicking "View Region Endpoint Urls".
+
+Data Model Used Per Doc
+
+```
+{
+ "_id": (STRING) Unique Identifier for the feature document.
+ : (STRING) Feature Values Mapped by Feature Name. Feature
+ values stored as a serialized hex string.
+ ....
+ "event_ts": (STRING) ISO Stringified Timestamp.
+ "created_ts": (STRING) ISO Stringified Timestamp.
+}
+```
+
+
+## Example
+
+```yaml
+project: my_feature_app
+registry: data/registry.db
+provider: local
+online_store:
+ ## Basic Configs ##
+
+ # If apikey or host is left blank the driver will try to pull
+ # these values from environment variables ROCKSET_APIKEY and
+ # ROCKSET_APISERVER respectively.
+ type: rockset
+ api_key:
+ host:
+
+ ## Advanced Configs ##
+
+ # Batch size of records that will be turned per page when
+ # paginating a batched read.
+ #
+ # read_pagination_batch_size: 100
+
+ # The amount of time, in seconds, we will wait for the
+ # collection to become visible to the API.
+ #
+ # collection_created_timeout_secs: 60
+
+ # The amount of time, in seconds, we will wait for the
+ # collection to enter READY state.
+ #
+ # collection_ready_timeout_secs: 1800
+
+ # Whether to wait for all writes to be flushed from log
+ # and queryable before returning write as completed. If
+ # False, documents that are written may not be seen
+ # immediately in subsequent reads.
+ #
+ # fence_all_writes: True
+
+ # The amount of time we will wait, in seconds, for the
+ # write fence to be passed
+ #
+ # fence_timeout_secs: 600
+
+ # Initial backoff, in seconds, we will wait between
+ # requests when polling for a response.
+ #
+ # initial_request_backoff_secs: 2
+
+ # Initial backoff, in seconds, we will wait between
+ # requests when polling for a response.
+ # max_request_backoff_secs: 30
+
+ # The max amount of times we will retry a failed request.
+ # max_request_attempts: 10000
+```
diff --git a/docs/reference/registry/snowflake.md b/docs/reference/registry/snowflake.md
new file mode 100644
index 0000000000..31b0db9582
--- /dev/null
+++ b/docs/reference/registry/snowflake.md
@@ -0,0 +1,30 @@
+# Snowflake registry
+
+## Description
+
+The [Snowflake](https://trial.snowflake.com) registry provides support for storing the protobuf representation of your feature store objects (data sources, feature views, feature services, etc.) Because Snowflake is an ACID compliant database, this allows for changes to individual objects atomically.
+
+An example of how to configure this would be:
+
+## Example
+
+{% code title="feature_store.yaml" %}
+```yaml
+project: my_feature_repo
+provider: local
+registry:
+ registry_type: snowflake.registry
+ account: snowflake_deployment.us-east-1
+ user: user_login
+ password: user_password
+ role: SYSADMIN
+ warehouse: COMPUTE_WH
+ database: FEAST
+ schema: PUBLIC
+ cache_ttl_seconds: 60
+offline_store:
+ ...
+```
+{% endcode %}
+
+The full set of configuration options is available in [SnowflakeRegistryConfig](https://rtd.feast.dev/en/latest/#feast.infra.registry.snowflake.SnowflakeRegistryConfig).
diff --git a/docs/roadmap.md b/docs/roadmap.md
index e75e58849b..a04ede7c99 100644
--- a/docs/roadmap.md
+++ b/docs/roadmap.md
@@ -3,7 +3,6 @@
The list below contains the functionality that contributors are planning to develop for Feast.
* We welcome contribution to all items in the roadmap!
-* Have questions about the roadmap? Go to the Slack channel to ask on #feast-development.
* **Data Sources**
* [x] [Snowflake source](https://docs.feast.dev/reference/data-sources/snowflake)
@@ -33,6 +32,7 @@ The list below contains the functionality that contributors are planning to deve
* [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore)
* [x] [Bigtable](https://docs.feast.dev/reference/online-stores/bigtable)
* [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite)
+ * [x] [Dragonfly](https://docs.feast.dev/reference/online-stores/dragonfly)
* [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure)
* [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/online-stores/postgres)
* [x] [Cassandra / AstraDB (contrib plugin)](https://docs.feast.dev/reference/online-stores/cassandra)
diff --git a/docs/tutorials/azure/notebooks/src/score.py b/docs/tutorials/azure/notebooks/src/score.py
index 93b248240d..7def7d2d2a 100644
--- a/docs/tutorials/azure/notebooks/src/score.py
+++ b/docs/tutorials/azure/notebooks/src/score.py
@@ -6,9 +6,11 @@
import json
import joblib
from feast import FeatureStore, RepoConfig
-from feast.infra.registry.registry import RegistryConfig
+from feast.repo_config import RegistryConfig
-from feast.infra.offline_stores.contrib.mssql_offline_store.mssql import MsSqlServerOfflineStoreConfig
+from feast.infra.offline_stores.contrib.mssql_offline_store.mssql import (
+ MsSqlServerOfflineStoreConfig,
+)
from feast.infra.online_stores.redis import RedisOnlineStoreConfig, RedisOnlineStore
@@ -73,4 +75,4 @@ def run(raw_data):
y_hat = model.predict(data)
return y_hat.tolist()
else:
- return 0.0
\ No newline at end of file
+ return 0.0
diff --git a/docs/tutorials/validating-historical-features.md b/docs/tutorials/validating-historical-features.md
index 70be38eced..03baccfbc9 100644
--- a/docs/tutorials/validating-historical-features.md
+++ b/docs/tutorials/validating-historical-features.md
@@ -136,8 +136,8 @@ taxi_entity = Entity(name='taxi', join_keys=['taxi_id'])
```python
trips_stats_fv = BatchFeatureView(
name='trip_stats',
- entities=['taxi'],
- features=[
+ entities=[taxi_entity],
+ schema=[
Field(name="total_miles_travelled", dtype=Float64),
Field(name="total_trip_seconds", dtype=Float64),
Field(name="total_earned", dtype=Float64),
@@ -154,17 +154,17 @@ trips_stats_fv = BatchFeatureView(
```python
@on_demand_feature_view(
- schema=[
- Field("avg_fare", Float64),
- Field("avg_speed", Float64),
- Field("avg_trip_seconds", Float64),
- Field("earned_per_hour", Float64),
- ],
sources=[
trips_stats_fv,
+ ],
+ schema=[
+ Field(name="avg_fare", dtype=Float64),
+ Field(name="avg_speed", dtype=Float64),
+ Field(name="avg_trip_seconds", dtype=Float64),
+ Field(name="earned_per_hour", dtype=Float64),
]
)
-def on_demand_stats(inp):
+def on_demand_stats(inp: pd.DataFrame) -> pd.DataFrame:
out = pd.DataFrame()
out["avg_fare"] = inp["total_earned"] / inp["trip_count"]
out["avg_speed"] = 3600 * inp["total_miles_travelled"] / inp["total_trip_seconds"]
@@ -647,7 +647,7 @@ Now we can create validation reference from dataset and profiler function:
```python
-validation_reference = ds.as_reference(profiler=stats_profiler)
+validation_reference = ds.as_reference(name="validation_reference_dataset", profiler=stats_profiler)
```
and test it against our existing retrieval job
diff --git a/examples/quickstart/quickstart.ipynb b/examples/quickstart/quickstart.ipynb
index cec4df91b1..f84457ac02 100644
--- a/examples/quickstart/quickstart.ipynb
+++ b/examples/quickstart/quickstart.ipynb
@@ -155,7 +155,7 @@
}
],
"source": [
- "%cd feature_repo\n",
+ "%cd feature_repo/feature_repo\n",
"!ls -R"
]
},
@@ -1066,7 +1066,6 @@
"- Read the [Concepts](https://docs.feast.dev/getting-started/concepts/) page to understand the Feast data model and architecture.\n",
"- Check out our [Tutorials](https://docs.feast.dev/tutorials/tutorials-overview) section for more examples on how to use Feast.\n",
"- Follow our [Running Feast with Snowflake/GCP/AWS](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws) guide for a more in-depth tutorial on using Feast.\n",
- "- Join other Feast users and contributors in [Slack](https://slack.feast.dev/) and become part of the community!"
]
}
],
@@ -1101,4 +1100,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
-}
+}
\ No newline at end of file
diff --git a/go.mod b/go.mod
index 3c05383ffc..68d47d7070 100644
--- a/go.mod
+++ b/go.mod
@@ -7,7 +7,6 @@ replace github.com/go-python/gopy v0.4.4 => github.com/feast-dev/gopy v0.4.1-0.2
require (
github.com/apache/arrow/go/v8 v8.0.0
github.com/ghodss/yaml v1.0.0
- github.com/go-python/gopy v0.4.4
github.com/go-redis/redis/v8 v8.11.4
github.com/golang/protobuf v1.5.2
github.com/google/uuid v1.3.0
@@ -15,21 +14,19 @@ require (
github.com/pkg/errors v0.9.1
github.com/spaolacci/murmur3 v1.1.0
github.com/stretchr/testify v1.7.0
- google.golang.org/grpc v1.47.0
- google.golang.org/protobuf v1.28.0
+ google.golang.org/grpc v1.53.0
+ google.golang.org/protobuf v1.28.1
)
require (
github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c // indirect
github.com/andybalholm/brotli v1.0.4 // indirect
github.com/apache/thrift v0.15.0 // indirect
- github.com/cespare/xxhash/v2 v2.1.2 // indirect
+ github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/goccy/go-json v0.9.6 // indirect
github.com/golang/snappy v0.0.4 // indirect
- github.com/gonuts/commander v0.1.0 // indirect
- github.com/gonuts/flag v0.1.0 // indirect
github.com/google/flatbuffers v2.0.6+incompatible // indirect
github.com/klauspost/asmfmt v1.3.2 // indirect
github.com/klauspost/compress v1.15.1 // indirect
@@ -41,12 +38,12 @@ require (
github.com/zeebo/xxh3 v1.0.2 // indirect
golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 // indirect
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
- golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3 // indirect
- golang.org/x/sys v0.0.0-20220412211240-33da011f77ad // indirect
- golang.org/x/text v0.3.7 // indirect
- golang.org/x/tools v0.1.11-0.20220413170336-afc6aad76eb1 // indirect
+ golang.org/x/net v0.7.0 // indirect
+ golang.org/x/sys v0.5.0 // indirect
+ golang.org/x/text v0.7.0 // indirect
+ golang.org/x/tools v0.1.12 // indirect
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect
- google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac // indirect
+ google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect
diff --git a/go.sum b/go.sum
index 11f1ba4d74..a2c7501e82 100644
--- a/go.sum
+++ b/go.sum
@@ -1,5 +1,389 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
+cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
+cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
+cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
+cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
+cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
+cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
+cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
+cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
+cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
+cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
+cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
+cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
+cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
+cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=
+cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=
+cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY=
+cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=
+cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=
+cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=
+cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY=
+cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM=
+cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY=
+cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ=
+cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI=
+cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4=
+cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc=
+cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA=
+cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U=
+cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A=
+cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc=
+cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34hIU=
+cloud.google.com/go v0.104.0/go.mod h1:OO6xxXdJyvuJPcEPBLN9BJPD+jep5G1+2U5B5gkRYtA=
+cloud.google.com/go v0.105.0/go.mod h1:PrLgOJNe5nfE9UMxKxgXj4mD3voiP+YQ6gdt6KMFOKM=
+cloud.google.com/go v0.107.0/go.mod h1:wpc2eNrD7hXUTy8EKS10jkxpZBjASrORK7goS+3YX2I=
+cloud.google.com/go/accessapproval v1.4.0/go.mod h1:zybIuC3KpDOvotz59lFe5qxRZx6C75OtwbisN56xYB4=
+cloud.google.com/go/accessapproval v1.5.0/go.mod h1:HFy3tuiGvMdcd/u+Cu5b9NkO1pEICJ46IR82PoUdplw=
+cloud.google.com/go/accesscontextmanager v1.3.0/go.mod h1:TgCBehyr5gNMz7ZaH9xubp+CE8dkrszb4oK9CWyvD4o=
+cloud.google.com/go/accesscontextmanager v1.4.0/go.mod h1:/Kjh7BBu/Gh83sv+K60vN9QE5NJcd80sU33vIe2IFPE=
+cloud.google.com/go/aiplatform v1.22.0/go.mod h1:ig5Nct50bZlzV6NvKaTwmplLLddFx0YReh9WfTO5jKw=
+cloud.google.com/go/aiplatform v1.24.0/go.mod h1:67UUvRBKG6GTayHKV8DBv2RtR1t93YRu5B1P3x99mYY=
+cloud.google.com/go/aiplatform v1.27.0/go.mod h1:Bvxqtl40l0WImSb04d0hXFU7gDOiq9jQmorivIiWcKg=
+cloud.google.com/go/analytics v0.11.0/go.mod h1:DjEWCu41bVbYcKyvlws9Er60YE4a//bK6mnhWvQeFNI=
+cloud.google.com/go/analytics v0.12.0/go.mod h1:gkfj9h6XRf9+TS4bmuhPEShsh3hH8PAZzm/41OOhQd4=
+cloud.google.com/go/apigateway v1.3.0/go.mod h1:89Z8Bhpmxu6AmUxuVRg/ECRGReEdiP3vQtk4Z1J9rJk=
+cloud.google.com/go/apigateway v1.4.0/go.mod h1:pHVY9MKGaH9PQ3pJ4YLzoj6U5FUDeDFBllIz7WmzJoc=
+cloud.google.com/go/apigeeconnect v1.3.0/go.mod h1:G/AwXFAKo0gIXkPTVfZDd2qA1TxBXJ3MgMRBQkIi9jc=
+cloud.google.com/go/apigeeconnect v1.4.0/go.mod h1:kV4NwOKqjvt2JYR0AoIWo2QGfoRtn/pkS3QlHp0Ni04=
+cloud.google.com/go/appengine v1.4.0/go.mod h1:CS2NhuBuDXM9f+qscZ6V86m1MIIqPj3WC/UoEuR1Sno=
+cloud.google.com/go/appengine v1.5.0/go.mod h1:TfasSozdkFI0zeoxW3PTBLiNqRmzraodCWatWI9Dmak=
+cloud.google.com/go/area120 v0.5.0/go.mod h1:DE/n4mp+iqVyvxHN41Vf1CR602GiHQjFPusMFW6bGR4=
+cloud.google.com/go/area120 v0.6.0/go.mod h1:39yFJqWVgm0UZqWTOdqkLhjoC7uFfgXRC8g/ZegeAh0=
+cloud.google.com/go/artifactregistry v1.6.0/go.mod h1:IYt0oBPSAGYj/kprzsBjZ/4LnG/zOcHyFHjWPCi6SAQ=
+cloud.google.com/go/artifactregistry v1.7.0/go.mod h1:mqTOFOnGZx8EtSqK/ZWcsm/4U8B77rbcLP6ruDU2Ixk=
+cloud.google.com/go/artifactregistry v1.8.0/go.mod h1:w3GQXkJX8hiKN0v+at4b0qotwijQbYUqF2GWkZzAhC0=
+cloud.google.com/go/artifactregistry v1.9.0/go.mod h1:2K2RqvA2CYvAeARHRkLDhMDJ3OXy26h3XW+3/Jh2uYc=
+cloud.google.com/go/asset v1.5.0/go.mod h1:5mfs8UvcM5wHhqtSv8J1CtxxaQq3AdBxxQi2jGW/K4o=
+cloud.google.com/go/asset v1.7.0/go.mod h1:YbENsRK4+xTiL+Ofoj5Ckf+O17kJtgp3Y3nn4uzZz5s=
+cloud.google.com/go/asset v1.8.0/go.mod h1:mUNGKhiqIdbr8X7KNayoYvyc4HbbFO9URsjbytpUaW0=
+cloud.google.com/go/asset v1.9.0/go.mod h1:83MOE6jEJBMqFKadM9NLRcs80Gdw76qGuHn8m3h8oHQ=
+cloud.google.com/go/asset v1.10.0/go.mod h1:pLz7uokL80qKhzKr4xXGvBQXnzHn5evJAEAtZiIb0wY=
+cloud.google.com/go/assuredworkloads v1.5.0/go.mod h1:n8HOZ6pff6re5KYfBXcFvSViQjDwxFkAkmUFffJRbbY=
+cloud.google.com/go/assuredworkloads v1.6.0/go.mod h1:yo2YOk37Yc89Rsd5QMVECvjaMKymF9OP+QXWlKXUkXw=
+cloud.google.com/go/assuredworkloads v1.7.0/go.mod h1:z/736/oNmtGAyU47reJgGN+KVoYoxeLBoj4XkKYscNI=
+cloud.google.com/go/assuredworkloads v1.8.0/go.mod h1:AsX2cqyNCOvEQC8RMPnoc0yEarXQk6WEKkxYfL6kGIo=
+cloud.google.com/go/assuredworkloads v1.9.0/go.mod h1:kFuI1P78bplYtT77Tb1hi0FMxM0vVpRC7VVoJC3ZoT0=
+cloud.google.com/go/automl v1.5.0/go.mod h1:34EjfoFGMZ5sgJ9EoLsRtdPSNZLcfflJR39VbVNS2M0=
+cloud.google.com/go/automl v1.6.0/go.mod h1:ugf8a6Fx+zP0D59WLhqgTDsQI9w07o64uf/Is3Nh5p8=
+cloud.google.com/go/automl v1.7.0/go.mod h1:RL9MYCCsJEOmt0Wf3z9uzG0a7adTT1fe+aObgSpkCt8=
+cloud.google.com/go/automl v1.8.0/go.mod h1:xWx7G/aPEe/NP+qzYXktoBSDfjO+vnKMGgsApGJJquM=
+cloud.google.com/go/baremetalsolution v0.3.0/go.mod h1:XOrocE+pvK1xFfleEnShBlNAXf+j5blPPxrhjKgnIFc=
+cloud.google.com/go/baremetalsolution v0.4.0/go.mod h1:BymplhAadOO/eBa7KewQ0Ppg4A4Wplbn+PsFKRLo0uI=
+cloud.google.com/go/batch v0.3.0/go.mod h1:TR18ZoAekj1GuirsUsR1ZTKN3FC/4UDnScjT8NXImFE=
+cloud.google.com/go/batch v0.4.0/go.mod h1:WZkHnP43R/QCGQsZ+0JyG4i79ranE2u8xvjq/9+STPE=
+cloud.google.com/go/beyondcorp v0.2.0/go.mod h1:TB7Bd+EEtcw9PCPQhCJtJGjk/7TC6ckmnSFS+xwTfm4=
+cloud.google.com/go/beyondcorp v0.3.0/go.mod h1:E5U5lcrcXMsCuoDNyGrpyTm/hn7ne941Jz2vmksAxW8=
+cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
+cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
+cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
+cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
+cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
+cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
+cloud.google.com/go/bigquery v1.42.0/go.mod h1:8dRTJxhtG+vwBKzE5OseQn/hiydoQN3EedCaOdYmxRA=
+cloud.google.com/go/bigquery v1.43.0/go.mod h1:ZMQcXHsl+xmU1z36G2jNGZmKp9zNY5BUua5wDgmNCfw=
+cloud.google.com/go/bigquery v1.44.0/go.mod h1:0Y33VqXTEsbamHJvJHdFmtqHvMIY28aK1+dFsvaChGc=
+cloud.google.com/go/billing v1.4.0/go.mod h1:g9IdKBEFlItS8bTtlrZdVLWSSdSyFUZKXNS02zKMOZY=
+cloud.google.com/go/billing v1.5.0/go.mod h1:mztb1tBc3QekhjSgmpf/CV4LzWXLzCArwpLmP2Gm88s=
+cloud.google.com/go/billing v1.6.0/go.mod h1:WoXzguj+BeHXPbKfNWkqVtDdzORazmCjraY+vrxcyvI=
+cloud.google.com/go/billing v1.7.0/go.mod h1:q457N3Hbj9lYwwRbnlD7vUpyjq6u5U1RAOArInEiD5Y=
+cloud.google.com/go/binaryauthorization v1.1.0/go.mod h1:xwnoWu3Y84jbuHa0zd526MJYmtnVXn0syOjaJgy4+dM=
+cloud.google.com/go/binaryauthorization v1.2.0/go.mod h1:86WKkJHtRcv5ViNABtYMhhNWRrD1Vpi//uKEy7aYEfI=
+cloud.google.com/go/binaryauthorization v1.3.0/go.mod h1:lRZbKgjDIIQvzYQS1p99A7/U1JqvqeZg0wiI5tp6tg0=
+cloud.google.com/go/binaryauthorization v1.4.0/go.mod h1:tsSPQrBd77VLplV70GUhBf/Zm3FsKmgSqgm4UmiDItk=
+cloud.google.com/go/certificatemanager v1.3.0/go.mod h1:n6twGDvcUBFu9uBgt4eYvvf3sQ6My8jADcOVwHmzadg=
+cloud.google.com/go/certificatemanager v1.4.0/go.mod h1:vowpercVFyqs8ABSmrdV+GiFf2H/ch3KyudYQEMM590=
+cloud.google.com/go/channel v1.8.0/go.mod h1:W5SwCXDJsq/rg3tn3oG0LOxpAo6IMxNa09ngphpSlnk=
+cloud.google.com/go/channel v1.9.0/go.mod h1:jcu05W0my9Vx4mt3/rEHpfxc9eKi9XwsdDL8yBMbKUk=
+cloud.google.com/go/cloudbuild v1.3.0/go.mod h1:WequR4ULxlqvMsjDEEEFnOG5ZSRSgWOywXYDb1vPE6U=
+cloud.google.com/go/cloudbuild v1.4.0/go.mod h1:5Qwa40LHiOXmz3386FrjrYM93rM/hdRr7b53sySrTqA=
+cloud.google.com/go/clouddms v1.3.0/go.mod h1:oK6XsCDdW4Ib3jCCBugx+gVjevp2TMXFtgxvPSee3OM=
+cloud.google.com/go/clouddms v1.4.0/go.mod h1:Eh7sUGCC+aKry14O1NRljhjyrr0NFC0G2cjwX0cByRk=
+cloud.google.com/go/cloudtasks v1.5.0/go.mod h1:fD92REy1x5woxkKEkLdvavGnPJGEn8Uic9nWuLzqCpY=
+cloud.google.com/go/cloudtasks v1.6.0/go.mod h1:C6Io+sxuke9/KNRkbQpihnW93SWDU3uXt92nu85HkYI=
+cloud.google.com/go/cloudtasks v1.7.0/go.mod h1:ImsfdYWwlWNJbdgPIIGJWC+gemEGTBK/SunNQQNCAb4=
+cloud.google.com/go/cloudtasks v1.8.0/go.mod h1:gQXUIwCSOI4yPVK7DgTVFiiP0ZW/eQkydWzwVMdHxrI=
+cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow=
+cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM=
+cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M=
+cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s=
+cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU=
+cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U=
+cloud.google.com/go/compute v1.10.0/go.mod h1:ER5CLbMxl90o2jtNbGSbtfOpQKR0t15FOtRsugnLrlU=
+cloud.google.com/go/compute v1.12.0/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU=
+cloud.google.com/go/compute v1.12.1/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU=
+cloud.google.com/go/compute v1.13.0/go.mod h1:5aPTS0cUNMIc1CE546K+Th6weJUNQErARyZtRXDJ8GE=
+cloud.google.com/go/compute v1.14.0/go.mod h1:YfLtxrj9sU4Yxv+sXzZkyPjEyPBZfXHUvjxega5vAdo=
+cloud.google.com/go/compute v1.15.1/go.mod h1:bjjoF/NtFUrkD/urWfdHaKuOPDR5nWIs63rR+SXhcpA=
+cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZEXYonfTBHHFPO/4UU=
+cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k=
+cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM=
+cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
+cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY=
+cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck=
+cloud.google.com/go/container v1.6.0/go.mod h1:Xazp7GjJSeUYo688S+6J5V+n/t+G5sKBTFkKNudGRxg=
+cloud.google.com/go/container v1.7.0/go.mod h1:Dp5AHtmothHGX3DwwIHPgq45Y8KmNsgN3amoYfxVkLo=
+cloud.google.com/go/containeranalysis v0.5.1/go.mod h1:1D92jd8gRR/c0fGMlymRgxWD3Qw9C1ff6/T7mLgVL8I=
+cloud.google.com/go/containeranalysis v0.6.0/go.mod h1:HEJoiEIu+lEXM+k7+qLCci0h33lX3ZqoYFdmPcoO7s4=
+cloud.google.com/go/datacatalog v1.3.0/go.mod h1:g9svFY6tuR+j+hrTw3J2dNcmI0dzmSiyOzm8kpLq0a0=
+cloud.google.com/go/datacatalog v1.5.0/go.mod h1:M7GPLNQeLfWqeIm3iuiruhPzkt65+Bx8dAKvScX8jvs=
+cloud.google.com/go/datacatalog v1.6.0/go.mod h1:+aEyF8JKg+uXcIdAmmaMUmZ3q1b/lKLtXCmXdnc0lbc=
+cloud.google.com/go/datacatalog v1.7.0/go.mod h1:9mEl4AuDYWw81UGc41HonIHH7/sn52H0/tc8f8ZbZIE=
+cloud.google.com/go/datacatalog v1.8.0/go.mod h1:KYuoVOv9BM8EYz/4eMFxrr4DUKhGIOXxZoKYF5wdISM=
+cloud.google.com/go/dataflow v0.6.0/go.mod h1:9QwV89cGoxjjSR9/r7eFDqqjtvbKxAK2BaYU6PVk9UM=
+cloud.google.com/go/dataflow v0.7.0/go.mod h1:PX526vb4ijFMesO1o202EaUmouZKBpjHsTlCtB4parQ=
+cloud.google.com/go/dataform v0.3.0/go.mod h1:cj8uNliRlHpa6L3yVhDOBrUXH+BPAO1+KFMQQNSThKo=
+cloud.google.com/go/dataform v0.4.0/go.mod h1:fwV6Y4Ty2yIFL89huYlEkwUPtS7YZinZbzzj5S9FzCE=
+cloud.google.com/go/dataform v0.5.0/go.mod h1:GFUYRe8IBa2hcomWplodVmUx/iTL0FrsauObOM3Ipr0=
+cloud.google.com/go/datafusion v1.4.0/go.mod h1:1Zb6VN+W6ALo85cXnM1IKiPw+yQMKMhB9TsTSRDo/38=
+cloud.google.com/go/datafusion v1.5.0/go.mod h1:Kz+l1FGHB0J+4XF2fud96WMmRiq/wj8N9u007vyXZ2w=
+cloud.google.com/go/datalabeling v0.5.0/go.mod h1:TGcJ0G2NzcsXSE/97yWjIZO0bXj0KbVlINXMG9ud42I=
+cloud.google.com/go/datalabeling v0.6.0/go.mod h1:WqdISuk/+WIGeMkpw/1q7bK/tFEZxsrFJOJdY2bXvTQ=
+cloud.google.com/go/dataplex v1.3.0/go.mod h1:hQuRtDg+fCiFgC8j0zV222HvzFQdRd+SVX8gdmFcZzA=
+cloud.google.com/go/dataplex v1.4.0/go.mod h1:X51GfLXEMVJ6UN47ESVqvlsRplbLhcsAt0kZCCKsU0A=
+cloud.google.com/go/dataproc v1.7.0/go.mod h1:CKAlMjII9H90RXaMpSxQ8EU6dQx6iAYNPcYPOkSbi8s=
+cloud.google.com/go/dataproc v1.8.0/go.mod h1:5OW+zNAH0pMpw14JVrPONsxMQYMBqJuzORhIBfBn9uI=
+cloud.google.com/go/dataqna v0.5.0/go.mod h1:90Hyk596ft3zUQ8NkFfvICSIfHFh1Bc7C4cK3vbhkeo=
+cloud.google.com/go/dataqna v0.6.0/go.mod h1:1lqNpM7rqNLVgWBJyk5NF6Uen2PHym0jtVJonplVsDA=
+cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
+cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
+cloud.google.com/go/datastore v1.10.0/go.mod h1:PC5UzAmDEkAmkfaknstTYbNpgE49HAgW2J1gcgUfmdM=
+cloud.google.com/go/datastream v1.2.0/go.mod h1:i/uTP8/fZwgATHS/XFu0TcNUhuA0twZxxQ3EyCUQMwo=
+cloud.google.com/go/datastream v1.3.0/go.mod h1:cqlOX8xlyYF/uxhiKn6Hbv6WjwPPuI9W2M9SAXwaLLQ=
+cloud.google.com/go/datastream v1.4.0/go.mod h1:h9dpzScPhDTs5noEMQVWP8Wx8AFBRyS0s8KWPx/9r0g=
+cloud.google.com/go/datastream v1.5.0/go.mod h1:6TZMMNPwjUqZHBKPQ1wwXpb0d5VDVPl2/XoS5yi88q4=
+cloud.google.com/go/deploy v1.4.0/go.mod h1:5Xghikd4VrmMLNaF6FiRFDlHb59VM59YoDQnOUdsH/c=
+cloud.google.com/go/deploy v1.5.0/go.mod h1:ffgdD0B89tToyW/U/D2eL0jN2+IEV/3EMuXHA0l4r+s=
+cloud.google.com/go/dialogflow v1.15.0/go.mod h1:HbHDWs33WOGJgn6rfzBW1Kv807BE3O1+xGbn59zZWI4=
+cloud.google.com/go/dialogflow v1.16.1/go.mod h1:po6LlzGfK+smoSmTBnbkIZY2w8ffjz/RcGSS+sh1el0=
+cloud.google.com/go/dialogflow v1.17.0/go.mod h1:YNP09C/kXA1aZdBgC/VtXX74G/TKn7XVCcVumTflA+8=
+cloud.google.com/go/dialogflow v1.18.0/go.mod h1:trO7Zu5YdyEuR+BhSNOqJezyFQ3aUzz0njv7sMx/iek=
+cloud.google.com/go/dialogflow v1.19.0/go.mod h1:JVmlG1TwykZDtxtTXujec4tQ+D8SBFMoosgy+6Gn0s0=
+cloud.google.com/go/dlp v1.6.0/go.mod h1:9eyB2xIhpU0sVwUixfBubDoRwP+GjeUoxxeueZmqvmM=
+cloud.google.com/go/dlp v1.7.0/go.mod h1:68ak9vCiMBjbasxeVD17hVPxDEck+ExiHavX8kiHG+Q=
+cloud.google.com/go/documentai v1.7.0/go.mod h1:lJvftZB5NRiFSX4moiye1SMxHx0Bc3x1+p9e/RfXYiU=
+cloud.google.com/go/documentai v1.8.0/go.mod h1:xGHNEB7CtsnySCNrCFdCyyMz44RhFEEX2Q7UD0c5IhU=
+cloud.google.com/go/documentai v1.9.0/go.mod h1:FS5485S8R00U10GhgBC0aNGrJxBP8ZVpEeJ7PQDZd6k=
+cloud.google.com/go/documentai v1.10.0/go.mod h1:vod47hKQIPeCfN2QS/jULIvQTugbmdc0ZvxxfQY1bg4=
+cloud.google.com/go/domains v0.6.0/go.mod h1:T9Rz3GasrpYk6mEGHh4rymIhjlnIuB4ofT1wTxDeT4Y=
+cloud.google.com/go/domains v0.7.0/go.mod h1:PtZeqS1xjnXuRPKE/88Iru/LdfoRyEHYA9nFQf4UKpg=
+cloud.google.com/go/edgecontainer v0.1.0/go.mod h1:WgkZ9tp10bFxqO8BLPqv2LlfmQF1X8lZqwW4r1BTajk=
+cloud.google.com/go/edgecontainer v0.2.0/go.mod h1:RTmLijy+lGpQ7BXuTDa4C4ssxyXT34NIuHIgKuP4s5w=
+cloud.google.com/go/errorreporting v0.3.0/go.mod h1:xsP2yaAp+OAW4OIm60An2bbLpqIhKXdWR/tawvl7QzU=
+cloud.google.com/go/essentialcontacts v1.3.0/go.mod h1:r+OnHa5jfj90qIfZDO/VztSFqbQan7HV75p8sA+mdGI=
+cloud.google.com/go/essentialcontacts v1.4.0/go.mod h1:8tRldvHYsmnBCHdFpvU+GL75oWiBKl80BiqlFh9tp+8=
+cloud.google.com/go/eventarc v1.7.0/go.mod h1:6ctpF3zTnaQCxUjHUdcfgcA1A2T309+omHZth7gDfmc=
+cloud.google.com/go/eventarc v1.8.0/go.mod h1:imbzxkyAU4ubfsaKYdQg04WS1NvncblHEup4kvF+4gw=
+cloud.google.com/go/filestore v1.3.0/go.mod h1:+qbvHGvXU1HaKX2nD0WEPo92TP/8AQuCVEBXNY9z0+w=
+cloud.google.com/go/filestore v1.4.0/go.mod h1:PaG5oDfo9r224f8OYXURtAsY+Fbyq/bLYoINEK8XQAI=
+cloud.google.com/go/firestore v1.9.0/go.mod h1:HMkjKHNTtRyZNiMzu7YAsLr9K3X2udY2AMwDaMEQiiE=
+cloud.google.com/go/functions v1.6.0/go.mod h1:3H1UA3qiIPRWD7PeZKLvHZ9SaQhR26XIJcC0A5GbvAk=
+cloud.google.com/go/functions v1.7.0/go.mod h1:+d+QBcWM+RsrgZfV9xo6KfA1GlzJfxcfZcRPEhDDfzg=
+cloud.google.com/go/functions v1.8.0/go.mod h1:RTZ4/HsQjIqIYP9a9YPbU+QFoQsAlYgrwOXJWHn1POY=
+cloud.google.com/go/functions v1.9.0/go.mod h1:Y+Dz8yGguzO3PpIjhLTbnqV1CWmgQ5UwtlpzoyquQ08=
+cloud.google.com/go/gaming v1.5.0/go.mod h1:ol7rGcxP/qHTRQE/RO4bxkXq+Fix0j6D4LFPzYTIrDM=
+cloud.google.com/go/gaming v1.6.0/go.mod h1:YMU1GEvA39Qt3zWGyAVA9bpYz/yAhTvaQ1t2sK4KPUA=
+cloud.google.com/go/gaming v1.7.0/go.mod h1:LrB8U7MHdGgFG851iHAfqUdLcKBdQ55hzXy9xBJz0+w=
+cloud.google.com/go/gaming v1.8.0/go.mod h1:xAqjS8b7jAVW0KFYeRUxngo9My3f33kFmua++Pi+ggM=
+cloud.google.com/go/gkebackup v0.2.0/go.mod h1:XKvv/4LfG829/B8B7xRkk8zRrOEbKtEam6yNfuQNH60=
+cloud.google.com/go/gkebackup v0.3.0/go.mod h1:n/E671i1aOQvUxT541aTkCwExO/bTer2HDlj4TsBRAo=
+cloud.google.com/go/gkeconnect v0.5.0/go.mod h1:c5lsNAg5EwAy7fkqX/+goqFsU1Da/jQFqArp+wGNr/o=
+cloud.google.com/go/gkeconnect v0.6.0/go.mod h1:Mln67KyU/sHJEBY8kFZ0xTeyPtzbq9StAVvEULYK16A=
+cloud.google.com/go/gkehub v0.9.0/go.mod h1:WYHN6WG8w9bXU0hqNxt8rm5uxnk8IH+lPY9J2TV7BK0=
+cloud.google.com/go/gkehub v0.10.0/go.mod h1:UIPwxI0DsrpsVoWpLB0stwKCP+WFVG9+y977wO+hBH0=
+cloud.google.com/go/gkemulticloud v0.3.0/go.mod h1:7orzy7O0S+5kq95e4Hpn7RysVA7dPs8W/GgfUtsPbrA=
+cloud.google.com/go/gkemulticloud v0.4.0/go.mod h1:E9gxVBnseLWCk24ch+P9+B2CoDFJZTyIgLKSalC7tuI=
+cloud.google.com/go/grafeas v0.2.0/go.mod h1:KhxgtF2hb0P191HlY5besjYm6MqTSTj3LSI+M+ByZHc=
+cloud.google.com/go/gsuiteaddons v1.3.0/go.mod h1:EUNK/J1lZEZO8yPtykKxLXI6JSVN2rg9bN8SXOa0bgM=
+cloud.google.com/go/gsuiteaddons v1.4.0/go.mod h1:rZK5I8hht7u7HxFQcFei0+AtfS9uSushomRlg+3ua1o=
+cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c=
+cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY=
+cloud.google.com/go/iam v0.5.0/go.mod h1:wPU9Vt0P4UmCux7mqtRu6jcpPAb74cP1fh50J3QpkUc=
+cloud.google.com/go/iam v0.6.0/go.mod h1:+1AH33ueBne5MzYccyMHtEKqLE4/kJOibtffMHDMFMc=
+cloud.google.com/go/iam v0.7.0/go.mod h1:H5Br8wRaDGNc8XP3keLc4unfUUZeyH3Sfl9XpQEYOeg=
+cloud.google.com/go/iam v0.8.0/go.mod h1:lga0/y3iH6CX7sYqypWJ33hf7kkfXJag67naqGESjkE=
+cloud.google.com/go/iap v1.4.0/go.mod h1:RGFwRJdihTINIe4wZ2iCP0zF/qu18ZwyKxrhMhygBEc=
+cloud.google.com/go/iap v1.5.0/go.mod h1:UH/CGgKd4KyohZL5Pt0jSKE4m3FR51qg6FKQ/z/Ix9A=
+cloud.google.com/go/ids v1.1.0/go.mod h1:WIuwCaYVOzHIj2OhN9HAwvW+DBdmUAdcWlFxRl+KubM=
+cloud.google.com/go/ids v1.2.0/go.mod h1:5WXvp4n25S0rA/mQWAg1YEEBBq6/s+7ml1RDCW1IrcY=
+cloud.google.com/go/iot v1.3.0/go.mod h1:r7RGh2B61+B8oz0AGE+J72AhA0G7tdXItODWsaA2oLs=
+cloud.google.com/go/iot v1.4.0/go.mod h1:dIDxPOn0UvNDUMD8Ger7FIaTuvMkj+aGk94RPP0iV+g=
+cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA=
+cloud.google.com/go/kms v1.5.0/go.mod h1:QJS2YY0eJGBg3mnDfuaCyLauWwBJiHRboYxJ++1xJNg=
+cloud.google.com/go/kms v1.6.0/go.mod h1:Jjy850yySiasBUDi6KFUwUv2n1+o7QZFyuUJg6OgjA0=
+cloud.google.com/go/language v1.4.0/go.mod h1:F9dRpNFQmJbkaop6g0JhSBXCNlO90e1KWx5iDdxbWic=
+cloud.google.com/go/language v1.6.0/go.mod h1:6dJ8t3B+lUYfStgls25GusK04NLh3eDLQnWM3mdEbhI=
+cloud.google.com/go/language v1.7.0/go.mod h1:DJ6dYN/W+SQOjF8e1hLQXMF21AkH2w9wiPzPCJa2MIE=
+cloud.google.com/go/language v1.8.0/go.mod h1:qYPVHf7SPoNNiCL2Dr0FfEFNil1qi3pQEyygwpgVKB8=
+cloud.google.com/go/lifesciences v0.5.0/go.mod h1:3oIKy8ycWGPUyZDR/8RNnTOYevhaMLqh5vLUXs9zvT8=
+cloud.google.com/go/lifesciences v0.6.0/go.mod h1:ddj6tSX/7BOnhxCSd3ZcETvtNr8NZ6t/iPhY2Tyfu08=
+cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw=
+cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE=
+cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc=
+cloud.google.com/go/managedidentities v1.3.0/go.mod h1:UzlW3cBOiPrzucO5qWkNkh0w33KFtBJU281hacNvsdE=
+cloud.google.com/go/managedidentities v1.4.0/go.mod h1:NWSBYbEMgqmbZsLIyKvxrYbtqOsxY1ZrGM+9RgDqInM=
+cloud.google.com/go/maps v0.1.0/go.mod h1:BQM97WGyfw9FWEmQMpZ5T6cpovXXSd1cGmFma94eubI=
+cloud.google.com/go/mediatranslation v0.5.0/go.mod h1:jGPUhGTybqsPQn91pNXw0xVHfuJ3leR1wj37oU3y1f4=
+cloud.google.com/go/mediatranslation v0.6.0/go.mod h1:hHdBCTYNigsBxshbznuIMFNe5QXEowAuNmmC7h8pu5w=
+cloud.google.com/go/memcache v1.4.0/go.mod h1:rTOfiGZtJX1AaFUrOgsMHX5kAzaTQ8azHiuDoTPzNsE=
+cloud.google.com/go/memcache v1.5.0/go.mod h1:dk3fCK7dVo0cUU2c36jKb4VqKPS22BTkf81Xq617aWM=
+cloud.google.com/go/memcache v1.6.0/go.mod h1:XS5xB0eQZdHtTuTF9Hf8eJkKtR3pVRCcvJwtm68T3rA=
+cloud.google.com/go/memcache v1.7.0/go.mod h1:ywMKfjWhNtkQTxrWxCkCFkoPjLHPW6A7WOTVI8xy3LY=
+cloud.google.com/go/metastore v1.5.0/go.mod h1:2ZNrDcQwghfdtCwJ33nM0+GrBGlVuh8rakL3vdPY3XY=
+cloud.google.com/go/metastore v1.6.0/go.mod h1:6cyQTls8CWXzk45G55x57DVQ9gWg7RiH65+YgPsNh9s=
+cloud.google.com/go/metastore v1.7.0/go.mod h1:s45D0B4IlsINu87/AsWiEVYbLaIMeUSoxlKKDqBGFS8=
+cloud.google.com/go/metastore v1.8.0/go.mod h1:zHiMc4ZUpBiM7twCIFQmJ9JMEkDSyZS9U12uf7wHqSI=
+cloud.google.com/go/monitoring v1.7.0/go.mod h1:HpYse6kkGo//7p6sT0wsIC6IBDET0RhIsnmlA53dvEk=
+cloud.google.com/go/monitoring v1.8.0/go.mod h1:E7PtoMJ1kQXWxPjB6mv2fhC5/15jInuulFdYYtlcvT4=
+cloud.google.com/go/networkconnectivity v1.4.0/go.mod h1:nOl7YL8odKyAOtzNX73/M5/mGZgqqMeryi6UPZTk/rA=
+cloud.google.com/go/networkconnectivity v1.5.0/go.mod h1:3GzqJx7uhtlM3kln0+x5wyFvuVH1pIBJjhCpjzSt75o=
+cloud.google.com/go/networkconnectivity v1.6.0/go.mod h1:OJOoEXW+0LAxHh89nXd64uGG+FbQoeH8DtxCHVOMlaM=
+cloud.google.com/go/networkconnectivity v1.7.0/go.mod h1:RMuSbkdbPwNMQjB5HBWD5MpTBnNm39iAVpC3TmsExt8=
+cloud.google.com/go/networkmanagement v1.4.0/go.mod h1:Q9mdLLRn60AsOrPc8rs8iNV6OHXaGcDdsIQe1ohekq8=
+cloud.google.com/go/networkmanagement v1.5.0/go.mod h1:ZnOeZ/evzUdUsnvRt792H0uYEnHQEMaz+REhhzJRcf4=
+cloud.google.com/go/networksecurity v0.5.0/go.mod h1:xS6fOCoqpVC5zx15Z/MqkfDwH4+m/61A3ODiDV1xmiQ=
+cloud.google.com/go/networksecurity v0.6.0/go.mod h1:Q5fjhTr9WMI5mbpRYEbiexTzROf7ZbDzvzCrNl14nyU=
+cloud.google.com/go/notebooks v1.2.0/go.mod h1:9+wtppMfVPUeJ8fIWPOq1UnATHISkGXGqTkxeieQ6UY=
+cloud.google.com/go/notebooks v1.3.0/go.mod h1:bFR5lj07DtCPC7YAAJ//vHskFBxA5JzYlH68kXVdk34=
+cloud.google.com/go/notebooks v1.4.0/go.mod h1:4QPMngcwmgb6uw7Po99B2xv5ufVoIQ7nOGDyL4P8AgA=
+cloud.google.com/go/notebooks v1.5.0/go.mod h1:q8mwhnP9aR8Hpfnrc5iN5IBhrXUy8S2vuYs+kBJ/gu0=
+cloud.google.com/go/optimization v1.1.0/go.mod h1:5po+wfvX5AQlPznyVEZjGJTMr4+CAkJf2XSTQOOl9l4=
+cloud.google.com/go/optimization v1.2.0/go.mod h1:Lr7SOHdRDENsh+WXVmQhQTrzdu9ybg0NecjHidBq6xs=
+cloud.google.com/go/orchestration v1.3.0/go.mod h1:Sj5tq/JpWiB//X/q3Ngwdl5K7B7Y0KZ7bfv0wL6fqVA=
+cloud.google.com/go/orchestration v1.4.0/go.mod h1:6W5NLFWs2TlniBphAViZEVhrXRSMgUGDfW7vrWKvsBk=
+cloud.google.com/go/orgpolicy v1.4.0/go.mod h1:xrSLIV4RePWmP9P3tBl8S93lTmlAxjm06NSm2UTmKvE=
+cloud.google.com/go/orgpolicy v1.5.0/go.mod h1:hZEc5q3wzwXJaKrsx5+Ewg0u1LxJ51nNFlext7Tanwc=
+cloud.google.com/go/osconfig v1.7.0/go.mod h1:oVHeCeZELfJP7XLxcBGTMBvRO+1nQ5tFG9VQTmYS2Fs=
+cloud.google.com/go/osconfig v1.8.0/go.mod h1:EQqZLu5w5XA7eKizepumcvWx+m8mJUhEwiPqWiZeEdg=
+cloud.google.com/go/osconfig v1.9.0/go.mod h1:Yx+IeIZJ3bdWmzbQU4fxNl8xsZ4amB+dygAwFPlvnNo=
+cloud.google.com/go/osconfig v1.10.0/go.mod h1:uMhCzqC5I8zfD9zDEAfvgVhDS8oIjySWh+l4WK6GnWw=
+cloud.google.com/go/oslogin v1.4.0/go.mod h1:YdgMXWRaElXz/lDk1Na6Fh5orF7gvmJ0FGLIs9LId4E=
+cloud.google.com/go/oslogin v1.5.0/go.mod h1:D260Qj11W2qx/HVF29zBg+0fd6YCSjSqLUkY/qEenQU=
+cloud.google.com/go/oslogin v1.6.0/go.mod h1:zOJ1O3+dTU8WPlGEkFSh7qeHPPSoxrcMbbK1Nm2iX70=
+cloud.google.com/go/oslogin v1.7.0/go.mod h1:e04SN0xO1UNJ1M5GP0vzVBFicIe4O53FOfcixIqTyXo=
+cloud.google.com/go/phishingprotection v0.5.0/go.mod h1:Y3HZknsK9bc9dMi+oE8Bim0lczMU6hrX0UpADuMefr0=
+cloud.google.com/go/phishingprotection v0.6.0/go.mod h1:9Y3LBLgy0kDTcYET8ZH3bq/7qni15yVUoAxiFxnlSUA=
+cloud.google.com/go/policytroubleshooter v1.3.0/go.mod h1:qy0+VwANja+kKrjlQuOzmlvscn4RNsAc0e15GGqfMxg=
+cloud.google.com/go/policytroubleshooter v1.4.0/go.mod h1:DZT4BcRw3QoO8ota9xw/LKtPa8lKeCByYeKTIf/vxdE=
+cloud.google.com/go/privatecatalog v0.5.0/go.mod h1:XgosMUvvPyxDjAVNDYxJ7wBW8//hLDDYmnsNcMGq1K0=
+cloud.google.com/go/privatecatalog v0.6.0/go.mod h1:i/fbkZR0hLN29eEWiiwue8Pb+GforiEIBnV9yrRUOKI=
+cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
+cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
+cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
+cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
+cloud.google.com/go/pubsub v1.26.0/go.mod h1:QgBH3U/jdJy/ftjPhTkyXNj543Tin1pRYcdcPRnFIRI=
+cloud.google.com/go/pubsub v1.27.1/go.mod h1:hQN39ymbV9geqBnfQq6Xf63yNhUAhv9CZhzp5O6qsW0=
+cloud.google.com/go/pubsublite v1.5.0/go.mod h1:xapqNQ1CuLfGi23Yda/9l4bBCKz/wC3KIJ5gKcxveZg=
+cloud.google.com/go/recaptchaenterprise v1.3.1/go.mod h1:OdD+q+y4XGeAlxRaMn1Y7/GveP6zmq76byL6tjPE7d4=
+cloud.google.com/go/recaptchaenterprise/v2 v2.1.0/go.mod h1:w9yVqajwroDNTfGuhmOjPDN//rZGySaf6PtFVcSCa7o=
+cloud.google.com/go/recaptchaenterprise/v2 v2.2.0/go.mod h1:/Zu5jisWGeERrd5HnlS3EUGb/D335f9k51B/FVil0jk=
+cloud.google.com/go/recaptchaenterprise/v2 v2.3.0/go.mod h1:O9LwGCjrhGHBQET5CA7dd5NwwNQUErSgEDit1DLNTdo=
+cloud.google.com/go/recaptchaenterprise/v2 v2.4.0/go.mod h1:Am3LHfOuBstrLrNCBrlI5sbwx9LBg3te2N6hGvHn2mE=
+cloud.google.com/go/recaptchaenterprise/v2 v2.5.0/go.mod h1:O8LzcHXN3rz0j+LBC91jrwI3R+1ZSZEWrfL7XHgNo9U=
+cloud.google.com/go/recommendationengine v0.5.0/go.mod h1:E5756pJcVFeVgaQv3WNpImkFP8a+RptV6dDLGPILjvg=
+cloud.google.com/go/recommendationengine v0.6.0/go.mod h1:08mq2umu9oIqc7tDy8sx+MNJdLG0fUi3vaSVbztHgJ4=
+cloud.google.com/go/recommender v1.5.0/go.mod h1:jdoeiBIVrJe9gQjwd759ecLJbxCDED4A6p+mqoqDvTg=
+cloud.google.com/go/recommender v1.6.0/go.mod h1:+yETpm25mcoiECKh9DEScGzIRyDKpZ0cEhWGo+8bo+c=
+cloud.google.com/go/recommender v1.7.0/go.mod h1:XLHs/W+T8olwlGOgfQenXBTbIseGclClff6lhFVe9Bs=
+cloud.google.com/go/recommender v1.8.0/go.mod h1:PkjXrTT05BFKwxaUxQmtIlrtj0kph108r02ZZQ5FE70=
+cloud.google.com/go/redis v1.7.0/go.mod h1:V3x5Jq1jzUcg+UNsRvdmsfuFnit1cfe3Z/PGyq/lm4Y=
+cloud.google.com/go/redis v1.8.0/go.mod h1:Fm2szCDavWzBk2cDKxrkmWBqoCiL1+Ctwq7EyqBCA/A=
+cloud.google.com/go/redis v1.9.0/go.mod h1:HMYQuajvb2D0LvMgZmLDZW8V5aOC/WxstZHiy4g8OiA=
+cloud.google.com/go/redis v1.10.0/go.mod h1:ThJf3mMBQtW18JzGgh41/Wld6vnDDc/F/F35UolRZPM=
+cloud.google.com/go/resourcemanager v1.3.0/go.mod h1:bAtrTjZQFJkiWTPDb1WBjzvc6/kifjj4QBYuKCCoqKA=
+cloud.google.com/go/resourcemanager v1.4.0/go.mod h1:MwxuzkumyTX7/a3n37gmsT3py7LIXwrShilPh3P1tR0=
+cloud.google.com/go/resourcesettings v1.3.0/go.mod h1:lzew8VfESA5DQ8gdlHwMrqZs1S9V87v3oCnKCWoOuQU=
+cloud.google.com/go/resourcesettings v1.4.0/go.mod h1:ldiH9IJpcrlC3VSuCGvjR5of/ezRrOxFtpJoJo5SmXg=
+cloud.google.com/go/retail v1.8.0/go.mod h1:QblKS8waDmNUhghY2TI9O3JLlFk8jybHeV4BF19FrE4=
+cloud.google.com/go/retail v1.9.0/go.mod h1:g6jb6mKuCS1QKnH/dpu7isX253absFl6iE92nHwlBUY=
+cloud.google.com/go/retail v1.10.0/go.mod h1:2gDk9HsL4HMS4oZwz6daui2/jmKvqShXKQuB2RZ+cCc=
+cloud.google.com/go/retail v1.11.0/go.mod h1:MBLk1NaWPmh6iVFSz9MeKG/Psyd7TAgm6y/9L2B4x9Y=
+cloud.google.com/go/run v0.2.0/go.mod h1:CNtKsTA1sDcnqqIFR3Pb5Tq0usWxJJvsWOCPldRU3Do=
+cloud.google.com/go/run v0.3.0/go.mod h1:TuyY1+taHxTjrD0ZFk2iAR+xyOXEA0ztb7U3UNA0zBo=
+cloud.google.com/go/scheduler v1.4.0/go.mod h1:drcJBmxF3aqZJRhmkHQ9b3uSSpQoltBPGPxGAWROx6s=
+cloud.google.com/go/scheduler v1.5.0/go.mod h1:ri073ym49NW3AfT6DZi21vLZrG07GXr5p3H1KxN5QlI=
+cloud.google.com/go/scheduler v1.6.0/go.mod h1:SgeKVM7MIwPn3BqtcBntpLyrIJftQISRrYB5ZtT+KOk=
+cloud.google.com/go/scheduler v1.7.0/go.mod h1:jyCiBqWW956uBjjPMMuX09n3x37mtyPJegEWKxRsn44=
+cloud.google.com/go/secretmanager v1.6.0/go.mod h1:awVa/OXF6IiyaU1wQ34inzQNc4ISIDIrId8qE5QGgKA=
+cloud.google.com/go/secretmanager v1.8.0/go.mod h1:hnVgi/bN5MYHd3Gt0SPuTPPp5ENina1/LxM+2W9U9J4=
+cloud.google.com/go/secretmanager v1.9.0/go.mod h1:b71qH2l1yHmWQHt9LC80akm86mX8AL6X1MA01dW8ht4=
+cloud.google.com/go/security v1.5.0/go.mod h1:lgxGdyOKKjHL4YG3/YwIL2zLqMFCKs0UbQwgyZmfJl4=
+cloud.google.com/go/security v1.7.0/go.mod h1:mZklORHl6Bg7CNnnjLH//0UlAlaXqiG7Lb9PsPXLfD0=
+cloud.google.com/go/security v1.8.0/go.mod h1:hAQOwgmaHhztFhiQ41CjDODdWP0+AE1B3sX4OFlq+GU=
+cloud.google.com/go/security v1.9.0/go.mod h1:6Ta1bO8LXI89nZnmnsZGp9lVoVWXqsVbIq/t9dzI+2Q=
+cloud.google.com/go/security v1.10.0/go.mod h1:QtOMZByJVlibUT2h9afNDWRZ1G96gVywH8T5GUSb9IA=
+cloud.google.com/go/securitycenter v1.13.0/go.mod h1:cv5qNAqjY84FCN6Y9z28WlkKXyWsgLO832YiWwkCWcU=
+cloud.google.com/go/securitycenter v1.14.0/go.mod h1:gZLAhtyKv85n52XYWt6RmeBdydyxfPeTrpToDPw4Auc=
+cloud.google.com/go/securitycenter v1.15.0/go.mod h1:PeKJ0t8MoFmmXLXWm41JidyzI3PJjd8sXWaVqg43WWk=
+cloud.google.com/go/securitycenter v1.16.0/go.mod h1:Q9GMaLQFUD+5ZTabrbujNWLtSLZIZF7SAR0wWECrjdk=
+cloud.google.com/go/servicecontrol v1.4.0/go.mod h1:o0hUSJ1TXJAmi/7fLJAedOovnujSEvjKCAFNXPQ1RaU=
+cloud.google.com/go/servicecontrol v1.5.0/go.mod h1:qM0CnXHhyqKVuiZnGKrIurvVImCs8gmqWsDoqe9sU1s=
+cloud.google.com/go/servicedirectory v1.4.0/go.mod h1:gH1MUaZCgtP7qQiI+F+A+OpeKF/HQWgtAddhTbhL2bs=
+cloud.google.com/go/servicedirectory v1.5.0/go.mod h1:QMKFL0NUySbpZJ1UZs3oFAmdvVxhhxB6eJ/Vlp73dfg=
+cloud.google.com/go/servicedirectory v1.6.0/go.mod h1:pUlbnWsLH9c13yGkxCmfumWEPjsRs1RlmJ4pqiNjVL4=
+cloud.google.com/go/servicedirectory v1.7.0/go.mod h1:5p/U5oyvgYGYejufvxhgwjL8UVXjkuw7q5XcG10wx1U=
+cloud.google.com/go/servicemanagement v1.4.0/go.mod h1:d8t8MDbezI7Z2R1O/wu8oTggo3BI2GKYbdG4y/SJTco=
+cloud.google.com/go/servicemanagement v1.5.0/go.mod h1:XGaCRe57kfqu4+lRxaFEAuqmjzF0r+gWHjWqKqBvKFo=
+cloud.google.com/go/serviceusage v1.3.0/go.mod h1:Hya1cozXM4SeSKTAgGXgj97GlqUvF5JaoXacR1JTP/E=
+cloud.google.com/go/serviceusage v1.4.0/go.mod h1:SB4yxXSaYVuUBYUml6qklyONXNLt83U0Rb+CXyhjEeU=
+cloud.google.com/go/shell v1.3.0/go.mod h1:VZ9HmRjZBsjLGXusm7K5Q5lzzByZmJHf1d0IWHEN5X4=
+cloud.google.com/go/shell v1.4.0/go.mod h1:HDxPzZf3GkDdhExzD/gs8Grqk+dmYcEjGShZgYa9URw=
+cloud.google.com/go/spanner v1.41.0/go.mod h1:MLYDBJR/dY4Wt7ZaMIQ7rXOTLjYrmxLE/5ve9vFfWos=
+cloud.google.com/go/speech v1.6.0/go.mod h1:79tcr4FHCimOp56lwC01xnt/WPJZc4v3gzyT7FoBkCM=
+cloud.google.com/go/speech v1.7.0/go.mod h1:KptqL+BAQIhMsj1kOP2la5DSEEerPDuOP/2mmkhHhZQ=
+cloud.google.com/go/speech v1.8.0/go.mod h1:9bYIl1/tjsAnMgKGHKmBZzXKEkGgtU+MpdDPTE9f7y0=
+cloud.google.com/go/speech v1.9.0/go.mod h1:xQ0jTcmnRFFM2RfX/U+rk6FQNUF6DQlydUSyoooSpco=
+cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
+cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
+cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
+cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
+cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
+cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
+cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y=
+cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc=
+cloud.google.com/go/storage v1.27.0/go.mod h1:x9DOL8TK/ygDUMieqwfhdpQryTeEkhGKMi80i/iqR2s=
+cloud.google.com/go/storagetransfer v1.5.0/go.mod h1:dxNzUopWy7RQevYFHewchb29POFv3/AaBgnhqzqiK0w=
+cloud.google.com/go/storagetransfer v1.6.0/go.mod h1:y77xm4CQV/ZhFZH75PLEXY0ROiS7Gh6pSKrM8dJyg6I=
+cloud.google.com/go/talent v1.1.0/go.mod h1:Vl4pt9jiHKvOgF9KoZo6Kob9oV4lwd/ZD5Cto54zDRw=
+cloud.google.com/go/talent v1.2.0/go.mod h1:MoNF9bhFQbiJ6eFD3uSsg0uBALw4n4gaCaEjBw9zo8g=
+cloud.google.com/go/talent v1.3.0/go.mod h1:CmcxwJ/PKfRgd1pBjQgU6W3YBwiewmUzQYH5HHmSCmM=
+cloud.google.com/go/talent v1.4.0/go.mod h1:ezFtAgVuRf8jRsvyE6EwmbTK5LKciD4KVnHuDEFmOOA=
+cloud.google.com/go/texttospeech v1.4.0/go.mod h1:FX8HQHA6sEpJ7rCMSfXuzBcysDAuWusNNNvN9FELDd8=
+cloud.google.com/go/texttospeech v1.5.0/go.mod h1:oKPLhR4n4ZdQqWKURdwxMy0uiTS1xU161C8W57Wkea4=
+cloud.google.com/go/tpu v1.3.0/go.mod h1:aJIManG0o20tfDQlRIej44FcwGGl/cD0oiRyMKG19IQ=
+cloud.google.com/go/tpu v1.4.0/go.mod h1:mjZaX8p0VBgllCzF6wcU2ovUXN9TONFLd7iz227X2Xg=
+cloud.google.com/go/trace v1.3.0/go.mod h1:FFUE83d9Ca57C+K8rDl/Ih8LwOzWIV1krKgxg6N0G28=
+cloud.google.com/go/trace v1.4.0/go.mod h1:UG0v8UBqzusp+z63o7FK74SdFE+AXpCLdFb1rshXG+Y=
+cloud.google.com/go/translate v1.3.0/go.mod h1:gzMUwRjvOqj5i69y/LYLd8RrNQk+hOmIXTi9+nb3Djs=
+cloud.google.com/go/translate v1.4.0/go.mod h1:06Dn/ppvLD6WvA5Rhdp029IX2Mi3Mn7fpMRLPvXT5Wg=
+cloud.google.com/go/video v1.8.0/go.mod h1:sTzKFc0bUSByE8Yoh8X0mn8bMymItVGPfTuUBUyRgxk=
+cloud.google.com/go/video v1.9.0/go.mod h1:0RhNKFRF5v92f8dQt0yhaHrEuH95m068JYOvLZYnJSw=
+cloud.google.com/go/videointelligence v1.6.0/go.mod h1:w0DIDlVRKtwPCn/C4iwZIJdvC69yInhW0cfi+p546uU=
+cloud.google.com/go/videointelligence v1.7.0/go.mod h1:k8pI/1wAhjznARtVT9U1llUaFNPh7muw8QyOUpavru4=
+cloud.google.com/go/videointelligence v1.8.0/go.mod h1:dIcCn4gVDdS7yte/w+koiXn5dWVplOZkE+xwG9FgK+M=
+cloud.google.com/go/videointelligence v1.9.0/go.mod h1:29lVRMPDYHikk3v8EdPSaL8Ku+eMzDljjuvRs105XoU=
+cloud.google.com/go/vision v1.2.0/go.mod h1:SmNwgObm5DpFBme2xpyOyasvBc1aPdjvMk2bBk0tKD0=
+cloud.google.com/go/vision/v2 v2.2.0/go.mod h1:uCdV4PpN1S0jyCyq8sIM42v2Y6zOLkZs+4R9LrGYwFo=
+cloud.google.com/go/vision/v2 v2.3.0/go.mod h1:UO61abBx9QRMFkNBbf1D8B1LXdS2cGiiCRx0vSpZoUo=
+cloud.google.com/go/vision/v2 v2.4.0/go.mod h1:VtI579ll9RpVTrdKdkMzckdnwMyX2JILb+MhPqRbPsY=
+cloud.google.com/go/vision/v2 v2.5.0/go.mod h1:MmaezXOOE+IWa+cS7OhRRLK2cNv1ZL98zhqFFZaaH2E=
+cloud.google.com/go/vmmigration v1.2.0/go.mod h1:IRf0o7myyWFSmVR1ItrBSFLFD/rJkfDCUTO4vLlJvsE=
+cloud.google.com/go/vmmigration v1.3.0/go.mod h1:oGJ6ZgGPQOFdjHuocGcLqX4lc98YQ7Ygq8YQwHh9A7g=
+cloud.google.com/go/vmwareengine v0.1.0/go.mod h1:RsdNEf/8UDvKllXhMz5J40XxDrNJNN4sagiox+OI208=
+cloud.google.com/go/vpcaccess v1.4.0/go.mod h1:aQHVbTWDYUR1EbTApSVvMq1EnT57ppDmQzZ3imqIk4w=
+cloud.google.com/go/vpcaccess v1.5.0/go.mod h1:drmg4HLk9NkZpGfCmZ3Tz0Bwnm2+DKqViEpeEpOq0m8=
+cloud.google.com/go/webrisk v1.4.0/go.mod h1:Hn8X6Zr+ziE2aNd8SliSDWpEnSS1u4R9+xXZmFiHmGE=
+cloud.google.com/go/webrisk v1.5.0/go.mod h1:iPG6fr52Tv7sGk0H6qUFzmL3HHZev1htXuWDEEsqMTg=
+cloud.google.com/go/webrisk v1.6.0/go.mod h1:65sW9V9rOosnc9ZY7A7jsy1zoHS5W9IAXv6dGqhMQMc=
+cloud.google.com/go/webrisk v1.7.0/go.mod h1:mVMHgEYH0r337nmt1JyLthzMr6YxwN1aAIEc2fTcq7A=
+cloud.google.com/go/websecurityscanner v1.3.0/go.mod h1:uImdKm2wyeXQevQJXeh8Uun/Ym1VqworNDlBXQevGMo=
+cloud.google.com/go/websecurityscanner v1.4.0/go.mod h1:ebit/Fp0a+FWu5j4JOmJEV8S8CzdTkAS77oDsiSqYWQ=
+cloud.google.com/go/workflows v1.6.0/go.mod h1:6t9F5h/unJz41YqfBmqSASJSXccBLtD1Vwf+KmJENM0=
+cloud.google.com/go/workflows v1.7.0/go.mod h1:JhSrZuVZWuiDfKEFxU0/F1PQjmpnpcoISEXH2bcHC3M=
+cloud.google.com/go/workflows v1.8.0/go.mod h1:ysGhmEajwZxGn1OhGOGKsTXc5PyxOc0vfKf5Af+to4M=
+cloud.google.com/go/workflows v1.9.0/go.mod h1:ZGkj1aFIOd9c8Gerkjjq7OW7I5+l6cSvT3ujaO/WwSA=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20201218220906-28db891af037/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
@@ -42,21 +426,31 @@ github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl
github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ=
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
-github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
+github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
+github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
+github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
+github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
+github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/cncf/xds/go v0.0.0-20220314180256-7f1daf1720fc/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
@@ -81,20 +475,17 @@ github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4s
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
+github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=
github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE=
+github.com/envoyproxy/go-control-plane v0.10.3/go.mod h1:fJJn/j26vwOu972OllsvAgJJM//w9BV6Fxbg2LuVd34=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
+github.com/envoyproxy/protoc-gen-validate v0.6.7/go.mod h1:dyJXwwfPK2VSqiB9Klm1J6romD608Ba7Hij42vrOBCo=
+github.com/envoyproxy/protoc-gen-validate v0.9.1/go.mod h1:OKNgG7TCp5pF4d6XftA0++PMirau2/yoOwVac3AbF2w=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
-github.com/feast-dev/gopy v0.4.1-0.20220714205859-591500e3215f h1:tTjEpVu4H/ZGh4wo3WETbA9dutNM6bXMXvyZbb9GLCs=
-github.com/feast-dev/gopy v0.4.1-0.20220714205859-591500e3215f/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA=
-github.com/feast-dev/gopy v0.4.1-0.20220714211038-aa312c13fd79 h1:oFj6GDGR8E4S5GeMyLBvaKtvMZxj3hHqsB5Xndjxjz8=
-github.com/feast-dev/gopy v0.4.1-0.20220714211038-aa312c13fd79/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA=
-github.com/feast-dev/gopy v0.4.1-0.20220714211330-67b016d61ed4 h1:UfzPdqqAfrt8f+jDIY61lbzqFZYsX2BhVyNcCbdpE+U=
-github.com/feast-dev/gopy v0.4.1-0.20220714211330-67b016d61ed4/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA=
-github.com/feast-dev/gopy v0.4.1-0.20220714211711-252048177d85 h1:BKmfqWiDbxvviB6vemPbbNjF+ywRsBMCdk1QvrcGgkc=
-github.com/feast-dev/gopy v0.4.1-0.20220714211711-252048177d85/go.mod h1:tlA/KcD7rM8B+NQJR4SASwiinfKY0aiMFanHszR8BZA=
github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4=
@@ -109,6 +500,7 @@ github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3
github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY=
github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
+github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
@@ -131,14 +523,26 @@ github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7a
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4=
github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
+github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=
+github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
@@ -148,15 +552,13 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
+github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
-github.com/gonuts/commander v0.1.0 h1:EcDTiVw9oAVORFjQOEOuHQqcl6OXMyTgELocTq6zJ0I=
-github.com/gonuts/commander v0.1.0/go.mod h1:qkb5mSlcWodYgo7vs8ulLnXhfinhZsZcm6+H/z1JjgY=
-github.com/gonuts/flag v0.1.0 h1:fqMv/MZ+oNGu0i9gp0/IQ/ZaPIDoAZBOBaJoV7viCWM=
-github.com/gonuts/flag v0.1.0/go.mod h1:ZTmTGtrSPejTo/SRNhCqwLTmiAgyBdCkLYhHrAoBdz4=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/flatbuffers v2.0.5+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
@@ -166,17 +568,58 @@ github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5a
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o=
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
+github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
+github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
+github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
+github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
+github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=
+github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
+github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
+github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8=
+github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8=
+github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg=
+github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
+github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
+github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0=
+github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM=
+github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM=
+github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM=
+github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c=
+github.com/googleapis/gax-go/v2 v2.5.1/go.mod h1:h6B0KMMFNtI2ddbGJn3T3ZbwkeT6yqEF02fYlzkUCyo=
+github.com/googleapis/gax-go/v2 v2.6.0/go.mod h1:1mjbznJAPHFpesgE5ucqfYEscaz5kMdcIDwU/6+DDoY=
+github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8=
+github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4=
+github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
@@ -186,6 +629,8 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w=
github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE=
github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@@ -208,6 +653,9 @@ github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2p
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg=
+github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
+github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
+github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
@@ -215,6 +663,8 @@ github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
+github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
+github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
@@ -231,6 +681,7 @@ github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa02
github.com/klauspost/cpuid/v2 v2.0.12 h1:p9dKCg8i4gmOxtv35DvrYoWqYzQrvEVdjQ762Y0OqZE=
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
@@ -240,6 +691,8 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM=
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
+github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
+github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
@@ -314,6 +767,8 @@ github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA=
+github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=
+github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
@@ -326,6 +781,7 @@ github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA=
@@ -355,6 +811,9 @@ github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJ
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
+github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4=
+github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
+github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y=
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw=
@@ -363,18 +822,31 @@ github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5J
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
+github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
+github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
github.com/zeebo/xxh3 v1.0.1/go.mod h1:8VHV24/3AZLn3b6Mlp/KuC33LWH687Wq6EnziEB+rsA=
@@ -384,13 +856,21 @@ go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
+go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
+go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
+go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
+go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/otel v0.20.0/go.mod h1:Y3ugLH2oa81t5QO+Lty+zXf8zC9L26ax4Nzoxm/dooo=
go.opentelemetry.io/otel/metric v0.20.0/go.mod h1:598I5tYlH1vzBjn+BTuhzTCSb/9debfNp6R3s7Pr1eU=
go.opentelemetry.io/otel/oteltest v0.20.0/go.mod h1:L7bgKf9ZB7qCwT9Up7i9/pn0PWIa9FqQ2IQ8LoxiGnw=
go.opentelemetry.io/otel/sdk v0.20.0/go.mod h1:g/IcepuwNsoiX5Byy2nNV0ySUF1em498m7hBWC279Yc=
go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
+go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
@@ -407,17 +887,29 @@ golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnf
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4=
+golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191002040644-a1355ae1e2c3/go.mod h1:NOZ3BPKG0ec/BKJQgnvsSFpcKLM5xXVWnvZS97DWHgE=
+golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
+golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
+golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20211216164055-b2b84827b756/go.mod h1:b9TAUYHmRtqA6klRHApnXMnj+OyLce4yF5cZCUbk2ps=
golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 h1:K3x+yU+fbot38x5bQbU2QqUAVyYLEktdNH2GxZLnM3U=
golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE=
@@ -434,22 +926,35 @@ golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTk
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
+golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mobile v0.0.0-20201217150744-e6ae53a27f4f/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191209134235-331c550502dd/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
-golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
-golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA=
+golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -462,31 +967,99 @@ golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk=
+golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
-golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3 h1:EN5+DfgmRMvRUrMGERW2gQl3Vc+Z7ZMnI/xdEpPSf0c=
-golang.org/x/net v0.0.0-20220407224826-aac1ed45d8e3/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
+golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
+golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
+golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
+golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
+golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.0.0-20220617184016-355a448f1bc9/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.0.0-20220909164309-bea034e7d591/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
+golang.org/x/net v0.0.0-20221012135044-0b7e1fb9d458/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
+golang.org/x/net v0.0.0-20221014081412-f15817d10f9b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
+golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
+golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
+golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g=
+golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
+golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
+golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
+golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE=
+golang.org/x/oauth2 v0.0.0-20220622183110-fd043fe589d2/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE=
+golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg=
+golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg=
+golang.org/x/oauth2 v0.0.0-20221006150949-b44042a4b9c1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg=
+golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg=
+golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -500,38 +1073,104 @@ golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210304124612-50617c2ba197/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220412211240-33da011f77ad h1:ntjMns5wyP/fN65tdBD4g8J5w8n015+iIIs9rtjXkY0=
+golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
+golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
+golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
+golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
+golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo=
+golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@@ -543,26 +1182,74 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200117012304-6edc0a871e69/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
+golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
+golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
+golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=
+golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
+golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.8-0.20211029000441-d6a9af8af023/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
-golang.org/x/tools v0.1.11-0.20220413170336-afc6aad76eb1 h1:Z3vE1sGlC7qiyFJkkDcZms8Y3+yV8+W7HmDSmuf71tM=
-golang.org/x/tools v0.1.11-0.20220413170336-afc6aad76eb1/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
+golang.org/x/tools v0.1.12 h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/tools v0.3.0 h1:SrNbZl6ECOS1qFzgTdQfWXZM9XBkiA6tkFrH9YSTPHM=
+golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U=
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
+golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
+golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk=
+golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0=
gonum.org/v1/gonum v0.9.3 h1:DnoIG+QAMaF5NvxnGe/oKsgKcAc6PcUyl8q0VetfQ8s=
@@ -571,37 +1258,224 @@ gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6d
gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
+google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
+google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
+google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
+google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
+google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
+google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
+google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
+google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
+google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=
+google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=
+google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
+google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=
+google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=
+google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo=
+google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4=
+google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw=
+google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU=
+google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k=
+google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=
+google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=
+google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI=
+google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I=
+google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo=
+google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g=
+google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA=
+google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8=
+google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs=
+google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA=
+google.golang.org/api v0.77.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA=
+google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw=
+google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg=
+google.golang.org/api v0.84.0/go.mod h1:NTsGnUFJMYROtiquksZHBWtHfeMC7iYthki7Eq3pa8o=
+google.golang.org/api v0.85.0/go.mod h1:AqZf8Ep9uZ2pyTvgL+x0D3Zt0eoT9b5E8fmzfu6FO2g=
+google.golang.org/api v0.90.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw=
+google.golang.org/api v0.93.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw=
+google.golang.org/api v0.95.0/go.mod h1:eADj+UBuxkh5zlrSntJghuNeg8HwQ1w5lTKkuqaETEI=
+google.golang.org/api v0.96.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s=
+google.golang.org/api v0.97.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s=
+google.golang.org/api v0.98.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s=
+google.golang.org/api v0.99.0/go.mod h1:1YOf74vkVndF7pG6hIHuINsM7eWwpVTAfNMNiL91A08=
+google.golang.org/api v0.100.0/go.mod h1:ZE3Z2+ZOr87Rx7dqFsdRQkRBk36kDtp/h+QpHbB7a70=
+google.golang.org/api v0.102.0/go.mod h1:3VFl6/fzoA+qNuS1N1/VfXY4LjoXN/wzeIp7TweWwGo=
+google.golang.org/api v0.103.0/go.mod h1:hGtW6nK1AC+d9si/UBhw8Xli+QMOf6xyNAyJw4qU9w0=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
+google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s=
+google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
+google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
+google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
+google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
+google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
+google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
+google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
+google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
+google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
+google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24=
+google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=
+google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=
+google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
+google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
+google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w=
+google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
-google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac h1:qSNTkEN+L2mvWcLgJOR+8bdHX9rN/IdU3A1Ghpfb1Rg=
+google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
+google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
+google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
+google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
+google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E=
+google.golang.org/genproto v0.0.0-20220329172620-7be39ac1afc7/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220502173005-c8bf987b8c21/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
+google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
+google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
+google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
+google.golang.org/genproto v0.0.0-20220608133413-ed9918b62aac/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA=
+google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA=
+google.golang.org/genproto v0.0.0-20220617124728-180714bec0ad/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA=
+google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA=
+google.golang.org/genproto v0.0.0-20220628213854-d9e0b6570c03/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA=
+google.golang.org/genproto v0.0.0-20220722212130-b98a9ff5e252/go.mod h1:GkXuJDJ6aQ7lnJcRF+SJVgFdQhypqgl3LB1C9vabdRE=
+google.golang.org/genproto v0.0.0-20220801145646-83ce21fca29f/go.mod h1:iHe1svFLAZg9VWz891+QbRMwUv9O/1Ww+/mngYeThbc=
+google.golang.org/genproto v0.0.0-20220815135757-37a418bb8959/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk=
+google.golang.org/genproto v0.0.0-20220817144833-d7fd3f11b9b1/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk=
+google.golang.org/genproto v0.0.0-20220822174746-9e6da59bd2fc/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk=
+google.golang.org/genproto v0.0.0-20220829144015-23454907ede3/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk=
+google.golang.org/genproto v0.0.0-20220829175752-36a9c930ecbf/go.mod h1:dbqgFATTzChvnt+ujMdZwITVAJHFtfyN1qUhDqEiIlk=
+google.golang.org/genproto v0.0.0-20220913154956-18f8339a66a5/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo=
+google.golang.org/genproto v0.0.0-20220914142337-ca0e39ece12f/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo=
+google.golang.org/genproto v0.0.0-20220915135415-7fd63a7952de/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo=
+google.golang.org/genproto v0.0.0-20220916172020-2692e8806bfa/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo=
+google.golang.org/genproto v0.0.0-20220919141832-68c03719ef51/go.mod h1:0Nb8Qy+Sk5eDzHnzlStwW3itdNaWoZA5XeSG+R3JHSo=
+google.golang.org/genproto v0.0.0-20220920201722-2b89144ce006/go.mod h1:ht8XFiar2npT/g4vkk7O0WYS1sHOHbdujxbEp7CJWbw=
+google.golang.org/genproto v0.0.0-20220926165614-551eb538f295/go.mod h1:woMGP53BroOrRY3xTxlbr8Y3eB/nzAvvFM83q7kG2OI=
+google.golang.org/genproto v0.0.0-20220926220553-6981cbe3cfce/go.mod h1:woMGP53BroOrRY3xTxlbr8Y3eB/nzAvvFM83q7kG2OI=
+google.golang.org/genproto v0.0.0-20221010155953-15ba04fc1c0e/go.mod h1:3526vdqwhZAwq4wsRUaVG555sVgsNmIjRtO7t/JH29U=
+google.golang.org/genproto v0.0.0-20221014173430-6e2ab493f96b/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM=
+google.golang.org/genproto v0.0.0-20221014213838-99cd37c6964a/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM=
+google.golang.org/genproto v0.0.0-20221024153911-1573dae28c9c/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s=
+google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s=
+google.golang.org/genproto v0.0.0-20221027153422-115e99e71e1c/go.mod h1:CGI5F/G+E5bKwmfYo09AXuVN4dD894kIKUFmVbP2/Fo=
+google.golang.org/genproto v0.0.0-20221114212237-e4508ebdbee1/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg=
+google.golang.org/genproto v0.0.0-20221117204609-8f9c96812029/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg=
+google.golang.org/genproto v0.0.0-20221118155620-16455021b5e6/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg=
+google.golang.org/genproto v0.0.0-20221201164419-0e50fba7f41c/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg=
+google.golang.org/genproto v0.0.0-20221202195650-67e5cbc046fd/go.mod h1:cTsE614GARnxrLsqKREzmNYJACSWWpAWdNMwnD7c2BE=
+google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f h1:BWUVssLB0HVOSY78gIdvk1dTVYtT1y8SBWtPYuTJ/6w=
+google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
+google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
+google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
+google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
+google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
+google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
+google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
+google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
+google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
+google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
+google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
+google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
+google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=
google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=
google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ=
-google.golang.org/grpc v1.47.0 h1:9n77onPX5F3qfFCqjy9dhn8PbNQsIKeVU04J9G7umt8=
+google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
+google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
+google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
+google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI=
+google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI=
+google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI=
+google.golang.org/grpc v1.51.0/go.mod h1:wgNDFcnuBGmxLKI/qn4T+m5BtEBYXJPvibbUPsAIPww=
+google.golang.org/grpc v1.53.0 h1:LAv2ds7cmFV/XTS3XG1NneeENYrXGmorPxsBbptIjNc=
+google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw=
+google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@@ -610,12 +1484,14 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
-google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
+google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
+google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -639,11 +1515,20 @@ gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
+honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
+honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las=
+rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
+rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
+rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU=
diff --git a/infra/charts/feast-feature-server/Chart.yaml b/infra/charts/feast-feature-server/Chart.yaml
index 73836936ee..4c148b0cf3 100644
--- a/infra/charts/feast-feature-server/Chart.yaml
+++ b/infra/charts/feast-feature-server/Chart.yaml
@@ -2,7 +2,7 @@ apiVersion: v2
name: feast-feature-server
description: Feast Feature Server in Go or Python
type: application
-version: 0.28.0
+version: 0.34.1
keywords:
- machine learning
- big data
diff --git a/infra/charts/feast-feature-server/README.md b/infra/charts/feast-feature-server/README.md
index f8120626c0..e4e477021d 100644
--- a/infra/charts/feast-feature-server/README.md
+++ b/infra/charts/feast-feature-server/README.md
@@ -1,6 +1,6 @@
# Feast Python / Go Feature Server Helm Charts
-Current chart version is `0.28.0`
+Current chart version is `0.34.1`
## Installation
@@ -30,7 +30,7 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/python-helm-d
| fullnameOverride | string | `""` | |
| image.pullPolicy | string | `"IfNotPresent"` | |
| image.repository | string | `"feastdev/feature-server"` | Docker image for Feature Server repository |
-| image.tag | string | `"0.28.0"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) |
+| image.tag | string | `"0.34.1"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) |
| imagePullSecrets | list | `[]` | |
| livenessProbe.initialDelaySeconds | int | `30` | |
| livenessProbe.periodSeconds | int | `30` | |
diff --git a/infra/charts/feast-feature-server/values.yaml b/infra/charts/feast-feature-server/values.yaml
index 86070f9eba..190017de8f 100644
--- a/infra/charts/feast-feature-server/values.yaml
+++ b/infra/charts/feast-feature-server/values.yaml
@@ -9,7 +9,7 @@ image:
repository: feastdev/feature-server
pullPolicy: IfNotPresent
# image.tag -- The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms)
- tag: 0.28.0
+ tag: 0.34.1
imagePullSecrets: []
nameOverride: ""
diff --git a/infra/charts/feast/Chart.yaml b/infra/charts/feast/Chart.yaml
index 6a79760140..994710b4ac 100644
--- a/infra/charts/feast/Chart.yaml
+++ b/infra/charts/feast/Chart.yaml
@@ -1,7 +1,7 @@
apiVersion: v1
description: Feature store for machine learning
name: feast
-version: 0.28.0
+version: 0.34.1
keywords:
- machine learning
- big data
diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md
index 516a66b2da..5efb872637 100644
--- a/infra/charts/feast/README.md
+++ b/infra/charts/feast/README.md
@@ -8,7 +8,7 @@ This repo contains Helm charts for Feast Java components that are being installe
## Chart: Feast
-Feature store for machine learning Current chart version is `0.28.0`
+Feature store for machine learning Current chart version is `0.34.1`
## Installation
@@ -65,8 +65,8 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/java-demo) fo
| Repository | Name | Version |
|------------|------|---------|
| https://charts.helm.sh/stable | redis | 10.5.6 |
-| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.28.0 |
-| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.28.0 |
+| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.34.1 |
+| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.34.1 |
## Values
diff --git a/infra/charts/feast/charts/feature-server/Chart.yaml b/infra/charts/feast/charts/feature-server/Chart.yaml
index 9b3a944342..467ad0867d 100644
--- a/infra/charts/feast/charts/feature-server/Chart.yaml
+++ b/infra/charts/feast/charts/feature-server/Chart.yaml
@@ -1,8 +1,8 @@
apiVersion: v1
description: "Feast Feature Server: Online feature serving service for Feast"
name: feature-server
-version: 0.28.0
-appVersion: v0.28.0
+version: 0.34.1
+appVersion: v0.34.1
keywords:
- machine learning
- big data
diff --git a/infra/charts/feast/charts/feature-server/README.md b/infra/charts/feast/charts/feature-server/README.md
index 65133d1d24..2079403ad8 100644
--- a/infra/charts/feast/charts/feature-server/README.md
+++ b/infra/charts/feast/charts/feature-server/README.md
@@ -1,6 +1,6 @@
# feature-server
-![Version: 0.28.0](https://img.shields.io/badge/Version-0.28.0-informational?style=flat-square) ![AppVersion: v0.28.0](https://img.shields.io/badge/AppVersion-v0.28.0-informational?style=flat-square)
+![Version: 0.34.1](https://img.shields.io/badge/Version-0.34.1-informational?style=flat-square) ![AppVersion: v0.34.1](https://img.shields.io/badge/AppVersion-v0.34.1-informational?style=flat-square)
Feast Feature Server: Online feature serving service for Feast
@@ -17,7 +17,7 @@ Feast Feature Server: Online feature serving service for Feast
| envOverrides | object | `{}` | Extra environment variables to set |
| image.pullPolicy | string | `"IfNotPresent"` | Image pull policy |
| image.repository | string | `"feastdev/feature-server-java"` | Docker image for Feature Server repository |
-| image.tag | string | `"0.28.0"` | Image tag |
+| image.tag | string | `"0.34.1"` | Image tag |
| ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress |
| ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth |
| ingress.grpc.class | string | `"nginx"` | Which ingress controller to use |
diff --git a/infra/charts/feast/charts/feature-server/values.yaml b/infra/charts/feast/charts/feature-server/values.yaml
index 866e95ca95..a5bceca7f9 100644
--- a/infra/charts/feast/charts/feature-server/values.yaml
+++ b/infra/charts/feast/charts/feature-server/values.yaml
@@ -5,7 +5,7 @@ image:
# image.repository -- Docker image for Feature Server repository
repository: feastdev/feature-server-java
# image.tag -- Image tag
- tag: 0.28.0
+ tag: 0.34.1
# image.pullPolicy -- Image pull policy
pullPolicy: IfNotPresent
diff --git a/infra/charts/feast/charts/transformation-service/Chart.yaml b/infra/charts/feast/charts/transformation-service/Chart.yaml
index 651ccb9e84..b685c44e4d 100644
--- a/infra/charts/feast/charts/transformation-service/Chart.yaml
+++ b/infra/charts/feast/charts/transformation-service/Chart.yaml
@@ -1,8 +1,8 @@
apiVersion: v1
description: "Transformation service: to compute on-demand features"
name: transformation-service
-version: 0.28.0
-appVersion: v0.28.0
+version: 0.34.1
+appVersion: v0.34.1
keywords:
- machine learning
- big data
diff --git a/infra/charts/feast/charts/transformation-service/README.md b/infra/charts/feast/charts/transformation-service/README.md
index f7f2a51522..3457f5243e 100644
--- a/infra/charts/feast/charts/transformation-service/README.md
+++ b/infra/charts/feast/charts/transformation-service/README.md
@@ -1,6 +1,6 @@
# transformation-service
-![Version: 0.28.0](https://img.shields.io/badge/Version-0.28.0-informational?style=flat-square) ![AppVersion: v0.28.0](https://img.shields.io/badge/AppVersion-v0.28.0-informational?style=flat-square)
+![Version: 0.34.1](https://img.shields.io/badge/Version-0.34.1-informational?style=flat-square) ![AppVersion: v0.34.1](https://img.shields.io/badge/AppVersion-v0.34.1-informational?style=flat-square)
Transformation service: to compute on-demand features
@@ -13,7 +13,7 @@ Transformation service: to compute on-demand features
| envOverrides | object | `{}` | Extra environment variables to set |
| image.pullPolicy | string | `"IfNotPresent"` | Image pull policy |
| image.repository | string | `"feastdev/feature-transformation-server"` | Docker image for Transformation Server repository |
-| image.tag | string | `"0.28.0"` | Image tag |
+| image.tag | string | `"0.34.1"` | Image tag |
| nodeSelector | object | `{}` | Node labels for pod assignment |
| podLabels | object | `{}` | Labels to be added to Feast Serving pods |
| replicaCount | int | `1` | Number of pods that will be created |
diff --git a/infra/charts/feast/charts/transformation-service/values.yaml b/infra/charts/feast/charts/transformation-service/values.yaml
index f8c4114af5..adea17f670 100644
--- a/infra/charts/feast/charts/transformation-service/values.yaml
+++ b/infra/charts/feast/charts/transformation-service/values.yaml
@@ -5,7 +5,7 @@ image:
# image.repository -- Docker image for Transformation Server repository
repository: feastdev/feature-transformation-server
# image.tag -- Image tag
- tag: 0.28.0
+ tag: 0.34.1
# image.pullPolicy -- Image pull policy
pullPolicy: IfNotPresent
diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml
index b1ee0ae945..f87f808c6f 100644
--- a/infra/charts/feast/requirements.yaml
+++ b/infra/charts/feast/requirements.yaml
@@ -1,12 +1,12 @@
dependencies:
- name: feature-server
alias: feature-server
- version: 0.28.0
+ version: 0.34.1
condition: feature-server.enabled
repository: https://feast-helm-charts.storage.googleapis.com
- name: transformation-service
alias: transformation-service
- version: 0.28.0
+ version: 0.34.1
condition: transformation-service.enabled
repository: https://feast-helm-charts.storage.googleapis.com
- name: redis
diff --git a/infra/scripts/cleanup_ci.py b/infra/scripts/cleanup_ci.py
new file mode 100644
index 0000000000..262adf1e3e
--- /dev/null
+++ b/infra/scripts/cleanup_ci.py
@@ -0,0 +1,50 @@
+from time import sleep
+import boto3
+from tqdm import tqdm
+from google.cloud import bigtable
+from google.cloud.bigtable import enums
+
+
+def cleanup_dynamo_ci():
+ db = boto3.resource("dynamodb")
+
+ num_to_delete = 0
+ all_tables = db.tables.all()
+ for table in all_tables:
+ if "integration_test" in table.name:
+ num_to_delete += 1
+ with tqdm(total=num_to_delete) as progress:
+ for table in all_tables:
+ if "integration_test" in table.name:
+ table.delete()
+ progress.update()
+ print(f"Deleted {num_to_delete} CI DynamoDB tables")
+
+
+def cleanup_bigtable_ci():
+ client = bigtable.Client(project="kf-feast", admin=True)
+ instance = client.instance("feast-integration-tests")
+ if instance.exists():
+ print(f"Deleted Bigtable CI instance")
+ instance.delete()
+
+ location_id = "us-central1-f"
+ serve_nodes = 1
+ storage_type = enums.StorageType.SSD
+ cluster = instance.cluster(
+ "feast-integration-tests-c1",
+ location_id=location_id,
+ serve_nodes=serve_nodes,
+ default_storage_type=storage_type,
+ )
+ instance.create(clusters=[cluster])
+ print(f"Created new Bigtable CI tables")
+
+
+def main() -> None:
+ cleanup_dynamo_ci()
+ cleanup_bigtable_ci()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/infra/scripts/cleanup_dynamo_ci.py b/infra/scripts/cleanup_dynamo_ci.py
deleted file mode 100644
index 2dda36cc5a..0000000000
--- a/infra/scripts/cleanup_dynamo_ci.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import boto3
-from tqdm import tqdm
-
-
-def main() -> None:
- db = boto3.resource("dynamodb")
-
- num_to_delete = 0
- all_tables = db.tables.all()
- for table in all_tables:
- if "integration_test" in table.name:
- num_to_delete += 1
- with tqdm(total=num_to_delete) as progress:
- for table in all_tables:
- if "integration_test" in table.name:
- table.delete()
- progress.update()
- print(f"Deleted {num_to_delete} CI DynamoDB tables")
-
-
-if __name__ == "__main__":
- main()
diff --git a/infra/templates/README.md.jinja2 b/infra/templates/README.md.jinja2
index 47779d4eb7..1cce08ecfa 100644
--- a/infra/templates/README.md.jinja2
+++ b/infra/templates/README.md.jinja2
@@ -25,7 +25,7 @@ Feast allows ML platform teams to:
* **Avoid data leakage** by generating point-in-time correct feature sets so data scientists can focus on feature engineering rather than debugging error-prone dataset joining logic. This ensure that future feature values do not leak to models during training.
* **Decouple ML from data infrastructure** by providing a single data access layer that abstracts feature storage from feature retrieval, ensuring models remain portable as you move from training models to serving models, from batch models to realtime models, and from one data infra system to another.
-Please see our [documentation](https://docs.feast.dev/) for more information about the project, or sign up for an [email newsletter](https://feast.dev/).
+Please see our [documentation](https://docs.feast.dev/) for more information about the project.
## 📐 Architecture
![](docs/assets/feast_marchitecture.png)
@@ -149,7 +149,6 @@ Please refer to the official documentation at [Documentation](https://docs.feast
* [Tutorials](https://docs.feast.dev/tutorials/tutorials-overview)
* [Running Feast with Snowflake/GCP/AWS](https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws)
* [Change Log](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md)
- * [Slack (#Feast)](https://slack.feast.dev/)
## 👋 Contributing
Feast is a community project and is still under active development. Please have a look at our contributing and development guides if you want to contribute to the project:
diff --git a/java/CONTRIBUTING.md b/java/CONTRIBUTING.md
index 7ccfe108c0..65d43d0de5 100644
--- a/java/CONTRIBUTING.md
+++ b/java/CONTRIBUTING.md
@@ -39,7 +39,7 @@ The primary entrypoint into the Feast Serving server is `ServingGuiceApplication
Common Environment Setup for all feast-java Feast components:
Ensure following development tools are installed:
-- Java SE Development Kit 11
+- Java SE Development Kit 11 (you may need to do `export JAVA_HOME=$(/usr/libexec/java_home -v 11)`)
- Maven 3.6
- `make`
diff --git a/java/datatypes/pom.xml b/java/datatypes/pom.xml
index a5c82d4c45..b0ba049c57 100644
--- a/java/datatypes/pom.xml
+++ b/java/datatypes/pom.xml
@@ -123,5 +123,4 @@
javax.annotation-api
-
diff --git a/java/pom.xml b/java/pom.xml
index a7ef145e2a..f026cd06c7 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -35,7 +35,7 @@
- 0.28.0
+ 0.34.1
https://github.com/feast-dev/feast
UTF-8
@@ -142,6 +142,21 @@
+
+
+ apple-m1
+
+ osx-x86_64
+
+
+
+ aarch64
+ mac
+
+
+
+
+
diff --git a/java/serving/README.md b/java/serving/README.md
index dc23702d0f..5cf5ec6535 100644
--- a/java/serving/README.md
+++ b/java/serving/README.md
@@ -2,9 +2,14 @@
### Overview
This guide is targeted at developers looking to contribute to Feast Serving:
-- [Building and running Feast Serving locally](#building-and-running-feast-serving-locally)
-- [Unit / Integration Tests](#unit-/-integration-tests)
-- [Developing against Feast Helm charts](#developing-against-feast-helm-charts)
+- [Getting Started Guide for Feast Serving Developers](#getting-started-guide-for-feast-serving-developers)
+ - [Overview](#overview)
+ - [Building and running Feast Serving locally:](#building-and-running-feast-serving-locally)
+ - [Pre-requisites](#pre-requisites)
+ - [Steps](#steps)
+ - [Debugging Feast Serving](#debugging-feast-serving)
+ - [Unit / Integration Tests](#unit--integration-tests)
+ - [Developing against Feast Helm charts](#developing-against-feast-helm-charts)
### Building and running Feast Serving locally:
@@ -25,12 +30,14 @@ From the Feast GitHub root, run:
feast:
project: feast_demo
registry: /Users/[your username]/GitHub/feast-demo/feature_repo/data/registry.db
+ entityKeySerializationVersion: 2
```
2. An example of if you're using Redis with a remote registry:
```yaml
feast:
project: feast_java_demo
registry: gs://[YOUR BUCKET]/demo-repo/registry.db
+ entityKeySerializationVersion: 2
activeStore: online
stores:
- name: online
diff --git a/java/serving/pom.xml b/java/serving/pom.xml
index 8f0cf407e9..79f942d491 100644
--- a/java/serving/pom.xml
+++ b/java/serving/pom.xml
@@ -243,6 +243,12 @@
1.12.261
+
+ com.amazonaws
+ aws-java-sdk-sts
+ 1.12.476
+
+
com.adobe.testing
s3mock-testcontainers
diff --git a/protos/feast/core/DataSource.proto b/protos/feast/core/DataSource.proto
index 3992d2c247..d129086f45 100644
--- a/protos/feast/core/DataSource.proto
+++ b/protos/feast/core/DataSource.proto
@@ -197,6 +197,8 @@ message DataSource {
// Defines options for DataSource that sources features from a Snowflake Query
message SnowflakeOptions {
+ reserved 5; // Snowflake warehouse name
+
// Snowflake table name
string table = 1;
@@ -209,9 +211,6 @@ message DataSource {
// Snowflake schema name
string database = 4;
-
- // Snowflake warehouse name
- string warehouse = 5;
}
// Defines options for DataSource that sources features from a spark table/query
diff --git a/protos/feast/serving/GrpcServer.proto b/protos/feast/serving/GrpcServer.proto
new file mode 100644
index 0000000000..cd0274c5c7
--- /dev/null
+++ b/protos/feast/serving/GrpcServer.proto
@@ -0,0 +1,27 @@
+syntax = "proto3";
+
+message PushRequest {
+ map features = 1;
+ string stream_feature_view = 2;
+ bool allow_registry_cache = 3;
+ string to = 4;
+}
+
+message PushResponse {
+ bool status = 1;
+}
+
+message WriteToOnlineStoreRequest {
+ map features = 1;
+ string feature_view_name = 2;
+ bool allow_registry_cache = 3;
+}
+
+message WriteToOnlineStoreResponse {
+ bool status = 1;
+}
+
+service GrpcFeatureServer {
+ rpc Push (PushRequest) returns (PushResponse) {};
+ rpc WriteToOnlineStore (WriteToOnlineStoreRequest) returns (WriteToOnlineStoreResponse);
+}
\ No newline at end of file
diff --git a/protos/feast/serving/ServingService.proto b/protos/feast/serving/ServingService.proto
index a940b72502..0eef3cd883 100644
--- a/protos/feast/serving/ServingService.proto
+++ b/protos/feast/serving/ServingService.proto
@@ -119,7 +119,7 @@ enum FieldStatus {
PRESENT = 1;
// Values could be found for entity key and age is within max age, but
- // this field value is assigned a value on ingestion into feast.
+ // this field value is not assigned a value on ingestion into feast.
NULL_VALUE = 2;
// Entity key did not return any values as they do not exist in Feast.
diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.hazelcast_online_store.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.hazelcast_online_store.rst
new file mode 100644
index 0000000000..bf3ed9d7d6
--- /dev/null
+++ b/sdk/python/docs/source/feast.infra.online_stores.contrib.hazelcast_online_store.rst
@@ -0,0 +1,21 @@
+feast.infra.online\_stores.contrib.hazelcast\_online\_store package
+===================================================================
+
+Submodules
+----------
+
+feast.infra.online\_stores.contrib.hazelcast\_online\_store.hazelcast\_online\_store module
+-------------------------------------------------------------------------------------------
+
+.. automodule:: feast.infra.online_stores.contrib.hazelcast_online_store.hazelcast_online_store
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Module contents
+---------------
+
+.. automodule:: feast.infra.online_stores.contrib.hazelcast_online_store
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst
new file mode 100644
index 0000000000..b3de7479a0
--- /dev/null
+++ b/sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst
@@ -0,0 +1,21 @@
+feast.infra.online\_stores.contrib.rockset\_online\_store package
+=================================================================
+
+Submodules
+----------
+
+feast.infra.online\_stores.contrib.rockset\_online\_store.rockset module
+------------------------------------------------------------------------
+
+.. automodule:: feast.infra.online_stores.contrib.rockset_online_store.rockset
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Module contents
+---------------
+
+.. automodule:: feast.infra.online_stores.contrib.rockset_online_store
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst
index 6b175f4584..b6c8a404ee 100644
--- a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst
+++ b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst
@@ -8,8 +8,10 @@ Subpackages
:maxdepth: 4
feast.infra.online_stores.contrib.cassandra_online_store
+ feast.infra.online_stores.contrib.hazelcast_online_store
feast.infra.online_stores.contrib.hbase_online_store
feast.infra.online_stores.contrib.mysql_online_store
+ feast.infra.online_stores.contrib.rockset_online_store
Submodules
----------
@@ -22,6 +24,14 @@ feast.infra.online\_stores.contrib.cassandra\_repo\_configuration module
:undoc-members:
:show-inheritance:
+feast.infra.online\_stores.contrib.hazelcast\_repo\_configuration module
+------------------------------------------------------------------------
+
+.. automodule:: feast.infra.online_stores.contrib.hazelcast_repo_configuration
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
feast.infra.online\_stores.contrib.hbase\_repo\_configuration module
--------------------------------------------------------------------
@@ -54,6 +64,14 @@ feast.infra.online\_stores.contrib.postgres\_repo\_configuration module
:undoc-members:
:show-inheritance:
+feast.infra.online\_stores.contrib.rockset\_repo\_configuration module
+----------------------------------------------------------------------
+
+.. automodule:: feast.infra.online_stores.contrib.rockset_repo_configuration
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
Module contents
---------------
diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py
index 4419a425bf..5043aab651 100644
--- a/sdk/python/feast/cli.py
+++ b/sdk/python/feast/cli.py
@@ -25,6 +25,7 @@
import yaml
from colorama import Fore, Style
from dateutil import parser
+from importlib_metadata import version as importlib_version
from pygments import formatters, highlight, lexers
from feast import utils
@@ -35,11 +36,13 @@
from feast.errors import FeastObjectNotFoundException, FeastProviderLoginError
from feast.feature_store import FeatureStore
from feast.feature_view import FeatureView
+from feast.infra.contrib.grpc_server import get_grpc_server
from feast.on_demand_feature_view import OnDemandFeatureView
from feast.repo_config import load_repo_config
from feast.repo_operations import (
apply_total,
cli_check_repo,
+ create_feature_store,
generate_project_name,
init_repo,
plan,
@@ -172,10 +175,7 @@ def ui(
"""
Shows the Feast UI over the current directory
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
# Pass in the registry_dump method to get around a circular dependency
store.serve_ui(
host=host,
@@ -192,10 +192,7 @@ def endpoint(ctx: click.Context):
"""
Display feature server endpoints
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
endpoint = store.get_feature_server_endpoint()
if endpoint is not None:
_logger.info(
@@ -220,10 +217,7 @@ def data_source_describe(ctx: click.Context, name: str):
"""
Describe a data source
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
try:
data_source = store.get_data_source(name)
@@ -244,10 +238,7 @@ def data_source_list(ctx: click.Context):
"""
List all data sources
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
table = []
for datasource in store.list_data_sources():
table.append([datasource.name, datasource.__class__])
@@ -272,10 +263,7 @@ def entity_describe(ctx: click.Context, name: str):
"""
Describe an entity
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
try:
entity = store.get_entity(name)
@@ -296,10 +284,7 @@ def entity_list(ctx: click.Context):
"""
List all entities
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
table = []
for entity in store.list_entities():
table.append([entity.name, entity.description, entity.value_type])
@@ -324,10 +309,7 @@ def feature_service_describe(ctx: click.Context, name: str):
"""
Describe a feature service
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
try:
feature_service = store.get_feature_service(name)
@@ -350,10 +332,7 @@ def feature_service_list(ctx: click.Context):
"""
List all feature services
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
feature_services = []
for feature_service in store.list_feature_services():
feature_names = []
@@ -383,10 +362,7 @@ def feature_view_describe(ctx: click.Context, name: str):
"""
Describe a feature view
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
try:
feature_view = store.get_feature_view(name)
@@ -407,11 +383,7 @@ def feature_view_list(ctx: click.Context):
"""
List all feature views
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
-
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
table = []
for feature_view in [
*store.list_feature_views(),
@@ -452,10 +424,7 @@ def on_demand_feature_view_describe(ctx: click.Context, name: str):
"""
[Experimental] Describe an on demand feature view
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
try:
on_demand_feature_view = store.get_on_demand_feature_view(name)
@@ -478,10 +447,7 @@ def on_demand_feature_view_list(ctx: click.Context):
"""
[Experimental] List all on demand feature views
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
table = []
for on_demand_feature_view in store.list_on_demand_feature_views():
table.append([on_demand_feature_view.name])
@@ -583,10 +549,8 @@ def materialize_command(
START_TS and END_TS should be in ISO 8601 format, e.g. '2021-07-16T19:20:01'
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
+
store.materialize(
feature_views=None if not views else views,
start_date=utils.make_tzaware(parser.parse(start_ts)),
@@ -612,10 +576,7 @@ def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List
END_TS should be in ISO 8601 format, e.g. '2021-07-16T19:20:01'
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
store.materialize_incremental(
feature_views=None if not views else views,
end_date=utils.make_tzaware(datetime.fromisoformat(end_ts)),
@@ -631,7 +592,18 @@ def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List
"--template",
"-t",
type=click.Choice(
- ["local", "gcp", "aws", "snowflake", "spark", "postgres", "hbase", "cassandra"],
+ [
+ "local",
+ "gcp",
+ "aws",
+ "snowflake",
+ "spark",
+ "postgres",
+ "hbase",
+ "cassandra",
+ "rockset",
+ "hazelcast",
+ ],
case_sensitive=False,
),
help="Specify a template for the created project",
@@ -686,6 +658,21 @@ def init_command(project_directory, minimal: bool, template: str):
show_default=True,
help="Disable logging served features",
)
+@click.option(
+ "--workers",
+ "-w",
+ type=click.INT,
+ default=1,
+ show_default=True,
+ help="Number of worker",
+)
+@click.option(
+ "--keep-alive-timeout",
+ type=click.INT,
+ default=5,
+ show_default=True,
+ help="Timeout for keep alive",
+)
@click.pass_context
def serve_command(
ctx: click.Context,
@@ -694,27 +681,50 @@ def serve_command(
type_: str,
no_access_log: bool,
no_feature_log: bool,
+ workers: int,
+ keep_alive_timeout: int,
):
"""Start a feature server locally on a given port."""
- repo = ctx.obj["CHDIR"]
-
- # If we received a base64 encoded version of feature_store.yaml, use that
- config_base64 = os.getenv(FEATURE_STORE_YAML_ENV_NAME)
- if config_base64:
- print("Received base64 encoded feature_store.yaml")
- config_bytes = base64.b64decode(config_base64)
- # Create a new unique directory for writing feature_store.yaml
- repo_path = Path(tempfile.mkdtemp())
- with open(repo_path / "feature_store.yaml", "wb") as f:
- f.write(config_bytes)
- store = FeatureStore(repo_path=str(repo_path.resolve()))
- else:
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
- store.serve(host, port, type_, no_access_log, no_feature_log)
+ store.serve(
+ host=host,
+ port=port,
+ type_=type_,
+ no_access_log=no_access_log,
+ no_feature_log=no_feature_log,
+ workers=workers,
+ keep_alive_timeout=keep_alive_timeout,
+ )
+@cli.command("listen")
+@click.option(
+ "--address",
+ "-a",
+ type=click.STRING,
+ default="localhost:50051",
+ show_default=True,
+ help="Address of the gRPC server",
+)
+@click.option(
+ "--max_workers",
+ "-w",
+ type=click.INT,
+ default=10,
+ show_default=False,
+ help="The maximum number of threads that can be used to execute the gRPC calls",
+)
+@click.pass_context
+def listen_command(
+ ctx: click.Context,
+ address: str,
+ max_workers: int,
+):
+ """Start a gRPC feature server to ingest streaming features on given address"""
+ store = create_feature_store(ctx)
+ server = get_grpc_server(address, store, max_workers)
+ server.start()
+ server.wait_for_termination()
@cli.command("serve_transformations")
@click.option(
@@ -727,10 +737,7 @@ def serve_command(
@click.pass_context
def serve_transformations_command(ctx: click.Context, port: int):
"""[Experimental] Start a feature consumption server locally on a given port."""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
store.serve_transformations(port)
@@ -767,10 +774,7 @@ def validate(
START_TS and END_TS should be in ISO 8601 format, e.g. '2021-07-16T19:20:01'
"""
- repo = ctx.obj["CHDIR"]
- fs_yaml_file = ctx.obj["FS_YAML_FILE"]
- cli_check_repo(repo, fs_yaml_file)
- store = FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+ store = create_feature_store(ctx)
feature_service = store.get_feature_service(name=feature_service)
reference = store.get_validation_reference(reference)
diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py
index 9e10d40202..dc64bad172 100644
--- a/sdk/python/feast/errors.py
+++ b/sdk/python/feast/errors.py
@@ -68,14 +68,6 @@ def __init__(self, name, project=None):
super().__init__(f"Feature view {name} does not exist")
-class InvalidSparkSessionException(Exception):
- def __init__(self, spark_arg):
- super().__init__(
- f" Need Spark Session to convert results to spark data frame\
- recieved {type(spark_arg)} instead. "
- )
-
-
class OnDemandFeatureViewNotFoundException(FeastObjectNotFoundException):
def __init__(self, name, project=None):
if project:
@@ -156,6 +148,11 @@ def __init__(self):
super().__init__("Provider is not set, but is required")
+class FeastRegistryNotSetError(Exception):
+ def __init__(self):
+ super().__init__("Registry is not set, but is required")
+
+
class FeastFeatureServerTypeSetError(Exception):
def __init__(self, feature_server_type: str):
super().__init__(
@@ -170,6 +167,13 @@ def __init__(self, feature_server_type: str):
)
+class FeastRegistryTypeInvalidError(Exception):
+ def __init__(self, registry_type: str):
+ super().__init__(
+ f"Feature server type was set to {registry_type}, but this type is invalid"
+ )
+
+
class FeastModuleImportError(Exception):
def __init__(self, module_name: str, class_name: str):
super().__init__(
diff --git a/sdk/python/feast/feast_object.py b/sdk/python/feast/feast_object.py
index 38109f5d8c..7cccf26455 100644
--- a/sdk/python/feast/feast_object.py
+++ b/sdk/python/feast/feast_object.py
@@ -12,6 +12,7 @@
from .protos.feast.core.FeatureView_pb2 import FeatureViewSpec
from .protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec
from .protos.feast.core.RequestFeatureView_pb2 import RequestFeatureViewSpec
+from .protos.feast.core.StreamFeatureView_pb2 import StreamFeatureViewSpec
from .protos.feast.core.ValidationProfile_pb2 import (
ValidationReference as ValidationReferenceProto,
)
@@ -36,6 +37,7 @@
FeatureViewSpec,
OnDemandFeatureViewSpec,
RequestFeatureViewSpec,
+ StreamFeatureViewSpec,
EntitySpecV2,
FeatureServiceSpec,
DataSourceProto,
diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py
index 7b0cfc4bed..3abca1d6e8 100644
--- a/sdk/python/feast/feature_server.py
+++ b/sdk/python/feast/feature_server.py
@@ -2,8 +2,8 @@
import traceback
import warnings
+import gunicorn.app.base
import pandas as pd
-import uvicorn
from fastapi import FastAPI, HTTPException, Request, Response, status
from fastapi.logger import logger
from fastapi.params import Depends
@@ -137,8 +137,35 @@ def health():
return app
+class FeastServeApplication(gunicorn.app.base.BaseApplication):
+ def __init__(self, store: "feast.FeatureStore", **options):
+ self._app = get_app(store=store)
+ self._options = options
+ super().__init__()
+
+ def load_config(self):
+ for key, value in self._options.items():
+ if key.lower() in self.cfg.settings and value is not None:
+ self.cfg.set(key.lower(), value)
+
+ self.cfg.set("worker_class", "uvicorn.workers.UvicornWorker")
+
+ def load(self):
+ return self._app
+
+
def start_server(
- store: "feast.FeatureStore", host: str, port: int, no_access_log: bool
+ store: "feast.FeatureStore",
+ host: str,
+ port: int,
+ no_access_log: bool,
+ workers: int,
+ keep_alive_timeout: int,
):
- app = get_app(store)
- uvicorn.run(app, host=host, port=port, access_log=(not no_access_log))
+ FeastServeApplication(
+ store=store,
+ bind=f"{host}:{port}",
+ accesslog=None if no_access_log else "-",
+ workers=workers,
+ keepalive=keep_alive_timeout,
+ ).run()
diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py
index b54015c8a7..cbd322233b 100644
--- a/sdk/python/feast/feature_store.py
+++ b/sdk/python/feast/feature_store.py
@@ -20,6 +20,7 @@
from functools import lru_cache
from pathlib import Path
from typing import (
+ TYPE_CHECKING,
Any,
Callable,
Dict,
@@ -102,6 +103,8 @@
warnings.simplefilter("once", DeprecationWarning)
+if TYPE_CHECKING:
+ from feast.embedded_go.online_features_service import EmbeddedOnlineFeatureServer
# decorator needed to use lru_cache with list arguments in `FeatureStore._get_feature_views_to_use()`
def featuresListToTuple(function):
@@ -703,7 +706,7 @@ def plan(
Args:
desired_repo_contents: The desired repo state.
-x
+
Raises:
ValueError: The 'objects' parameter could not be parsed properly.
@@ -2086,7 +2089,7 @@ def _read_from_online_store_many(
)
processed_read_rows_list = [
- self._process_read_rows(read_rows, requested_features)
+ self._process_read_rows(read_rows, requested_features)
for read_rows, requested_features in zip(read_rows_list, requested_features_list)
]
return processed_read_rows_list
@@ -2273,7 +2276,6 @@ def _get_feature_views_to_use(
allow_cache=False,
hide_dummy_entity: bool = True,
) -> Tuple[List[FeatureView], List[RequestFeatureView], List[OnDemandFeatureView]]:
-
fvs = {
fv.name: fv
for fv in [
@@ -2344,6 +2346,8 @@ def serve(
type_: str,
no_access_log: bool,
no_feature_log: bool,
+ workers: int,
+ keep_alive_timeout: int,
) -> None:
"""Start the feature consumption server locally on a given port."""
type_ = type_.lower()
@@ -2352,7 +2356,14 @@ def serve(
f"Python server only supports 'http'. Got '{type_}' instead."
)
# Start the python server
- feature_server.start_server(self, host, port, no_access_log)
+ feature_server.start_server(
+ self,
+ host=host,
+ port=port,
+ no_access_log=no_access_log,
+ workers=workers,
+ keep_alive_timeout=keep_alive_timeout,
+ )
@log_exceptions_and_usage
def get_feature_server_endpoint(self) -> Optional[str]:
diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py
index fa98ea29f8..6f73b2b98a 100644
--- a/sdk/python/feast/feature_view.py
+++ b/sdk/python/feast/feature_view.py
@@ -403,7 +403,7 @@ def from_proto(cls, feature_view_proto: FeatureViewProto):
feature_view.stream_source = stream_source
# This avoids the deprecation warning.
- feature_view.entities = feature_view_proto.spec.entities
+ feature_view.entities = list(feature_view_proto.spec.entities)
# Instead of passing in a schema, we set the features and entity columns.
feature_view.features = [
diff --git a/sdk/python/feast/field.py b/sdk/python/feast/field.py
index 245bb24f52..b07bddfeac 100644
--- a/sdk/python/feast/field.py
+++ b/sdk/python/feast/field.py
@@ -109,6 +109,7 @@ def from_proto(cls, field_proto: FieldProto):
name=field_proto.name,
dtype=from_value_type(value_type=value_type),
tags=dict(field_proto.tags),
+ description=field_proto.description,
)
@classmethod
diff --git a/sdk/python/feast/inference.py b/sdk/python/feast/inference.py
index d416763bd3..28a170172c 100644
--- a/sdk/python/feast/inference.py
+++ b/sdk/python/feast/inference.py
@@ -1,5 +1,5 @@
import re
-from typing import List, Set, Union
+from typing import List, Optional, Set, Union
from feast.data_source import DataSource, PushSource, RequestSource
from feast.entity import Entity
@@ -119,7 +119,10 @@ def update_feature_views_with_inferred_features_and_entities(
for fv in fvs:
join_keys = set(
- [entity_name_to_join_key_map[entity_name] for entity_name in fv.entities]
+ [
+ entity_name_to_join_key_map.get(entity_name)
+ for entity_name in fv.entities
+ ]
)
# Fields whose names match a join key are considered to be entity columns; all
@@ -137,7 +140,10 @@ def update_feature_views_with_inferred_features_and_entities(
# Respect the `value_type` attribute of the entity, if it is specified.
for entity_name in fv.entities:
- entity = entity_name_to_entity_map[entity_name]
+ entity = entity_name_to_entity_map.get(entity_name)
+ # pass when entity does not exist. Entityless feature view case
+ if entity is None:
+ continue
if (
entity.join_key
not in [entity_column.name for entity_column in fv.entity_columns]
@@ -181,7 +187,7 @@ def update_feature_views_with_inferred_features_and_entities(
def _infer_features_and_entities(
fv: FeatureView,
- join_keys: Set[str],
+ join_keys: Set[Optional[str]],
run_inference_for_features,
config,
) -> None:
diff --git a/sdk/python/feast/infra/contrib/grpc_server.py b/sdk/python/feast/infra/contrib/grpc_server.py
new file mode 100644
index 0000000000..2017f1095b
--- /dev/null
+++ b/sdk/python/feast/infra/contrib/grpc_server.py
@@ -0,0 +1,95 @@
+import logging
+from concurrent import futures
+
+import grpc
+import pandas as pd
+from grpc_health.v1 import health, health_pb2_grpc
+
+from feast.data_source import PushMode
+from feast.errors import PushSourceNotFoundException
+from feast.feature_store import FeatureStore
+from feast.protos.feast.serving.GrpcServer_pb2 import (
+ PushResponse,
+ WriteToOnlineStoreResponse,
+)
+from feast.protos.feast.serving.GrpcServer_pb2_grpc import (
+ GrpcFeatureServerServicer,
+ add_GrpcFeatureServerServicer_to_server,
+)
+
+
+def parse(features):
+ df = {}
+ for i in features.keys():
+ df[i] = [features.get(i)]
+ return pd.DataFrame.from_dict(df)
+
+
+class GrpcFeatureServer(GrpcFeatureServerServicer):
+ fs: FeatureStore
+
+ def __init__(self, fs: FeatureStore):
+ self.fs = fs
+ super().__init__()
+
+ def Push(self, request, context):
+ try:
+ df = parse(request.features)
+ if request.to == "offline":
+ to = PushMode.OFFLINE
+ elif request.to == "online":
+ to = PushMode.ONLINE
+ elif request.to == "online_and_offline":
+ to = PushMode.ONLINE_AND_OFFLINE
+ else:
+ raise ValueError(
+ f"{request.to} is not a supported push format. Please specify one of these ['online', 'offline', "
+ f"'online_and_offline']."
+ )
+ self.fs.push(
+ push_source_name=request.push_source_name,
+ df=df,
+ allow_registry_cache=request.allow_registry_cache,
+ to=to,
+ )
+ except PushSourceNotFoundException as e:
+ logging.exception(str(e))
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
+ context.set_details(str(e))
+ return PushResponse(status=False)
+ except Exception as e:
+ logging.exception(str(e))
+ context.set_code(grpc.StatusCode.INTERNAL)
+ context.set_details(str(e))
+ return PushResponse(status=False)
+ return PushResponse(status=True)
+
+ def WriteToOnlineStore(self, request, context):
+ logging.warning(
+ "write_to_online_store is deprecated. Please consider using Push instead"
+ )
+ try:
+ df = parse(request.features)
+ self.fs.write_to_online_store(
+ feature_view_name=request.feature_view_name,
+ df=df,
+ allow_registry_cache=request.allow_registry_cache,
+ )
+ except Exception as e:
+ logging.exception(str(e))
+ context.set_code(grpc.StatusCode.INTERNAL)
+ context.set_details(str(e))
+ return PushResponse(status=False)
+ return WriteToOnlineStoreResponse(status=True)
+
+
+def get_grpc_server(address: str, fs: FeatureStore, max_workers: int):
+ server = grpc.server(futures.ThreadPoolExecutor(max_workers=max_workers))
+ add_GrpcFeatureServerServicer_to_server(GrpcFeatureServer(fs), server)
+ health_servicer = health.HealthServicer(
+ experimental_non_blocking=True,
+ experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=max_workers),
+ )
+ health_pb2_grpc.add_HealthServicer_to_server(health_servicer, server)
+ server.add_insecure_port(address)
+ return server
diff --git a/sdk/python/feast/infra/contrib/spark_kafka_processor.py b/sdk/python/feast/infra/contrib/spark_kafka_processor.py
index 32d91b2010..ea55d89988 100644
--- a/sdk/python/feast/infra/contrib/spark_kafka_processor.py
+++ b/sdk/python/feast/infra/contrib/spark_kafka_processor.py
@@ -131,7 +131,7 @@ def batch_write(row: DataFrame, batch_id: int):
# Also add a 'created' column.
rows = (
rows.sort_values(
- by=self.join_keys + [self.sfv.timestamp_field], ascending=True
+ by=[*self.join_keys, self.sfv.timestamp_field], ascending=False
)
.groupby(self.join_keys)
.nth(0)
diff --git a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py
index 9a456376bf..b222128bbb 100644
--- a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py
+++ b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py
@@ -46,6 +46,21 @@ class BytewaxMaterializationEngineConfig(FeastConfigBaseModel):
These environment variables can be used to reference Kubernetes secrets.
"""
+ image_pull_secrets: List[dict] = []
+ """ (optional) The secrets to use when pulling the image to run for the materialization job """
+
+ resources: dict = {}
+ """ (optional) The resource requests and limits for the materialization containers """
+
+ service_account_name: StrictStr = ""
+ """ (optional) The service account name to use when running the job """
+
+ annotations: dict = {}
+ """ (optional) Annotations to apply to the job container. Useful for linking the service account to IAM roles, operational metadata, etc """
+
+ include_security_context_capabilities: bool = True
+ """ (optional) Include security context capabilities in the init and job container spec """
+
class BytewaxMaterializationEngine(BatchMaterializationEngine):
def __init__(
@@ -186,6 +201,9 @@ def _create_configuration_map(self, job_id, paths, feature_view, namespace):
"apiVersion": "v1",
"metadata": {
"name": f"feast-{job_id}",
+ "labels": {
+ "feast-bytewax-materializer": "configmap",
+ },
},
"data": {
"feature_store.yaml": feature_store_configuration,
@@ -235,12 +253,22 @@ def _create_job_definition(self, job_id, namespace, pods, env):
# Add any Feast configured environment variables
job_env.extend(env)
+ securityContextCapabilities = None
+ if self.batch_engine_config.include_security_context_capabilities:
+ securityContextCapabilities = {
+ "add": ["NET_BIND_SERVICE"],
+ "drop": ["ALL"],
+ }
+
job_definition = {
"apiVersion": "batch/v1",
"kind": "Job",
"metadata": {
"name": f"dataflow-{job_id}",
"namespace": namespace,
+ "labels": {
+ "feast-bytewax-materializer": "job",
+ },
},
"spec": {
"ttlSecondsAfterFinished": 3600,
@@ -248,9 +276,17 @@ def _create_job_definition(self, job_id, namespace, pods, env):
"parallelism": pods,
"completionMode": "Indexed",
"template": {
+ "metadata": {
+ "annotations": self.batch_engine_config.annotations,
+ "labels": {
+ "feast-bytewax-materializer": "pod",
+ },
+ },
"spec": {
"restartPolicy": "Never",
"subdomain": f"dataflow-{job_id}",
+ "imagePullSecrets": self.batch_engine_config.image_pull_secrets,
+ "serviceAccountName": self.batch_engine_config.service_account_name,
"initContainers": [
{
"env": [
@@ -265,10 +301,7 @@ def _create_job_definition(self, job_id, namespace, pods, env):
"resources": {},
"securityContext": {
"allowPrivilegeEscalation": False,
- "capabilities": {
- "add": ["NET_BIND_SERVICE"],
- "drop": ["ALL"],
- },
+ "capabilities": securityContextCapabilities,
"readOnlyRootFilesystem": True,
},
"terminationMessagePath": "/dev/termination-log",
@@ -300,13 +333,10 @@ def _create_job_definition(self, job_id, namespace, pods, env):
"protocol": "TCP",
}
],
- "resources": {},
+ "resources": self.batch_engine_config.resources,
"securityContext": {
"allowPrivilegeEscalation": False,
- "capabilities": {
- "add": ["NET_BIND_SERVICE"],
- "drop": ["ALL"],
- },
+ "capabilities": securityContextCapabilities,
"readOnlyRootFilesystem": False,
},
"terminationMessagePath": "/dev/termination-log",
@@ -334,7 +364,7 @@ def _create_job_definition(self, job_id, namespace, pods, env):
"name": f"feast-{job_id}",
},
],
- }
+ },
},
},
}
diff --git a/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py b/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py
index ffeea9039f..4819ee1af7 100644
--- a/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py
+++ b/sdk/python/feast/infra/materialization/contrib/spark/spark_materialization_engine.py
@@ -176,7 +176,7 @@ def _materialize_one(
created_timestamp_column=created_timestamp_column,
start_date=start_date,
end_date=end_date,
- )
+ ),
)
spark_serialized_artifacts = _SparkSerializedArtifacts.serialize(
diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py
index d8fc5f5611..36c42cd390 100644
--- a/sdk/python/feast/infra/materialization/snowflake_engine.py
+++ b/sdk/python/feast/infra/materialization/snowflake_engine.py
@@ -25,10 +25,10 @@
from feast.infra.online_stores.online_store import OnlineStore
from feast.infra.registry.base_registry import BaseRegistry
from feast.infra.utils.snowflake.snowflake_utils import (
+ GetSnowflakeConnection,
_run_snowflake_field_mapping,
assert_snowflake_feature_names,
execute_snowflake_statement,
- get_snowflake_conn,
get_snowflake_online_store_path,
package_snowpark_zip,
)
@@ -121,7 +121,7 @@ def update(
):
stage_context = f'"{self.repo_config.batch_engine.database}"."{self.repo_config.batch_engine.schema_}"'
stage_path = f'{stage_context}."feast_{project}"'
- with get_snowflake_conn(self.repo_config.batch_engine) as conn:
+ with GetSnowflakeConnection(self.repo_config.batch_engine) as conn:
query = f"SHOW STAGES IN {stage_context}"
cursor = execute_snowflake_statement(conn, query)
stage_list = pd.DataFrame(
@@ -132,6 +132,10 @@ def update(
# if the stage already exists,
# assumes that the materialization functions have been deployed
if f"feast_{project}" in stage_list["name"].tolist():
+ click.echo(
+ f"Materialization functions for {Style.BRIGHT + Fore.GREEN}{project}{Style.RESET_ALL} already detected."
+ )
+ click.echo()
return None
click.echo(
@@ -169,7 +173,7 @@ def teardown_infra(
):
stage_path = f'"{self.repo_config.batch_engine.database}"."{self.repo_config.batch_engine.schema_}"."feast_{project}"'
- with get_snowflake_conn(self.repo_config.batch_engine) as conn:
+ with GetSnowflakeConnection(self.repo_config.batch_engine) as conn:
query = f"DROP STAGE IF EXISTS {stage_path}"
execute_snowflake_statement(conn, query)
@@ -259,10 +263,11 @@ def _materialize_one(
# Lets check and see if we can skip this query, because the table hasnt changed
# since before the start date of this query
- with get_snowflake_conn(self.repo_config.offline_store) as conn:
+ with GetSnowflakeConnection(self.repo_config.offline_store) as conn:
query = f"""SELECT SYSTEM$LAST_CHANGE_COMMIT_TIME('{feature_view.batch_source.get_table_query_string()}') AS last_commit_change_time"""
last_commit_change_time = (
- conn.cursor().execute(query).fetchall()[0][0] / 1_000_000_000
+ execute_snowflake_statement(conn, query).fetchall()[0][0]
+ / 1_000_000_000
)
if last_commit_change_time < start_date.astimezone(tz=utc).timestamp():
return SnowflakeMaterializationJob(
@@ -271,32 +276,65 @@ def _materialize_one(
fv_latest_values_sql = offline_job.to_sql()
+ if feature_view.entity_columns:
+ join_keys = [entity.name for entity in feature_view.entity_columns]
+ unique_entities = '"' + '", "'.join(join_keys) + '"'
+
+ query = f"""
+ SELECT
+ COUNT(DISTINCT {unique_entities})
+ FROM
+ {feature_view.batch_source.get_table_query_string()}
+ """
+
+ with GetSnowflakeConnection(self.repo_config.offline_store) as conn:
+ entities_to_write = conn.cursor().execute(query).fetchall()[0][0]
+ else:
+ entities_to_write = (
+ 1 # entityless feature view has a placeholder entity
+ )
+
if feature_view.batch_source.field_mapping is not None:
fv_latest_mapped_values_sql = _run_snowflake_field_mapping(
fv_latest_values_sql, feature_view.batch_source.field_mapping
)
- fv_to_proto_sql = self.generate_snowflake_materialization_query(
- self.repo_config,
- fv_latest_mapped_values_sql,
- feature_view,
- project,
- )
+ features_full_list = feature_view.features
+ feature_batches = [
+ features_full_list[i : i + 100]
+ for i in range(0, len(features_full_list), 100)
+ ]
if self.repo_config.online_store.type == "snowflake.online":
- self.materialize_to_snowflake_online_store(
- self.repo_config,
- fv_to_proto_sql,
- feature_view,
- project,
- )
+ rows_to_write = entities_to_write * len(features_full_list)
else:
- self.materialize_to_external_online_store(
- self.repo_config,
- fv_to_proto_sql,
- feature_view,
- tqdm_builder,
- )
+ rows_to_write = entities_to_write * len(feature_batches)
+
+ with tqdm_builder(rows_to_write) as pbar:
+ for i, feature_batch in enumerate(feature_batches):
+ fv_to_proto_sql = self.generate_snowflake_materialization_query(
+ self.repo_config,
+ fv_latest_mapped_values_sql,
+ feature_view,
+ feature_batch,
+ project,
+ )
+
+ if self.repo_config.online_store.type == "snowflake.online":
+ self.materialize_to_snowflake_online_store(
+ self.repo_config,
+ fv_to_proto_sql,
+ feature_view,
+ project,
+ )
+ pbar.update(entities_to_write * len(feature_batch))
+ else:
+ self.materialize_to_external_online_store(
+ self.repo_config,
+ fv_to_proto_sql,
+ feature_view,
+ pbar,
+ )
return SnowflakeMaterializationJob(
job_id=job_id, status=MaterializationJobStatus.SUCCEEDED
@@ -311,6 +349,7 @@ def generate_snowflake_materialization_query(
repo_config: RepoConfig,
fv_latest_mapped_values_sql: str,
feature_view: Union[BatchFeatureView, FeatureView],
+ feature_batch: list,
project: str,
) -> str:
@@ -333,7 +372,7 @@ def generate_snowflake_materialization_query(
UDF serialization function.
"""
feature_sql_list = []
- for feature in feature_view.features:
+ for feature in feature_batch:
feature_value_type_name = feature.dtype.to_value_type().name
feature_sql = _convert_value_name_to_snowflake_udf(
@@ -428,12 +467,9 @@ def materialize_to_snowflake_online_store(
)
"""
- with get_snowflake_conn(repo_config.batch_engine) as conn:
- query_id = execute_snowflake_statement(conn, query).sfqid
+ with GetSnowflakeConnection(repo_config.batch_engine) as conn:
+ execute_snowflake_statement(conn, query).sfqid
- click.echo(
- f"Snowflake Query ID: {Style.BRIGHT + Fore.GREEN}{query_id}{Style.RESET_ALL}"
- )
return None
def materialize_to_external_online_store(
@@ -441,19 +477,15 @@ def materialize_to_external_online_store(
repo_config: RepoConfig,
materialization_sql: str,
feature_view: Union[StreamFeatureView, FeatureView],
- tqdm_builder: Callable[[int], tqdm],
+ pbar: tqdm,
) -> None:
feature_names = [feature.name for feature in feature_view.features]
- with get_snowflake_conn(repo_config.batch_engine) as conn:
+ with GetSnowflakeConnection(repo_config.batch_engine) as conn:
query = materialization_sql
cursor = execute_snowflake_statement(conn, query)
for i, df in enumerate(cursor.fetch_pandas_batches()):
- click.echo(
- f"Snowflake: Processing Materialization ResultSet Batch #{i+1}"
- )
-
entity_keys = (
df["entity_key"].apply(EntityKeyProto.FromString).to_numpy()
)
@@ -489,11 +521,10 @@ def materialize_to_external_online_store(
)
)
- with tqdm_builder(len(rows_to_write)) as pbar:
- self.online_store.online_write_batch(
- repo_config,
- feature_view,
- rows_to_write,
- lambda x: pbar.update(x),
- )
+ self.online_store.online_write_batch(
+ repo_config,
+ feature_view,
+ rows_to_write,
+ lambda x: pbar.update(x),
+ )
return None
diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py
index b3a500665e..5913b60f62 100644
--- a/sdk/python/feast/infra/offline_stores/bigquery.py
+++ b/sdk/python/feast/infra/offline_stores/bigquery.py
@@ -363,7 +363,7 @@ def offline_write_batch(
assert isinstance(feature_view.batch_source, BigQuerySource)
pa_schema, column_names = offline_utils.get_pyarrow_schema_from_batch_source(
- config, feature_view.batch_source
+ config, feature_view.batch_source, timestamp_unit="ns"
)
if column_names != table.column_names:
raise ValueError(
@@ -441,9 +441,11 @@ def full_feature_names(self) -> bool:
def on_demand_feature_views(self) -> List[OnDemandFeatureView]:
return self._on_demand_feature_views
- def _to_df_internal(self) -> pd.DataFrame:
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
with self._query_generator() as query:
- df = self._execute_query(query).to_dataframe(create_bqstorage_client=True)
+ df = self._execute_query(query=query, timeout=timeout).to_dataframe(
+ create_bqstorage_client=True
+ )
return df
def to_sql(self) -> str:
@@ -454,8 +456,8 @@ def to_sql(self) -> str:
def to_bigquery(
self,
job_config: Optional[bigquery.QueryJobConfig] = None,
- timeout: int = 1800,
- retry_cadence: int = 10,
+ timeout: Optional[int] = 1800,
+ retry_cadence: Optional[int] = 10,
) -> str:
"""
Synchronously executes the underlying query and exports the result to a BigQuery table. The
@@ -488,20 +490,34 @@ def to_bigquery(
return str(job_config.destination)
with self._query_generator() as query:
- self._execute_query(query, job_config, timeout)
-
- print(f"Done writing to '{job_config.destination}'.")
- return str(job_config.destination)
-
- def _to_arrow_internal(self) -> pyarrow.Table:
+ dest = job_config.destination
+ # because setting destination for scripts is not valid
+ # remove destination attribute if provided
+ job_config.destination = None
+ bq_job = self._execute_query(query, job_config, timeout)
+
+ if not job_config.dry_run:
+ config = bq_job.to_api_repr()["configuration"]
+ # get temp table created by BQ
+ tmp_dest = config["query"]["destinationTable"]
+ temp_dest_table = f"{tmp_dest['projectId']}.{tmp_dest['datasetId']}.{tmp_dest['tableId']}"
+
+ # persist temp table
+ sql = f"CREATE TABLE `{dest}` AS SELECT * FROM {temp_dest_table}"
+ self._execute_query(sql, timeout=timeout)
+
+ print(f"Done writing to '{dest}'.")
+ return str(dest)
+
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table:
with self._query_generator() as query:
- q = self._execute_query(query=query)
+ q = self._execute_query(query=query, timeout=timeout)
assert q
return q.to_arrow()
@log_exceptions_and_usage
def _execute_query(
- self, query, job_config=None, timeout: int = 1800
+ self, query, job_config=None, timeout: Optional[int] = None
) -> Optional[bigquery.job.query.QueryJob]:
bq_job = self.client.query(query, job_config=job_config)
@@ -511,14 +527,20 @@ def _execute_query(
)
return None
- block_until_done(client=self.client, bq_job=bq_job, timeout=timeout)
+ block_until_done(client=self.client, bq_job=bq_job, timeout=timeout or 1800)
return bq_job
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: Optional[bool] = False,
+ timeout: Optional[int] = None,
+ ):
assert isinstance(storage, SavedDatasetBigQueryStorage)
self.to_bigquery(
- bigquery.QueryJobConfig(destination=storage.bigquery_options.table)
+ bigquery.QueryJobConfig(destination=storage.bigquery_options.table),
+ timeout=timeout,
)
@property
@@ -555,7 +577,6 @@ def to_remote_storage(self) -> List[str]:
else:
storage_client = StorageClient(project=self.client.project)
bucket, prefix = self._gcs_path[len("gs://") :].split("/", 1)
- prefix = prefix.rsplit("/", 1)[0]
if prefix.startswith("/"):
prefix = prefix[1:]
@@ -647,7 +668,7 @@ def _upload_entity_df(
job: Union[bigquery.job.query.QueryJob, bigquery.job.load.LoadJob]
if isinstance(entity_df, str):
- job = client.query(f"CREATE TABLE {table_name} AS ({entity_df})")
+ job = client.query(f"CREATE TABLE `{table_name}` AS ({entity_df})")
elif isinstance(entity_df, pd.DataFrame):
# Drop the index so that we don't have unnecessary columns
@@ -671,7 +692,7 @@ def _get_entity_schema(
) -> Dict[str, np.dtype]:
if isinstance(entity_df, str):
entity_df_sample = (
- client.query(f"SELECT * FROM ({entity_df}) LIMIT 1").result().to_dataframe()
+ client.query(f"SELECT * FROM ({entity_df}) LIMIT 0").result().to_dataframe()
)
entity_schema = dict(zip(entity_df_sample.columns, entity_df_sample.dtypes))
@@ -777,7 +798,7 @@ def arrow_schema_to_bq_schema(arrow_schema: pyarrow.Schema) -> List[SchemaField]
Compute a deterministic hash for the `left_table_query_string` that will be used throughout
all the logic as the field to GROUP BY the data
*/
-WITH entity_dataframe AS (
+CREATE TEMP TABLE entity_dataframe AS (
SELECT *,
{{entity_df_event_timestamp_col}} AS entity_timestamp
{% for featureview in featureviews %}
@@ -793,95 +814,95 @@ def arrow_schema_to_bq_schema(arrow_schema: pyarrow.Schema) -> List[SchemaField]
{% endif %}
{% endfor %}
FROM `{{ left_table_query_string }}`
-),
+);
{% for featureview in featureviews %}
-
-{{ featureview.name }}__entity_dataframe AS (
- SELECT
- {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %}
- entity_timestamp,
- {{featureview.name}}__entity_row_unique_id
- FROM entity_dataframe
- GROUP BY
- {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %}
- entity_timestamp,
- {{featureview.name}}__entity_row_unique_id
-),
-
-/*
- This query template performs the point-in-time correctness join for a single feature set table
- to the provided entity table.
-
- 1. We first join the current feature_view to the entity dataframe that has been passed.
- This JOIN has the following logic:
- - For each row of the entity dataframe, only keep the rows where the `timestamp_field`
- is less than the one provided in the entity dataframe
- - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field`
- is higher the the one provided minus the TTL
- - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been
- computed previously
-
- The output of this CTE will contain all the necessary information and already filtered out most
- of the data that is not relevant.
-*/
-
-{{ featureview.name }}__subquery AS (
- SELECT
- {{ featureview.timestamp_field }} as event_timestamp,
- {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }}
- {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %}
- {% for feature in featureview.features %}
- {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %}
- {% endfor %}
- FROM {{ featureview.table_subquery }}
- WHERE {{ featureview.timestamp_field }} <= '{{ featureview.max_event_timestamp }}'
- {% if featureview.ttl == 0 %}{% else %}
- AND {{ featureview.timestamp_field }} >= '{{ featureview.min_event_timestamp }}'
- {% endif %}
-),
-
-{{ featureview.name }}__base AS (
- SELECT
- subquery.*,
- entity_dataframe.entity_timestamp,
- entity_dataframe.{{featureview.name}}__entity_row_unique_id
- FROM {{ featureview.name }}__subquery AS subquery
- INNER JOIN {{ featureview.name }}__entity_dataframe AS entity_dataframe
- ON TRUE
- AND subquery.event_timestamp <= entity_dataframe.entity_timestamp
-
+CREATE TEMP TABLE {{ featureview.name }}__cleaned AS (
+ WITH {{ featureview.name }}__entity_dataframe AS (
+ SELECT
+ {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %}
+ entity_timestamp,
+ {{featureview.name}}__entity_row_unique_id
+ FROM entity_dataframe
+ GROUP BY
+ {{ featureview.entities | join(', ')}}{% if featureview.entities %},{% else %}{% endif %}
+ entity_timestamp,
+ {{featureview.name}}__entity_row_unique_id
+ ),
+
+ /*
+ This query template performs the point-in-time correctness join for a single feature set table
+ to the provided entity table.
+
+ 1. We first join the current feature_view to the entity dataframe that has been passed.
+ This JOIN has the following logic:
+ - For each row of the entity dataframe, only keep the rows where the `timestamp_field`
+ is less than the one provided in the entity dataframe
+ - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field`
+ is higher the the one provided minus the TTL
+ - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been
+ computed previously
+
+ The output of this CTE will contain all the necessary information and already filtered out most
+ of the data that is not relevant.
+ */
+
+ {{ featureview.name }}__subquery AS (
+ SELECT
+ {{ featureview.timestamp_field }} as event_timestamp,
+ {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }}
+ {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %}
+ {% for feature in featureview.features %}
+ {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %}
+ {% endfor %}
+ FROM {{ featureview.table_subquery }}
+ WHERE {{ featureview.timestamp_field }} <= '{{ featureview.max_event_timestamp }}'
{% if featureview.ttl == 0 %}{% else %}
- AND subquery.event_timestamp >= Timestamp_sub(entity_dataframe.entity_timestamp, interval {{ featureview.ttl }} second)
+ AND {{ featureview.timestamp_field }} >= '{{ featureview.min_event_timestamp }}'
{% endif %}
+ ),
+
+ {{ featureview.name }}__base AS (
+ SELECT
+ subquery.*,
+ entity_dataframe.entity_timestamp,
+ entity_dataframe.{{featureview.name}}__entity_row_unique_id
+ FROM {{ featureview.name }}__subquery AS subquery
+ INNER JOIN {{ featureview.name }}__entity_dataframe AS entity_dataframe
+ ON TRUE
+ AND subquery.event_timestamp <= entity_dataframe.entity_timestamp
+
+ {% if featureview.ttl == 0 %}{% else %}
+ AND subquery.event_timestamp >= Timestamp_sub(entity_dataframe.entity_timestamp, interval {{ featureview.ttl }} second)
+ {% endif %}
- {% for entity in featureview.entities %}
- AND subquery.{{ entity }} = entity_dataframe.{{ entity }}
- {% endfor %}
-),
-
-/*
- 2. If the `created_timestamp_column` has been set, we need to
- deduplicate the data first. This is done by calculating the
- `MAX(created_at_timestamp)` for each event_timestamp.
- We then join the data on the next CTE
-*/
-{% if featureview.created_timestamp_column %}
-{{ featureview.name }}__dedup AS (
- SELECT
- {{featureview.name}}__entity_row_unique_id,
- event_timestamp,
- MAX(created_timestamp) as created_timestamp
- FROM {{ featureview.name }}__base
- GROUP BY {{featureview.name}}__entity_row_unique_id, event_timestamp
-),
-{% endif %}
+ {% for entity in featureview.entities %}
+ AND subquery.{{ entity }} = entity_dataframe.{{ entity }}
+ {% endfor %}
+ ),
+
+ /*
+ 2. If the `created_timestamp_column` has been set, we need to
+ deduplicate the data first. This is done by calculating the
+ `MAX(created_at_timestamp)` for each event_timestamp.
+ We then join the data on the next CTE
+ */
+ {% if featureview.created_timestamp_column %}
+ {{ featureview.name }}__dedup AS (
+ SELECT
+ {{featureview.name}}__entity_row_unique_id,
+ event_timestamp,
+ MAX(created_timestamp) as created_timestamp
+ FROM {{ featureview.name }}__base
+ GROUP BY {{featureview.name}}__entity_row_unique_id, event_timestamp
+ ),
+ {% endif %}
-/*
- 3. The data has been filtered during the first CTE "*__base"
- Thus we only need to compute the latest timestamp of each feature.
-*/
-{{ featureview.name }}__latest AS (
+ /*
+ 3. The data has been filtered during the first CTE "*__base"
+ Thus we only need to compute the latest timestamp of each feature.
+ */
+ {{ featureview.name }}__latest AS (
SELECT
event_timestamp,
{% if featureview.created_timestamp_column %}created_timestamp,{% endif %}
@@ -900,13 +921,13 @@ def arrow_schema_to_bq_schema(arrow_schema: pyarrow.Schema) -> List[SchemaField]
{% endif %}
)
WHERE row_number = 1
-),
+)
/*
4. Once we know the latest value of each feature for a given timestamp,
we can join again the data back to the original "base" dataset
*/
-{{ featureview.name }}__cleaned AS (
+
SELECT base.*
FROM {{ featureview.name }}__base as base
INNER JOIN {{ featureview.name }}__latest
@@ -917,7 +938,7 @@ def arrow_schema_to_bq_schema(arrow_schema: pyarrow.Schema) -> List[SchemaField]
,created_timestamp
{% endif %}
)
-){% if loop.last %}{% else %}, {% endif %}
+);
{% endfor %}
diff --git a/sdk/python/feast/infra/offline_stores/bigquery_source.py b/sdk/python/feast/infra/offline_stores/bigquery_source.py
index 28d6a3ed77..4888707c09 100644
--- a/sdk/python/feast/infra/offline_stores/bigquery_source.py
+++ b/sdk/python/feast/infra/offline_stores/bigquery_source.py
@@ -18,13 +18,6 @@
from feast.usage import get_user_agent
from feast.value_type import ValueType
-try:
- from google.api_core import client_info as http_client_info
-except ImportError as e:
- from feast.errors import FeastExtrasDependencyImportError
-
- raise FeastExtrasDependencyImportError("gcp", str(e))
-
@typechecked
class BigQuerySource(DataSource):
@@ -165,6 +158,13 @@ def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]:
def get_table_column_names_and_types(
self, config: RepoConfig
) -> Iterable[Tuple[str, str]]:
+ try:
+ from google.api_core import client_info as http_client_info
+ except ImportError as e:
+ from feast.errors import FeastExtrasDependencyImportError
+
+ raise FeastExtrasDependencyImportError("gcp", str(e))
+
from google.cloud import bigquery
project_id = (
@@ -180,9 +180,9 @@ def get_table_column_names_and_types(
if not isinstance(schema[0], bigquery.schema.SchemaField):
raise TypeError("Could not parse BigQuery table schema.")
else:
- bq_columns_query = f"SELECT * FROM ({self.query}) LIMIT 1"
- queryRes = client.query(bq_columns_query).result()
- schema = queryRes.schema
+ bq_columns_query = f"SELECT * FROM ({self.query}) LIMIT 0"
+ query_res = client.query(bq_columns_query).result()
+ schema = query_res.schema
name_type_pairs: List[Tuple[str, str]] = []
for field in schema:
diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py
index e3bb4e8cca..85a61106aa 100644
--- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py
+++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py
@@ -375,7 +375,7 @@ def get_temp_table_dml_header(
return temp_table_dml_header
@log_exceptions_and_usage
- def _to_df_internal(self) -> pd.DataFrame:
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
with self._query_generator() as query:
temp_table_name = "_" + str(uuid.uuid4()).replace("-", "")
temp_external_location = self.get_temp_s3_path()
@@ -392,7 +392,7 @@ def _to_df_internal(self) -> pd.DataFrame:
)
@log_exceptions_and_usage
- def _to_arrow_internal(self) -> pa.Table:
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pa.Table:
with self._query_generator() as query:
temp_table_name = "_" + str(uuid.uuid4()).replace("-", "")
temp_external_location = self.get_temp_s3_path()
@@ -412,7 +412,12 @@ def _to_arrow_internal(self) -> pa.Table:
def metadata(self) -> Optional[RetrievalMetadata]:
return self._metadata
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: Optional[bool] = False,
+ timeout: Optional[int] = None,
+ ):
assert isinstance(storage, SavedDatasetAthenaStorage)
self.to_athena(table_name=storage.athena_options.table)
diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py
index 8dc5f6c654..849d5cc797 100644
--- a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py
+++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py
@@ -327,7 +327,7 @@ def __init__(
engine: Engine,
config: MsSqlServerOfflineStoreConfig,
full_feature_names: bool,
- on_demand_feature_views: Optional[List[OnDemandFeatureView]],
+ on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None,
metadata: Optional[RetrievalMetadata] = None,
drop_columns: Optional[List[str]] = None,
):
@@ -347,16 +347,21 @@ def full_feature_names(self) -> bool:
def on_demand_feature_views(self) -> List[OnDemandFeatureView]:
return self._on_demand_feature_views
- def _to_df_internal(self) -> pandas.DataFrame:
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pandas.DataFrame:
return pandas.read_sql(self.query, con=self.engine).fillna(value=np.nan)
- def _to_arrow_internal(self) -> pyarrow.Table:
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table:
result = pandas.read_sql(self.query, con=self.engine).fillna(value=np.nan)
return pyarrow.Table.from_pandas(result)
## Implements persist in Feast 0.18 - This persists to filestorage
## ToDo: Persist to Azure Storage
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: Optional[bool] = False,
+ timeout: Optional[int] = None,
+ ):
assert isinstance(storage, SavedDatasetFileStorage)
filesystem, path = FileSource.create_filesystem_and_path(
diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py
index ada41c023b..c2e95a8648 100644
--- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py
+++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py
@@ -241,7 +241,7 @@ def __init__(
query: Union[str, Callable[[], ContextManager[str]]],
config: RepoConfig,
full_feature_names: bool,
- on_demand_feature_views: Optional[List[OnDemandFeatureView]],
+ on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None,
metadata: Optional[RetrievalMetadata] = None,
):
if not isinstance(query, str):
@@ -267,7 +267,7 @@ def full_feature_names(self) -> bool:
def on_demand_feature_views(self) -> List[OnDemandFeatureView]:
return self._on_demand_feature_views
- def _to_df_internal(self) -> pd.DataFrame:
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
# We use arrow format because it gives better control of the table schema
return self._to_arrow_internal().to_pandas()
@@ -275,7 +275,7 @@ def to_sql(self) -> str:
with self._query_generator() as query:
return query
- def _to_arrow_internal(self) -> pa.Table:
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pa.Table:
with self._query_generator() as query:
with _get_conn(self.config.offline_store) as conn, conn.cursor() as cur:
conn.set_session(readonly=True)
@@ -302,7 +302,12 @@ def _to_arrow_internal(self) -> pa.Table:
def metadata(self) -> Optional[RetrievalMetadata]:
return self._metadata
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: Optional[bool] = False,
+ timeout: Optional[int] = None,
+ ):
assert isinstance(storage, SavedDatasetPostgreSQLStorage)
df_to_postgres_table(
diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py
index 34a7786873..decd0c2c6f 100644
--- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py
+++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py
@@ -337,24 +337,52 @@ def to_spark_df(self) -> pyspark.sql.DataFrame:
*_, last = map(self.spark_session.sql, statements)
return last
- def _to_df_internal(self) -> pd.DataFrame:
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
"""Return dataset as Pandas DataFrame synchronously"""
return self.to_spark_df().toPandas()
- def _to_arrow_internal(self) -> pyarrow.Table:
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table:
"""Return dataset as pyarrow Table synchronously"""
- return pyarrow.Table.from_pandas(self._to_df_internal())
+ return pyarrow.Table.from_pandas(self._to_df_internal(timeout=timeout))
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: Optional[bool] = False,
+ timeout: Optional[int] = None,
+ ):
"""
Run the retrieval and persist the results in the same offline store used for read.
- Please note the persisting is done only within the scope of the spark session.
+ Please note the persisting is done only within the scope of the spark session for local warehouse directory.
"""
assert isinstance(storage, SavedDatasetSparkStorage)
table_name = storage.spark_options.table
if not table_name:
raise ValueError("Cannot persist, table_name is not defined")
- self.to_spark_df().createOrReplaceTempView(table_name)
+ if self._has_remote_warehouse_in_config():
+ file_format = storage.spark_options.file_format
+ if not file_format:
+ self.to_spark_df().write.saveAsTable(table_name)
+ else:
+ self.to_spark_df().write.format(file_format).saveAsTable(table_name)
+ else:
+ self.to_spark_df().createOrReplaceTempView(table_name)
+
+ def _has_remote_warehouse_in_config(self) -> bool:
+ """
+ Check if Spark Session config has info about hive metastore uri
+ or warehouse directory is not a local path
+ """
+ self.spark_session.sparkContext.getConf().getAll()
+ try:
+ self.spark_session.conf.get("hive.metastore.uris")
+ return True
+ except Exception:
+ warehouse_dir = self.spark_session.conf.get("spark.sql.warehouse.dir")
+ if warehouse_dir and warehouse_dir.startswith("file:"):
+ return False
+ else:
+ return True
def supports_remote_storage_export(self) -> bool:
return self._config.offline_store.staging_location is not None
diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py
index 67efa6a27f..a5aa53df7a 100644
--- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py
+++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py
@@ -67,6 +67,11 @@ def __init__(
catalog="memory",
host="localhost",
port=self.exposed_port,
+ source="trino-python-client",
+ http_scheme="http",
+ verify=False,
+ extra_credential=None,
+ auth=None,
)
def teardown(self):
diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py
index a5a51311eb..f662cda913 100644
--- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py
+++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py
@@ -1,12 +1,18 @@
import uuid
from datetime import date, datetime
-from typing import Any, Dict, List, Optional, Tuple, Union
+from typing import Any, Dict, List, Literal, Optional, Tuple, Union
import numpy as np
import pandas as pd
import pyarrow
-from pydantic import StrictStr
-from trino.auth import Authentication
+from pydantic import Field, FilePath, SecretStr, StrictBool, StrictStr, root_validator
+from trino.auth import (
+ BasicAuthentication,
+ CertificateAuthentication,
+ JWTAuthentication,
+ KerberosAuthentication,
+ OAuth2Authentication,
+)
from feast.data_source import DataSource
from feast.errors import InvalidEntityType
@@ -32,6 +38,87 @@
from feast.usage import log_exceptions_and_usage
+class BasicAuthModel(FeastConfigBaseModel):
+ username: StrictStr
+ password: SecretStr
+
+
+class KerberosAuthModel(FeastConfigBaseModel):
+ config: Optional[FilePath] = Field(default=None, alias="config-file")
+ service_name: Optional[StrictStr] = Field(default=None, alias="service-name")
+ mutual_authentication: StrictBool = Field(
+ default=False, alias="mutual-authentication"
+ )
+ force_preemptive: StrictBool = Field(default=False, alias="force-preemptive")
+ hostname_override: Optional[StrictStr] = Field(
+ default=None, alias="hostname-override"
+ )
+ sanitize_mutual_error_response: StrictBool = Field(
+ default=True, alias="sanitize-mutual-error-response"
+ )
+ principal: Optional[StrictStr]
+ delegate: StrictBool = False
+ ca_bundle: Optional[FilePath] = Field(default=None, alias="ca-bundle-file")
+
+
+class JWTAuthModel(FeastConfigBaseModel):
+ token: SecretStr
+
+
+class CertificateAuthModel(FeastConfigBaseModel):
+ cert: FilePath = Field(default=None, alias="cert-file")
+ key: FilePath = Field(default=None, alias="key-file")
+
+
+CLASSES_BY_AUTH_TYPE = {
+ "kerberos": {
+ "auth_model": KerberosAuthModel,
+ "trino_auth": KerberosAuthentication,
+ },
+ "basic": {
+ "auth_model": BasicAuthModel,
+ "trino_auth": BasicAuthentication,
+ },
+ "jwt": {
+ "auth_model": JWTAuthModel,
+ "trino_auth": JWTAuthentication,
+ },
+ "oauth2": {
+ "auth_model": None,
+ "trino_auth": OAuth2Authentication,
+ },
+ "certificate": {
+ "auth_model": CertificateAuthModel,
+ "trino_auth": CertificateAuthentication,
+ },
+}
+
+
+class AuthConfig(FeastConfigBaseModel):
+ type: Literal["kerberos", "basic", "jwt", "oauth2", "certificate"]
+ config: Optional[Dict[StrictStr, Any]]
+
+ @root_validator
+ def config_only_nullable_for_oauth2(cls, values):
+ auth_type = values["type"]
+ auth_config = values["config"]
+ if auth_type != "oauth2" and auth_config is None:
+ raise ValueError(f"config cannot be null for auth type '{auth_type}'")
+
+ return values
+
+ def to_trino_auth(self):
+ auth_type = self.type
+ trino_auth_cls = CLASSES_BY_AUTH_TYPE[auth_type]["trino_auth"]
+
+ if auth_type == "oauth2":
+ return trino_auth_cls()
+
+ model_cls = CLASSES_BY_AUTH_TYPE[auth_type]["auth_model"]
+ model = model_cls(**self.config)
+ return trino_auth_cls(**model.dict())
+
+
class TrinoOfflineStoreConfig(FeastConfigBaseModel):
"""Online store config for Trino"""
@@ -47,6 +134,23 @@ class TrinoOfflineStoreConfig(FeastConfigBaseModel):
catalog: StrictStr
""" Catalog of the Trino cluster """
+ user: StrictStr
+ """ User of the Trino cluster """
+
+ source: Optional[StrictStr] = "trino-python-client"
+ """ ID of the feast's Trino Python client, useful for debugging """
+
+ http_scheme: Literal["http", "https"] = Field(default="http", alias="http-scheme")
+ """ HTTP scheme that should be used while establishing a connection to the Trino cluster """
+
+ verify: StrictBool = Field(default=True, alias="ssl-verify")
+ """ Whether the SSL certificate emited by the Trino cluster should be verified or not """
+
+ extra_credential: Optional[StrictStr] = Field(
+ default=None, alias="x-trino-extra-credential-header"
+ )
+ """ Specifies the HTTP header X-Trino-Extra-Credential, e.g. user1=pwd1, user2=pwd2 """
+
connector: Dict[str, str]
"""
Trino connector to use as well as potential extra parameters.
@@ -59,6 +163,16 @@ class TrinoOfflineStoreConfig(FeastConfigBaseModel):
dataset: StrictStr = "feast"
""" (optional) Trino Dataset name for temporary tables """
+ auth: Optional[AuthConfig]
+ """
+ (optional) Authentication mechanism to use when connecting to Trino. Supported options are:
+ - kerberos
+ - basic
+ - jwt
+ - oauth2
+ - certificate
+ """
+
class TrinoRetrievalJob(RetrievalJob):
def __init__(
@@ -85,16 +199,16 @@ def full_feature_names(self) -> bool:
def on_demand_feature_views(self) -> List[OnDemandFeatureView]:
return self._on_demand_feature_views
- def _to_df_internal(self) -> pd.DataFrame:
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
"""Return dataset as Pandas DataFrame synchronously including on demand transforms"""
results = self._client.execute_query(query_text=self._query)
self.pyarrow_schema = results.pyarrow_schema
return results.to_dataframe()
- def _to_arrow_internal(self) -> pyarrow.Table:
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table:
"""Return payrrow dataset as synchronously including on demand transforms"""
return pyarrow.Table.from_pandas(
- self._to_df_internal(), schema=self.pyarrow_schema
+ self._to_df_internal(timeout=timeout), schema=self.pyarrow_schema
)
def to_sql(self) -> str:
@@ -126,7 +240,12 @@ def to_trino(
self._client.execute_query(query_text=query)
return destination_table
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: Optional[bool] = False,
+ timeout: Optional[int] = None,
+ ):
"""
Run the retrieval and persist the results in the same offline store used for read.
"""
@@ -157,9 +276,6 @@ def pull_latest_from_table_or_query(
created_timestamp_column: Optional[str],
start_date: datetime,
end_date: datetime,
- user: Optional[str] = None,
- auth: Optional[Authentication] = None,
- http_scheme: Optional[str] = None,
) -> TrinoRetrievalJob:
assert isinstance(config.offline_store, TrinoOfflineStoreConfig)
assert isinstance(data_source, TrinoSource)
@@ -176,9 +292,7 @@ def pull_latest_from_table_or_query(
timestamps.append(created_timestamp_column)
timestamp_desc_string = " DESC, ".join(timestamps) + " DESC"
field_string = ", ".join(join_key_columns + feature_name_columns + timestamps)
- client = _get_trino_client(
- config=config, user=user, auth=auth, http_scheme=http_scheme
- )
+ client = _get_trino_client(config=config)
query = f"""
SELECT
@@ -211,17 +325,12 @@ def get_historical_features(
registry: Registry,
project: str,
full_feature_names: bool = False,
- user: Optional[str] = None,
- auth: Optional[Authentication] = None,
- http_scheme: Optional[str] = None,
) -> TrinoRetrievalJob:
assert isinstance(config.offline_store, TrinoOfflineStoreConfig)
for fv in feature_views:
assert isinstance(fv.batch_source, TrinoSource)
- client = _get_trino_client(
- config=config, user=user, auth=auth, http_scheme=http_scheme
- )
+ client = _get_trino_client(config=config)
table_reference = _get_table_reference_for_new_entity(
catalog=config.offline_store.catalog,
@@ -302,17 +411,12 @@ def pull_all_from_table_or_query(
timestamp_field: str,
start_date: datetime,
end_date: datetime,
- user: Optional[str] = None,
- auth: Optional[Authentication] = None,
- http_scheme: Optional[str] = None,
) -> RetrievalJob:
assert isinstance(config.offline_store, TrinoOfflineStoreConfig)
assert isinstance(data_source, TrinoSource)
from_expression = data_source.get_table_query_string()
- client = _get_trino_client(
- config=config, user=user, auth=auth, http_scheme=http_scheme
- )
+ client = _get_trino_client(config=config)
field_string = ", ".join(
join_key_columns + feature_name_columns + [timestamp_field]
)
@@ -373,21 +477,22 @@ def _upload_entity_df_and_get_entity_schema(
# TODO: Ensure that the table expires after some time
-def _get_trino_client(
- config: RepoConfig,
- user: Optional[str],
- auth: Optional[Any],
- http_scheme: Optional[str],
-) -> Trino:
- client = Trino(
- user=user,
- catalog=config.offline_store.catalog,
+def _get_trino_client(config: RepoConfig) -> Trino:
+ auth = None
+ if config.offline_store.auth is not None:
+ auth = config.offline_store.auth.to_trino_auth()
+
+ return Trino(
host=config.offline_store.host,
port=config.offline_store.port,
+ user=config.offline_store.user,
+ catalog=config.offline_store.catalog,
+ source=config.offline_store.source,
+ http_scheme=config.offline_store.http_scheme,
+ verify=config.offline_store.verify,
+ extra_credential=config.offline_store.extra_credential,
auth=auth,
- http_scheme=http_scheme,
)
- return client
def _get_entity_df_event_timestamp_range(
diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py
index 97c61f78a6..50472407bc 100644
--- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py
+++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_queries.py
@@ -1,7 +1,6 @@
from __future__ import annotations
import datetime
-import os
import signal
from dataclasses import dataclass
from enum import Enum
@@ -30,34 +29,27 @@ class QueryStatus(Enum):
class Trino:
def __init__(
self,
- host: Optional[str] = None,
- port: Optional[int] = None,
- user: Optional[str] = None,
- catalog: Optional[str] = None,
- auth: Optional[Any] = None,
- http_scheme: Optional[str] = None,
- source: Optional[str] = None,
- extra_credential: Optional[str] = None,
+ host: str,
+ port: int,
+ user: str,
+ catalog: str,
+ source: Optional[str],
+ http_scheme: str,
+ verify: bool,
+ extra_credential: Optional[str],
+ auth: Optional[trino.Authentication],
):
- self.host = host or os.getenv("TRINO_HOST")
- self.port = port or os.getenv("TRINO_PORT")
- self.user = user or os.getenv("TRINO_USER")
- self.catalog = catalog or os.getenv("TRINO_CATALOG")
- self.auth = auth or os.getenv("TRINO_AUTH")
- self.http_scheme = http_scheme or os.getenv("TRINO_HTTP_SCHEME")
- self.source = source or os.getenv("TRINO_SOURCE")
- self.extra_credential = extra_credential or os.getenv("TRINO_EXTRA_CREDENTIAL")
+ self.host = host
+ self.port = port
+ self.user = user
+ self.catalog = catalog
+ self.source = source
+ self.http_scheme = http_scheme
+ self.verify = verify
+ self.extra_credential = extra_credential
+ self.auth = auth
self._cursor: Optional[Cursor] = None
- if self.host is None:
- raise ValueError("TRINO_HOST must be set if not passed in")
- if self.port is None:
- raise ValueError("TRINO_PORT must be set if not passed in")
- if self.user is None:
- raise ValueError("TRINO_USER must be set if not passed in")
- if self.catalog is None:
- raise ValueError("TRINO_CATALOG must be set if not passed in")
-
def _get_cursor(self) -> Cursor:
if self._cursor is None:
headers = (
@@ -70,9 +62,10 @@ def _get_cursor(self) -> Cursor:
port=self.port,
user=self.user,
catalog=self.catalog,
- auth=self.auth,
- http_scheme=self.http_scheme,
source=self.source,
+ http_scheme=self.http_scheme,
+ verify=self.verify,
+ auth=self.auth,
http_headers=headers,
).cursor()
diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py
index f09b79069c..e618e8664e 100644
--- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py
+++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py
@@ -227,10 +227,20 @@ def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]:
def get_table_column_names_and_types(
self, config: RepoConfig
) -> Iterable[Tuple[str, str]]:
+ auth = None
+ if config.offline_store.auth is not None:
+ auth = config.offline_store.auth.to_trino_auth()
+
client = Trino(
catalog=config.offline_store.catalog,
host=config.offline_store.host,
port=config.offline_store.port,
+ user=config.offline_store.user,
+ source=config.offline_store.source,
+ http_scheme=config.offline_store.http_scheme,
+ verify=config.offline_store.verify,
+ extra_credential=config.offline_store.extra_credential,
+ auth=auth,
)
if self.table:
table_schema = client.execute_query(
diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py
index 15e614a5a3..5e4107545f 100644
--- a/sdk/python/feast/infra/offline_stores/file.py
+++ b/sdk/python/feast/infra/offline_stores/file.py
@@ -76,19 +76,24 @@ def on_demand_feature_views(self) -> List[OnDemandFeatureView]:
return self._on_demand_feature_views
@log_exceptions_and_usage
- def _to_df_internal(self) -> pd.DataFrame:
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
# Only execute the evaluation function to build the final historical retrieval dataframe at the last moment.
df = self.evaluation_function().compute()
df = df.reset_index(drop=True)
return df
@log_exceptions_and_usage
- def _to_arrow_internal(self):
+ def _to_arrow_internal(self, timeout: Optional[int] = None):
# Only execute the evaluation function to build the final historical retrieval dataframe at the last moment.
df = self.evaluation_function().compute()
return pyarrow.Table.from_pandas(df)
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: Optional[bool] = False,
+ timeout: Optional[int] = None,
+ ):
assert isinstance(storage, SavedDatasetFileStorage)
# Check if the specified location already exists.
@@ -453,7 +458,9 @@ def offline_write_batch(
filesystem, path = FileSource.create_filesystem_and_path(
file_options.uri, file_options.s3_endpoint_override
)
- prev_table = pyarrow.parquet.read_table(path, memory_map=True)
+ prev_table = pyarrow.parquet.read_table(
+ path, filesystem=filesystem, memory_map=True
+ )
if table.schema != prev_table.schema:
table = table.cast(prev_table.schema)
new_table = pyarrow.concat_tables([table, prev_table])
diff --git a/sdk/python/feast/infra/offline_stores/file_source.py b/sdk/python/feast/infra/offline_stores/file_source.py
index e9f3735dee..ac824b359f 100644
--- a/sdk/python/feast/infra/offline_stores/file_source.py
+++ b/sdk/python/feast/infra/offline_stores/file_source.py
@@ -158,7 +158,13 @@ def get_table_column_names_and_types(
# Adding support for different file format path
# based on S3 filesystem
if filesystem is None:
- schema = ParquetDataset(path).schema.to_arrow_schema()
+ schema = ParquetDataset(path, use_legacy_dataset=False).schema
+ if hasattr(schema, "names") and hasattr(schema, "types"):
+ # Newer versions of pyarrow doesn't have this method,
+ # but this field is good enough.
+ pass
+ else:
+ schema = schema.to_arrow_schema()
else:
schema = ParquetDataset(path, filesystem=filesystem).schema
diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py
index 96adf0091d..e5ace17bb3 100644
--- a/sdk/python/feast/infra/offline_stores/offline_store.py
+++ b/sdk/python/feast/infra/offline_stores/offline_store.py
@@ -62,7 +62,9 @@ class RetrievalJob(ABC):
"""A RetrievalJob manages the execution of a query to retrieve data from the offline store."""
def to_df(
- self, validation_reference: Optional["ValidationReference"] = None
+ self,
+ validation_reference: Optional["ValidationReference"] = None,
+ timeout: Optional[int] = None,
) -> pd.DataFrame:
"""
Synchronously executes the underlying query and returns the result as a pandas dataframe.
@@ -72,8 +74,9 @@ def to_df(
Args:
validation_reference (optional): The validation to apply against the retrieved dataframe.
+ timeout (optional): The query timeout if applicable.
"""
- features_df = self._to_df_internal()
+ features_df = self._to_df_internal(timeout=timeout)
if self.on_demand_feature_views:
# TODO(adchia): Fix requirement to specify dependent feature views in feature_refs
@@ -103,7 +106,9 @@ def to_df(
return features_df
def to_arrow(
- self, validation_reference: Optional["ValidationReference"] = None
+ self,
+ validation_reference: Optional["ValidationReference"] = None,
+ timeout: Optional[int] = None,
) -> pyarrow.Table:
"""
Synchronously executes the underlying query and returns the result as an arrow table.
@@ -113,11 +118,12 @@ def to_arrow(
Args:
validation_reference (optional): The validation to apply against the retrieved dataframe.
+ timeout (optional): The query timeout if applicable.
"""
if not self.on_demand_feature_views and not validation_reference:
- return self._to_arrow_internal()
+ return self._to_arrow_internal(timeout=timeout)
- features_df = self._to_df_internal()
+ features_df = self._to_df_internal(timeout=timeout)
if self.on_demand_feature_views:
for odfv in self.on_demand_feature_views:
if odfv.mode != 'pandas':
@@ -151,20 +157,24 @@ def to_sql(self) -> str:
pass
@abstractmethod
- def _to_df_internal(self) -> pd.DataFrame:
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
"""
Synchronously executes the underlying query and returns the result as a pandas dataframe.
+ timeout: RetreivalJob implementations may implement a timeout.
+
Does not handle on demand transformations or dataset validation. For either of those,
`to_df` should be used.
"""
pass
@abstractmethod
- def _to_arrow_internal(self) -> pyarrow.Table:
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table:
"""
Synchronously executes the underlying query and returns the result as an arrow table.
+ timeout: RetreivalJob implementations may implement a timeout.
+
Does not handle on demand transformations or dataset validation. For either of those,
`to_arrow` should be used.
"""
@@ -183,7 +193,12 @@ def on_demand_feature_views(self) -> List[OnDemandFeatureView]:
pass
@abstractmethod
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: bool = False,
+ timeout: Optional[int] = None,
+ ):
"""
Synchronously executes the underlying query and persists the result in the same offline store
at the specified destination.
diff --git a/sdk/python/feast/infra/offline_stores/offline_utils.py b/sdk/python/feast/infra/offline_stores/offline_utils.py
index 42b8f8497a..2d4fa268e4 100644
--- a/sdk/python/feast/infra/offline_stores/offline_utils.py
+++ b/sdk/python/feast/infra/offline_stores/offline_utils.py
@@ -232,7 +232,7 @@ def get_offline_store_from_config(offline_store_config: Any) -> OfflineStore:
def get_pyarrow_schema_from_batch_source(
- config: RepoConfig, batch_source: DataSource
+ config: RepoConfig, batch_source: DataSource, timestamp_unit: str = "us"
) -> Tuple[pa.Schema, List[str]]:
"""Returns the pyarrow schema and column names for the given batch source."""
column_names_and_types = batch_source.get_table_column_names_and_types(config)
@@ -244,7 +244,8 @@ def get_pyarrow_schema_from_batch_source(
(
column_name,
feast_value_type_to_pa(
- batch_source.source_datatype_to_feast_value_type()(column_type)
+ batch_source.source_datatype_to_feast_value_type()(column_type),
+ timestamp_unit=timestamp_unit,
),
)
)
diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py
index 82b5150eaf..837cf49655 100644
--- a/sdk/python/feast/infra/offline_stores/redshift.py
+++ b/sdk/python/feast/infra/offline_stores/redshift.py
@@ -19,7 +19,7 @@
import pyarrow
import pyarrow as pa
from dateutil import parser
-from pydantic import StrictStr
+from pydantic import StrictStr, root_validator
from pydantic.typing import Literal
from pytz import utc
@@ -51,15 +51,18 @@ class RedshiftOfflineStoreConfig(FeastConfigBaseModel):
type: Literal["redshift"] = "redshift"
""" Offline store type selector"""
- cluster_id: StrictStr
- """ Redshift cluster identifier """
+ cluster_id: Optional[StrictStr]
+ """ Redshift cluster identifier, for provisioned clusters """
+
+ user: Optional[StrictStr]
+ """ Redshift user name, only required for provisioned clusters """
+
+ workgroup: Optional[StrictStr]
+ """ Redshift workgroup identifier, for serverless """
region: StrictStr
""" Redshift cluster's AWS region """
- user: StrictStr
- """ Redshift user name """
-
database: StrictStr
""" Redshift database name """
@@ -69,6 +72,26 @@ class RedshiftOfflineStoreConfig(FeastConfigBaseModel):
iam_role: StrictStr
""" IAM Role for Redshift, granting it access to S3 """
+ @root_validator
+ def require_cluster_and_user_or_workgroup(cls, values):
+ """
+ Provisioned Redshift clusters: Require cluster_id and user, ignore workgroup
+ Serverless Redshift: Require workgroup, ignore cluster_id and user
+ """
+ cluster_id, user, workgroup = (
+ values.get("cluster_id"),
+ values.get("user"),
+ values.get("workgroup"),
+ )
+ if not (cluster_id and user) and not workgroup:
+ raise ValueError(
+ "please specify either cluster_id & user if using provisioned clusters, or workgroup if using serverless"
+ )
+ elif cluster_id and workgroup:
+ raise ValueError("cannot specify both cluster_id and workgroup")
+
+ return values
+
class RedshiftOfflineStore(OfflineStore):
@staticmethod
@@ -248,6 +271,7 @@ def query_generator() -> Iterator[str]:
aws_utils.execute_redshift_statement(
redshift_client,
config.offline_store.cluster_id,
+ config.offline_store.workgroup,
config.offline_store.database,
config.offline_store.user,
f"DROP TABLE IF EXISTS {table_name}",
@@ -294,6 +318,7 @@ def write_logged_features(
table=data,
redshift_data_client=redshift_client,
cluster_id=config.offline_store.cluster_id,
+ workgroup=config.offline_store.workgroup,
database=config.offline_store.database,
user=config.offline_store.user,
s3_resource=s3_resource,
@@ -336,13 +361,15 @@ def offline_write_batch(
table=table,
redshift_data_client=redshift_client,
cluster_id=config.offline_store.cluster_id,
+ workgroup=config.offline_store.workgroup,
database=redshift_options.database
- or config.offline_store.database, # Users can define database in the source if needed but it's not required.
+ # Users can define database in the source if needed but it's not required.
+ or config.offline_store.database,
user=config.offline_store.user,
s3_resource=s3_resource,
s3_path=f"{config.offline_store.s3_staging_location}/push/{uuid.uuid4()}.parquet",
iam_role=config.offline_store.iam_role,
- table_name=redshift_options.table,
+ table_name=redshift_options.fully_qualified_table_name,
schema=pa_schema,
fail_if_exists=False,
)
@@ -400,11 +427,12 @@ def on_demand_feature_views(self) -> List[OnDemandFeatureView]:
return self._on_demand_feature_views
@log_exceptions_and_usage
- def _to_df_internal(self) -> pd.DataFrame:
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
with self._query_generator() as query:
return aws_utils.unload_redshift_query_to_df(
self._redshift_client,
self._config.offline_store.cluster_id,
+ self._config.offline_store.workgroup,
self._config.offline_store.database,
self._config.offline_store.user,
self._s3_resource,
@@ -414,11 +442,12 @@ def _to_df_internal(self) -> pd.DataFrame:
)
@log_exceptions_and_usage
- def _to_arrow_internal(self) -> pa.Table:
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pa.Table:
with self._query_generator() as query:
return aws_utils.unload_redshift_query_to_pa(
self._redshift_client,
self._config.offline_store.cluster_id,
+ self._config.offline_store.workgroup,
self._config.offline_store.database,
self._config.offline_store.user,
self._s3_resource,
@@ -439,6 +468,7 @@ def to_s3(self) -> str:
aws_utils.execute_redshift_query_and_unload_to_s3(
self._redshift_client,
self._config.offline_store.cluster_id,
+ self._config.offline_store.workgroup,
self._config.offline_store.database,
self._config.offline_store.user,
self._s3_path,
@@ -455,6 +485,7 @@ def to_redshift(self, table_name: str) -> None:
aws_utils.upload_df_to_redshift(
self._redshift_client,
self._config.offline_store.cluster_id,
+ self._config.offline_store.workgroup,
self._config.offline_store.database,
self._config.offline_store.user,
self._s3_resource,
@@ -471,12 +502,18 @@ def to_redshift(self, table_name: str) -> None:
aws_utils.execute_redshift_statement(
self._redshift_client,
self._config.offline_store.cluster_id,
+ self._config.offline_store.workgroup,
self._config.offline_store.database,
self._config.offline_store.user,
query,
)
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: Optional[bool] = False,
+ timeout: Optional[int] = None,
+ ):
assert isinstance(storage, SavedDatasetRedshiftStorage)
self.to_redshift(table_name=storage.redshift_options.table)
@@ -504,6 +541,7 @@ def _upload_entity_df(
aws_utils.upload_df_to_redshift(
redshift_client,
config.offline_store.cluster_id,
+ config.offline_store.workgroup,
config.offline_store.database,
config.offline_store.user,
s3_resource,
@@ -517,6 +555,7 @@ def _upload_entity_df(
aws_utils.execute_redshift_statement(
redshift_client,
config.offline_store.cluster_id,
+ config.offline_store.workgroup,
config.offline_store.database,
config.offline_store.user,
f"CREATE TABLE {table_name} AS ({entity_df})",
@@ -572,6 +611,7 @@ def _get_entity_df_event_timestamp_range(
statement_id = aws_utils.execute_redshift_statement(
redshift_client,
config.offline_store.cluster_id,
+ config.offline_store.workgroup,
config.offline_store.database,
config.offline_store.user,
f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max "
diff --git a/sdk/python/feast/infra/offline_stores/redshift_source.py b/sdk/python/feast/infra/offline_stores/redshift_source.py
index 4279e6a068..52ab50ba00 100644
--- a/sdk/python/feast/infra/offline_stores/redshift_source.py
+++ b/sdk/python/feast/infra/offline_stores/redshift_source.py
@@ -207,18 +207,30 @@ def get_table_column_names_and_types(
if self.table:
try:
paginator = client.get_paginator("describe_table")
- response_iterator = paginator.paginate(
- ClusterIdentifier=config.offline_store.cluster_id,
- Database=(
+
+ paginator_kwargs = {
+ "Database": (
self.database
if self.database
else config.offline_store.database
),
- DbUser=config.offline_store.user,
- Table=self.table,
- Schema=self.schema,
- )
+ "Table": self.table,
+ "Schema": self.schema,
+ }
+
+ if config.offline_store.cluster_id:
+ # Provisioned cluster
+ paginator_kwargs[
+ "ClusterIdentifier"
+ ] = config.offline_store.cluster_id
+ paginator_kwargs["DbUser"] = config.offline_store.user
+ elif config.offline_store.workgroup:
+ # Redshift serverless
+ paginator_kwargs["WorkgroupName"] = config.offline_store.workgroup
+
+ response_iterator = paginator.paginate(**paginator_kwargs)
table = response_iterator.build_full_result()
+
except ClientError as e:
if e.response["Error"]["Code"] == "ValidationException":
raise RedshiftCredentialsError() from e
@@ -233,6 +245,7 @@ def get_table_column_names_and_types(
statement_id = aws_utils.execute_redshift_statement(
client,
config.offline_store.cluster_id,
+ config.offline_store.workgroup,
self.database if self.database else config.offline_store.database,
config.offline_store.user,
f"SELECT * FROM ({self.query}) LIMIT 1",
@@ -281,6 +294,42 @@ def from_proto(cls, redshift_options_proto: DataSourceProto.RedshiftOptions):
return redshift_options
+ @property
+ def fully_qualified_table_name(self) -> str:
+ """
+ The fully qualified table name of this Redshift table.
+
+ Returns:
+ A string in the format of ..
+ May be empty or None if the table is not set
+ """
+
+ if not self.table:
+ return ""
+
+ # self.table may already contain the database and schema
+ parts = self.table.split(".")
+ if len(parts) == 3:
+ database, schema, table = parts
+ elif len(parts) == 2:
+ database = self.database
+ schema, table = parts
+ elif len(parts) == 1:
+ database = self.database
+ schema = self.schema
+ table = parts[0]
+ else:
+ raise ValueError(
+ f"Invalid table name: {self.table} - can't determine database and schema"
+ )
+
+ if database and schema:
+ return f"{database}.{schema}.{table}"
+ elif schema:
+ return f"{schema}.{table}"
+ else:
+ return table
+
def to_proto(self) -> DataSourceProto.RedshiftOptions:
"""
Converts an RedshiftOptionsProto object to its protobuf representation.
@@ -310,7 +359,6 @@ def __init__(self, table_ref: str):
@staticmethod
def from_proto(storage_proto: SavedDatasetStorageProto) -> SavedDatasetStorage:
-
return SavedDatasetRedshiftStorage(
table_ref=RedshiftOptions.from_proto(storage_proto.redshift_storage).table
)
diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py
index e126b05934..38568ce79b 100644
--- a/sdk/python/feast/infra/offline_stores/snowflake.py
+++ b/sdk/python/feast/infra/offline_stores/snowflake.py
@@ -28,11 +28,7 @@
from feast import OnDemandFeatureView
from feast.data_source import DataSource
-from feast.errors import (
- EntitySQLEmptyResults,
- InvalidEntityType,
- InvalidSparkSessionException,
-)
+from feast.errors import EntitySQLEmptyResults, InvalidEntityType
from feast.feature_logging import LoggingConfig, LoggingSource
from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView
from feast.infra.offline_stores import offline_utils
@@ -48,8 +44,8 @@
)
from feast.infra.registry.base_registry import BaseRegistry
from feast.infra.utils.snowflake.snowflake_utils import (
+ GetSnowflakeConnection,
execute_snowflake_statement,
- get_snowflake_conn,
write_pandas,
write_parquet,
)
@@ -74,13 +70,13 @@ class SnowflakeOfflineStoreConfig(FeastConfigBaseModel):
"""Offline store config for Snowflake"""
type: Literal["snowflake.offline"] = "snowflake.offline"
- """ Offline store type selector"""
+ """ Offline store type selector """
config_path: Optional[str] = os.path.expanduser("~/.snowsql/config")
""" Snowflake config path -- absolute path required (Cant use ~)"""
account: Optional[str] = None
- """ Snowflake deployment identifier -- drop .snowflakecomputing.com"""
+ """ Snowflake deployment identifier -- drop .snowflakecomputing.com """
user: Optional[str] = None
""" Snowflake user name """
@@ -89,7 +85,7 @@ class SnowflakeOfflineStoreConfig(FeastConfigBaseModel):
""" Snowflake password """
role: Optional[str] = None
- """ Snowflake role name"""
+ """ Snowflake role name """
warehouse: Optional[str] = None
""" Snowflake warehouse name """
@@ -109,6 +105,9 @@ class SnowflakeOfflineStoreConfig(FeastConfigBaseModel):
blob_export_location: Optional[str] = None
""" Location (in S3, Google storage or Azure storage) where data is offloaded """
+ convert_timestamp_columns: Optional[bool] = None
+ """ Convert timestamp columns on export to a Parquet-supported format """
+
class Config:
allow_population_by_field_name = True
@@ -152,10 +151,31 @@ def pull_latest_from_table_or_query(
+ '"'
)
- if data_source.snowflake_options.warehouse:
- config.offline_store.warehouse = data_source.snowflake_options.warehouse
+ if config.offline_store.convert_timestamp_columns:
+ select_fields = list(
+ map(
+ lambda field_name: f'"{field_name}"',
+ join_key_columns + feature_name_columns,
+ )
+ )
+ select_timestamps = list(
+ map(
+ lambda field_name: f"TO_VARCHAR({field_name}, 'YYYY-MM-DD\"T\"HH24:MI:SS.FFTZH:TZM') AS {field_name}",
+ timestamp_columns,
+ )
+ )
+ inner_field_string = ", ".join(select_fields + select_timestamps)
+ else:
+ select_fields = list(
+ map(
+ lambda field_name: f'"{field_name}"',
+ join_key_columns + feature_name_columns + timestamp_columns,
+ )
+ )
+ inner_field_string = ", ".join(select_fields)
- snowflake_conn = get_snowflake_conn(config.offline_store)
+ with GetSnowflakeConnection(config.offline_store) as conn:
+ snowflake_conn = conn
start_date = start_date.astimezone(tz=utc)
end_date = end_date.astimezone(tz=utc)
@@ -165,7 +185,7 @@ def pull_latest_from_table_or_query(
{field_string}
{f''', TRIM({repr(DUMMY_ENTITY_VAL)}::VARIANT,'"') AS "{DUMMY_ENTITY_ID}"''' if not join_key_columns else ""}
FROM (
- SELECT {field_string},
+ SELECT {inner_field_string},
ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS "_feast_row"
FROM {from_expression}
WHERE "{timestamp_field}" BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}'
@@ -205,10 +225,8 @@ def pull_all_from_table_or_query(
+ '"'
)
- if data_source.snowflake_options.warehouse:
- config.offline_store.warehouse = data_source.snowflake_options.warehouse
-
- snowflake_conn = get_snowflake_conn(config.offline_store)
+ with GetSnowflakeConnection(config.offline_store) as conn:
+ snowflake_conn = conn
start_date = start_date.astimezone(tz=utc)
end_date = end_date.astimezone(tz=utc)
@@ -241,7 +259,8 @@ def get_historical_features(
for fv in feature_views:
assert isinstance(fv.batch_source, SnowflakeSource)
- snowflake_conn = get_snowflake_conn(config.offline_store)
+ with GetSnowflakeConnection(config.offline_store) as conn:
+ snowflake_conn = conn
entity_schema = _get_entity_schema(entity_df, snowflake_conn, config)
@@ -319,7 +338,8 @@ def write_logged_features(
):
assert isinstance(logging_config.destination, SnowflakeLoggingDestination)
- snowflake_conn = get_snowflake_conn(config.offline_store)
+ with GetSnowflakeConnection(config.offline_store) as conn:
+ snowflake_conn = conn
if isinstance(data, Path):
write_parquet(
@@ -359,7 +379,8 @@ def offline_write_batch(
if table.schema != pa_schema:
table = table.cast(pa_schema)
- snowflake_conn = get_snowflake_conn(config.offline_store)
+ with GetSnowflakeConnection(config.offline_store) as conn:
+ snowflake_conn = conn
write_pandas(
snowflake_conn,
@@ -410,54 +431,86 @@ def full_feature_names(self) -> bool:
def on_demand_feature_views(self) -> List[OnDemandFeatureView]:
return self._on_demand_feature_views
- def _to_df_internal(self) -> pd.DataFrame:
- with self._query_generator() as query:
-
- df = execute_snowflake_statement(
- self.snowflake_conn, query
- ).fetch_pandas_all()
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
+ df = execute_snowflake_statement(
+ self.snowflake_conn, self.to_sql()
+ ).fetch_pandas_all()
return df
- def _to_arrow_internal(self) -> pyarrow.Table:
- with self._query_generator() as query:
-
- pa_table = execute_snowflake_statement(
- self.snowflake_conn, query
- ).fetch_arrow_all()
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table:
+ pa_table = execute_snowflake_statement(
+ self.snowflake_conn, self.to_sql()
+ ).fetch_arrow_all()
- if pa_table:
+ if pa_table:
+ return pa_table
+ else:
+ empty_result = execute_snowflake_statement(
+ self.snowflake_conn, self.to_sql()
+ )
- return pa_table
- else:
- empty_result = execute_snowflake_statement(self.snowflake_conn, query)
+ return pyarrow.Table.from_pandas(
+ pd.DataFrame(columns=[md.name for md in empty_result.description])
+ )
- return pyarrow.Table.from_pandas(
- pd.DataFrame(columns=[md.name for md in empty_result.description])
- )
+ def to_sql(self) -> str:
+ """
+ Returns the SQL query that will be executed in Snowflake to build the historical feature table.
+ """
+ with self._query_generator() as query:
+ return query
- def to_snowflake(self, table_name: str, temporary=False) -> None:
+ def to_snowflake(
+ self, table_name: str, allow_overwrite: bool = False, temporary: bool = False
+ ) -> None:
"""Save dataset as a new Snowflake table"""
if self.on_demand_feature_views:
transformed_df = self.to_df()
+ if allow_overwrite:
+ query = f'DROP TABLE IF EXISTS "{table_name}"'
+ execute_snowflake_statement(self.snowflake_conn, query)
+
write_pandas(
- self.snowflake_conn, transformed_df, table_name, auto_create_table=True
+ self.snowflake_conn,
+ transformed_df,
+ table_name,
+ auto_create_table=True,
+ create_temp_table=temporary,
)
- return None
+ else:
+ query = f'CREATE {"OR REPLACE" if allow_overwrite else ""} {"TEMPORARY" if temporary else ""} TABLE {"IF NOT EXISTS" if not allow_overwrite else ""} "{table_name}" AS ({self.to_sql()});\n'
+ execute_snowflake_statement(self.snowflake_conn, query)
- with self._query_generator() as query:
- query = f'CREATE {"TEMPORARY" if temporary else ""} TABLE IF NOT EXISTS "{table_name}" AS ({query});\n'
+ return None
- execute_snowflake_statement(self.snowflake_conn, query)
+ def to_arrow_batches(self) -> Iterator[pyarrow.Table]:
- def to_sql(self) -> str:
- """
- Returns the SQL query that will be executed in Snowflake to build the historical feature table.
- """
- with self._query_generator() as query:
- return query
+ table_name = "temp_arrow_batches_" + uuid.uuid4().hex
+
+ self.to_snowflake(table_name=table_name, allow_overwrite=True, temporary=True)
+
+ query = f'SELECT * FROM "{table_name}"'
+ arrow_batches = execute_snowflake_statement(
+ self.snowflake_conn, query
+ ).fetch_arrow_batches()
+
+ return arrow_batches
+
+ def to_pandas_batches(self) -> Iterator[pd.DataFrame]:
+
+ table_name = "temp_pandas_batches_" + uuid.uuid4().hex
+
+ self.to_snowflake(table_name=table_name, allow_overwrite=True, temporary=True)
+
+ query = f'SELECT * FROM "{table_name}"'
+ arrow_batches = execute_snowflake_statement(
+ self.snowflake_conn, query
+ ).fetch_pandas_batches()
+
+ return arrow_batches
def to_spark_df(self, spark_session: "SparkSession") -> "DataFrame":
"""
@@ -471,39 +524,34 @@ def to_spark_df(self, spark_session: "SparkSession") -> "DataFrame":
"""
try:
- from pyspark.sql import DataFrame, SparkSession
+ from pyspark.sql import DataFrame
except ImportError as e:
from feast.errors import FeastExtrasDependencyImportError
raise FeastExtrasDependencyImportError("spark", str(e))
- if isinstance(spark_session, SparkSession):
- with self._query_generator() as query:
-
- arrow_batches = execute_snowflake_statement(
- self.snowflake_conn, query
- ).fetch_arrow_batches()
-
- if arrow_batches:
- spark_df = reduce(
- DataFrame.unionAll,
- [
- spark_session.createDataFrame(batch.to_pandas())
- for batch in arrow_batches
- ],
- )
-
- return spark_df
+ spark_session.conf.set("spark.sql.execution.arrow.pyspark.enabled", "true")
- else:
- raise EntitySQLEmptyResults(query)
+ # This can be improved by parallelizing the read of chunks
+ pandas_batches = self.to_pandas_batches()
- else:
- raise InvalidSparkSessionException(spark_session)
+ spark_df = reduce(
+ DataFrame.unionAll,
+ [spark_session.createDataFrame(batch) for batch in pandas_batches],
+ )
+ return spark_df
- def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False):
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: bool = False,
+ timeout: Optional[int] = None,
+ ):
assert isinstance(storage, SavedDatasetSnowflakeStorage)
- self.to_snowflake(table_name=storage.snowflake_options.table)
+
+ self.to_snowflake(
+ table_name=storage.snowflake_options.table, allow_overwrite=allow_overwrite
+ )
@property
def metadata(self) -> Optional[RetrievalMetadata]:
@@ -526,10 +574,10 @@ def to_remote_storage(self) -> List[str]:
)
table = f"temporary_{uuid.uuid4().hex}"
- self.to_snowflake(table)
+ self.to_snowflake(table, temporary=True)
query = f"""
- COPY INTO '{self.config.offline_store.blob_export_location}/{table}' FROM "{self.config.offline_store.database}"."{self.config.offline_store.schema_}"."{table}"\n
+ COPY INTO '{self.export_path}/{table}' FROM "{self.config.offline_store.database}"."{self.config.offline_store.schema_}"."{table}"\n
STORAGE_INTEGRATION = {self.config.offline_store.storage_integration_name}\n
FILE_FORMAT = (TYPE = PARQUET)
DETAILED_OUTPUT = TRUE
diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py
index cc5208a676..95bd46f1ec 100644
--- a/sdk/python/feast/infra/offline_stores/snowflake_source.py
+++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py
@@ -1,3 +1,4 @@
+import warnings
from typing import Callable, Dict, Iterable, Optional, Tuple
from typeguard import typechecked
@@ -45,7 +46,6 @@ def __init__(
timestamp_field (optional): Event timestamp field used for point in time
joins of feature values.
database (optional): Snowflake database where the features are stored.
- warehouse (optional): Snowflake warehouse where the database is stored.
schema (optional): Snowflake schema in which the table is located.
table (optional): Snowflake table where the features are stored. Exactly one of 'table'
and 'query' must be specified.
@@ -60,6 +60,14 @@ def __init__(
owner (optional): The owner of the snowflake source, typically the email of the primary
maintainer.
"""
+
+ if warehouse:
+ warnings.warn(
+ "Specifying a warehouse within a SnowflakeSource is to be deprecated."
+ "Starting v0.32.0, the warehouse as part of the Snowflake store config will be used.",
+ RuntimeWarning,
+ )
+
if table is None and query is None:
raise ValueError('No "table" or "query" argument provided.')
if table and query:
@@ -73,7 +81,6 @@ def __init__(
schema=_schema,
table=table,
query=query,
- warehouse=warehouse,
)
# If no name, use the table as the default name.
@@ -109,7 +116,6 @@ def from_proto(data_source: DataSourceProto):
database=data_source.snowflake_options.database,
schema=data_source.snowflake_options.schema,
table=data_source.snowflake_options.table,
- warehouse=data_source.snowflake_options.warehouse,
created_timestamp_column=data_source.created_timestamp_column,
field_mapping=dict(data_source.field_mapping),
query=data_source.snowflake_options.query,
@@ -134,7 +140,6 @@ def __eq__(self, other):
and self.schema == other.schema
and self.table == other.table
and self.query == other.query
- and self.warehouse == other.warehouse
)
@property
@@ -157,11 +162,6 @@ def query(self):
"""Returns the snowflake options of this snowflake source."""
return self.snowflake_options.query
- @property
- def warehouse(self):
- """Returns the warehouse of this snowflake source."""
- return self.snowflake_options.warehouse
-
def to_proto(self) -> DataSourceProto:
"""
Converts a SnowflakeSource object to its protobuf representation.
@@ -213,13 +213,13 @@ def get_table_column_names_and_types(
"""
from feast.infra.offline_stores.snowflake import SnowflakeOfflineStoreConfig
from feast.infra.utils.snowflake.snowflake_utils import (
+ GetSnowflakeConnection,
execute_snowflake_statement,
- get_snowflake_conn,
)
assert isinstance(config.offline_store, SnowflakeOfflineStoreConfig)
- with get_snowflake_conn(config.offline_store) as conn:
+ with GetSnowflakeConnection(config.offline_store) as conn:
query = f"SELECT * FROM {self.get_table_query_string()} LIMIT 5"
cursor = execute_snowflake_statement(conn, query)
@@ -250,7 +250,7 @@ def get_table_column_names_and_types(
else:
column = row["column_name"]
- with get_snowflake_conn(config.offline_store) as conn:
+ with GetSnowflakeConnection(config.offline_store) as conn:
query = f'SELECT MAX("{column}") AS "{column}" FROM {self.get_table_query_string()}'
result = execute_snowflake_statement(
conn, query
@@ -335,13 +335,11 @@ def __init__(
schema: Optional[str],
table: Optional[str],
query: Optional[str],
- warehouse: Optional[str],
):
self.database = database or ""
self.schema = schema or ""
self.table = table or ""
self.query = query or ""
- self.warehouse = warehouse or ""
@classmethod
def from_proto(cls, snowflake_options_proto: DataSourceProto.SnowflakeOptions):
@@ -359,7 +357,6 @@ def from_proto(cls, snowflake_options_proto: DataSourceProto.SnowflakeOptions):
schema=snowflake_options_proto.schema,
table=snowflake_options_proto.table,
query=snowflake_options_proto.query,
- warehouse=snowflake_options_proto.warehouse,
)
return snowflake_options
@@ -376,7 +373,6 @@ def to_proto(self) -> DataSourceProto.SnowflakeOptions:
schema=self.schema,
table=self.table,
query=self.query,
- warehouse=self.warehouse,
)
return snowflake_options_proto
@@ -393,7 +389,6 @@ def __init__(self, table_ref: str):
schema=None,
table=table_ref,
query=None,
- warehouse=None,
)
@staticmethod
diff --git a/sdk/python/feast/infra/online_stores/bigtable.py b/sdk/python/feast/infra/online_stores/bigtable.py
index e08bc44bdb..30561d0840 100644
--- a/sdk/python/feast/infra/online_stores/bigtable.py
+++ b/sdk/python/feast/infra/online_stores/bigtable.py
@@ -335,4 +335,8 @@ def teardown(
def _get_client(
self, online_config: BigtableOnlineStoreConfig, admin: bool = False
):
- return bigtable.Client(project=online_config.project_id, admin=admin)
+ if self._client is None:
+ self._client = bigtable.Client(
+ project=online_config.project_id, admin=admin
+ )
+ return self._client
diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/README.md b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/README.md
new file mode 100644
index 0000000000..8c7f102239
--- /dev/null
+++ b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/README.md
@@ -0,0 +1,89 @@
+# Hazelcast Online Store
+
+This contribution makes it possible to use [Hazelcast](https://hazelcast.com/) as online store for Feast.
+
+Once the Hazelcast client configuration is given inside `feature_store.yaml` file, everything else
+is handled as with any other online store: schema creation, read/write from/to Hazelcast and remove operations.
+
+## Quick usage
+
+The following refers to the [Feast quickstart](https://docs.feast.dev/getting-started/quickstart) page.
+Only the Step 2 is different from this tutorial since it requires you to configure your Hazelcast online store.
+
+### Creating the feature repository
+
+The easiest way to get started is to use the Feast CLI to initialize a new
+feature store. Once Feast is installed, the command
+
+```
+feast init FEATURE_STORE_NAME -t hazelcast
+```
+
+will interactively help you create the `feature_store.yaml` with the
+required configuration details to access your Hazelcast cluster.
+
+Alternatively, you can run `feast init -t FEATURE_STORE_NAME`, as described
+in the quickstart, and then manually edit the `online_store` section in
+the `feature_store.yaml` file as detailed below.
+
+The following steps (setup of feature definitions, deployment of the store,
+generation of training data, materialization, fetching of online/offline
+features) proceed exactly as in the general Feast quickstart instructions.
+
+#### Hazelcast setup
+
+In order to use [Hazelcast](https://hazelcast.com) as online store, you need to have a running Hazelcast cluster.
+You can create a cluster using Hazelcast Viridian Serverless easily or deploy one on your local/remote machine.
+See this [getting started](https://hazelcast.com/get-started/) page for more details.
+
+Hazelcast online store provides capability to connect local/remote or Hazelcast Viridian Serverless cluster.
+Following is an example to connect local cluster named "dev" running on port 5701 with TLS/SSL enabled.
+
+```yaml
+[...]
+online_store:
+ type: hazelcast
+ cluster_name: dev
+ cluster_members: ["localhost:5701"]
+ ssl_cafile_path: /path/to/ca/file
+ ssl_certfile_path: /path/to/cert/file
+ ssl_keyfile_path: /path/to/key/file
+ ssl_password: ${SSL_PASSWORD} # The password will be read form the `SSL_PASSWORD` environment variable.
+ key_ttl_seconds: 86400 # The default is 0 and means infinite.
+```
+
+If you want to connect your Hazelcast Viridian cluster instead of local/remote one, specify your configuration as follows:
+
+```yaml
+[...]
+online_store:
+ type: hazelcast
+ cluster_name: YOUR_CLUSTER_ID
+ discovery_token: YOUR_DISCOVERY_TOKEN
+ ssl_cafile_path: /path/to/ca/file
+ ssl_certfile_path: /path/to/cert/file
+ ssl_keyfile_path: /path/to/key/file
+ ssl_password: ${SSL_PASSWORD} # The password will be read form the `SSL_PASSWORD` environment variable.
+ key_ttl_seconds: 86400 # The default is 0 and means infinite.
+```
+
+#### TTL configuration
+
+TTL is the maximum time in seconds for each feature to stay idle in the map.
+It limits the lifetime of the features relative to the time of the last read or write access performed on them.
+The features whose idle period exceeds this limit are expired and evicted automatically.
+A feature is idle if no get or put is called on it.
+Valid values are integers between 0 and Integer.MAX_VALUE.
+Its default value is 0, which means infinite.
+
+```yaml
+[...]
+online_store:
+ [...]
+ key_ttl_seconds: 86400
+```
+
+### More info
+
+You can learn about Hazelcast more from the [Hazelcast Documentation](https://docs.hazelcast.com/home/).
+
diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/__init__.py b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py
new file mode 100644
index 0000000000..7ec803a69c
--- /dev/null
+++ b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py
@@ -0,0 +1,321 @@
+#
+# Copyright 2019 The Feast Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+Hazelcast online store for Feast.
+"""
+import base64
+import threading
+from datetime import datetime, timezone
+from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple
+
+import pytz
+from hazelcast.client import HazelcastClient
+from hazelcast.core import HazelcastJsonValue
+from hazelcast.discovery import HazelcastCloudDiscovery
+from pydantic import StrictStr
+
+from feast import Entity, FeatureView, RepoConfig
+from feast.infra.key_encoding_utils import serialize_entity_key
+from feast.infra.online_stores.online_store import OnlineStore
+from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
+from feast.protos.feast.types.Value_pb2 import Value as ValueProto
+from feast.repo_config import FeastConfigBaseModel
+from feast.usage import log_exceptions_and_usage
+
+# Exception messages
+EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS = (
+ "Unexpected configuration object (not a HazelcastOnlineStoreConfig instance)"
+)
+
+# Hazelcast schema names for each field
+D_FEATURE_NAME = "feature_name"
+D_FEATURE_VALUE = "feature_value"
+D_ENTITY_KEY = "entity_key"
+D_EVENT_TS = "event_ts"
+D_CREATED_TS = "created_ts"
+
+
+class HazelcastInvalidConfig(Exception):
+ def __init__(self, msg: str):
+ super().__init__(msg)
+
+
+class HazelcastOnlineStoreConfig(FeastConfigBaseModel):
+ """Online store config for Hazelcast store"""
+
+ type: Literal["hazelcast"] = "hazelcast"
+ """Online store type selector"""
+
+ cluster_name: StrictStr = "dev"
+ """Name of the cluster you want to connect. The default cluster name is `dev`"""
+
+ cluster_members: Optional[List[str]] = ["localhost:5701"]
+ """List of member addresses which is connected to your cluster"""
+
+ discovery_token: Optional[StrictStr] = ""
+ """The discovery token of your Hazelcast Viridian cluster"""
+
+ ssl_cafile_path: Optional[StrictStr] = ""
+ """Absolute path of CA certificates in PEM format."""
+
+ ssl_certfile_path: Optional[StrictStr] = ""
+ """Absolute path of the client certificate in PEM format."""
+
+ ssl_keyfile_path: Optional[StrictStr] = ""
+ """Absolute path of the private key file for the client certificate in the PEM format."""
+
+ ssl_password: Optional[StrictStr] = ""
+ """Password for decrypting the keyfile if it is encrypted."""
+
+ key_ttl_seconds: Optional[int] = 0
+ """Hazelcast key bin TTL (in seconds) for expiring entities"""
+
+
+class HazelcastOnlineStore(OnlineStore):
+ """
+ Hazelcast online store implementation for Feast
+
+ Attributes:
+ _client: Hazelcast client connection.
+ _lock: Prevent race condition while creating the client connection
+ """
+
+ _client: Optional[HazelcastClient] = None
+ _lock = threading.Lock()
+
+ def _get_client(self, config: HazelcastOnlineStoreConfig):
+ """
+ Establish the client connection to Hazelcast cluster, if not yet created,
+ and return it.
+
+ The established client connection could be Hazelcast Viridian and SSL enabled based on user config.
+
+ Args:
+ config: The HazelcastOnlineStoreConfig for the online store.
+ """
+ if self._client is None:
+ with self._lock:
+ if self._client is None:
+ if config.discovery_token != "":
+ HazelcastCloudDiscovery._CLOUD_URL_BASE = (
+ "api.viridian.hazelcast.com"
+ )
+ self._client = HazelcastClient(
+ cluster_name=config.cluster_name,
+ cloud_discovery_token=config.discovery_token,
+ statistics_enabled=True,
+ ssl_enabled=True,
+ ssl_cafile=config.ssl_cafile_path,
+ ssl_certfile=config.ssl_certfile_path,
+ ssl_keyfile=config.ssl_keyfile_path,
+ ssl_password=config.ssl_password,
+ )
+ elif config.ssl_cafile_path != "":
+ self._client = HazelcastClient(
+ cluster_name=config.cluster_name,
+ statistics_enabled=True,
+ ssl_enabled=True,
+ ssl_cafile=config.ssl_cafile_path,
+ ssl_certfile=config.ssl_certfile_path,
+ ssl_keyfile=config.ssl_keyfile_path,
+ ssl_password=config.ssl_password,
+ )
+ else:
+ self._client = HazelcastClient(
+ statistics_enabled=True,
+ cluster_members=config.cluster_members,
+ cluster_name=config.cluster_name,
+ )
+ return self._client
+
+ @log_exceptions_and_usage(online_store="hazelcast")
+ def online_write_batch(
+ self,
+ config: RepoConfig,
+ table: FeatureView,
+ data: List[
+ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
+ ],
+ progress: Optional[Callable[[int], Any]],
+ ) -> None:
+ online_store_config = config.online_store
+ if not isinstance(online_store_config, HazelcastOnlineStoreConfig):
+ raise HazelcastInvalidConfig(
+ EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS
+ )
+
+ client = self._get_client(online_store_config)
+ fv_map = client.get_map(_map_name(config.project, table))
+
+ for entity_key, values, event_ts, created_ts in data:
+ entity_key_str = base64.b64encode(
+ serialize_entity_key(
+ entity_key,
+ entity_key_serialization_version=2,
+ )
+ ).decode("utf-8")
+ event_ts_utc = pytz.utc.localize(event_ts, is_dst=None).timestamp()
+ created_ts_utc = 0.0
+ if created_ts is not None:
+ created_ts_utc = pytz.utc.localize(created_ts, is_dst=None).timestamp()
+ for feature_name, value in values.items():
+ feature_value = base64.b64encode(value.SerializeToString()).decode(
+ "utf-8"
+ )
+ hz_combined_key = entity_key_str + feature_name
+ fv_map.put(
+ hz_combined_key,
+ HazelcastJsonValue(
+ {
+ D_ENTITY_KEY: entity_key_str,
+ D_FEATURE_NAME: feature_name,
+ D_FEATURE_VALUE: feature_value,
+ D_EVENT_TS: event_ts_utc,
+ D_CREATED_TS: created_ts_utc,
+ }
+ ),
+ online_store_config.key_ttl_seconds,
+ )
+ if progress:
+ progress(1)
+
+ def online_read(
+ self,
+ config: RepoConfig,
+ table: FeatureView,
+ entity_keys: List[EntityKeyProto],
+ requested_features: Optional[List[str]] = None,
+ ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]:
+
+ online_store_config = config.online_store
+ if not isinstance(online_store_config, HazelcastOnlineStoreConfig):
+ raise HazelcastInvalidConfig(
+ EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS
+ )
+
+ client = self._get_client(online_store_config)
+ entries: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = []
+ fv_map = client.get_map(_map_name(config.project, table))
+
+ hz_keys = []
+ entity_keys_str = {}
+ for entity_key in entity_keys:
+ entity_key_str = base64.b64encode(
+ serialize_entity_key(
+ entity_key,
+ entity_key_serialization_version=2,
+ )
+ ).decode("utf-8")
+ if requested_features:
+ feature_keys = [
+ entity_key_str + feature for feature in requested_features
+ ]
+ else:
+ feature_keys = [entity_key_str + f.name for f in table.features]
+ hz_keys.extend(feature_keys)
+ entity_keys_str[entity_key_str] = feature_keys
+
+ data = fv_map.get_all(hz_keys).result()
+ entities = []
+ for key in hz_keys:
+ try:
+ data[key] = data[key].loads()
+ entities.append(data[key][D_ENTITY_KEY])
+ except KeyError:
+ continue
+
+ for key in entity_keys_str:
+ if key in entities:
+ entry = {}
+ event_ts = None
+ for f_key in entity_keys_str[key]:
+ row = data[f_key]
+ value = ValueProto()
+ value.ParseFromString(base64.b64decode(row[D_FEATURE_VALUE]))
+ entry[row[D_FEATURE_NAME]] = value
+ event_ts = datetime.fromtimestamp(row[D_EVENT_TS], tz=timezone.utc)
+ entries.append((event_ts, entry))
+ else:
+ entries.append((None, None))
+ return entries
+
+ def update(
+ self,
+ config: RepoConfig,
+ tables_to_delete: Sequence[FeatureView],
+ tables_to_keep: Sequence[FeatureView],
+ entities_to_delete: Sequence[Entity],
+ entities_to_keep: Sequence[Entity],
+ partial: bool,
+ ):
+ online_store_config = config.online_store
+ if not isinstance(online_store_config, HazelcastOnlineStoreConfig):
+ raise HazelcastInvalidConfig(
+ EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS
+ )
+
+ client = self._get_client(online_store_config)
+ project = config.project
+
+ for table in tables_to_keep:
+ client.sql.execute(
+ f"""CREATE OR REPLACE MAPPING {_map_name(project, table)} (
+ __key VARCHAR,
+ {D_ENTITY_KEY} VARCHAR,
+ {D_FEATURE_NAME} VARCHAR,
+ {D_FEATURE_VALUE} VARCHAR,
+ {D_EVENT_TS} DECIMAL,
+ {D_CREATED_TS} DECIMAL
+ )
+ TYPE IMap
+ OPTIONS (
+ 'keyFormat' = 'varchar',
+ 'valueFormat' = 'json-flat'
+ )
+ """
+ ).result()
+
+ for table in tables_to_delete:
+ client.sql.execute(
+ f"DELETE FROM {_map_name(config.project, table)}"
+ ).result()
+ client.sql.execute(
+ f"DROP MAPPING IF EXISTS {_map_name(config.project, table)}"
+ ).result()
+
+ def teardown(
+ self,
+ config: RepoConfig,
+ tables: Sequence[FeatureView],
+ entities: Sequence[Entity],
+ ):
+ online_store_config = config.online_store
+ if not isinstance(online_store_config, HazelcastOnlineStoreConfig):
+ raise HazelcastInvalidConfig(
+ EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS
+ )
+
+ client = self._get_client(online_store_config)
+ project = config.project
+
+ for table in tables:
+ client.sql.execute(f"DELETE FROM {_map_name(config.project, table)}")
+ client.sql.execute(f"DROP MAPPING IF EXISTS {_map_name(project, table)}")
+
+
+def _map_name(project: str, table: FeatureView) -> str:
+ return f"{project}_{table.name}"
diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_repo_configuration.py b/sdk/python/feast/infra/online_stores/contrib/hazelcast_repo_configuration.py
new file mode 100644
index 0000000000..5b3ea6e307
--- /dev/null
+++ b/sdk/python/feast/infra/online_stores/contrib/hazelcast_repo_configuration.py
@@ -0,0 +1,26 @@
+#
+# Copyright 2019 The Feast Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from tests.integration.feature_repos.integration_test_repo_config import (
+ IntegrationTestRepoConfig,
+)
+from tests.integration.feature_repos.universal.online_store.hazelcast import (
+ HazelcastOnlineStoreCreator,
+)
+
+FULL_REPO_CONFIGS = [
+ IntegrationTestRepoConfig(online_store_creator=HazelcastOnlineStoreCreator),
+]
diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres.py b/sdk/python/feast/infra/online_stores/contrib/postgres.py
index 144b242a1d..a12e66f109 100644
--- a/sdk/python/feast/infra/online_stores/contrib/postgres.py
+++ b/sdk/python/feast/infra/online_stores/contrib/postgres.py
@@ -1,3 +1,4 @@
+import contextlib
import logging
from collections import defaultdict
from datetime import datetime
@@ -7,14 +8,15 @@
import pytz
from psycopg2 import sql
from psycopg2.extras import execute_values
+from psycopg2.pool import SimpleConnectionPool
from pydantic.schema import Literal
from feast import Entity
from feast.feature_view import FeatureView
from feast.infra.key_encoding_utils import serialize_entity_key
from feast.infra.online_stores.online_store import OnlineStore
-from feast.infra.utils.postgres.connection_utils import _get_conn
-from feast.infra.utils.postgres.postgres_config import PostgreSQLConfig
+from feast.infra.utils.postgres.connection_utils import _get_conn, _get_connection_pool
+from feast.infra.utils.postgres.postgres_config import ConnectionType, PostgreSQLConfig
from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
from feast.protos.feast.types.Value_pb2 import Value as ValueProto
from feast.repo_config import RepoConfig
@@ -27,12 +29,21 @@ class PostgreSQLOnlineStoreConfig(PostgreSQLConfig):
class PostgreSQLOnlineStore(OnlineStore):
_conn: Optional[psycopg2._psycopg.connection] = None
+ _conn_pool: Optional[SimpleConnectionPool] = None
+ @contextlib.contextmanager
def _get_conn(self, config: RepoConfig):
- if not self._conn:
- assert config.online_store.type == "postgres"
- self._conn = _get_conn(config.online_store)
- return self._conn
+ assert config.online_store.type == "postgres"
+ if config.online_store.conn_type == ConnectionType.pool:
+ if not self._conn_pool:
+ self._conn_pool = _get_connection_pool(config.online_store)
+ connection = self._conn_pool.getconn()
+ yield connection
+ self._conn_pool.putconn(connection)
+ else:
+ if not self._conn:
+ self._conn = _get_conn(config.online_store)
+ yield self._conn
@log_exceptions_and_usage(online_store="postgres")
def online_write_batch(
diff --git a/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/__init__.py b/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py b/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py
new file mode 100644
index 0000000000..37cfbd86af
--- /dev/null
+++ b/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py
@@ -0,0 +1,525 @@
+# Copyright 2022 The Feast Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import random
+import time
+from datetime import datetime
+from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, cast
+
+import requests
+from rockset.exceptions import BadRequestException, RocksetException
+from rockset.models import QueryRequestSql
+from rockset.query_paginator import QueryPaginator
+from rockset.rockset_client import RocksetClient
+
+from feast.entity import Entity
+from feast.feature_view import FeatureView
+from feast.infra.online_stores.helpers import compute_entity_id
+from feast.infra.online_stores.online_store import OnlineStore
+from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
+from feast.protos.feast.types.Value_pb2 import Value as ValueProto
+from feast.repo_config import FeastConfigBaseModel, RepoConfig
+from feast.usage import log_exceptions_and_usage
+
+logger = logging.getLogger(__name__)
+
+
+class RocksetOnlineStoreConfig(FeastConfigBaseModel):
+ """Online store config for Rockset store"""
+
+ type: Literal["rockset"] = "rockset"
+ """Online store type selector"""
+
+ api_key: Optional[str] = None
+ """Api Key to be used for Rockset Account. If not set the env var ROCKSET_APIKEY will be used."""
+
+ host: Optional[str] = None
+ """The Host Url for Rockset requests. If not set the env var ROCKSET_APISERVER will be used."""
+
+ read_pagination_batch_size: int = 100
+ """Batch size of records that will be turned per page when paginating a batched read"""
+
+ collection_created_timeout_secs: int = 60
+ """The amount of time, in seconds, we will wait for the collection to become visible to the API"""
+
+ collection_ready_timeout_secs: int = 30 * 60
+ """The amount of time, in seconds, we will wait for the collection to enter READY state"""
+
+ fence_all_writes: bool = True
+ """Whether to wait for all writes to be flushed from log and queryable. If False, documents that are written may not be seen immediately in subsequent reads"""
+
+ fence_timeout_secs: int = 10 * 60
+ """The amount of time we will wait, in seconds, for the write fence to be passed"""
+
+ initial_request_backoff_secs: int = 2
+ """Initial backoff, in seconds, we will wait between requests when polling for a response"""
+
+ max_request_backoff_secs: int = 30
+ """Initial backoff, in seconds, we will wait between requests when polling for a response"""
+
+ max_request_attempts: int = 10 * 1000
+ """The max amount of times we will retry a failed request"""
+
+
+class RocksetOnlineStore(OnlineStore):
+ """
+ Rockset implementation of the online store interface.
+
+ Attributes:
+ _rockset_client: Rockset openapi client.
+ """
+
+ _rockset_client = None
+
+ @log_exceptions_and_usage(online_store="rockset")
+ def online_write_batch(
+ self,
+ config: RepoConfig,
+ table: FeatureView,
+ data: List[
+ Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
+ ],
+ progress: Optional[Callable[[int], Any]],
+ ) -> None:
+ """
+ Write a batch of feature rows to online Rockset store.
+
+ Args:
+ config: The RepoConfig for the current FeatureStore.
+ table: Feast FeatureView.
+ data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key,
+ a dict containing feature values, an event timestamp for the row, and
+ the created timestamp for the row if it exists.
+ progress: Optional function to be called once every mini-batch of rows is written to
+ the online store. Can be used to display progress.
+ """
+
+ online_config = config.online_store
+ assert isinstance(online_config, RocksetOnlineStoreConfig)
+
+ rs = self.get_rockset_client(online_config)
+ collection_name = self.get_collection_name(config, table)
+
+ # We need to deduplicate on entity_id and we will save the latest timestamp version.
+ dedup_dict = {}
+ for feature_vals in data:
+ entity_key, features, timestamp, created_ts = feature_vals
+ serialized_key = compute_entity_id(
+ entity_key=entity_key,
+ entity_key_serialization_version=config.entity_key_serialization_version,
+ )
+
+ if serialized_key not in dedup_dict:
+ dedup_dict[serialized_key] = feature_vals
+ continue
+
+ # If the entity already existings in the dictionary ignore the entry if it has a lower timestamp.
+ if timestamp <= dedup_dict[serialized_key][2]:
+ continue
+
+ dedup_dict[serialized_key] = feature_vals
+
+ request_batch = []
+ for serialized_key, feature_vals in dedup_dict.items():
+ document = {}
+ entity_key, features, timestamp, created_ts = feature_vals
+ document["_id"] = serialized_key
+
+ # Rockset python client currently does not handle datetime correctly and will convert
+ # to string instead of native Rockset DATETIME. This will be fixed, but until then we
+ # use isoformat.
+ document["event_ts"] = timestamp.isoformat()
+ document["created_ts"] = (
+ "" if created_ts is None else created_ts.isoformat()
+ )
+ for k, v in features.items():
+ # Rockset client currently does not support bytes type.
+ document[k] = v.SerializeToString().hex()
+
+ # TODO: Implement async batching with retries.
+ request_batch.append(document)
+
+ if progress:
+ progress(1)
+
+ resp = rs.Documents.add_documents(
+ collection=collection_name, data=request_batch
+ )
+ if online_config.fence_all_writes:
+ self.wait_for_fence(rs, collection_name, resp["last_offset"], online_config)
+
+ return None
+
+ @log_exceptions_and_usage(online_store="rockset")
+ def online_read(
+ self,
+ config: RepoConfig,
+ table: FeatureView,
+ entity_keys: List[EntityKeyProto],
+ requested_features: Optional[List[str]] = None,
+ ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]:
+ """
+ Retrieve feature values from the online Rockset store.
+
+ Args:
+ config: The RepoConfig for the current FeatureStore.
+ table: Feast FeatureView.
+ entity_keys: a list of entity keys that should be read from the FeatureStore.
+ """
+ online_config = config.online_store
+ assert isinstance(online_config, RocksetOnlineStoreConfig)
+
+ rs = self.get_rockset_client(online_config)
+ collection_name = self.get_collection_name(config, table)
+
+ feature_list = ""
+ if requested_features is not None:
+ feature_list = ",".join(requested_features)
+
+ entity_serialized_key_list = [
+ compute_entity_id(
+ k,
+ entity_key_serialization_version=config.entity_key_serialization_version,
+ )
+ for k in entity_keys
+ ]
+
+ entity_query_str = ",".join(
+ "'{id}'".format(id=s) for s in entity_serialized_key_list
+ )
+
+ query_str = f"""
+ SELECT
+ "_id",
+ "event_ts",
+ {feature_list}
+ FROM
+ {collection_name}
+ WHERE
+ "_id" IN ({entity_query_str})
+ """
+
+ feature_set = set()
+ if requested_features:
+ feature_set.update(requested_features)
+
+ result_map = {}
+ for page in QueryPaginator(
+ rs,
+ rs.Queries.query(
+ sql=QueryRequestSql(
+ query=query_str,
+ paginate=True,
+ initial_paginate_response_doc_count=online_config.read_pagination_batch_size,
+ )
+ ),
+ ):
+ for doc in page:
+ result = {}
+ for k, v in doc.items():
+ if k not in feature_set:
+ # We want to skip deserializing values that are not feature values like bookeeping values.
+ continue
+
+ val = ValueProto()
+
+ # TODO: Remove bytes <-> string parsing once client supports bytes.
+ val.ParseFromString(bytes.fromhex(v))
+ result[k] = val
+ result_map[doc["_id"]] = (
+ datetime.fromisoformat(doc["event_ts"]),
+ result,
+ )
+
+ results_list: List[
+ Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]
+ ] = []
+ for key in entity_serialized_key_list:
+ if key not in result_map:
+ # If not found, we add a gap to let the client know.
+ results_list.append((None, None))
+ continue
+
+ results_list.append(result_map[key])
+
+ return results_list
+
+ @log_exceptions_and_usage(online_store="rockset")
+ def update(
+ self,
+ config: RepoConfig,
+ tables_to_delete: Sequence[FeatureView],
+ tables_to_keep: Sequence[FeatureView],
+ entities_to_delete: Sequence[Entity],
+ entities_to_keep: Sequence[Entity],
+ partial: bool,
+ ):
+ """
+ Update tables from the Rockset Online Store.
+
+ Args:
+ config: The RepoConfig for the current FeatureStore.
+ tables_to_delete: Tables to delete from the Rockset Online Store.
+ tables_to_keep: Tables to keep in the Rockset Online Store.
+ """
+ online_config = config.online_store
+ assert isinstance(online_config, RocksetOnlineStoreConfig)
+ rs = self.get_rockset_client(online_config)
+
+ created_collections = []
+ for table_instance in tables_to_keep:
+ try:
+ collection_name = self.get_collection_name(config, table_instance)
+ rs.Collections.create_file_upload_collection(name=collection_name)
+ created_collections.append(collection_name)
+ except BadRequestException as e:
+ if self.parse_request_error_type(e) == "AlreadyExists":
+ # Table already exists nothing to do. We should still make sure it is ready though.
+ created_collections.append(collection_name)
+ continue
+ raise
+
+ for table_to_delete in tables_to_delete:
+ self.delete_collection(
+ rs, collection_name=self.get_collection_name(config, table_to_delete)
+ )
+
+ # Now wait for all collections to be READY.
+ self.wait_for_ready_collections(
+ rs, created_collections, online_config=online_config
+ )
+
+ @log_exceptions_and_usage(online_store="rockset")
+ def teardown(
+ self,
+ config: RepoConfig,
+ tables: Sequence[FeatureView],
+ entities: Sequence[Entity],
+ ):
+ """
+ Delete all collections from the Rockset Online Store.
+
+ Args:
+ config: The RepoConfig for the current FeatureStore.
+ tables: Tables to delete from the feature repo.
+ """
+ online_config = config.online_store
+ assert isinstance(online_config, RocksetOnlineStoreConfig)
+ rs = self.get_rockset_client(online_config)
+ for table in tables:
+ self.delete_collection(
+ rs, collection_name=self.get_collection_name(config, table)
+ )
+
+ def get_rockset_client(
+ self, onlineConfig: RocksetOnlineStoreConfig
+ ) -> RocksetClient:
+ """
+ Fetches the RocksetClient to be used for all requests for this online store based on the api
+ configuration in the provided config. If no configuration provided local ENV vars will be used.
+
+ Args:
+ onlineConfig: The RocksetOnlineStoreConfig associated with this online store.
+ """
+ if self._rockset_client is not None:
+ return self._rockset_client
+
+ _api_key = (
+ os.getenv("ROCKSET_APIKEY")
+ if isinstance(onlineConfig.api_key, type(None))
+ else onlineConfig.api_key
+ )
+ _host = (
+ os.getenv("ROCKSET_APISERVER")
+ if isinstance(onlineConfig.host, type(None))
+ else onlineConfig.host
+ )
+ self._rockset_client = RocksetClient(host=_host, api_key=_api_key)
+ return self._rockset_client
+
+ @staticmethod
+ def delete_collection(rs: RocksetClient, collection_name: str):
+ """
+ Deletes the collection whose name was provided
+
+ Args:
+ rs: The RocksetClient to be used for the deletion.
+ collection_name: The name of the collection to be deleted.
+ """
+
+ try:
+ rs.Collections.delete(collection=collection_name)
+ except RocksetException as e:
+ if RocksetOnlineStore.parse_request_error_type(e) == "NotFound":
+ logger.warning(
+ f"Trying to delete collection that does not exist {collection_name}"
+ )
+ return
+ raise
+
+ @staticmethod
+ def get_collection_name(config: RepoConfig, feature_view: FeatureView) -> str:
+ """
+ Returns the collection name based on the provided config and FeatureView.
+
+ Args:
+ config: RepoConfig for the online store.
+ feature_view: FeatureView that is backed by the returned collection name.
+
+ Returns:
+ The collection name as a string.
+ """
+ project_val = config.project if config.project else "feast"
+ table_name = feature_view.name if feature_view.name else "feature_store"
+ return f"{project_val}_{table_name}"
+
+ @staticmethod
+ def parse_request_error_type(e: RocksetException) -> str:
+ """
+ Parse a throw RocksetException. Will return a string representing the type of error that was thrown.
+
+ Args:
+ e: The RockException that is being parsed.
+
+ Returns:
+ Error type parsed as a string.
+ """
+
+ body_dict = json.loads(e.body)
+ return body_dict["type"]
+
+ @staticmethod
+ def wait_for_fence(
+ rs: RocksetClient,
+ collection_name: str,
+ last_offset: str,
+ online_config: RocksetOnlineStoreConfig,
+ ):
+ """
+ Waits until 'last_offset' is flushed and values are ready to be read. If wait lasts longer than the timeout specified in config
+ a timeout exception will be throw.
+
+ Args:
+ rs: Rockset client that will be used to make all requests.
+ collection_name: Collection associated with the offsets we are waiting for.
+ last_offset: The actual offsets we are waiting to be flushed.
+ online_config: The config that will be used to determine timeouts and backout configurations.
+ """
+
+ resource_path = (
+ f"/v1/orgs/self/ws/commons/collections/{collection_name}/offsets/commit"
+ )
+ request = {"name": [last_offset]}
+
+ headers = {}
+ headers["Content-Type"] = "application/json"
+ headers["Authorization"] = f"ApiKey {rs.api_client.configuration.api_key}"
+
+ t_start = time.time()
+ for num_attempts in range(online_config.max_request_attempts):
+ delay = time.time() - t_start
+ resp = requests.post(
+ url=f"{rs.api_client.configuration.host}{resource_path}",
+ json=request,
+ headers=headers,
+ )
+
+ if resp.status_code == 200 and resp.json()["data"]["passed"] is True:
+ break
+
+ if delay > online_config.fence_timeout_secs:
+ raise TimeoutError(
+ f"Write to collection {collection_name} at offset {last_offset} was not available for read after {delay} secs"
+ )
+
+ if resp.status_code == 429:
+ RocksetOnlineStore.backoff_sleep(num_attempts, online_config)
+ continue
+ elif resp.status_code != 200:
+ raise Exception(f"[{resp.status_code}]: {resp.reason}")
+
+ RocksetOnlineStore.backoff_sleep(num_attempts, online_config)
+
+ @staticmethod
+ def wait_for_ready_collections(
+ rs: RocksetClient,
+ collection_names: List[str],
+ online_config: RocksetOnlineStoreConfig,
+ ):
+ """
+ Waits until all collections provided have entered READY state and can accept new documents. If wait
+ lasts longer than timeout a TimeoutError exception will be thrown.
+
+ Args:
+ rs: Rockset client that will be used to make all requests.
+ collection_names: All collections that we will wait for.
+ timeout: The max amount of time we will wait for the collections to become READY.
+ """
+
+ t_start = time.time()
+ for cname in collection_names:
+ # We will wait until the provided timeout for all collections to become READY.
+ for num_attempts in range(online_config.max_request_attempts):
+ resp = None
+ delay = time.time() - t_start
+ try:
+ resp = rs.Collections.get(collection=cname)
+ except RocksetException as e:
+ error_type = RocksetOnlineStore.parse_request_error_type(e)
+ if error_type == "NotFound":
+ if delay > online_config.collection_created_timeout_secs:
+ raise TimeoutError(
+ f"Collection {cname} failed to become visible after {delay} seconds"
+ )
+ elif error_type == "RateLimitExceeded":
+ RocksetOnlineStore.backoff_sleep(num_attempts, online_config)
+ continue
+ else:
+ raise
+
+ if (
+ resp is not None
+ and cast(Dict[str, dict], resp)["data"]["status"] == "READY"
+ ):
+ break
+
+ if delay > online_config.collection_ready_timeout_secs:
+ raise TimeoutError(
+ f"Collection {cname} failed to become ready after {delay} seconds"
+ )
+
+ RocksetOnlineStore.backoff_sleep(num_attempts, online_config)
+
+ @staticmethod
+ def backoff_sleep(attempts: int, online_config: RocksetOnlineStoreConfig):
+ """
+ Sleep for the needed amount of time based on the number of request attempts.
+
+ Args:
+ backoff: The amount of time we will sleep for
+ max_backoff: The max amount of time we should ever backoff for.
+ rate_limited: Whether this method is being called as part of a rate limited request.
+ """
+
+ default_backoff = online_config.initial_request_backoff_secs
+
+ # Full jitter, exponential backoff.
+ backoff = random.uniform(
+ default_backoff,
+ min(default_backoff << attempts, online_config.max_request_backoff_secs),
+ )
+ time.sleep(backoff)
diff --git a/sdk/python/feast/infra/online_stores/redis.py b/sdk/python/feast/infra/online_stores/redis.py
index 8af2097076..83922068ac 100644
--- a/sdk/python/feast/infra/online_stores/redis.py
+++ b/sdk/python/feast/infra/online_stores/redis.py
@@ -89,15 +89,15 @@ class RedisOnlineStore(OnlineStore):
def delete_entity_values(self, config: RepoConfig, join_keys: List[str]):
client = self._get_client(config.online_store)
deleted_count = 0
- pipeline = client.pipeline(transaction=False)
prefix = _redis_key_prefix(join_keys)
- for _k in client.scan_iter(
- b"".join([prefix, b"*", config.project.encode("utf8")])
- ):
- pipeline.delete(_k)
- deleted_count += 1
- pipeline.execute()
+ with client.pipeline(transaction=False) as pipe:
+ for _k in client.scan_iter(
+ b"".join([prefix, b"*", config.project.encode("utf8")])
+ ):
+ pipe.delete(_k)
+ deleted_count += 1
+ pipe.execute()
logger.debug(f"Deleted {deleted_count} rows for entity {', '.join(join_keys)}")
diff --git a/sdk/python/feast/infra/online_stores/snowflake.py b/sdk/python/feast/infra/online_stores/snowflake.py
index c4474dff38..c1a03a2862 100644
--- a/sdk/python/feast/infra/online_stores/snowflake.py
+++ b/sdk/python/feast/infra/online_stores/snowflake.py
@@ -13,8 +13,8 @@
from feast.infra.key_encoding_utils import serialize_entity_key
from feast.infra.online_stores.online_store import OnlineStore
from feast.infra.utils.snowflake.snowflake_utils import (
+ GetSnowflakeConnection,
execute_snowflake_statement,
- get_snowflake_conn,
get_snowflake_online_store_path,
write_pandas_binary,
)
@@ -29,13 +29,13 @@ class SnowflakeOnlineStoreConfig(FeastConfigBaseModel):
"""Online store config for Snowflake"""
type: Literal["snowflake.online"] = "snowflake.online"
- """ Online store type selector"""
+ """ Online store type selector """
config_path: Optional[str] = os.path.expanduser("~/.snowsql/config")
""" Snowflake config path -- absolute path required (Can't use ~)"""
account: Optional[str] = None
- """ Snowflake deployment identifier -- drop .snowflakecomputing.com"""
+ """ Snowflake deployment identifier -- drop .snowflakecomputing.com """
user: Optional[str] = None
""" Snowflake user name """
@@ -44,7 +44,7 @@ class SnowflakeOnlineStoreConfig(FeastConfigBaseModel):
""" Snowflake password """
role: Optional[str] = None
- """ Snowflake role name"""
+ """ Snowflake role name """
warehouse: Optional[str] = None
""" Snowflake warehouse name """
@@ -114,7 +114,7 @@ def online_write_batch(
# This combines both the data upload plus the overwrite in the same transaction
online_path = get_snowflake_online_store_path(config, table)
- with get_snowflake_conn(config.online_store, autocommit=False) as conn:
+ with GetSnowflakeConnection(config.online_store, autocommit=False) as conn:
write_pandas_binary(
conn,
agg_df,
@@ -178,7 +178,7 @@ def online_read(
)
online_path = get_snowflake_online_store_path(config, table)
- with get_snowflake_conn(config.online_store) as conn:
+ with GetSnowflakeConnection(config.online_store) as conn:
query = f"""
SELECT
"entity_key", "feature_name", "value", "event_ts"
@@ -220,7 +220,7 @@ def update(
):
assert isinstance(config.online_store, SnowflakeOnlineStoreConfig)
- with get_snowflake_conn(config.online_store) as conn:
+ with GetSnowflakeConnection(config.online_store) as conn:
for table in tables_to_keep:
online_path = get_snowflake_online_store_path(config, table)
query = f"""
@@ -248,7 +248,7 @@ def teardown(
):
assert isinstance(config.online_store, SnowflakeOnlineStoreConfig)
- with get_snowflake_conn(config.online_store) as conn:
+ with GetSnowflakeConnection(config.online_store) as conn:
for table in tables:
online_path = get_snowflake_online_store_path(config, table)
query = f'DROP TABLE IF EXISTS {online_path}."[online-transient] {config.project}_{table.name}"'
diff --git a/sdk/python/feast/infra/registry/proto_registry_utils.py b/sdk/python/feast/infra/registry/proto_registry_utils.py
index 4dbc95d2a5..c7eeea0f82 100644
--- a/sdk/python/feast/infra/registry/proto_registry_utils.py
+++ b/sdk/python/feast/infra/registry/proto_registry_utils.py
@@ -1,5 +1,7 @@
-from typing import List
+import uuid
+from typing import List, Optional
+from feast import usage
from feast.data_source import DataSource
from feast.entity import Entity
from feast.errors import (
@@ -7,7 +9,6 @@
EntityNotFoundException,
FeatureServiceNotFoundException,
FeatureViewNotFoundException,
- OnDemandFeatureViewNotFoundException,
SavedDatasetNotFound,
ValidationReferenceNotFound,
)
@@ -15,12 +16,32 @@
from feast.feature_view import FeatureView
from feast.on_demand_feature_view import OnDemandFeatureView
from feast.project_metadata import ProjectMetadata
+from feast.protos.feast.core.Registry_pb2 import ProjectMetadata as ProjectMetadataProto
from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto
from feast.request_feature_view import RequestFeatureView
from feast.saved_dataset import SavedDataset, ValidationReference
from feast.stream_feature_view import StreamFeatureView
+def init_project_metadata(cached_registry_proto: RegistryProto, project: str):
+ new_project_uuid = f"{uuid.uuid4()}"
+ usage.set_current_project_uuid(new_project_uuid)
+ cached_registry_proto.project_metadata.append(
+ ProjectMetadata(project_name=project, project_uuid=new_project_uuid).to_proto()
+ )
+
+
+def get_project_metadata(
+ registry_proto: Optional[RegistryProto], project: str
+) -> Optional[ProjectMetadataProto]:
+ if not registry_proto:
+ return None
+ for pm in registry_proto.project_metadata:
+ if pm.project == project:
+ return pm
+ return None
+
+
def get_feature_service(
registry_proto: RegistryProto, name: str, project: str
) -> FeatureService:
@@ -76,7 +97,7 @@ def get_on_demand_feature_view(
and on_demand_feature_view.spec.name == name
):
return OnDemandFeatureView.from_proto(on_demand_feature_view)
- raise OnDemandFeatureViewNotFoundException(name, project=project)
+ raise FeatureViewNotFoundException(name, project=project)
def get_data_source(
@@ -116,10 +137,6 @@ def get_validation_reference(
raise ValidationReferenceNotFound(name, project=project)
-def list_validation_references(registry_proto: RegistryProto):
- return registry_proto.validation_references
-
-
def list_feature_services(
registry_proto: RegistryProto, project: str, allow_cache: bool = False
) -> List[FeatureService]:
@@ -193,13 +210,25 @@ def list_data_sources(registry_proto: RegistryProto, project: str) -> List[DataS
def list_saved_datasets(
- registry_proto: RegistryProto, project: str, allow_cache: bool = False
+ registry_proto: RegistryProto, project: str
) -> List[SavedDataset]:
- return [
- SavedDataset.from_proto(saved_dataset)
- for saved_dataset in registry_proto.saved_datasets
- if saved_dataset.spec.project == project
- ]
+ saved_datasets = []
+ for saved_dataset in registry_proto.saved_datasets:
+ if saved_dataset.spec.project == project:
+ saved_datasets.append(SavedDataset.from_proto(saved_dataset))
+ return saved_datasets
+
+
+def list_validation_references(
+ registry_proto: RegistryProto, project: str
+) -> List[ValidationReference]:
+ validation_references = []
+ for validation_reference in registry_proto.validation_references:
+ if validation_reference.project == project:
+ validation_references.append(
+ ValidationReference.from_proto(validation_reference)
+ )
+ return validation_references
def list_project_metadata(
diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py
index c18ebbbb4a..f5b9f5d3e0 100644
--- a/sdk/python/feast/infra/registry/registry.py
+++ b/sdk/python/feast/infra/registry/registry.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
-import uuid
from datetime import datetime, timedelta
from enum import Enum
from pathlib import Path
@@ -44,7 +43,6 @@
from feast.infra.registry.registry_store import NoopRegistryStore
from feast.on_demand_feature_view import OnDemandFeatureView
from feast.project_metadata import ProjectMetadata
-from feast.protos.feast.core.Registry_pb2 import ProjectMetadata as ProjectMetadataProto
from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto
from feast.repo_config import RegistryConfig
from feast.repo_contents import RepoContents
@@ -143,25 +141,6 @@ def get_registry_store_class_from_scheme(registry_path: str):
return get_registry_store_class_from_type(registry_store_type)
-def _get_project_metadata(
- registry_proto: Optional[RegistryProto], project: str
-) -> Optional[ProjectMetadataProto]:
- if not registry_proto:
- return None
- for pm in registry_proto.project_metadata:
- if pm.project == project:
- return pm
- return None
-
-
-def _init_project_metadata(cached_registry_proto: RegistryProto, project: str):
- new_project_uuid = f"{uuid.uuid4()}"
- usage.set_current_project_uuid(new_project_uuid)
- cached_registry_proto.project_metadata.append(
- ProjectMetadata(project_name=project, project_uuid=new_project_uuid).to_proto()
- )
-
-
class Registry(BaseRegistry):
def apply_user_metadata(
self,
@@ -190,19 +169,29 @@ def exit_apply_context(self):
cached_registry_proto_ttl: timedelta
def __new__(
- cls, registry_config: Optional[RegistryConfig], repo_path: Optional[Path]
+ cls,
+ project: str,
+ registry_config: Optional[RegistryConfig],
+ repo_path: Optional[Path],
):
# We override __new__ so that we can inspect registry_config and create a SqlRegistry without callers
# needing to make any changes.
if registry_config and registry_config.registry_type == "sql":
from feast.infra.registry.sql import SqlRegistry
- return SqlRegistry(registry_config, repo_path)
+ return SqlRegistry(registry_config, project, repo_path)
+ elif registry_config and registry_config.registry_type == "snowflake.registry":
+ from feast.infra.registry.snowflake import SnowflakeRegistry
+
+ return SnowflakeRegistry(registry_config, project, repo_path)
else:
return super(Registry, cls).__new__(cls)
def __init__(
- self, registry_config: Optional[RegistryConfig], repo_path: Optional[Path]
+ self,
+ project: str,
+ registry_config: Optional[RegistryConfig],
+ repo_path: Optional[Path],
):
"""
Create the Registry object.
@@ -231,7 +220,7 @@ def __init__(
)
def clone(self) -> "Registry":
- new_registry = Registry(None, None)
+ new_registry = Registry("project", None, None)
new_registry.cached_registry_proto_ttl = timedelta(seconds=0)
new_registry.cached_registry_proto = (
self.cached_registry_proto.__deepcopy__()
@@ -249,7 +238,7 @@ def _initialize_registry(self, project: str):
except FileNotFoundError:
registry_proto = RegistryProto()
registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION
- _init_project_metadata(registry_proto, project)
+ proto_registry_utils.init_project_metadata(registry_proto, project)
self._registry_store.update_registry_proto(registry_proto)
def update_infra(self, infra: Infra, project: str, commit: bool = True):
@@ -752,7 +741,7 @@ def list_validation_references(
registry_proto = self._get_registry_proto(
project=project, allow_cache=allow_cache
)
- return proto_registry_utils.list_validation_references(registry_proto)
+ return proto_registry_utils.list_validation_references(registry_proto, project)
def delete_validation_reference(self, name: str, project: str, commit: bool = True):
registry_proto = self._prepare_registry_for_changes(project)
@@ -797,7 +786,12 @@ def _prepare_registry_for_changes(self, project: str):
"""Prepares the Registry for changes by refreshing the cache if necessary."""
try:
self._get_registry_proto(project=project, allow_cache=True)
- if _get_project_metadata(self.cached_registry_proto, project) is None:
+ if (
+ proto_registry_utils.get_project_metadata(
+ self.cached_registry_proto, project
+ )
+ is None
+ ):
# Project metadata not initialized yet. Try pulling without cache
self._get_registry_proto(project=project, allow_cache=False)
except FileNotFoundError:
@@ -808,8 +802,15 @@ def _prepare_registry_for_changes(self, project: str):
# Initialize project metadata if needed
assert self.cached_registry_proto
- if _get_project_metadata(self.cached_registry_proto, project) is None:
- _init_project_metadata(self.cached_registry_proto, project)
+ if (
+ proto_registry_utils.get_project_metadata(
+ self.cached_registry_proto, project
+ )
+ is None
+ ):
+ proto_registry_utils.init_project_metadata(
+ self.cached_registry_proto, project
+ )
self.commit()
return self.cached_registry_proto
@@ -842,7 +843,7 @@ def _get_registry_proto(
)
if project:
- old_project_metadata = _get_project_metadata(
+ old_project_metadata = proto_registry_utils.get_project_metadata(
registry_proto=self.cached_registry_proto, project=project
)
@@ -853,6 +854,7 @@ def _get_registry_proto(
assert isinstance(self.cached_registry_proto, RegistryProto)
return self.cached_registry_proto
+ logger.info("Registry cache expired, so refreshing")
registry_proto = self._registry_store.get_registry_proto()
self.cached_registry_proto = registry_proto
self.cached_registry_proto_created = datetime.utcnow()
@@ -860,13 +862,13 @@ def _get_registry_proto(
if not project:
return registry_proto
- project_metadata = _get_project_metadata(
+ project_metadata = proto_registry_utils.get_project_metadata(
registry_proto=registry_proto, project=project
)
if project_metadata:
usage.set_current_project_uuid(project_metadata.project_uuid)
else:
- _init_project_metadata(registry_proto, project)
+ proto_registry_utils.init_project_metadata(registry_proto, project)
self.commit()
return registry_proto
@@ -889,4 +891,7 @@ def _existing_feature_view_names_to_fvs(self) -> Dict[str, Message]:
request_fvs = {
fv.spec.name: fv for fv in self.cached_registry_proto.request_feature_views
}
- return {**odfvs, **fvs, **request_fvs}
+ sfv = {
+ fv.spec.name: fv for fv in self.cached_registry_proto.stream_feature_views
+ }
+ return {**odfvs, **fvs, **request_fvs, **sfv}
diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py
new file mode 100644
index 0000000000..56c7bc1f65
--- /dev/null
+++ b/sdk/python/feast/infra/registry/snowflake.py
@@ -0,0 +1,1100 @@
+import logging
+import os
+import uuid
+from binascii import hexlify
+from datetime import datetime, timedelta
+from enum import Enum
+from threading import Lock
+from typing import Any, Callable, List, Optional, Set, Union
+
+from pydantic import Field, StrictStr
+from pydantic.schema import Literal
+
+import feast
+from feast import usage
+from feast.base_feature_view import BaseFeatureView
+from feast.data_source import DataSource
+from feast.entity import Entity
+from feast.errors import (
+ DataSourceObjectNotFoundException,
+ EntityNotFoundException,
+ FeatureServiceNotFoundException,
+ FeatureViewNotFoundException,
+ SavedDatasetNotFound,
+ ValidationReferenceNotFound,
+)
+from feast.feature_service import FeatureService
+from feast.feature_view import FeatureView
+from feast.infra.infra_object import Infra
+from feast.infra.registry import proto_registry_utils
+from feast.infra.registry.base_registry import BaseRegistry
+from feast.infra.utils.snowflake.snowflake_utils import (
+ GetSnowflakeConnection,
+ execute_snowflake_statement,
+)
+from feast.on_demand_feature_view import OnDemandFeatureView
+from feast.project_metadata import ProjectMetadata
+from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto
+from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto
+from feast.protos.feast.core.FeatureService_pb2 import (
+ FeatureService as FeatureServiceProto,
+)
+from feast.protos.feast.core.FeatureView_pb2 import FeatureView as FeatureViewProto
+from feast.protos.feast.core.InfraObject_pb2 import Infra as InfraProto
+from feast.protos.feast.core.OnDemandFeatureView_pb2 import (
+ OnDemandFeatureView as OnDemandFeatureViewProto,
+)
+from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto
+from feast.protos.feast.core.RequestFeatureView_pb2 import (
+ RequestFeatureView as RequestFeatureViewProto,
+)
+from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto
+from feast.protos.feast.core.StreamFeatureView_pb2 import (
+ StreamFeatureView as StreamFeatureViewProto,
+)
+from feast.protos.feast.core.ValidationProfile_pb2 import (
+ ValidationReference as ValidationReferenceProto,
+)
+from feast.repo_config import RegistryConfig
+from feast.request_feature_view import RequestFeatureView
+from feast.saved_dataset import SavedDataset, ValidationReference
+from feast.stream_feature_view import StreamFeatureView
+
+logger = logging.getLogger(__name__)
+
+
+class FeastMetadataKeys(Enum):
+ LAST_UPDATED_TIMESTAMP = "last_updated_timestamp"
+ PROJECT_UUID = "project_uuid"
+
+
+class SnowflakeRegistryConfig(RegistryConfig):
+ """Registry config for Snowflake"""
+
+ registry_type: Literal["snowflake.registry"] = "snowflake.registry"
+ """ Registry type selector """
+
+ type: Literal["snowflake.registry"] = "snowflake.registry"
+ """ Registry type selector """
+
+ config_path: Optional[str] = os.path.expanduser("~/.snowsql/config")
+ """ Snowflake config path -- absolute path required (Cant use ~) """
+
+ account: Optional[str] = None
+ """ Snowflake deployment identifier -- drop .snowflakecomputing.com """
+
+ user: Optional[str] = None
+ """ Snowflake user name """
+
+ password: Optional[str] = None
+ """ Snowflake password """
+
+ role: Optional[str] = None
+ """ Snowflake role name """
+
+ warehouse: Optional[str] = None
+ """ Snowflake warehouse name """
+
+ authenticator: Optional[str] = None
+ """ Snowflake authenticator name """
+
+ database: StrictStr
+ """ Snowflake database name """
+
+ schema_: Optional[str] = Field("PUBLIC", alias="schema")
+ """ Snowflake schema name """
+
+ class Config:
+ allow_population_by_field_name = True
+
+
+class SnowflakeRegistry(BaseRegistry):
+ def __init__(
+ self,
+ registry_config,
+ project: str,
+ repo_path,
+ ):
+ assert registry_config is not None and isinstance(
+ registry_config, SnowflakeRegistryConfig
+ ), "SnowflakeRegistry needs a valid registry_config, a path does not work"
+
+ self.registry_config = registry_config
+ self.registry_path = (
+ f'"{self.registry_config.database}"."{self.registry_config.schema_}"'
+ )
+
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ sql_function_file = f"{os.path.dirname(feast.__file__)}/infra/utils/snowflake/registry/snowflake_table_creation.sql"
+ with open(sql_function_file, "r") as file:
+ sqlFile = file.read()
+
+ sqlCommands = sqlFile.split(";")
+ for command in sqlCommands:
+ query = command.replace("REGISTRY_PATH", f"{self.registry_path}")
+ execute_snowflake_statement(conn, query)
+
+ self.cached_registry_proto = self.proto()
+ proto_registry_utils.init_project_metadata(self.cached_registry_proto, project)
+ self.cached_registry_proto_created = datetime.utcnow()
+ self._refresh_lock = Lock()
+ self.cached_registry_proto_ttl = timedelta(
+ seconds=registry_config.cache_ttl_seconds
+ if registry_config.cache_ttl_seconds is not None
+ else 0
+ )
+ self.project = project
+
+ def refresh(self, project: Optional[str] = None):
+ if project:
+ project_metadata = proto_registry_utils.get_project_metadata(
+ registry_proto=self.cached_registry_proto, project=project
+ )
+ if project_metadata:
+ usage.set_current_project_uuid(project_metadata.project_uuid)
+ else:
+ proto_registry_utils.init_project_metadata(
+ self.cached_registry_proto, project
+ )
+ self.cached_registry_proto = self.proto()
+ self.cached_registry_proto_created = datetime.utcnow()
+
+ def _refresh_cached_registry_if_necessary(self):
+ with self._refresh_lock:
+ expired = (
+ self.cached_registry_proto is None
+ or self.cached_registry_proto_created is None
+ ) or (
+ self.cached_registry_proto_ttl.total_seconds()
+ > 0 # 0 ttl means infinity
+ and (
+ datetime.utcnow()
+ > (
+ self.cached_registry_proto_created
+ + self.cached_registry_proto_ttl
+ )
+ )
+ )
+
+ if expired:
+ logger.info("Registry cache expired, so refreshing")
+ self.refresh()
+
+ def teardown(self):
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ sql_function_file = f"{os.path.dirname(feast.__file__)}/infra/utils/snowflake/registry/snowflake_table_deletion.sql"
+ with open(sql_function_file, "r") as file:
+ sqlFile = file.read()
+
+ sqlCommands = sqlFile.split(";")
+ for command in sqlCommands:
+ query = command.replace("REGISTRY_PATH", f"{self.registry_path}")
+ execute_snowflake_statement(conn, query)
+
+ # apply operations
+ def apply_data_source(
+ self, data_source: DataSource, project: str, commit: bool = True
+ ):
+ return self._apply_object(
+ "DATA_SOURCES",
+ project,
+ "DATA_SOURCE_NAME",
+ data_source,
+ "DATA_SOURCE_PROTO",
+ )
+
+ def apply_entity(self, entity: Entity, project: str, commit: bool = True):
+ return self._apply_object(
+ "ENTITIES", project, "ENTITY_NAME", entity, "ENTITY_PROTO"
+ )
+
+ def apply_feature_service(
+ self, feature_service: FeatureService, project: str, commit: bool = True
+ ):
+ return self._apply_object(
+ "FEATURE_SERVICES",
+ project,
+ "FEATURE_SERVICE_NAME",
+ feature_service,
+ "FEATURE_SERVICE_PROTO",
+ )
+
+ def apply_feature_view(
+ self, feature_view: BaseFeatureView, project: str, commit: bool = True
+ ):
+ fv_table_str = self._infer_fv_table(feature_view)
+ fv_column_name = fv_table_str[:-1]
+ return self._apply_object(
+ fv_table_str,
+ project,
+ f"{fv_column_name}_NAME",
+ feature_view,
+ f"{fv_column_name}_PROTO",
+ )
+
+ def apply_saved_dataset(
+ self,
+ saved_dataset: SavedDataset,
+ project: str,
+ commit: bool = True,
+ ):
+ return self._apply_object(
+ "SAVED_DATASETS",
+ project,
+ "SAVED_DATASET_NAME",
+ saved_dataset,
+ "SAVED_DATASET_PROTO",
+ )
+
+ def apply_validation_reference(
+ self,
+ validation_reference: ValidationReference,
+ project: str,
+ commit: bool = True,
+ ):
+ return self._apply_object(
+ "VALIDATION_REFERENCES",
+ project,
+ "VALIDATION_REFERENCE_NAME",
+ validation_reference,
+ "VALIDATION_REFERENCE_PROTO",
+ )
+
+ def update_infra(self, infra: Infra, project: str, commit: bool = True):
+ self._apply_object(
+ "MANAGED_INFRA",
+ project,
+ "INFRA_NAME",
+ infra,
+ "INFRA_PROTO",
+ name="infra_obj",
+ )
+
+ def _apply_object(
+ self,
+ table: str,
+ project: str,
+ id_field_name: str,
+ obj: Any,
+ proto_field_name: str,
+ name: Optional[str] = None,
+ ):
+ self._maybe_init_project_metadata(project)
+
+ name = name or (obj.name if hasattr(obj, "name") else None)
+ assert name, f"name needs to be provided for {obj}"
+
+ update_datetime = datetime.utcnow()
+ if hasattr(obj, "last_updated_timestamp"):
+ obj.last_updated_timestamp = update_datetime
+
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ SELECT
+ project_id
+ FROM
+ {self.registry_path}."{table}"
+ WHERE
+ project_id = '{project}'
+ AND {id_field_name.lower()} = '{name}'
+ LIMIT 1
+ """
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ if not df.empty:
+ proto = hexlify(obj.to_proto().SerializeToString()).__str__()[1:]
+ query = f"""
+ UPDATE {self.registry_path}."{table}"
+ SET
+ {proto_field_name} = TO_BINARY({proto}),
+ last_updated_timestamp = CURRENT_TIMESTAMP()
+ WHERE
+ {id_field_name.lower()} = '{name}'
+ """
+ execute_snowflake_statement(conn, query)
+
+ else:
+ obj_proto = obj.to_proto()
+
+ if hasattr(obj_proto, "meta") and hasattr(
+ obj_proto.meta, "created_timestamp"
+ ):
+ obj_proto.meta.created_timestamp.FromDatetime(update_datetime)
+
+ proto = hexlify(obj_proto.SerializeToString()).__str__()[1:]
+ if table == "FEATURE_VIEWS":
+ query = f"""
+ INSERT INTO {self.registry_path}."{table}"
+ VALUES
+ ('{name}', '{project}', CURRENT_TIMESTAMP(), TO_BINARY({proto}), '', '')
+ """
+ elif "_FEATURE_VIEWS" in table:
+ query = f"""
+ INSERT INTO {self.registry_path}."{table}"
+ VALUES
+ ('{name}', '{project}', CURRENT_TIMESTAMP(), TO_BINARY({proto}), '')
+ """
+ else:
+ query = f"""
+ INSERT INTO {self.registry_path}."{table}"
+ VALUES
+ ('{name}', '{project}', CURRENT_TIMESTAMP(), TO_BINARY({proto}))
+ """
+ execute_snowflake_statement(conn, query)
+
+ self._set_last_updated_metadata(update_datetime, project)
+
+ # delete operations
+ def delete_data_source(self, name: str, project: str, commit: bool = True):
+ return self._delete_object(
+ "DATA_SOURCES",
+ name,
+ project,
+ "DATA_SOURCE_NAME",
+ DataSourceObjectNotFoundException,
+ )
+
+ def delete_entity(self, name: str, project: str, commit: bool = True):
+ return self._delete_object(
+ "ENTITIES", name, project, "ENTITY_NAME", EntityNotFoundException
+ )
+
+ def delete_feature_service(self, name: str, project: str, commit: bool = True):
+ return self._delete_object(
+ "FEATURE_SERVICES",
+ name,
+ project,
+ "FEATURE_SERVICE_NAME",
+ FeatureServiceNotFoundException,
+ )
+
+ # can you have featureviews with the same name
+ def delete_feature_view(self, name: str, project: str, commit: bool = True):
+ deleted_count = 0
+ for table in {
+ "FEATURE_VIEWS",
+ "REQUEST_FEATURE_VIEWS",
+ "ON_DEMAND_FEATURE_VIEWS",
+ "STREAM_FEATURE_VIEWS",
+ }:
+ deleted_count += self._delete_object(
+ table, name, project, "FEATURE_VIEW_NAME", None
+ )
+ if deleted_count == 0:
+ raise FeatureViewNotFoundException(name, project)
+
+ def delete_saved_dataset(self, name: str, project: str, allow_cache: bool = False):
+ self._delete_object(
+ "SAVED_DATASETS",
+ name,
+ project,
+ "SAVED_DATASET_NAME",
+ SavedDatasetNotFound,
+ )
+
+ def delete_validation_reference(self, name: str, project: str, commit: bool = True):
+ self._delete_object(
+ "VALIDATION_REFERENCES",
+ name,
+ project,
+ "VALIDATION_REFERENCE_NAME",
+ ValidationReferenceNotFound,
+ )
+
+ def _delete_object(
+ self,
+ table: str,
+ name: str,
+ project: str,
+ id_field_name: str,
+ not_found_exception: Optional[Callable],
+ ):
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ DELETE FROM {self.registry_path}."{table}"
+ WHERE
+ project_id = '{project}'
+ AND {id_field_name.lower()} = '{name}'
+ """
+ cursor = execute_snowflake_statement(conn, query)
+
+ if cursor.rowcount < 1 and not_found_exception:
+ raise not_found_exception(name, project)
+ self._set_last_updated_metadata(datetime.utcnow(), project)
+
+ return cursor.rowcount
+
+ # get operations
+ def get_data_source(
+ self, name: str, project: str, allow_cache: bool = False
+ ) -> DataSource:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.get_data_source(
+ self.cached_registry_proto, name, project
+ )
+ return self._get_object(
+ "DATA_SOURCES",
+ name,
+ project,
+ DataSourceProto,
+ DataSource,
+ "DATA_SOURCE_NAME",
+ "DATA_SOURCE_PROTO",
+ DataSourceObjectNotFoundException,
+ )
+
+ def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Entity:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.get_entity(
+ self.cached_registry_proto, name, project
+ )
+ return self._get_object(
+ "ENTITIES",
+ name,
+ project,
+ EntityProto,
+ Entity,
+ "ENTITY_NAME",
+ "ENTITY_PROTO",
+ EntityNotFoundException,
+ )
+
+ def get_feature_service(
+ self, name: str, project: str, allow_cache: bool = False
+ ) -> FeatureService:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.get_feature_service(
+ self.cached_registry_proto, name, project
+ )
+ return self._get_object(
+ "FEATURE_SERVICES",
+ name,
+ project,
+ FeatureServiceProto,
+ FeatureService,
+ "FEATURE_SERVICE_NAME",
+ "FEATURE_SERVICE_PROTO",
+ FeatureServiceNotFoundException,
+ )
+
+ def get_feature_view(
+ self, name: str, project: str, allow_cache: bool = False
+ ) -> FeatureView:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.get_feature_view(
+ self.cached_registry_proto, name, project
+ )
+ return self._get_object(
+ "FEATURE_VIEWS",
+ name,
+ project,
+ FeatureViewProto,
+ FeatureView,
+ "FEATURE_VIEW_NAME",
+ "FEATURE_VIEW_PROTO",
+ FeatureViewNotFoundException,
+ )
+
+ def get_infra(self, project: str, allow_cache: bool = False) -> Infra:
+ infra_object = self._get_object(
+ "MANAGED_INFRA",
+ "infra_obj",
+ project,
+ InfraProto,
+ Infra,
+ "INFRA_NAME",
+ "INFRA_PROTO",
+ None,
+ )
+ infra_object = infra_object or InfraProto()
+ return Infra.from_proto(infra_object)
+
+ def get_on_demand_feature_view(
+ self, name: str, project: str, allow_cache: bool = False
+ ) -> OnDemandFeatureView:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.get_on_demand_feature_view(
+ self.cached_registry_proto, name, project
+ )
+ return self._get_object(
+ "ON_DEMAND_FEATURE_VIEWS",
+ name,
+ project,
+ OnDemandFeatureViewProto,
+ OnDemandFeatureView,
+ "ON_DEMAND_FEATURE_VIEW_NAME",
+ "ON_DEMAND_FEATURE_VIEW_PROTO",
+ FeatureViewNotFoundException,
+ )
+
+ def get_request_feature_view(
+ self, name: str, project: str, allow_cache: bool = False
+ ) -> RequestFeatureView:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.get_request_feature_view(
+ self.cached_registry_proto, name, project
+ )
+ return self._get_object(
+ "REQUEST_FEATURE_VIEWS",
+ name,
+ project,
+ RequestFeatureViewProto,
+ RequestFeatureView,
+ "REQUEST_FEATURE_VIEW_NAME",
+ "REQUEST_FEATURE_VIEW_PROTO",
+ FeatureViewNotFoundException,
+ )
+
+ def get_saved_dataset(
+ self, name: str, project: str, allow_cache: bool = False
+ ) -> SavedDataset:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.get_saved_dataset(
+ self.cached_registry_proto, name, project
+ )
+ return self._get_object(
+ "SAVED_DATASETS",
+ name,
+ project,
+ SavedDatasetProto,
+ SavedDataset,
+ "SAVED_DATASET_NAME",
+ "SAVED_DATASET_PROTO",
+ SavedDatasetNotFound,
+ )
+
+ def get_stream_feature_view(
+ self, name: str, project: str, allow_cache: bool = False
+ ):
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.get_stream_feature_view(
+ self.cached_registry_proto, name, project
+ )
+ return self._get_object(
+ "STREAM_FEATURE_VIEWS",
+ name,
+ project,
+ StreamFeatureViewProto,
+ StreamFeatureView,
+ "STREAM_FEATURE_VIEW_NAME",
+ "STREAM_FEATURE_VIEW_PROTO",
+ FeatureViewNotFoundException,
+ )
+
+ def get_validation_reference(
+ self, name: str, project: str, allow_cache: bool = False
+ ) -> ValidationReference:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.get_validation_reference(
+ self.cached_registry_proto, name, project
+ )
+ return self._get_object(
+ "VALIDATION_REFERENCES",
+ name,
+ project,
+ ValidationReferenceProto,
+ ValidationReference,
+ "VALIDATION_REFERENCE_NAME",
+ "VALIDATION_REFERENCE_PROTO",
+ ValidationReferenceNotFound,
+ )
+
+ def _get_object(
+ self,
+ table: str,
+ name: str,
+ project: str,
+ proto_class: Any,
+ python_class: Any,
+ id_field_name: str,
+ proto_field_name: str,
+ not_found_exception: Optional[Callable],
+ ):
+ self._maybe_init_project_metadata(project)
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ SELECT
+ {proto_field_name}
+ FROM
+ {self.registry_path}."{table}"
+ WHERE
+ project_id = '{project}'
+ AND {id_field_name.lower()} = '{name}'
+ LIMIT 1
+ """
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ if not df.empty:
+ _proto = proto_class.FromString(df.squeeze())
+ return python_class.from_proto(_proto)
+ elif not_found_exception:
+ raise not_found_exception(name, project)
+ else:
+ return None
+
+ # list operations
+ def list_data_sources(
+ self, project: str, allow_cache: bool = False
+ ) -> List[DataSource]:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.list_data_sources(
+ self.cached_registry_proto, project
+ )
+ return self._list_objects(
+ "DATA_SOURCES", project, DataSourceProto, DataSource, "DATA_SOURCE_PROTO"
+ )
+
+ def list_entities(self, project: str, allow_cache: bool = False) -> List[Entity]:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.list_entities(
+ self.cached_registry_proto, project
+ )
+ return self._list_objects(
+ "ENTITIES", project, EntityProto, Entity, "ENTITY_PROTO"
+ )
+
+ def list_feature_services(
+ self, project: str, allow_cache: bool = False
+ ) -> List[FeatureService]:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.list_feature_services(
+ self.cached_registry_proto, project
+ )
+ return self._list_objects(
+ "FEATURE_SERVICES",
+ project,
+ FeatureServiceProto,
+ FeatureService,
+ "FEATURE_SERVICE_PROTO",
+ )
+
+ def list_feature_views(
+ self, project: str, allow_cache: bool = False
+ ) -> List[FeatureView]:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.list_feature_views(
+ self.cached_registry_proto, project
+ )
+ return self._list_objects(
+ "FEATURE_VIEWS",
+ project,
+ FeatureViewProto,
+ FeatureView,
+ "FEATURE_VIEW_PROTO",
+ )
+
+ def list_on_demand_feature_views(
+ self, project: str, allow_cache: bool = False
+ ) -> List[OnDemandFeatureView]:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.list_on_demand_feature_views(
+ self.cached_registry_proto, project
+ )
+ return self._list_objects(
+ "ON_DEMAND_FEATURE_VIEWS",
+ project,
+ OnDemandFeatureViewProto,
+ OnDemandFeatureView,
+ "ON_DEMAND_FEATURE_VIEW_PROTO",
+ )
+
+ def list_request_feature_views(
+ self, project: str, allow_cache: bool = False
+ ) -> List[RequestFeatureView]:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.list_request_feature_views(
+ self.cached_registry_proto, project
+ )
+ return self._list_objects(
+ "REQUEST_FEATURE_VIEWS",
+ project,
+ RequestFeatureViewProto,
+ RequestFeatureView,
+ "REQUEST_FEATURE_VIEW_PROTO",
+ )
+
+ def list_saved_datasets(
+ self, project: str, allow_cache: bool = False
+ ) -> List[SavedDataset]:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.list_saved_datasets(
+ self.cached_registry_proto, project
+ )
+ return self._list_objects(
+ "SAVED_DATASETS",
+ project,
+ SavedDatasetProto,
+ SavedDataset,
+ "SAVED_DATASET_PROTO",
+ )
+
+ def list_stream_feature_views(
+ self, project: str, allow_cache: bool = False
+ ) -> List[StreamFeatureView]:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.list_stream_feature_views(
+ self.cached_registry_proto, project
+ )
+ return self._list_objects(
+ "STREAM_FEATURE_VIEWS",
+ project,
+ StreamFeatureViewProto,
+ StreamFeatureView,
+ "STREAM_FEATURE_VIEW_PROTO",
+ )
+
+ def list_validation_references(
+ self, project: str, allow_cache: bool = False
+ ) -> List[ValidationReference]:
+ return self._list_objects(
+ "VALIDATION_REFERENCES",
+ project,
+ ValidationReferenceProto,
+ ValidationReference,
+ "VALIDATION_REFERENCE_PROTO",
+ )
+
+ def _list_objects(
+ self,
+ table: str,
+ project: str,
+ proto_class: Any,
+ python_class: Any,
+ proto_field_name: str,
+ ):
+ self._maybe_init_project_metadata(project)
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ SELECT
+ {proto_field_name}
+ FROM
+ {self.registry_path}."{table}"
+ WHERE
+ project_id = '{project}'
+ """
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ if not df.empty:
+ return [
+ python_class.from_proto(
+ proto_class.FromString(row[1][proto_field_name])
+ )
+ for row in df.iterrows()
+ ]
+ return []
+
+ def apply_materialization(
+ self,
+ feature_view: FeatureView,
+ project: str,
+ start_date: datetime,
+ end_date: datetime,
+ commit: bool = True,
+ ):
+ fv_table_str = self._infer_fv_table(feature_view)
+ fv_column_name = fv_table_str[:-1]
+ python_class, proto_class = self._infer_fv_classes(feature_view)
+
+ if python_class in {RequestFeatureView, OnDemandFeatureView}:
+ raise ValueError(
+ f"Cannot apply materialization for feature {feature_view.name} of type {python_class}"
+ )
+ fv: Union[FeatureView, StreamFeatureView] = self._get_object(
+ fv_table_str,
+ feature_view.name,
+ project,
+ proto_class,
+ python_class,
+ f"{fv_column_name}_NAME",
+ f"{fv_column_name}_PROTO",
+ FeatureViewNotFoundException,
+ )
+ fv.materialization_intervals.append((start_date, end_date))
+ self._apply_object(
+ fv_table_str,
+ project,
+ f"{fv_column_name}_NAME",
+ fv,
+ f"{fv_column_name}_PROTO",
+ )
+
+ def list_project_metadata(
+ self, project: str, allow_cache: bool = False
+ ) -> List[ProjectMetadata]:
+ if allow_cache:
+ self._refresh_cached_registry_if_necessary()
+ return proto_registry_utils.list_project_metadata(
+ self.cached_registry_proto, project
+ )
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ SELECT
+ metadata_key,
+ metadata_value
+ FROM
+ {self.registry_path}."FEAST_METADATA"
+ WHERE
+ project_id = '{project}'
+ """
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ if not df.empty:
+ project_metadata = ProjectMetadata(project_name=project)
+ for row in df.iterrows():
+ if row[1]["METADATA_KEY"] == FeastMetadataKeys.PROJECT_UUID.value:
+ project_metadata.project_uuid = row[1]["METADATA_VALUE"]
+ break
+ # TODO(adchia): Add other project metadata in a structured way
+ return [project_metadata]
+ return []
+
+ def apply_user_metadata(
+ self,
+ project: str,
+ feature_view: BaseFeatureView,
+ metadata_bytes: Optional[bytes],
+ ):
+ fv_table_str = self._infer_fv_table(feature_view)
+ fv_column_name = fv_table_str[:-1].lower()
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ SELECT
+ project_id
+ FROM
+ {self.registry_path}."{fv_table_str}"
+ WHERE
+ project_id = '{project}'
+ AND {fv_column_name}_name = '{feature_view.name}'
+ LIMIT 1
+ """
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ if not df.empty:
+ if metadata_bytes:
+ metadata_hex = hexlify(metadata_bytes).__str__()[1:]
+ else:
+ metadata_hex = "''"
+ query = f"""
+ UPDATE {self.registry_path}."{fv_table_str}"
+ SET
+ user_metadata = TO_BINARY({metadata_hex}),
+ last_updated_timestamp = CURRENT_TIMESTAMP()
+ WHERE
+ project_id = '{project}'
+ AND {fv_column_name}_name = '{feature_view.name}'
+ """
+ execute_snowflake_statement(conn, query)
+ else:
+ raise FeatureViewNotFoundException(feature_view.name, project=project)
+
+ def get_user_metadata(
+ self, project: str, feature_view: BaseFeatureView
+ ) -> Optional[bytes]:
+ fv_table_str = self._infer_fv_table(feature_view)
+ fv_column_name = fv_table_str[:-1].lower()
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ SELECT
+ user_metadata
+ FROM
+ {self.registry_path}."{fv_table_str}"
+ WHERE
+ {fv_column_name}_name = '{feature_view.name}'
+ LIMIT 1
+ """
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ if not df.empty:
+ return df.squeeze()
+ else:
+ raise FeatureViewNotFoundException(feature_view.name, project=project)
+
+ def proto(self) -> RegistryProto:
+ r = RegistryProto()
+ last_updated_timestamps = []
+ projects = self._get_all_projects()
+ for project in projects:
+ for lister, registry_proto_field in [
+ (self.list_entities, r.entities),
+ (self.list_feature_views, r.feature_views),
+ (self.list_data_sources, r.data_sources),
+ (self.list_on_demand_feature_views, r.on_demand_feature_views),
+ (self.list_request_feature_views, r.request_feature_views),
+ (self.list_stream_feature_views, r.stream_feature_views),
+ (self.list_feature_services, r.feature_services),
+ (self.list_saved_datasets, r.saved_datasets),
+ (self.list_validation_references, r.validation_references),
+ (self.list_project_metadata, r.project_metadata),
+ ]:
+ objs: List[Any] = lister(project) # type: ignore
+ if objs:
+ obj_protos = [obj.to_proto() for obj in objs]
+ for obj_proto in obj_protos:
+ if "spec" in obj_proto.DESCRIPTOR.fields_by_name:
+ obj_proto.spec.project = project
+ else:
+ obj_proto.project = project
+ registry_proto_field.extend(obj_protos)
+
+ # This is suuuper jank. Because of https://github.com/feast-dev/feast/issues/2783,
+ # the registry proto only has a single infra field, which we're currently setting as the "last" project.
+ r.infra.CopyFrom(self.get_infra(project).to_proto())
+ last_updated_timestamps.append(self._get_last_updated_metadata(project))
+
+ if last_updated_timestamps:
+ r.last_updated.FromDatetime(max(last_updated_timestamps))
+
+ return r
+
+ def _get_all_projects(self) -> Set[str]:
+ projects = set()
+
+ base_tables = [
+ "DATA_SOURCES",
+ "ENTITIES",
+ "FEATURE_VIEWS",
+ "ON_DEMAND_FEATURE_VIEWS",
+ "REQUEST_FEATURE_VIEWS",
+ "STREAM_FEATURE_VIEWS",
+ ]
+
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ for table in base_tables:
+ query = (
+ f'SELECT DISTINCT project_id FROM {self.registry_path}."{table}"'
+ )
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ for row in df.iterrows():
+ projects.add(row[1]["PROJECT_ID"])
+
+ return projects
+
+ def _get_last_updated_metadata(self, project: str):
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ SELECT
+ metadata_value
+ FROM
+ {self.registry_path}."FEAST_METADATA"
+ WHERE
+ project_id = '{project}'
+ AND metadata_key = '{FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value}'
+ LIMIT 1
+ """
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ if df.empty:
+ return None
+
+ return datetime.utcfromtimestamp(int(df.squeeze()))
+
+ def _infer_fv_classes(self, feature_view):
+ if isinstance(feature_view, StreamFeatureView):
+ python_class, proto_class = StreamFeatureView, StreamFeatureViewProto
+ elif isinstance(feature_view, FeatureView):
+ python_class, proto_class = FeatureView, FeatureViewProto
+ elif isinstance(feature_view, OnDemandFeatureView):
+ python_class, proto_class = OnDemandFeatureView, OnDemandFeatureViewProto
+ elif isinstance(feature_view, RequestFeatureView):
+ python_class, proto_class = RequestFeatureView, RequestFeatureViewProto
+ else:
+ raise ValueError(f"Unexpected feature view type: {type(feature_view)}")
+ return python_class, proto_class
+
+ def _infer_fv_table(self, feature_view) -> str:
+ if isinstance(feature_view, StreamFeatureView):
+ table = "STREAM_FEATURE_VIEWS"
+ elif isinstance(feature_view, FeatureView):
+ table = "FEATURE_VIEWS"
+ elif isinstance(feature_view, OnDemandFeatureView):
+ table = "ON_DEMAND_FEATURE_VIEWS"
+ elif isinstance(feature_view, RequestFeatureView):
+ table = "REQUEST_FEATURE_VIEWS"
+ else:
+ raise ValueError(f"Unexpected feature view type: {type(feature_view)}")
+ return table
+
+ def _maybe_init_project_metadata(self, project):
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ SELECT
+ metadata_value
+ FROM
+ {self.registry_path}."FEAST_METADATA"
+ WHERE
+ project_id = '{project}'
+ AND metadata_key = '{FeastMetadataKeys.PROJECT_UUID.value}'
+ LIMIT 1
+ """
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ if not df.empty:
+ usage.set_current_project_uuid(df.squeeze())
+ else:
+ new_project_uuid = f"{uuid.uuid4()}"
+ query = f"""
+ INSERT INTO {self.registry_path}."FEAST_METADATA"
+ VALUES
+ ('{project}', '{FeastMetadataKeys.PROJECT_UUID.value}', '{new_project_uuid}', CURRENT_TIMESTAMP())
+ """
+ execute_snowflake_statement(conn, query)
+
+ usage.set_current_project_uuid(new_project_uuid)
+
+ def _set_last_updated_metadata(self, last_updated: datetime, project: str):
+ with GetSnowflakeConnection(self.registry_config) as conn:
+ query = f"""
+ SELECT
+ project_id
+ FROM
+ {self.registry_path}."FEAST_METADATA"
+ WHERE
+ project_id = '{project}'
+ AND metadata_key = '{FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value}'
+ LIMIT 1
+ """
+ df = execute_snowflake_statement(conn, query).fetch_pandas_all()
+
+ update_time = int(last_updated.timestamp())
+ if not df.empty:
+ query = f"""
+ UPDATE {self.registry_path}."FEAST_METADATA"
+ SET
+ project_id = '{project}',
+ metadata_key = '{FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value}',
+ metadata_value = '{update_time}',
+ last_updated_timestamp = CURRENT_TIMESTAMP()
+ WHERE
+ project_id = '{project}'
+ AND metadata_key = '{FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value}'
+ """
+ execute_snowflake_statement(conn, query)
+
+ else:
+ query = f"""
+ INSERT INTO {self.registry_path}."FEAST_METADATA"
+ VALUES
+ ('{project}', '{FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value}', '{update_time}', CURRENT_TIMESTAMP())
+ """
+ execute_snowflake_statement(conn, query)
+
+ def commit(self):
+ pass
diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py
index 57016e1d5e..7ac384397b 100644
--- a/sdk/python/feast/infra/registry/sql.py
+++ b/sdk/python/feast/infra/registry/sql.py
@@ -1,3 +1,4 @@
+import logging
import uuid
from datetime import datetime, timedelta
from enum import Enum
@@ -5,6 +6,7 @@
from threading import Lock
from typing import Any, Callable, List, Optional, Set, Union
+from pydantic import StrictStr
from sqlalchemy import ( # type: ignore
BigInteger,
Column,
@@ -178,6 +180,17 @@ class FeastMetadataKeys(Enum):
Column("last_updated_timestamp", BigInteger, nullable=False),
)
+logger = logging.getLogger(__name__)
+
+
+class SqlRegistryConfig(RegistryConfig):
+ registry_type: StrictStr = "sql"
+ """ str: Provider name or a class name that implements Registry."""
+
+ path: StrictStr = ""
+ """ str: Path to metadata store.
+ If registry_type is 'sql', then this is a database URL as expected by SQLAlchemy """
+
class SqlRegistry(BaseRegistry):
def __init__(
@@ -257,6 +270,7 @@ def _refresh_cached_registry_if_necessary(self):
)
if expired:
+ logger.info("Registry cache expired, so refreshing")
self.refresh()
def get_stream_feature_view(
diff --git a/sdk/python/feast/infra/utils/aws_utils.py b/sdk/python/feast/infra/utils/aws_utils.py
index 7e8335ac92..f48dfbb86b 100644
--- a/sdk/python/feast/infra/utils/aws_utils.py
+++ b/sdk/python/feast/infra/utils/aws_utils.py
@@ -74,7 +74,12 @@ def get_bucket_and_key(s3_path: str) -> Tuple[str, str]:
reraise=True,
)
def execute_redshift_statement_async(
- redshift_data_client, cluster_id: str, database: str, user: str, query: str
+ redshift_data_client,
+ cluster_id: Optional[str],
+ workgroup: Optional[str],
+ database: str,
+ user: Optional[str],
+ query: str,
) -> dict:
"""Execute Redshift statement asynchronously. Does not wait for the query to finish.
@@ -83,6 +88,7 @@ def execute_redshift_statement_async(
Args:
redshift_data_client: Redshift Data API Service client
cluster_id: Redshift Cluster Identifier
+ workgroup: Redshift Serverless Workgroup
database: Redshift Database Name
user: Redshift username
query: The SQL query to execute
@@ -91,12 +97,17 @@ def execute_redshift_statement_async(
"""
try:
- return redshift_data_client.execute_statement(
- ClusterIdentifier=cluster_id,
- Database=database,
- DbUser=user,
- Sql=query,
- )
+ rs_kwargs = {"Database": database, "Sql": query}
+
+ # Standard Redshift requires a ClusterId as well as DbUser. RS Serverless instead requires a WorkgroupName.
+ if cluster_id and user:
+ rs_kwargs["ClusterIdentifier"] = cluster_id
+ rs_kwargs["DbUser"] = user
+ elif workgroup:
+ rs_kwargs["WorkgroupName"] = workgroup
+
+ return redshift_data_client.execute_statement(**rs_kwargs)
+
except ClientError as e:
if e.response["Error"]["Code"] == "ValidationException":
raise RedshiftCredentialsError() from e
@@ -133,7 +144,12 @@ def wait_for_redshift_statement(redshift_data_client, statement: dict) -> None:
def execute_redshift_statement(
- redshift_data_client, cluster_id: str, database: str, user: str, query: str
+ redshift_data_client,
+ cluster_id: Optional[str],
+ workgroup: Optional[str],
+ database: str,
+ user: Optional[str],
+ query: str,
) -> str:
"""Execute Redshift statement synchronously. Waits for the query to finish.
@@ -144,6 +160,7 @@ def execute_redshift_statement(
Args:
redshift_data_client: Redshift Data API Service client
cluster_id: Redshift Cluster Identifier
+ workgroup: Redshift Serverless Workgroup
database: Redshift Database Name
user: Redshift username
query: The SQL query to execute
@@ -152,7 +169,7 @@ def execute_redshift_statement(
"""
statement = execute_redshift_statement_async(
- redshift_data_client, cluster_id, database, user, query
+ redshift_data_client, cluster_id, workgroup, database, user, query
)
wait_for_redshift_statement(redshift_data_client, statement)
return statement["Id"]
@@ -193,9 +210,10 @@ def upload_df_to_s3(
def upload_df_to_redshift(
redshift_data_client,
- cluster_id: str,
+ cluster_id: Optional[str],
+ workgroup: Optional[str],
database: str,
- user: str,
+ user: Optional[str],
s3_resource,
s3_path: str,
iam_role: str,
@@ -209,6 +227,7 @@ def upload_df_to_redshift(
Args:
redshift_data_client: Redshift Data API Service client
cluster_id: Redshift Cluster Identifier
+ workgroup: Redshift Serverless Workgroup
database: Redshift Database Name
user: Redshift username
s3_resource: S3 Resource object
@@ -236,6 +255,7 @@ def upload_df_to_redshift(
table,
redshift_data_client,
cluster_id=cluster_id,
+ workgroup=workgroup,
database=database,
user=user,
s3_resource=s3_resource,
@@ -248,6 +268,7 @@ def upload_df_to_redshift(
def delete_redshift_table(
redshift_data_client,
cluster_id: str,
+ workgroup: str,
database: str,
user: str,
table_name: str,
@@ -256,6 +277,7 @@ def delete_redshift_table(
execute_redshift_statement(
redshift_data_client,
cluster_id,
+ workgroup,
database,
user,
drop_query,
@@ -265,9 +287,10 @@ def delete_redshift_table(
def upload_arrow_table_to_redshift(
table: Union[pyarrow.Table, Path],
redshift_data_client,
- cluster_id: str,
+ cluster_id: Optional[str],
+ workgroup: Optional[str],
database: str,
- user: str,
+ user: Optional[str],
s3_resource,
iam_role: str,
s3_path: str,
@@ -286,6 +309,7 @@ def upload_arrow_table_to_redshift(
Args:
redshift_data_client: Redshift Data API Service client
cluster_id: Redshift Cluster Identifier
+ workgroup: Redshift Serverless Workgroup
database: Redshift Database Name
user: Redshift username
s3_resource: S3 Resource object
@@ -345,6 +369,7 @@ def upload_arrow_table_to_redshift(
execute_redshift_statement(
redshift_data_client,
cluster_id,
+ workgroup,
database,
user,
f"{create_query}; {copy_query};",
@@ -359,6 +384,7 @@ def upload_arrow_table_to_redshift(
def temporarily_upload_df_to_redshift(
redshift_data_client,
cluster_id: str,
+ workgroup: str,
database: str,
user: str,
s3_resource,
@@ -381,6 +407,7 @@ def temporarily_upload_df_to_redshift(
upload_df_to_redshift(
redshift_data_client,
cluster_id,
+ workgroup,
database,
user,
s3_resource,
@@ -396,6 +423,7 @@ def temporarily_upload_df_to_redshift(
execute_redshift_statement(
redshift_data_client,
cluster_id,
+ workgroup,
database,
user,
f"DROP TABLE {table_name}",
@@ -407,6 +435,7 @@ def temporarily_upload_arrow_table_to_redshift(
table: Union[pyarrow.Table, Path],
redshift_data_client,
cluster_id: str,
+ workgroup: str,
database: str,
user: str,
s3_resource,
@@ -431,6 +460,7 @@ def temporarily_upload_arrow_table_to_redshift(
table,
redshift_data_client,
cluster_id,
+ workgroup,
database,
user,
s3_resource,
@@ -447,6 +477,7 @@ def temporarily_upload_arrow_table_to_redshift(
execute_redshift_statement(
redshift_data_client,
cluster_id,
+ workgroup,
database,
user,
f"DROP TABLE {table_name}",
@@ -476,9 +507,10 @@ def delete_s3_directory(s3_resource, bucket: str, key: str):
def execute_redshift_query_and_unload_to_s3(
redshift_data_client,
- cluster_id: str,
+ cluster_id: Optional[str],
+ workgroup: Optional[str],
database: str,
- user: str,
+ user: Optional[str],
s3_path: str,
iam_role: str,
query: str,
@@ -488,6 +520,7 @@ def execute_redshift_query_and_unload_to_s3(
Args:
redshift_data_client: Redshift Data API Service client
cluster_id: Redshift Cluster Identifier
+ workgroup: Redshift Serverless workgroup name
database: Redshift Database Name
user: Redshift username
s3_path: S3 directory where the unloaded data is written
@@ -500,12 +533,15 @@ def execute_redshift_query_and_unload_to_s3(
unique_table_name = "_" + str(uuid.uuid4()).replace("-", "")
query = f"CREATE TEMPORARY TABLE {unique_table_name} AS ({query});\n"
query += f"UNLOAD ('SELECT * FROM {unique_table_name}') TO '{s3_path}/' IAM_ROLE '{iam_role}' FORMAT AS PARQUET"
- execute_redshift_statement(redshift_data_client, cluster_id, database, user, query)
+ execute_redshift_statement(
+ redshift_data_client, cluster_id, workgroup, database, user, query
+ )
def unload_redshift_query_to_pa(
redshift_data_client,
cluster_id: str,
+ workgroup: str,
database: str,
user: str,
s3_resource,
@@ -519,6 +555,7 @@ def unload_redshift_query_to_pa(
execute_redshift_query_and_unload_to_s3(
redshift_data_client,
cluster_id,
+ workgroup,
database,
user,
s3_path,
@@ -535,6 +572,7 @@ def unload_redshift_query_to_pa(
def unload_redshift_query_to_df(
redshift_data_client,
cluster_id: str,
+ workgroup: str,
database: str,
user: str,
s3_resource,
@@ -546,6 +584,7 @@ def unload_redshift_query_to_df(
table = unload_redshift_query_to_pa(
redshift_data_client,
cluster_id,
+ workgroup,
database,
user,
s3_resource,
@@ -716,7 +755,7 @@ def execute_athena_query_async(
# return athena_data_client.execute_statement(
return athena_data_client.start_query_execution(
QueryString=query,
- QueryExecutionContext={"Database": database},
+ QueryExecutionContext={"Database": database, "Catalog": data_source},
WorkGroup=workgroup,
)
diff --git a/sdk/python/feast/infra/utils/postgres/connection_utils.py b/sdk/python/feast/infra/utils/postgres/connection_utils.py
index 0e9cbf96fe..0d99c8ab99 100644
--- a/sdk/python/feast/infra/utils/postgres/connection_utils.py
+++ b/sdk/python/feast/infra/utils/postgres/connection_utils.py
@@ -5,6 +5,7 @@
import psycopg2
import psycopg2.extras
import pyarrow as pa
+from psycopg2.pool import SimpleConnectionPool
from feast.infra.utils.postgres.postgres_config import PostgreSQLConfig
from feast.type_map import arrow_to_pg_type
@@ -22,10 +23,28 @@ def _get_conn(config: PostgreSQLConfig):
sslcert=config.sslcert_path,
sslrootcert=config.sslrootcert_path,
options="-c search_path={}".format(config.db_schema or config.user),
+ keepalives_idle=config.keepalives_idle,
)
return conn
+def _get_connection_pool(config: PostgreSQLConfig):
+ return SimpleConnectionPool(
+ config.min_conn,
+ config.max_conn,
+ dbname=config.database,
+ host=config.host,
+ port=int(config.port),
+ user=config.user,
+ password=config.password,
+ sslmode=config.sslmode,
+ sslkey=config.sslkey_path,
+ sslcert=config.sslcert_path,
+ sslrootcert=config.sslrootcert_path,
+ options="-c search_path={}".format(config.db_schema or config.user),
+ )
+
+
def _df_to_create_table_sql(entity_df, table_name) -> str:
pa_table = pa.Table.from_pandas(entity_df)
columns = [
diff --git a/sdk/python/feast/infra/utils/postgres/postgres_config.py b/sdk/python/feast/infra/utils/postgres/postgres_config.py
index f22cc6c204..a4ebb456ef 100644
--- a/sdk/python/feast/infra/utils/postgres/postgres_config.py
+++ b/sdk/python/feast/infra/utils/postgres/postgres_config.py
@@ -1,3 +1,4 @@
+from enum import Enum
from typing import Optional
from pydantic import StrictStr
@@ -5,7 +6,15 @@
from feast.repo_config import FeastConfigBaseModel
+class ConnectionType(Enum):
+ singleton = "singleton"
+ pool = "pool"
+
+
class PostgreSQLConfig(FeastConfigBaseModel):
+ min_conn: int = 1
+ max_conn: int = 10
+ conn_type: ConnectionType = ConnectionType.singleton
host: StrictStr
port: int = 5432
database: StrictStr
@@ -16,3 +25,4 @@ class PostgreSQLConfig(FeastConfigBaseModel):
sslkey_path: Optional[StrictStr] = None
sslcert_path: Optional[StrictStr] = None
sslrootcert_path: Optional[StrictStr] = None
+ keepalives_idle: Optional[int] = None
diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql
new file mode 100644
index 0000000000..4b53d6bb3f
--- /dev/null
+++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql
@@ -0,0 +1,92 @@
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."DATA_SOURCES" (
+ data_source_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ data_source_proto BINARY NOT NULL,
+ PRIMARY KEY (data_source_name, project_id)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."ENTITIES" (
+ entity_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ entity_proto BINARY NOT NULL,
+ PRIMARY KEY (entity_name, project_id)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."FEAST_METADATA" (
+ project_id VARCHAR,
+ metadata_key VARCHAR,
+ metadata_value VARCHAR NOT NULL,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ PRIMARY KEY (project_id, metadata_key)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."FEATURE_SERVICES" (
+ feature_service_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ feature_service_proto BINARY NOT NULL,
+ PRIMARY KEY (feature_service_name, project_id)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."FEATURE_VIEWS" (
+ feature_view_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ feature_view_proto BINARY NOT NULL,
+ materialized_intervals BINARY,
+ user_metadata BINARY,
+ PRIMARY KEY (feature_view_name, project_id)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."MANAGED_INFRA" (
+ infra_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ infra_proto BINARY NOT NULL,
+ PRIMARY KEY (infra_name, project_id)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."ON_DEMAND_FEATURE_VIEWS" (
+ on_demand_feature_view_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ on_demand_feature_view_proto BINARY NOT NULL,
+ user_metadata BINARY,
+ PRIMARY KEY (on_demand_feature_view_name, project_id)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."REQUEST_FEATURE_VIEWS" (
+ request_feature_view_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ request_feature_view_proto BINARY NOT NULL,
+ user_metadata BINARY,
+ PRIMARY KEY (request_feature_view_name, project_id)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."SAVED_DATASETS" (
+ saved_dataset_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ saved_dataset_proto BINARY NOT NULL,
+ PRIMARY KEY (saved_dataset_name, project_id)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."STREAM_FEATURE_VIEWS" (
+ stream_feature_view_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ stream_feature_view_proto BINARY NOT NULL,
+ user_metadata BINARY,
+ PRIMARY KEY (stream_feature_view_name, project_id)
+);
+
+CREATE TABLE IF NOT EXISTS REGISTRY_PATH."VALIDATION_REFERENCES" (
+ validation_reference_name VARCHAR,
+ project_id VARCHAR,
+ last_updated_timestamp TIMESTAMP_LTZ NOT NULL,
+ validation_reference_proto BINARY NOT NULL,
+ PRIMARY KEY (validation_reference_name, project_id)
+)
diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql
new file mode 100644
index 0000000000..7f5c1991ea
--- /dev/null
+++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS REGISTRY_PATH."DATA_SOURCES";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."ENTITIES";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."FEAST_METADATA";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."FEATURE_SERVICES";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."FEATURE_VIEWS";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."MANAGED_INFRA";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."ON_DEMAND_FEATURE_VIEWS";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."REQUEST_FEATURE_VIEWS";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."SAVED_DATASETS";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."STREAM_FEATURE_VIEWS";
+
+DROP TABLE IF EXISTS REGISTRY_PATH."VALIDATION_REFERENCES"
diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py
index a5d2b05d45..a4cda89a6f 100644
--- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py
+++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py
@@ -39,6 +39,77 @@
getLogger("snowflake.connector.network").disabled = True
logger = getLogger(__name__)
+_cache = {}
+
+
+class GetSnowflakeConnection:
+ def __init__(self, config: str, autocommit=True):
+ self.config = config
+ self.autocommit = autocommit
+
+ def __enter__(self):
+
+ assert self.config.type in [
+ "snowflake.registry",
+ "snowflake.offline",
+ "snowflake.engine",
+ "snowflake.online",
+ ]
+
+ if self.config.type not in _cache:
+ if self.config.type == "snowflake.registry":
+ config_header = "connections.feast_registry"
+ elif self.config.type == "snowflake.offline":
+ config_header = "connections.feast_offline_store"
+ if self.config.type == "snowflake.engine":
+ config_header = "connections.feast_batch_engine"
+ elif self.config.type == "snowflake.online":
+ config_header = "connections.feast_online_store"
+
+ config_dict = dict(self.config)
+
+ # read config file
+ config_reader = configparser.ConfigParser()
+ config_reader.read([config_dict["config_path"]])
+ kwargs: Dict[str, Any] = {}
+ if config_reader.has_section(config_header):
+ kwargs = dict(config_reader[config_header])
+
+ kwargs.update((k, v) for k, v in config_dict.items() if v is not None)
+
+ for k, v in kwargs.items():
+ if k in ["role", "warehouse", "database", "schema_"]:
+ kwargs[k] = f'"{v}"'
+
+ kwargs["schema"] = kwargs.pop("schema_")
+
+ # https://docs.snowflake.com/en/user-guide/python-connector-example.html#using-key-pair-authentication-key-pair-rotation
+ # https://docs.snowflake.com/en/user-guide/key-pair-auth.html#configuring-key-pair-authentication
+ if "private_key" in kwargs:
+ kwargs["private_key"] = parse_private_key_path(
+ kwargs["private_key"], kwargs["private_key_passphrase"]
+ )
+
+ try:
+ _cache[self.config.type] = snowflake.connector.connect(
+ application="feast",
+ client_session_keep_alive=True,
+ autocommit=self.autocommit,
+ **kwargs,
+ )
+ _cache[self.config.type].cursor().execute(
+ "ALTER SESSION SET TIMEZONE = 'UTC'", _is_internal=True
+ )
+
+ except KeyError as e:
+ raise SnowflakeIncompleteConfig(e)
+
+ self.client = _cache[self.config.type]
+ return self.client
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
def assert_snowflake_feature_names(feature_view: FeatureView) -> None:
for feature in feature_view.features:
@@ -57,54 +128,6 @@ def execute_snowflake_statement(conn: SnowflakeConnection, query) -> SnowflakeCu
return cursor
-def get_snowflake_conn(config, autocommit=True) -> SnowflakeConnection:
- assert config.type in ["snowflake.offline", "snowflake.engine", "snowflake.online"]
-
- if config.type == "snowflake.offline":
- config_header = "connections.feast_offline_store"
- if config.type == "snowflake.engine":
- config_header = "connections.feast_batch_engine"
- elif config.type == "snowflake.online":
- config_header = "connections.feast_online_store"
-
- config_dict = dict(config)
-
- # read config file
- config_reader = configparser.ConfigParser()
- config_reader.read([config_dict["config_path"]])
- kwargs: Dict[str, Any] = {}
- if config_reader.has_section(config_header):
- kwargs = dict(config_reader[config_header])
-
- kwargs.update((k, v) for k, v in config_dict.items() if v is not None)
-
- for k, v in kwargs.items():
- if k in ["role", "warehouse", "database", "schema_"]:
- kwargs[k] = f'"{v}"'
-
- kwargs["schema"] = kwargs.pop("schema_")
-
- # https://docs.snowflake.com/en/user-guide/python-connector-example.html#using-key-pair-authentication-key-pair-rotation
- # https://docs.snowflake.com/en/user-guide/key-pair-auth.html#configuring-key-pair-authentication
- if "private_key" in kwargs:
- kwargs["private_key"] = parse_private_key_path(
- kwargs["private_key"], kwargs["private_key_passphrase"]
- )
-
- try:
- conn = snowflake.connector.connect(
- application="feast",
- autocommit=autocommit,
- **kwargs,
- )
-
- conn.cursor().execute("ALTER SESSION SET TIMEZONE = 'UTC'", _is_internal=True)
-
- return conn
- except KeyError as e:
- raise SnowflakeIncompleteConfig(e)
-
-
def get_snowflake_online_store_path(
config: RepoConfig,
feature_view: FeatureView,
diff --git a/sdk/python/feast/proto_json.py b/sdk/python/feast/proto_json.py
index a0a4dce86b..41d2afa55a 100644
--- a/sdk/python/feast/proto_json.py
+++ b/sdk/python/feast/proto_json.py
@@ -1,13 +1,13 @@
import uuid
from typing import Any, Callable, Type
-import pkg_resources
from google.protobuf.json_format import ( # type: ignore
_WKTJSONMETHODS,
ParseError,
_Parser,
_Printer,
)
+from importlib_metadata import version as importlib_version
from packaging import version
from feast.protos.feast.serving.ServingService_pb2 import FeatureList
@@ -118,7 +118,7 @@ def from_json_object_updated(
# https://github.com/feast-dev/feast/issues/2484 Certain feast users need a higher version of protobuf but the
# parameters of `from_json_object` changes in feast 3.20.1. This change gives users flexibility to use earlier versions.
- current_version = pkg_resources.get_distribution("protobuf").version
+ current_version = importlib_version("protobuf")
if version.parse(current_version) < version.parse("3.20"):
_patch_proto_json_encoding(Value, to_json_object, from_json_object)
else:
@@ -168,7 +168,7 @@ def from_json_object(
# https://github.com/feast-dev/feast/issues/2484 Certain feast users need a higher version of protobuf but the
# parameters of `from_json_object` changes in feast 3.20.1. This change gives users flexibility to use earlier versions.
- current_version = pkg_resources.get_distribution("protobuf").version
+ current_version = importlib_version("protobuf")
if version.parse(current_version) < version.parse("3.20"):
_patch_proto_json_encoding(RepeatedValue, to_json_object, from_json_object)
else:
@@ -221,7 +221,7 @@ def from_json_object_updated(
# https://github.com/feast-dev/feast/issues/2484 Certain feast users need a higher version of protobuf but the
# parameters of `from_json_object` changes in feast 3.20.1. This change gives users flexibility to use earlier versions.
- current_version = pkg_resources.get_distribution("protobuf").version
+ current_version = importlib_version("protobuf")
if version.parse(current_version) < version.parse("3.20"):
_patch_proto_json_encoding(FeatureList, to_json_object, from_json_object)
else:
diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py
index 8d56c5a995..7a1bd6c251 100644
--- a/sdk/python/feast/repo_config.py
+++ b/sdk/python/feast/repo_config.py
@@ -15,7 +15,7 @@
validator,
)
from pydantic.error_wrappers import ErrorWrapper
-from pydantic.typing import Dict, Optional, Union
+from pydantic.typing import Dict, Optional
from feast.errors import (
FeastFeatureServerTypeInvalidError,
@@ -23,6 +23,8 @@
FeastOfflineStoreInvalidName,
FeastOnlineStoreInvalidName,
FeastProviderNotSetError,
+ FeastRegistryNotSetError,
+ FeastRegistryTypeInvalidError,
)
from feast.importer import import_class
from feast.usage import log_exceptions
@@ -34,6 +36,12 @@
# These dict exists so that:
# - existing values for the online store type in featurestore.yaml files continue to work in a backwards compatible way
# - first party and third party implementations can use the same class loading code path.
+REGISTRY_CLASS_FOR_TYPE = {
+ "file": "feast.infra.registry.registry.Registry",
+ "sql": "feast.infra.registry.sql.SqlRegistry",
+ "snowflake.registry": "feast.infra.registry.snowflake.SnowflakeRegistry",
+}
+
BATCH_ENGINE_CLASS_FOR_TYPE = {
"local": "feast.infra.materialization.local_engine.LocalMaterializationEngine",
"snowflake.engine": "feast.infra.materialization.snowflake_engine.SnowflakeMaterializationEngine",
@@ -53,6 +61,8 @@
"hbase": "feast.infra.online_stores.contrib.hbase_online_store.hbase.HbaseOnlineStore",
"cassandra": "feast.infra.online_stores.contrib.cassandra_online_store.cassandra_online_store.CassandraOnlineStore",
"mysql": "feast.infra.online_stores.contrib.mysql_online_store.mysql.MySQLOnlineStore",
+ "rockset": "feast.infra.online_stores.contrib.rockset_online_store.rockset.RocksetOnlineStore",
+ "hazelcast": "feast.infra.online_stores.contrib.hazelcast_online_store.hazelcast_online_store.HazelcastOnlineStore",
}
OFFLINE_STORE_CLASS_FOR_TYPE = {
@@ -100,14 +110,15 @@ class RegistryConfig(FeastBaseModel):
"""Metadata Store Configuration. Configuration that relates to reading from and writing to the Feast registry."""
registry_type: StrictStr = "file"
- """ str: Provider name or a class name that implements RegistryStore.
- If specified, registry_store_type should be redundant."""
+ """ str: Provider name or a class name that implements Registry."""
registry_store_type: Optional[StrictStr]
""" str: Provider name or a class name that implements RegistryStore. """
- path: StrictStr
- """ str: Path to metadata store. Can be a local path, or remote object storage path, e.g. a GCS URI """
+ path: StrictStr = ""
+ """ str: Path to metadata store.
+ If registry_type is 'file', then an be a local path, or remote object storage path, e.g. a GCS URI
+ If registry_type is 'sql', then this is a database URL as expected by SQLAlchemy """
cache_ttl_seconds: StrictInt = 600
"""int: The cache TTL is the amount of time registry state will be cached in memory. If this TTL is exceeded then
@@ -122,9 +133,6 @@ class RegistryConfig(FeastBaseModel):
class RepoConfig(FeastBaseModel):
"""Repo config. Typically loaded from `feature_store.yaml`"""
- registry: Union[StrictStr, RegistryConfig] = "data/registry.db"
- """ str: Path to metadata store. Can be a local path, or remote object storage path, e.g. a GCS URI """
-
project: StrictStr
""" str: Feast project id. This can be any alphanumeric string up to 16 characters.
You can have multiple independent feature repositories deployed to the same cloud
@@ -134,6 +142,14 @@ class RepoConfig(FeastBaseModel):
provider: StrictStr
""" str: local or gcp or aws """
+ _registry_config: Any = Field(alias="registry", default="data/registry.db")
+ """ Configures the registry.
+ Can be:
+ 1. str: a path to a file based registry (a local path, or remote object storage path, e.g. a GCS URI)
+ 2. RegistryConfig: A fully specified file based registry or SQL based registry
+ 3. SnowflakeRegistryConfig: Using a Snowflake table to store the registry
+ """
+
_online_config: Any = Field(alias="online_store")
""" OnlineStoreConfig: Online store configuration (optional depending on provider) """
@@ -173,6 +189,11 @@ class RepoConfig(FeastBaseModel):
def __init__(self, **data: Any):
super().__init__(**data)
+ self._registry = None
+ if "registry" not in data:
+ raise FeastRegistryNotSetError()
+ self._registry_config = data["registry"]
+
self._offline_store = None
if "offline_store" in data:
self._offline_config = data["offline_store"]
@@ -196,6 +217,8 @@ def __init__(self, **data: Any):
self._online_config = "datastore"
elif data["provider"] == "aws":
self._online_config = "dynamodb"
+ elif data["provider"] == "rockset":
+ self._online_config = "rockset"
self._batch_engine = None
if "batch_engine" in data:
@@ -221,11 +244,25 @@ def __init__(self, **data: Any):
RuntimeWarning,
)
- def get_registry_config(self):
- if isinstance(self.registry, str):
- return RegistryConfig(path=self.registry)
- else:
- return self.registry
+ @property
+ def registry(self):
+ if not self._registry:
+ if isinstance(self._registry_config, Dict):
+ if "registry_type" in self._registry_config:
+ self._registry = get_registry_config_from_type(
+ self._registry_config["registry_type"]
+ )(**self._registry_config)
+ else:
+ # This may be a custom registry store, which does not need a 'registry_type'
+ self._registry = RegistryConfig(**self._registry_config)
+ elif isinstance(self._registry_config, str):
+ # User passed in just a path to file registry
+ self._registry = get_registry_config_from_type("file")(
+ path=self._registry_config
+ )
+ elif self._registry_config:
+ self._registry = self._registry_config
+ return self._registry
@property
def offline_store(self):
@@ -455,6 +492,16 @@ def get_data_source_class_from_type(data_source_type: str):
return import_class(module_name, config_class_name, "DataSource")
+def get_registry_config_from_type(registry_type: str):
+ # We do not support custom registry's right now
+ if registry_type not in REGISTRY_CLASS_FOR_TYPE:
+ raise FeastRegistryTypeInvalidError(registry_type)
+ registry_type = REGISTRY_CLASS_FOR_TYPE[registry_type]
+ module_name, registry_class_type = registry_type.rsplit(".", 1)
+ config_class_name = f"{registry_class_type}Config"
+ return import_class(module_name, config_class_name, config_class_name)
+
+
def get_batch_engine_config_from_type(batch_engine_type: str):
if batch_engine_type in BATCH_ENGINE_CLASS_FOR_TYPE:
batch_engine_type = BATCH_ENGINE_CLASS_FOR_TYPE[batch_engine_type]
diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py
index 275ae7a63d..f565f93550 100644
--- a/sdk/python/feast/repo_operations.py
+++ b/sdk/python/feast/repo_operations.py
@@ -1,9 +1,11 @@
+import base64
import importlib
import json
import os
import random
import re
import sys
+import tempfile
from importlib.abc import Loader
from importlib.machinery import ModuleSpec
from pathlib import Path
@@ -14,6 +16,7 @@
from feast import PushSource
from feast.batch_feature_view import BatchFeatureView
+from feast.constants import FEATURE_STORE_YAML_ENV_NAME
from feast.data_source import DataSource, KafkaSource, KinesisSource
from feast.diff.registry_diff import extract_objects_for_keep_delete_update_add
from feast.entity import Entity
@@ -312,6 +315,9 @@ def apply_total_with_repo_instance(
click.echo(registry_diff.to_string())
if store._should_use_plan():
+ registry_diff, infra_diff, new_infra = store.plan(repo)
+ click.echo(registry_diff.to_string())
+
store._apply_diffs(registry_diff, infra_diff, new_infra)
click.echo(infra_diff.to_string())
else:
@@ -335,6 +341,27 @@ def log_infra_changes(
)
+@log_exceptions_and_usage
+def create_feature_store(
+ ctx: click.Context,
+) -> FeatureStore:
+ repo = ctx.obj["CHDIR"]
+ # If we received a base64 encoded version of feature_store.yaml, use that
+ config_base64 = os.getenv(FEATURE_STORE_YAML_ENV_NAME)
+ if config_base64:
+ print("Received base64 encoded feature_store.yaml")
+ config_bytes = base64.b64decode(config_base64)
+ # Create a new unique directory for writing feature_store.yaml
+ repo_path = Path(tempfile.mkdtemp())
+ with open(repo_path / "feature_store.yaml", "wb") as f:
+ f.write(config_bytes)
+ return FeatureStore(repo_path=str(repo_path.resolve()))
+ else:
+ fs_yaml_file = ctx.obj["FS_YAML_FILE"]
+ cli_check_repo(repo, fs_yaml_file)
+ return FeatureStore(repo_path=str(repo), fs_yaml_file=fs_yaml_file)
+
+
@log_exceptions_and_usage
def apply_total(
repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool, store: Optional[FeatureStore] = None
diff --git a/sdk/python/feast/templates/aws/bootstrap.py b/sdk/python/feast/templates/aws/bootstrap.py
index dcabadd358..63e5b50203 100644
--- a/sdk/python/feast/templates/aws/bootstrap.py
+++ b/sdk/python/feast/templates/aws/bootstrap.py
@@ -35,6 +35,7 @@ def bootstrap():
aws_utils.execute_redshift_statement(
client,
cluster_id,
+ None,
database,
user,
"DROP TABLE IF EXISTS feast_driver_hourly_stats",
@@ -43,6 +44,7 @@ def bootstrap():
aws_utils.upload_df_to_redshift(
client,
cluster_id,
+ None,
database,
user,
s3,
diff --git a/sdk/python/feast/templates/hazelcast/__init__.py b/sdk/python/feast/templates/hazelcast/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/sdk/python/feast/templates/hazelcast/bootstrap.py b/sdk/python/feast/templates/hazelcast/bootstrap.py
new file mode 100644
index 0000000000..e5018e4fe0
--- /dev/null
+++ b/sdk/python/feast/templates/hazelcast/bootstrap.py
@@ -0,0 +1,176 @@
+#
+# Copyright 2019 The Feast Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import pathlib
+from datetime import datetime, timedelta
+
+import click
+
+from feast.file_utils import (
+ remove_lines_from_file,
+ replace_str_in_file,
+ write_setting_or_remove,
+)
+
+
+def collect_hazelcast_online_store_settings():
+ c_cluster_name = None
+ c_members = None
+ c_ca_path = None
+ c_cert_path = None
+ c_key_path = None
+ c_discovery_token = None
+ c_ttl_seconds = None
+
+ cluster_type = click.prompt(
+ "Would you like to connect a [L]ocal cluster or [V]iridian cluster?",
+ type=click.Choice(["L", "V"]),
+ show_choices=False,
+ default="L",
+ )
+ is_viridian = cluster_type == "V"
+
+ if is_viridian:
+ c_cluster_name = click.prompt("Cluster ID: ")
+ c_discovery_token = click.prompt("Discovery Token: ")
+ c_ca_path = click.prompt("CA file path: ")
+ c_cert_path = click.prompt("CERT file path: ")
+ c_key_path = click.prompt("Key file path: ")
+ else:
+ c_cluster_name = click.prompt(
+ "Cluster name: ",
+ default="dev",
+ )
+ c_members = click.prompt(
+ "Cluster members:",
+ default="localhost:5701",
+ )
+ needs_ssl = click.confirm("Use TLS/SSL?", default=False)
+ if needs_ssl:
+ c_ca_path = click.prompt("CA file path: ")
+ c_cert_path = click.prompt("CERT file path: ")
+ c_key_path = click.prompt("Key file path: ")
+
+ c_ttl_seconds = click.prompt(
+ "Key TTL seconds: ",
+ default=0,
+ )
+ return {
+ "c_cluster_name": c_cluster_name,
+ "c_members": c_members,
+ "c_ca_path": c_ca_path,
+ "c_cert_path": c_cert_path,
+ "c_key_path": c_key_path,
+ "c_discovery_token": c_discovery_token,
+ "c_ttl_seconds": c_ttl_seconds,
+ }
+
+
+def apply_hazelcast_store_settings(config_file, settings):
+ write_setting_or_remove(
+ config_file,
+ settings["c_cluster_name"],
+ "cluster_name",
+ "c_cluster_name",
+ )
+ #
+ write_setting_or_remove(
+ config_file,
+ settings["c_discovery_token"],
+ "discovery_token",
+ "c_discovery_token",
+ )
+ #
+ if settings["c_members"] is not None:
+ settings["c_members"] = "[" + settings["c_members"] + "]"
+ write_setting_or_remove(
+ config_file,
+ settings["c_members"],
+ "cluster_members",
+ "c_members",
+ )
+ #
+ write_setting_or_remove(
+ config_file,
+ settings["c_ca_path"],
+ "ssl_cafile_path",
+ "c_ca_path",
+ )
+ #
+ write_setting_or_remove(
+ config_file,
+ settings["c_cert_path"],
+ "ssl_certfile_path",
+ "c_cert_path",
+ )
+ #
+ write_setting_or_remove(
+ config_file,
+ settings["c_key_path"],
+ "ssl_keyfile_path",
+ "c_key_path",
+ )
+ if settings["c_ca_path"] is None:
+ remove_lines_from_file(
+ config_file,
+ "ssl_password: ${SSL_PASSWORD}",
+ True,
+ )
+ #
+ replace_str_in_file(
+ config_file,
+ "c_ttl_seconds",
+ f"{settings['c_ttl_seconds']}",
+ )
+
+
+def bootstrap():
+ """
+ Bootstrap() will automatically be called
+ from the init_repo() during `feast init`.
+ """
+ from feast.driver_test_data import create_driver_hourly_stats_df
+
+ repo_path = pathlib.Path(__file__).parent.absolute() / "feature_repo"
+ config_file = repo_path / "feature_store.yaml"
+
+ data_path = repo_path / "data"
+ data_path.mkdir(exist_ok=True)
+
+ end_date = datetime.now().replace(microsecond=0, second=0, minute=0)
+ start_date = end_date - timedelta(days=15)
+ #
+ driver_entities = [1001, 1002, 1003, 1004, 1005]
+ driver_df = create_driver_hourly_stats_df(
+ driver_entities,
+ start_date,
+ end_date,
+ )
+ #
+ driver_stats_path = data_path / "driver_stats.parquet"
+ driver_df.to_parquet(path=str(driver_stats_path), allow_truncated_timestamps=True)
+
+ # example_repo.py
+ example_py_file = repo_path / "example_repo.py"
+ replace_str_in_file(example_py_file, "%PARQUET_PATH%", str(driver_stats_path))
+
+ # store config yaml, interact with user and then customize file:
+ settings = collect_hazelcast_online_store_settings()
+ apply_hazelcast_store_settings(config_file, settings)
+
+
+if __name__ == "__main__":
+ bootstrap()
diff --git a/sdk/python/feast/templates/hazelcast/feature_repo/__init__.py b/sdk/python/feast/templates/hazelcast/feature_repo/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/sdk/python/feast/templates/hazelcast/feature_repo/example_repo.py b/sdk/python/feast/templates/hazelcast/feature_repo/example_repo.py
new file mode 100644
index 0000000000..131f1bcaa6
--- /dev/null
+++ b/sdk/python/feast/templates/hazelcast/feature_repo/example_repo.py
@@ -0,0 +1,139 @@
+# This is an example feature definition file
+
+from datetime import timedelta
+
+import pandas as pd
+
+from feast import (
+ Entity,
+ FeatureService,
+ FeatureView,
+ Field,
+ FileSource,
+ PushSource,
+ RequestSource,
+)
+from feast.on_demand_feature_view import on_demand_feature_view
+from feast.types import Float32, Float64, Int64
+
+# Define an entity for the driver. You can think of an entity as a primary key used to
+# fetch features.
+driver = Entity(name="driver", join_keys=["driver_id"])
+
+# Read data from parquet files. Parquet is convenient for local development mode. For
+# production, you can use your favorite DWH, such as BigQuery. See Feast documentation
+# for more info.
+driver_stats_source = FileSource(
+ name="driver_hourly_stats_source",
+ path="%PARQUET_PATH%",
+ timestamp_field="event_timestamp",
+ created_timestamp_column="created",
+)
+
+# Our parquet files contain sample data that includes a driver_id column, timestamps and
+# three feature column. Here we define a Feature View that will allow us to serve this
+# data to our model online.
+driver_stats_fv = FeatureView(
+ # The unique name of this feature view. Two feature views in a single
+ # project cannot have the same name
+ name="driver_hourly_stats",
+ entities=[driver],
+ ttl=timedelta(days=1),
+ # The list of features defined below act as a schema to both define features
+ # for both materialization of features into a store, and are used as references
+ # during retrieval for building a training dataset or serving features
+ schema=[
+ Field(name="conv_rate", dtype=Float32),
+ Field(name="acc_rate", dtype=Float32),
+ Field(name="avg_daily_trips", dtype=Int64),
+ ],
+ online=True,
+ source=driver_stats_source,
+ # Tags are user defined key/value pairs that are attached to each
+ # feature view
+ tags={"team": "driver_performance"},
+)
+
+# Define a request data source which encodes features / information only
+# available at request time (e.g. part of the user initiated HTTP request)
+input_request = RequestSource(
+ name="vals_to_add",
+ schema=[
+ Field(name="val_to_add", dtype=Int64),
+ Field(name="val_to_add_2", dtype=Int64),
+ ],
+)
+
+
+# Define an on demand feature view which can generate new features based on
+# existing feature views and RequestSource features
+@on_demand_feature_view(
+ sources=[driver_stats_fv, input_request],
+ schema=[
+ Field(name="conv_rate_plus_val1", dtype=Float64),
+ Field(name="conv_rate_plus_val2", dtype=Float64),
+ ],
+)
+def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame:
+ df = pd.DataFrame()
+ df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"]
+ df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"]
+ return df
+
+
+# This groups features into a model version
+driver_activity_v1 = FeatureService(
+ name="driver_activity_v1",
+ features=[
+ driver_stats_fv[["conv_rate"]], # Sub-selects a feature from a feature view
+ transformed_conv_rate, # Selects all features from the feature view
+ ],
+)
+driver_activity_v2 = FeatureService(
+ name="driver_activity_v2", features=[driver_stats_fv, transformed_conv_rate]
+)
+
+# Defines a way to push data (to be available offline, online or both) into Feast.
+driver_stats_push_source = PushSource(
+ name="driver_stats_push_source",
+ batch_source=driver_stats_source,
+)
+
+# Defines a slightly modified version of the feature view from above, where the source
+# has been changed to the push source. This allows fresh features to be directly pushed
+# to the online store for this feature view.
+driver_stats_fresh_fv = FeatureView(
+ name="driver_hourly_stats_fresh",
+ entities=[driver],
+ ttl=timedelta(days=1),
+ schema=[
+ Field(name="conv_rate", dtype=Float32),
+ Field(name="acc_rate", dtype=Float32),
+ Field(name="avg_daily_trips", dtype=Int64),
+ ],
+ online=True,
+ source=driver_stats_push_source, # Changed from above
+ tags={"team": "driver_performance"},
+)
+
+
+# Define an on demand feature view which can generate new features based on
+# existing feature views and RequestSource features
+@on_demand_feature_view(
+ sources=[driver_stats_fresh_fv, input_request], # relies on fresh version of FV
+ schema=[
+ Field(name="conv_rate_plus_val1", dtype=Float64),
+ Field(name="conv_rate_plus_val2", dtype=Float64),
+ ],
+)
+def transformed_conv_rate_fresh(inputs: pd.DataFrame) -> pd.DataFrame:
+ df = pd.DataFrame()
+ df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"]
+ df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"]
+ return df
+
+
+driver_activity_v3 = FeatureService(
+ name="driver_activity_v3",
+ features=[driver_stats_fresh_fv, transformed_conv_rate_fresh],
+)
diff --git a/sdk/python/feast/templates/hazelcast/feature_repo/feature_store.yaml b/sdk/python/feast/templates/hazelcast/feature_repo/feature_store.yaml
new file mode 100644
index 0000000000..e26d1bf750
--- /dev/null
+++ b/sdk/python/feast/templates/hazelcast/feature_repo/feature_store.yaml
@@ -0,0 +1,14 @@
+project: my_project
+registry: data/registry.db
+provider: local
+online_store:
+ type: hazelcast
+ cluster_name: c_cluster_name
+ cluster_members: c_members
+ discovery_token: c_discovery_token
+ ssl_cafile_path: c_ca_path
+ ssl_certfile_path: c_cert_path
+ ssl_keyfile_path: c_key_path
+ ssl_password: ${SSL_PASSWORD} # This value will be read form the `SSL_PASSWORD` environment variable.
+ key_ttl_seconds: c_ttl_seconds
+entity_key_serialization_version: 2
diff --git a/sdk/python/feast/templates/hazelcast/feature_repo/test_workflow.py b/sdk/python/feast/templates/hazelcast/feature_repo/test_workflow.py
new file mode 100644
index 0000000000..eebeb11311
--- /dev/null
+++ b/sdk/python/feast/templates/hazelcast/feature_repo/test_workflow.py
@@ -0,0 +1,130 @@
+import subprocess
+from datetime import datetime
+
+import pandas as pd
+
+from feast import FeatureStore
+from feast.data_source import PushMode
+
+
+def run_demo():
+ store = FeatureStore(repo_path=".")
+ print("\n--- Run feast apply ---")
+ subprocess.run(["feast", "apply"])
+
+ print("\n--- Historical features for training ---")
+ fetch_historical_features_entity_df(store, for_batch_scoring=False)
+
+ print("\n--- Historical features for batch scoring ---")
+ fetch_historical_features_entity_df(store, for_batch_scoring=True)
+
+ print("\n--- Load features into online store ---")
+ store.materialize_incremental(end_date=datetime.now())
+
+ print("\n--- Online features ---")
+ fetch_online_features(store)
+
+ print("\n--- Online features retrieved (instead) through a feature service---")
+ fetch_online_features(store, source="feature_service")
+
+ print(
+ "\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---"
+ )
+ fetch_online_features(store, source="push")
+
+ print("\n--- Simulate a stream event ingestion of the hourly stats df ---")
+ event_df = pd.DataFrame.from_dict(
+ {
+ "driver_id": [1001],
+ "event_timestamp": [
+ datetime.now(),
+ ],
+ "created": [
+ datetime.now(),
+ ],
+ "conv_rate": [1.0],
+ "acc_rate": [1.0],
+ "avg_daily_trips": [1000],
+ }
+ )
+ print(event_df)
+ store.push("driver_stats_push_source", event_df, to=PushMode.ONLINE_AND_OFFLINE)
+
+ print("\n--- Online features again with updated values from a stream push---")
+ fetch_online_features(store, source="push")
+
+ print("\n--- Run feast teardown ---")
+ subprocess.run(["feast", "teardown"])
+
+
+def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool):
+ # Note: see https://docs.feast.dev/getting-started/concepts/feature-retrieval for more details on how to retrieve
+ # for all entities in the offline store instead
+ entity_df = pd.DataFrame.from_dict(
+ {
+ # entity's join key -> entity values
+ "driver_id": [1001, 1002, 1003],
+ # "event_timestamp" (reserved key) -> timestamps
+ "event_timestamp": [
+ datetime(2021, 4, 12, 10, 59, 42),
+ datetime(2021, 4, 12, 8, 12, 10),
+ datetime(2021, 4, 12, 16, 40, 26),
+ ],
+ # (optional) label name -> label values. Feast does not process these
+ "label_driver_reported_satisfaction": [1, 5, 3],
+ # values we're using for an on-demand transformation
+ "val_to_add": [1, 2, 3],
+ "val_to_add_2": [10, 20, 30],
+ }
+ )
+ # For batch scoring, we want the latest timestamps
+ if for_batch_scoring:
+ entity_df["event_timestamp"] = pd.to_datetime("now", utc=True)
+
+ training_df = store.get_historical_features(
+ entity_df=entity_df,
+ features=[
+ "driver_hourly_stats:conv_rate",
+ "driver_hourly_stats:acc_rate",
+ "driver_hourly_stats:avg_daily_trips",
+ "transformed_conv_rate:conv_rate_plus_val1",
+ "transformed_conv_rate:conv_rate_plus_val2",
+ ],
+ ).to_df()
+ print(training_df.head())
+
+
+def fetch_online_features(store, source: str = ""):
+ entity_rows = [
+ # {join_key: entity_value}
+ {
+ "driver_id": 1001,
+ "val_to_add": 1000,
+ "val_to_add_2": 2000,
+ },
+ {
+ "driver_id": 1002,
+ "val_to_add": 1001,
+ "val_to_add_2": 2002,
+ },
+ ]
+ if source == "feature_service":
+ features_to_fetch = store.get_feature_service("driver_activity_v1")
+ elif source == "push":
+ features_to_fetch = store.get_feature_service("driver_activity_v3")
+ else:
+ features_to_fetch = [
+ "driver_hourly_stats:acc_rate",
+ "transformed_conv_rate:conv_rate_plus_val1",
+ "transformed_conv_rate:conv_rate_plus_val2",
+ ]
+ returned_features = store.get_online_features(
+ features=features_to_fetch,
+ entity_rows=entity_rows,
+ ).to_dict()
+ for key, value in sorted(returned_features.items()):
+ print(key, " : ", value)
+
+
+if __name__ == "__main__":
+ run_demo()
diff --git a/sdk/python/feast/templates/rockset/README.md b/sdk/python/feast/templates/rockset/README.md
new file mode 100644
index 0000000000..d4f1ef6faf
--- /dev/null
+++ b/sdk/python/feast/templates/rockset/README.md
@@ -0,0 +1,21 @@
+# Feast Quickstart
+A quick view of what's in this repository:
+
+* `data/` contains raw demo parquet data
+* `feature_repo/driver_repo.py` contains demo feature definitions
+* `feature_repo/feature_store.yaml` contains a demo setup configuring where data sources are
+* `test_workflow.py` showcases how to run all key Feast commands, including defining, retrieving, and pushing features.
+
+You can run the overall workflow with `python test_workflow.py`.
+
+## To move from this into a more production ready workflow:
+> See more details in [Running Feast in production](https://docs.feast.dev/how-to-guides/running-feast-in-production)
+
+1. `feature_store.yaml` points to a local file as a registry. You'll want to setup a remote file (e.g. in S3/GCS) or a
+ SQL registry. See [registry docs](https://docs.feast.dev/getting-started/concepts/registry) for more details.
+2. Setup CI/CD + dev vs staging vs prod environments to automatically update the registry as you change Feast feature definitions. See [docs](https://docs.feast.dev/how-to-guides/running-feast-in-production#1.-automatically-deploying-changes-to-your-feature-definitions).
+3. (optional) Regularly scheduled materialization to power low latency feature retrieval (e.g. via Airflow). See [Batch data ingestion](https://docs.feast.dev/getting-started/concepts/data-ingestion#batch-data-ingestion)
+ for more details.
+4. (optional) Deploy feature server instances with `feast serve` to expose endpoints to retrieve online features.
+ - See [Python feature server](https://docs.feast.dev/reference/feature-servers/python-feature-server) for details.
+ - Use cases can also directly call the Feast client to fetch features as per [Feature retrieval](https://docs.feast.dev/getting-started/concepts/feature-retrieval)
diff --git a/sdk/python/feast/templates/rockset/__init__.py b/sdk/python/feast/templates/rockset/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/sdk/python/feast/templates/rockset/bootstrap.py b/sdk/python/feast/templates/rockset/bootstrap.py
new file mode 100644
index 0000000000..a3dc17f18e
--- /dev/null
+++ b/sdk/python/feast/templates/rockset/bootstrap.py
@@ -0,0 +1,30 @@
+import click
+
+from feast.file_utils import replace_str_in_file
+
+
+def bootstrap():
+ # Bootstrap() will automatically be called from the init_repo() during `feast init`
+ import pathlib
+
+ repo_path = pathlib.Path(__file__).parent.absolute() / "feature_repo"
+ config_file = repo_path / "feature_store.yaml"
+ data_path = repo_path / "data"
+ data_path.mkdir(exist_ok=True)
+
+ rockset_apikey = click.prompt(
+ "Rockset Api Key (If blank will be read from ROCKSET_APIKEY in ENV):",
+ default="",
+ )
+
+ rockset_host = click.prompt(
+ "Rockset Host (If blank will be read from ROCKSET_APISERVER in ENV):",
+ default="",
+ )
+
+ replace_str_in_file(config_file, "ROCKSET_APIKEY", rockset_apikey)
+ replace_str_in_file(config_file, "ROCKSET_APISERVER", rockset_host)
+
+
+if __name__ == "__main__":
+ bootstrap()
diff --git a/sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml b/sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml
new file mode 100644
index 0000000000..57cf8e73bb
--- /dev/null
+++ b/sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml
@@ -0,0 +1,8 @@
+project: my_project
+registry: registry.db
+provider: local
+online_store:
+ type: rockset
+ api_key: ROCKSET_APIKEY
+ host: ROCKSET_APISERVER # (api.usw2a1.rockset.com, api.euc1a1.rockset.com, api.use1a1.rockset.com)
+entity_key_serialization_version: 2
diff --git a/sdk/python/feast/templates/snowflake/test_workflow.py b/sdk/python/feast/templates/snowflake/test_workflow.py
index b121f22980..3c44342881 100644
--- a/sdk/python/feast/templates/snowflake/test_workflow.py
+++ b/sdk/python/feast/templates/snowflake/test_workflow.py
@@ -11,11 +11,12 @@
def run_demo():
- store = FeatureStore(repo_path="./feature_repo")
print("\n--- Run feast apply to setup feature store on Snowflake ---")
command = "cd feature_repo; feast apply"
subprocess.run(command, shell=True)
+ store = FeatureStore(repo_path="./feature_repo")
+
print("\n--- Historical features for training ---")
fetch_historical_features_entity_df(store, for_batch_scoring=False)
diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py
index 78b625aa89..3f49069066 100644
--- a/sdk/python/feast/type_map.py
+++ b/sdk/python/feast/type_map.py
@@ -300,7 +300,7 @@ def _type_err(item, dtype):
ValueType.DOUBLE: ("double_val", lambda x: x, {float, np.float64}),
ValueType.STRING: ("string_val", lambda x: str(x), None),
ValueType.BYTES: ("bytes_val", lambda x: x, {bytes}),
- ValueType.BOOL: ("bool_val", lambda x: x, {bool, np.bool_}),
+ ValueType.BOOL: ("bool_val", lambda x: x, {bool, np.bool_, int, np.int_}),
}
@@ -405,9 +405,14 @@ def _python_value_to_proto_value(
if (sample == 0 or sample == 0.0) and feast_value_type != ValueType.BOOL:
# Numpy convert 0 to int. However, in the feature view definition, the type of column may be a float.
# So, if value is 0, type validation must pass if scalar_types are either int or float.
- assert type(sample) in [np.int64, int, np.float64, float]
+ allowed_types = {np.int64, int, np.float64, float}
+ assert (
+ type(sample) in allowed_types
+ ), f"Type `{type(sample)}` not in {allowed_types}"
else:
- assert type(sample) in valid_scalar_types
+ assert (
+ type(sample) in valid_scalar_types
+ ), f"Type `{type(sample)}` not in {valid_scalar_types}"
if feast_value_type == ValueType.BOOL:
# ProtoValue does not support conversion of np.bool_ so we need to convert it to support np.bool_.
return [
@@ -523,6 +528,7 @@ def bq_to_feast_value_type(bq_type_as_str: str) -> ValueType:
"DATETIME": ValueType.UNIX_TIMESTAMP,
"TIMESTAMP": ValueType.UNIX_TIMESTAMP,
"INTEGER": ValueType.INT64,
+ "NUMERIC": ValueType.INT64,
"INT64": ValueType.INT64,
"STRING": ValueType.STRING,
"FLOAT": ValueType.DOUBLE,
@@ -533,7 +539,8 @@ def bq_to_feast_value_type(bq_type_as_str: str) -> ValueType:
"NULL": ValueType.NULL,
}
- value_type = type_map[bq_type_as_str]
+ value_type = type_map.get(bq_type_as_str, ValueType.STRING)
+
if is_list:
value_type = ValueType[value_type.name + "_LIST"]
@@ -839,7 +846,9 @@ def pg_type_to_feast_value_type(type_str: str) -> ValueType:
return value
-def feast_value_type_to_pa(feast_type: ValueType) -> "pyarrow.DataType":
+def feast_value_type_to_pa(
+ feast_type: ValueType, timestamp_unit: str = "us"
+) -> "pyarrow.DataType":
import pyarrow
type_map = {
@@ -850,7 +859,7 @@ def feast_value_type_to_pa(feast_type: ValueType) -> "pyarrow.DataType":
ValueType.STRING: pyarrow.string(),
ValueType.BYTES: pyarrow.binary(),
ValueType.BOOL: pyarrow.bool_(),
- ValueType.UNIX_TIMESTAMP: pyarrow.timestamp("us"),
+ ValueType.UNIX_TIMESTAMP: pyarrow.timestamp(timestamp_unit),
ValueType.INT32_LIST: pyarrow.list_(pyarrow.int32()),
ValueType.INT64_LIST: pyarrow.list_(pyarrow.int64()),
ValueType.DOUBLE_LIST: pyarrow.list_(pyarrow.float64()),
@@ -858,20 +867,33 @@ def feast_value_type_to_pa(feast_type: ValueType) -> "pyarrow.DataType":
ValueType.STRING_LIST: pyarrow.list_(pyarrow.string()),
ValueType.BYTES_LIST: pyarrow.list_(pyarrow.binary()),
ValueType.BOOL_LIST: pyarrow.list_(pyarrow.bool_()),
- ValueType.UNIX_TIMESTAMP_LIST: pyarrow.list_(pyarrow.timestamp("us")),
+ ValueType.UNIX_TIMESTAMP_LIST: pyarrow.list_(pyarrow.timestamp(timestamp_unit)),
ValueType.NULL: pyarrow.null(),
}
return type_map[feast_type]
def pg_type_code_to_pg_type(code: int) -> str:
- return {
+ """Map the postgres type code a Feast type string
+
+ Rather than raise an exception on an unknown type, we return the
+ string representation of the type code. This way rather than raising
+ an exception on unknown types, Feast will just skip the problem columns.
+
+ Note that json and jsonb are not supported but this shows up in the
+ log as a warning. Since postgres allows custom types we return an unknown for those cases.
+
+ See: https://jdbc.postgresql.org/documentation/publicapi/index.html?constant-values.html
+ """
+ PG_TYPE_MAP = {
16: "boolean",
17: "bytea",
20: "bigint",
21: "smallint",
23: "integer",
25: "text",
+ 114: "json",
+ 199: "json[]",
700: "real",
701: "double precision",
1000: "boolean[]",
@@ -897,7 +919,11 @@ def pg_type_code_to_pg_type(code: int) -> str:
1700: "numeric",
2950: "uuid",
2951: "uuid[]",
- }[code]
+ 3802: "jsonb",
+ 3807: "jsonb[]",
+ }
+
+ return PG_TYPE_MAP.get(code, "unknown")
def pg_type_code_to_arrow(code: int) -> str:
diff --git a/sdk/python/feast/ui_server.py b/sdk/python/feast/ui_server.py
index d69c133289..9950eea070 100644
--- a/sdk/python/feast/ui_server.py
+++ b/sdk/python/feast/ui_server.py
@@ -2,7 +2,7 @@
import threading
from typing import Callable, Optional
-import pkg_resources
+import importlib_resources
import uvicorn
from fastapi import FastAPI, Response
from fastapi.middleware.cors import CORSMiddleware
@@ -13,11 +13,9 @@
def get_app(
store: "feast.FeatureStore",
- get_registry_dump: Callable,
project_id: str,
registry_ttl_secs: int,
- host: str,
- port: int,
+ root_path: str = "",
):
app = FastAPI()
@@ -53,20 +51,21 @@ def shutdown_event():
async_refresh()
- ui_dir = pkg_resources.resource_filename(__name__, "ui/build/")
- # Initialize with the projects-list.json file
- with open(ui_dir + "projects-list.json", mode="w") as f:
- projects_dict = {
- "projects": [
- {
- "name": "Project",
- "description": "Test project",
- "id": project_id,
- "registryPath": "/registry",
- }
- ]
- }
- f.write(json.dumps(projects_dict))
+ ui_dir_ref = importlib_resources.files(__name__) / "ui/build/"
+ with importlib_resources.as_file(ui_dir_ref) as ui_dir:
+ # Initialize with the projects-list.json file
+ with ui_dir.joinpath("projects-list.json").open(mode="w") as f:
+ projects_dict = {
+ "projects": [
+ {
+ "name": "Project",
+ "description": "Test project",
+ "id": project_id,
+ "registryPath": f"{root_path}/registry",
+ }
+ ]
+ }
+ f.write(json.dumps(projects_dict))
@app.get("/registry")
def read_registry():
@@ -101,14 +100,12 @@ def start_server(
get_registry_dump: Callable,
project_id: str,
registry_ttl_sec: int,
- root_path: Optional[str] = "",
+ root_path: str = "",
):
app = get_app(
store,
- get_registry_dump,
project_id,
registry_ttl_sec,
- host,
- port,
+ root_path,
)
- uvicorn.run(app, host=host, port=port, root_path=root_path)
+ uvicorn.run(app, host=host, port=port)
diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt
index 049a9e19bf..cf1f4e938e 100644
--- a/sdk/python/requirements/py3.10-ci-requirements.txt
+++ b/sdk/python/requirements/py3.10-ci-requirements.txt
@@ -5,103 +5,114 @@
# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.10-ci-requirements.txt
#
adal==1.2.7
- # via
- # azure-datalake-store
- # msrestazure
+ # via msrestazure
adlfs==0.5.9
# via feast (setup.py)
-aiobotocore==2.1.2
- # via s3fs
-aiohttp==3.8.3
+aiohttp==3.8.5
# via
# adlfs
- # aiobotocore
# gcsfs
- # s3fs
-aioitertools==0.11.0
- # via aiobotocore
aiosignal==1.3.1
# via aiohttp
alabaster==0.7.13
# via sphinx
altair==4.2.0
# via great-expectations
-anyio==3.6.1
+anyio==4.0.0
# via
+ # httpcore
+ # jupyter-server
# starlette
# watchfiles
appdirs==1.4.4
# via fissix
appnope==0.1.3
- # via ipython
+ # via
+ # ipykernel
+ # ipython
+argon2-cffi==23.1.0
+ # via jupyter-server
+argon2-cffi-bindings==21.2.0
+ # via argon2-cffi
+arrow==1.2.3
+ # via isoduration
asn1crypto==1.5.1
# via
# oscrypto
# snowflake-connector-python
assertpy==1.1
# via feast (setup.py)
-asttokens==2.2.1
+asttokens==2.4.0
# via stack-data
-async-timeout==4.0.2
+async-lru==2.0.4
+ # via jupyterlab
+async-timeout==4.0.3
# via
# aiohttp
# redis
-attrs==22.1.0
+attrs==23.1.0
# via
# aiohttp
# bowler
# jsonschema
- # pytest
+ # referencing
avro==1.10.0
# via feast (setup.py)
-azure-core==1.26.2
+azure-core==1.29.4
# via
# adlfs
# azure-identity
# azure-storage-blob
# msrest
-azure-datalake-store==0.0.52
+azure-datalake-store==0.0.53
# via adlfs
-azure-identity==1.12.0
+azure-identity==1.14.0
# via
# adlfs
# feast (setup.py)
-azure-storage-blob==12.14.1
+azure-storage-blob==12.17.0
# via
# adlfs
# feast (setup.py)
-babel==2.11.0
- # via sphinx
+babel==2.12.1
+ # via
+ # jupyterlab-server
+ # sphinx
backcall==0.2.0
# via ipython
+beautifulsoup4==4.12.2
+ # via nbconvert
black==22.12.0
# via feast (setup.py)
-boto3==1.20.23
+bleach==6.0.0
+ # via nbconvert
+boto3==1.28.43
# via
# feast (setup.py)
# moto
-botocore==1.23.24
+botocore==1.31.43
# via
- # aiobotocore
# boto3
# moto
# s3transfer
bowler==0.9.0
# via feast (setup.py)
-build==0.10.0
+build==1.0.3
# via
# feast (setup.py)
# pip-tools
-bytewax==0.13.1
+bytewax==0.15.1
# via feast (setup.py)
-cachecontrol==0.12.11
+cachecontrol==0.13.1
# via firebase-admin
-cachetools==5.2.0
+cachetools==5.3.1
# via google-auth
-cassandra-driver==3.25.0
+cassandra-driver==3.28.0
# via feast (setup.py)
-certifi==2022.12.7
+certifi==2023.7.22
# via
+ # httpcore
+ # httpx
# kubernetes
# minio
# msrest
@@ -109,35 +120,41 @@ certifi==2022.12.7
# snowflake-connector-python
cffi==1.15.1
# via
+ # argon2-cffi-bindings
# azure-datalake-store
# cryptography
# snowflake-connector-python
-cfgv==3.3.1
+cfgv==3.4.0
# via pre-commit
-charset-normalizer==2.1.1
+charset-normalizer==3.2.0
# via
# aiohttp
# requests
# snowflake-connector-python
-click==8.1.3
+click==8.1.7
# via
# black
# bowler
+ # dask
# feast (setup.py)
# geomet
# great-expectations
# moreorless
# pip-tools
# uvicorn
-cloudpickle==2.2.0
+cloudpickle==2.2.1
# via dask
-colorama==0.4.5
+colorama==0.4.6
# via
# feast (setup.py)
# great-expectations
-coverage[toml]==7.0.5
+comm==0.1.4
+ # via
+ # ipykernel
+ # ipywidgets
+coverage[toml]==7.3.1
# via pytest-cov
-cryptography==35.0.0
+cryptography==41.0.3
# via
# adal
# azure-identity
@@ -149,28 +166,30 @@ cryptography==35.0.0
# pyjwt
# pyopenssl
# snowflake-connector-python
-dask==2022.1.1
+ # types-pyopenssl
+ # types-redis
+dask==2023.9.1
# via feast (setup.py)
-dataclasses==0.6
- # via great-expectations
-db-dtypes==1.0.5
+db-dtypes==1.1.1
# via google-cloud-bigquery
+debugpy==1.7.0
+ # via ipykernel
decorator==5.1.1
# via
# gcsfs
# ipython
-deprecated==1.2.13
- # via redis
+defusedxml==0.7.1
+ # via nbconvert
deprecation==2.1.0
# via testcontainers
-dill==0.3.6
+dill==0.3.7
# via
# bytewax
# feast (setup.py)
# multiprocess
-distlib==0.3.6
+distlib==0.3.7
# via virtualenv
-docker==6.0.1
+docker==6.1.3
# via
# feast (setup.py)
# testcontainers
@@ -178,21 +197,24 @@ docutils==0.19
# via sphinx
entrypoints==0.4
# via altair
-exceptiongroup==1.1.0
- # via pytest
-execnet==1.9.0
+exceptiongroup==1.1.3
+ # via
+ # anyio
+ # ipython
+ # pytest
+execnet==2.0.2
# via pytest-xdist
executing==1.2.0
# via stack-data
-fastapi==0.85.0
+fastapi==0.99.1
# via feast (setup.py)
-fastavro==1.6.1
+fastavro==1.8.3
# via
# feast (setup.py)
# pandavro
-fastjsonschema==2.16.2
+fastjsonschema==2.18.0
# via nbformat
-filelock==3.9.0
+filelock==3.12.3
# via
# snowflake-connector-python
# virtualenv
@@ -202,7 +224,9 @@ fissix==21.11.13
# via bowler
flake8==6.0.0
# via feast (setup.py)
-frozenlist==1.3.3
+fqdn==1.5.1
+ # via jsonschema
+frozenlist==1.4.0
# via
# aiohttp
# aiosignal
@@ -211,12 +235,13 @@ fsspec==2022.1.0
# adlfs
# dask
# gcsfs
- # s3fs
gcsfs==2022.1.0
# via feast (setup.py)
+geojson==2.5.0
+ # via rockset
geomet==0.2.1.post1
# via cassandra-driver
-google-api-core[grpc]==2.11.0
+google-api-core[grpc]==2.11.1
# via
# feast (setup.py)
# firebase-admin
@@ -228,9 +253,9 @@ google-api-core[grpc]==2.11.0
# google-cloud-datastore
# google-cloud-firestore
# google-cloud-storage
-google-api-python-client==2.72.0
+google-api-python-client==2.98.0
# via firebase-admin
-google-auth==2.16.0
+google-auth==2.22.0
# via
# gcsfs
# google-api-core
@@ -242,185 +267,285 @@ google-auth==2.16.0
# kubernetes
google-auth-httplib2==0.1.0
# via google-api-python-client
-google-auth-oauthlib==0.8.0
+google-auth-oauthlib==1.0.0
# via gcsfs
-google-cloud-bigquery[pandas]==3.4.1
+google-cloud-bigquery[pandas]==3.11.4
# via feast (setup.py)
-google-cloud-bigquery-storage==2.18.0
+google-cloud-bigquery-storage==2.22.0
# via feast (setup.py)
-google-cloud-bigtable==2.15.0
+google-cloud-bigtable==2.21.0
# via feast (setup.py)
-google-cloud-core==2.3.2
+google-cloud-core==2.3.3
# via
# google-cloud-bigquery
# google-cloud-bigtable
# google-cloud-datastore
# google-cloud-firestore
# google-cloud-storage
-google-cloud-datastore==2.12.0
+google-cloud-datastore==2.18.0
# via feast (setup.py)
-google-cloud-firestore==2.9.0
+google-cloud-firestore==2.11.1
# via firebase-admin
-google-cloud-storage==2.7.0
+google-cloud-storage==2.10.0
# via
# feast (setup.py)
# firebase-admin
# gcsfs
google-crc32c==1.5.0
# via google-resumable-media
-google-resumable-media==2.4.0
+google-resumable-media==2.6.0
# via
# google-cloud-bigquery
# google-cloud-storage
-googleapis-common-protos[grpc]==1.56.4
+googleapis-common-protos[grpc]==1.60.0
# via
# feast (setup.py)
# google-api-core
# grpc-google-iam-v1
# grpcio-status
-great-expectations==0.14.13
+great-expectations==0.15.50
# via feast (setup.py)
-greenlet==2.0.1
- # via sqlalchemy
grpc-google-iam-v1==0.12.6
# via google-cloud-bigtable
-grpcio==1.51.1
+grpcio==1.58.0
# via
# feast (setup.py)
# google-api-core
# google-cloud-bigquery
# googleapis-common-protos
# grpc-google-iam-v1
+ # grpcio-health-checking
# grpcio-reflection
# grpcio-status
# grpcio-testing
# grpcio-tools
-grpcio-reflection==1.49.1
+grpcio-health-checking==1.58.0
# via feast (setup.py)
-grpcio-status==1.51.1
+grpcio-reflection==1.58.0
+ # via feast (setup.py)
+grpcio-status==1.58.0
# via google-api-core
-grpcio-testing==1.51.1
+grpcio-testing==1.58.0
+ # via feast (setup.py)
+grpcio-tools==1.58.0
# via feast (setup.py)
-grpcio-tools==1.51.1
+gunicorn==21.2.0
# via feast (setup.py)
h11==0.14.0
- # via uvicorn
+ # via
+ # httpcore
+ # uvicorn
happybase==1.2.0
# via feast (setup.py)
-hiredis==2.1.1
+hazelcast-python-client==5.3.0
+ # via feast (setup.py)
+hiredis==2.2.3
# via feast (setup.py)
-httplib2==0.21.0
+httpcore==0.17.3
+ # via httpx
+httplib2==0.22.0
# via
# google-api-python-client
# google-auth-httplib2
-httptools==0.5.0
+httptools==0.6.0
# via uvicorn
-identify==2.5.13
+httpx==0.24.1
+ # via feast (setup.py)
+identify==2.5.27
# via pre-commit
idna==3.4
# via
# anyio
+ # httpx
+ # jsonschema
# requests
# snowflake-connector-python
# yarl
imagesize==1.4.1
# via sphinx
-importlib-metadata==6.0.0
- # via great-expectations
+importlib-metadata==6.8.0
+ # via
+ # dask
+ # feast (setup.py)
+ # great-expectations
+importlib-resources==6.0.1
+ # via feast (setup.py)
iniconfig==2.0.0
# via pytest
-ipython==8.8.0
+ipykernel==6.25.2
+ # via jupyterlab
+ipython==8.15.0
+ # via
+ # great-expectations
+ # ipykernel
+ # ipywidgets
+ipywidgets==8.1.0
# via great-expectations
isodate==0.6.1
- # via msrest
-isort==5.11.4
+ # via
+ # azure-storage-blob
+ # msrest
+isoduration==20.11.0
+ # via jsonschema
+isort==5.12.0
# via feast (setup.py)
-jedi==0.18.2
+jedi==0.19.0
# via ipython
-jinja2==3.0.3
+jinja2==3.1.2
# via
# altair
# feast (setup.py)
# great-expectations
+ # jupyter-server
+ # jupyterlab
+ # jupyterlab-server
# moto
+ # nbconvert
# sphinx
-jmespath==0.10.0
+jmespath==1.0.1
# via
# boto3
# botocore
-jsonpatch==1.32
+json5==0.9.14
+ # via jupyterlab-server
+jsonpatch==1.33
# via great-expectations
-jsonpointer==2.3
- # via jsonpatch
-jsonschema==4.16.0
+jsonpointer==2.4
+ # via
+ # jsonpatch
+ # jsonschema
+jsonschema[format-nongpl]==4.19.0
# via
# altair
# feast (setup.py)
# great-expectations
+ # jupyter-events
+ # jupyterlab-server
# nbformat
-jupyter-core==5.1.3
- # via nbformat
+jsonschema-specifications==2023.7.1
+ # via jsonschema
+jupyter-client==8.3.1
+ # via
+ # ipykernel
+ # jupyter-server
+ # nbclient
+jupyter-core==5.3.1
+ # via
+ # ipykernel
+ # jupyter-client
+ # jupyter-server
+ # jupyterlab
+ # nbclient
+ # nbconvert
+ # nbformat
+jupyter-events==0.7.0
+ # via jupyter-server
+jupyter-lsp==2.2.0
+ # via jupyterlab
+jupyter-server==2.7.3
+ # via
+ # jupyter-lsp
+ # jupyterlab
+ # jupyterlab-server
+ # notebook
+ # notebook-shim
+jupyter-server-terminals==0.4.4
+ # via jupyter-server
+jupyterlab==4.0.5
+ # via notebook
+jupyterlab-pygments==0.2.2
+ # via nbconvert
+jupyterlab-server==2.24.0
+ # via
+ # jupyterlab
+ # notebook
+jupyterlab-widgets==3.0.8
+ # via ipywidgets
kubernetes==20.13.0
# via feast (setup.py)
locket==1.0.0
# via partd
-markupsafe==2.1.1
+makefun==1.15.1
+ # via great-expectations
+markupsafe==2.1.3
# via
# jinja2
- # moto
+ # nbconvert
+ # werkzeug
+marshmallow==3.20.1
+ # via great-expectations
matplotlib-inline==0.1.6
- # via ipython
+ # via
+ # ipykernel
+ # ipython
mccabe==0.7.0
# via flake8
minio==7.1.0
# via feast (setup.py)
-mistune==2.0.4
- # via great-expectations
-mmh3==3.0.0
+mistune==3.0.1
+ # via
+ # great-expectations
+ # nbconvert
+mmh3==4.0.1
# via feast (setup.py)
mock==2.0.0
# via feast (setup.py)
moreorless==0.4.0
# via bowler
-moto==3.1.18
+moto==4.2.2
# via feast (setup.py)
-msal==1.20.0
+msal==1.23.0
# via
+ # azure-datalake-store
# azure-identity
# msal-extensions
msal-extensions==1.0.0
# via azure-identity
-msgpack==1.0.4
+msgpack==1.0.5
# via cachecontrol
msrest==0.7.1
- # via
- # azure-storage-blob
- # msrestazure
+ # via msrestazure
msrestazure==0.6.4
# via adlfs
multidict==6.0.4
# via
# aiohttp
# yarl
-multiprocess==0.70.14
+multiprocess==0.70.15
# via bytewax
-mypy==0.981
+mypy==0.982
# via
# feast (setup.py)
# sqlalchemy
-mypy-extensions==0.4.3
+mypy-extensions==1.0.0
# via
# black
# mypy
mypy-protobuf==3.1
# via feast (setup.py)
-mysqlclient==2.1.1
+mysqlclient==2.2.0
# via feast (setup.py)
-nbformat==5.7.3
- # via great-expectations
-nodeenv==1.7.0
+nbclient==0.8.0
+ # via nbconvert
+nbconvert==7.8.0
+ # via jupyter-server
+nbformat==5.9.2
+ # via
+ # great-expectations
+ # jupyter-server
+ # nbclient
+ # nbconvert
+nest-asyncio==1.5.7
+ # via ipykernel
+nodeenv==1.8.0
# via pre-commit
-numpy==1.23.3
+notebook==7.0.3
+ # via great-expectations
+notebook-shim==0.2.3
+ # via
+ # jupyterlab
+ # notebook
+numpy==1.24.4
# via
# altair
# db-dtypes
@@ -434,7 +559,9 @@ oauthlib==3.2.2
# via requests-oauthlib
oscrypto==1.3.0
# via snowflake-connector-python
-packaging==21.3
+overrides==7.4.0
+ # via jupyter-server
+packaging==23.1
# via
# build
# dask
@@ -443,10 +570,17 @@ packaging==21.3
# docker
# google-cloud-bigquery
# great-expectations
+ # gunicorn
+ # ipykernel
+ # jupyter-server
+ # jupyterlab
+ # jupyterlab-server
+ # marshmallow
+ # nbconvert
# pytest
- # redis
+ # snowflake-connector-python
# sphinx
-pandas==1.4.4
+pandas==1.5.3
# via
# altair
# db-dtypes
@@ -457,11 +591,13 @@ pandas==1.4.4
# snowflake-connector-python
pandavro==1.5.2
# via feast (setup.py)
+pandocfilters==1.5.0
+ # via nbconvert
parso==0.8.3
# via jedi
-partd==1.3.0
+partd==1.4.0
# via dask
-pathspec==0.10.3
+pathspec==0.11.2
# via black
pbr==5.11.1
# via mock
@@ -469,24 +605,27 @@ pexpect==4.8.0
# via ipython
pickleshare==0.7.5
# via ipython
-pip-tools==6.12.1
+pip-tools==7.3.0
# via feast (setup.py)
-platformdirs==2.6.2
+platformdirs==3.8.1
# via
# black
# jupyter-core
+ # snowflake-connector-python
# virtualenv
-pluggy==1.0.0
+pluggy==1.3.0
# via pytest
ply==3.11
# via thriftpy2
-portalocker==2.6.0
+portalocker==2.7.0
# via msal-extensions
-pre-commit==2.21.0
+pre-commit==3.3.1
# via feast (setup.py)
-prompt-toolkit==3.0.36
+prometheus-client==0.17.1
+ # via jupyter-server
+prompt-toolkit==3.0.39
# via ipython
-proto-plus==1.22.1
+proto-plus==1.22.3
# via
# feast (setup.py)
# google-cloud-bigquery
@@ -494,7 +633,7 @@ proto-plus==1.22.1
# google-cloud-bigtable
# google-cloud-datastore
# google-cloud-firestore
-protobuf==4.21.7
+protobuf==4.23.3
# via
# feast (setup.py)
# google-api-core
@@ -505,6 +644,7 @@ protobuf==4.21.7
# google-cloud-firestore
# googleapis-common-protos
# grpc-google-iam-v1
+ # grpcio-health-checking
# grpcio-reflection
# grpcio-status
# grpcio-testing
@@ -512,30 +652,34 @@ protobuf==4.21.7
# mypy-protobuf
# proto-plus
psutil==5.9.0
- # via feast (setup.py)
-psycopg2-binary==2.9.5
+ # via
+ # feast (setup.py)
+ # ipykernel
+psycopg2-binary==2.9.7
# via feast (setup.py)
ptyprocess==0.7.0
- # via pexpect
+ # via
+ # pexpect
+ # terminado
pure-eval==0.2.2
# via stack-data
py==1.11.0
# via feast (setup.py)
py-cpuinfo==9.0.0
# via pytest-benchmark
-py4j==0.10.9.5
+py4j==0.10.9.7
# via pyspark
-pyarrow==8.0.0
+pyarrow==10.0.1
# via
# db-dtypes
# feast (setup.py)
# google-cloud-bigquery
# snowflake-connector-python
-pyasn1==0.4.8
+pyasn1==0.5.0
# via
# pyasn1-modules
# rsa
-pyasn1-modules==0.2.8
+pyasn1-modules==0.3.0
# via google-auth
pybindgen==0.22.1
# via feast (setup.py)
@@ -543,46 +687,43 @@ pycodestyle==2.10.0
# via flake8
pycparser==2.21
# via cffi
-pycryptodomex==3.16.0
+pycryptodomex==3.18.0
# via snowflake-connector-python
-pydantic==1.10.2
+pydantic==1.10.12
# via
# fastapi
# feast (setup.py)
+ # great-expectations
pyflakes==3.0.1
# via flake8
-pygments==2.13.0
+pygments==2.16.1
# via
# feast (setup.py)
# ipython
+ # nbconvert
# sphinx
-pyjwt[crypto]==2.6.0
+pyjwt[crypto]==2.8.0
# via
# adal
# msal
# snowflake-connector-python
-pymssql==2.2.7
+pymssql==2.2.8
# via feast (setup.py)
-pymysql==1.0.2
+pymysql==1.1.0
# via feast (setup.py)
-pyodbc==4.0.35
+pyodbc==4.0.39
# via feast (setup.py)
-pyopenssl==22.0.0
- # via
- # feast (setup.py)
- # snowflake-connector-python
-pyparsing==2.4.7
+pyopenssl==23.2.0
+ # via snowflake-connector-python
+pyparsing==3.1.1
# via
# great-expectations
# httplib2
- # packaging
pyproject-hooks==1.0.0
# via build
-pyrsistent==0.18.1
- # via jsonschema
-pyspark==3.3.1
+pyspark==3.4.1
# via feast (setup.py)
-pytest==7.2.1
+pytest==7.4.2
# via
# feast (setup.py)
# pytest-benchmark
@@ -594,7 +735,7 @@ pytest==7.2.1
# pytest-xdist
pytest-benchmark==3.4.1
# via feast (setup.py)
-pytest-cov==4.0.0
+pytest-cov==4.1.0
# via feast (setup.py)
pytest-lazy-fixture==0.6.3
# via feast (setup.py)
@@ -604,39 +745,55 @@ pytest-ordering==0.6
# via feast (setup.py)
pytest-timeout==1.4.2
# via feast (setup.py)
-pytest-xdist==3.1.0
+pytest-xdist==3.3.1
# via feast (setup.py)
python-dateutil==2.8.2
# via
# adal
+ # arrow
# botocore
# google-cloud-bigquery
# great-expectations
+ # jupyter-client
# kubernetes
# moto
# pandas
-python-dotenv==0.21.0
+ # rockset
+ # trino
+python-dotenv==1.0.0
# via uvicorn
-pytz==2022.2.1
+python-json-logger==2.0.7
+ # via jupyter-events
+pytz==2023.3.post1
# via
- # babel
# great-expectations
- # moto
# pandas
# snowflake-connector-python
# trino
-pytz-deprecation-shim==0.1.0.post0
- # via tzlocal
-pyyaml==6.0
+pyyaml==6.0.1
# via
# dask
# feast (setup.py)
+ # jupyter-events
# kubernetes
# pre-commit
+ # responses
# uvicorn
-redis==4.2.2
+pyzmq==25.1.1
+ # via
+ # ipykernel
+ # jupyter-client
+ # jupyter-server
+redis==4.6.0
+ # via feast (setup.py)
+referencing==0.30.2
+ # via
+ # jsonschema
+ # jsonschema-specifications
+ # jupyter-events
+regex==2023.8.8
# via feast (setup.py)
-requests==2.28.1
+requests==2.31.0
# via
# adal
# adlfs
@@ -644,11 +801,13 @@ requests==2.28.1
# azure-datalake-store
# cachecontrol
# docker
+ # feast (setup.py)
# gcsfs
# google-api-core
# google-cloud-bigquery
# google-cloud-storage
# great-expectations
+ # jupyterlab-server
# kubernetes
# moto
# msal
@@ -663,22 +822,37 @@ requests-oauthlib==1.3.1
# google-auth-oauthlib
# kubernetes
# msrest
-responses==0.22.0
+responses==0.23.3
# via moto
+rfc3339-validator==0.1.4
+ # via
+ # jsonschema
+ # jupyter-events
+rfc3986-validator==0.1.1
+ # via
+ # jsonschema
+ # jupyter-events
+rockset==2.1.0
+ # via feast (setup.py)
+rpds-py==0.10.2
+ # via
+ # jsonschema
+ # referencing
rsa==4.9
# via google-auth
ruamel-yaml==0.17.17
# via great-expectations
-s3fs==2022.1.0
- # via feast (setup.py)
-s3transfer==0.5.2
+s3transfer==0.6.2
# via boto3
-scipy==1.10.0
+scipy==1.11.2
# via great-expectations
+send2trash==1.8.2
+ # via jupyter-server
six==1.16.0
# via
+ # asttokens
# azure-core
- # azure-identity
+ # bleach
# cassandra-driver
# geomet
# google-auth
@@ -690,161 +864,213 @@ six==1.16.0
# msrestazure
# pandavro
# python-dateutil
+ # rfc3339-validator
# thriftpy2
sniffio==1.3.0
- # via anyio
+ # via
+ # anyio
+ # httpcore
+ # httpx
snowballstemmer==2.2.0
# via sphinx
-snowflake-connector-python[pandas]==2.9.0
+snowflake-connector-python[pandas]==3.1.1
# via feast (setup.py)
-sphinx==6.1.3
- # via feast (setup.py)
-sphinxcontrib-applehelp==1.0.3
+sortedcontainers==2.4.0
+ # via snowflake-connector-python
+soupsieve==2.5
+ # via beautifulsoup4
+sphinx==6.2.1
+ # via
+ # feast (setup.py)
+ # sphinxcontrib-applehelp
+ # sphinxcontrib-devhelp
+ # sphinxcontrib-htmlhelp
+ # sphinxcontrib-qthelp
+ # sphinxcontrib-serializinghtml
+sphinxcontrib-applehelp==1.0.7
# via sphinx
-sphinxcontrib-devhelp==1.0.2
+sphinxcontrib-devhelp==1.0.5
# via sphinx
-sphinxcontrib-htmlhelp==2.0.0
+sphinxcontrib-htmlhelp==2.0.4
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
-sphinxcontrib-qthelp==1.0.3
+sphinxcontrib-qthelp==1.0.6
# via sphinx
-sphinxcontrib-serializinghtml==1.1.5
+sphinxcontrib-serializinghtml==1.1.9
# via sphinx
-sqlalchemy[mypy]==1.4.41
+sqlalchemy[mypy]==1.4.49
# via feast (setup.py)
-sqlalchemy2-stubs==0.0.2a27
+sqlalchemy2-stubs==0.0.2a35
# via sqlalchemy
stack-data==0.6.2
# via ipython
-starlette==0.20.4
+starlette==0.27.0
# via fastapi
-tabulate==0.8.10
+tabulate==0.9.0
# via feast (setup.py)
-tenacity==8.1.0
+tenacity==8.2.3
# via feast (setup.py)
-termcolor==2.2.0
- # via great-expectations
+terminado==0.17.1
+ # via
+ # jupyter-server
+ # jupyter-server-terminals
testcontainers==3.7.1
# via feast (setup.py)
thriftpy2==0.4.16
# via happybase
+tinycss2==1.2.1
+ # via nbconvert
toml==0.10.2
- # via
- # feast (setup.py)
- # responses
+ # via feast (setup.py)
tomli==2.0.1
# via
# black
# build
# coverage
+ # jupyterlab
# mypy
+ # pip-tools
# pyproject-hooks
# pytest
+tomlkit==0.12.1
+ # via snowflake-connector-python
toolz==0.12.0
# via
# altair
# dask
# partd
-tqdm==4.64.1
+tornado==6.3.3
+ # via
+ # ipykernel
+ # jupyter-client
+ # jupyter-server
+ # jupyterlab
+ # notebook
+ # terminado
+tqdm==4.66.1
# via
# feast (setup.py)
# great-expectations
-traitlets==5.8.1
+traitlets==5.9.0
# via
+ # comm
+ # ipykernel
# ipython
+ # ipywidgets
+ # jupyter-client
# jupyter-core
+ # jupyter-events
+ # jupyter-server
+ # jupyterlab
# matplotlib-inline
+ # nbclient
+ # nbconvert
# nbformat
-trino==0.321.0
+trino==0.326.0
# via feast (setup.py)
typeguard==2.13.3
# via feast (setup.py)
-types-docutils==0.19.1.1
- # via types-setuptools
-types-protobuf==4.21.0.2
+types-protobuf==3.19.22
# via
# feast (setup.py)
# mypy-protobuf
-types-pymysql==1.0.19.1
- # via feast (setup.py)
-types-python-dateutil==2.8.19.5
+types-pymysql==1.1.0.1
# via feast (setup.py)
-types-pytz==2022.7.0.0
+types-pyopenssl==23.2.0.2
+ # via types-redis
+types-python-dateutil==2.8.19.14
# via feast (setup.py)
-types-pyyaml==6.0.12.2
+types-pytz==2023.3.0.1
# via feast (setup.py)
-types-redis==4.4.0.0
+types-pyyaml==6.0.12.11
+ # via
+ # feast (setup.py)
+ # responses
+types-redis==4.6.0.5
# via feast (setup.py)
-types-requests==2.28.11.7
+types-requests==2.31.0.2
# via feast (setup.py)
-types-setuptools==65.7.0.1
+types-setuptools==68.2.0.0
# via feast (setup.py)
-types-tabulate==0.9.0.0
+types-tabulate==0.9.0.3
# via feast (setup.py)
-types-toml==0.10.8.1
- # via responses
-types-urllib3==1.26.25.4
+types-urllib3==1.26.25.14
# via types-requests
-typing-extensions==4.3.0
+typing-extensions==4.7.1
# via
+ # async-lru
# azure-core
+ # azure-storage-blob
+ # fastapi
+ # filelock
# great-expectations
# mypy
# pydantic
# snowflake-connector-python
# sqlalchemy2-stubs
-tzdata==2022.7
- # via pytz-deprecation-shim
-tzlocal==4.2
+ # uvicorn
+tzlocal==5.0.1
# via
# great-expectations
# trino
+uri-template==1.3.0
+ # via jsonschema
uritemplate==4.1.1
# via google-api-python-client
-urllib3==1.26.12
+urllib3==1.26.16
# via
# botocore
# docker
# feast (setup.py)
+ # google-auth
# great-expectations
# kubernetes
# minio
# requests
# responses
+ # rockset
# snowflake-connector-python
-uvicorn[standard]==0.18.3
+uvicorn[standard]==0.23.2
# via feast (setup.py)
uvloop==0.17.0
# via uvicorn
-virtualenv==20.17.1
- # via pre-commit
+virtualenv==20.23.0
+ # via
+ # feast (setup.py)
+ # pre-commit
volatile==2.1.0
# via bowler
-watchfiles==0.17.0
+watchfiles==0.20.0
# via uvicorn
-wcwidth==0.2.5
+wcwidth==0.2.6
# via prompt-toolkit
-websocket-client==1.4.2
+webcolors==1.13
+ # via jsonschema
+webencodings==0.5.1
+ # via
+ # bleach
+ # tinycss2
+websocket-client==1.6.2
# via
# docker
+ # jupyter-server
# kubernetes
-websockets==10.3
+websockets==11.0.3
# via uvicorn
-werkzeug==2.1.2
+werkzeug==2.3.7
# via moto
-wheel==0.38.1
+wheel==0.41.2
# via pip-tools
-wrapt==1.14.1
- # via
- # aiobotocore
- # deprecated
- # testcontainers
+widgetsnbextension==4.0.8
+ # via ipywidgets
+wrapt==1.15.0
+ # via testcontainers
xmltodict==0.13.0
# via moto
-yarl==1.8.2
+yarl==1.9.2
# via aiohttp
-zipp==3.11.0
+zipp==3.16.2
# via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt
index c15541fc52..9ee910bf00 100644
--- a/sdk/python/requirements/py3.10-requirements.txt
+++ b/sdk/python/requirements/py3.10-requirements.txt
@@ -4,163 +4,184 @@
#
# pip-compile --output-file=sdk/python/requirements/py3.10-requirements.txt
#
-anyio==3.6.1
+anyio==4.0.0
# via
+ # httpcore
# starlette
# watchfiles
appdirs==1.4.4
# via fissix
-attrs==22.1.0
+attrs==23.1.0
# via
# bowler
# jsonschema
+ # referencing
bowler==0.9.0
# via feast (setup.py)
-cachetools==5.2.0
- # via google-auth
-certifi==2022.12.7
- # via requests
-charset-normalizer==2.1.1
+certifi==2023.7.22
+ # via
+ # httpcore
+ # httpx
+ # requests
+charset-normalizer==3.2.0
# via requests
-click==8.1.3
+click==8.1.7
# via
# bowler
+ # dask
# feast (setup.py)
# moreorless
# uvicorn
-cloudpickle==2.2.0
+cloudpickle==2.2.1
# via dask
-colorama==0.4.5
+colorama==0.4.6
# via feast (setup.py)
-dask==2022.1.1
+dask==2023.9.1
# via feast (setup.py)
-dill==0.3.5.1
+dill==0.3.7
# via feast (setup.py)
-fastapi==0.85.0
+exceptiongroup==1.1.3
+ # via anyio
+fastapi==0.99.1
# via feast (setup.py)
-fastavro==1.6.1
+fastavro==1.8.3
# via
# feast (setup.py)
# pandavro
fissix==21.11.13
# via bowler
-fsspec==2022.8.2
+fsspec==2023.9.0
# via dask
-google-api-core==2.10.1
- # via feast (setup.py)
-google-auth==2.12.0
- # via google-api-core
-googleapis-common-protos==1.56.4
- # via
- # feast (setup.py)
- # google-api-core
-greenlet==2.0.1
- # via sqlalchemy
-grpcio==1.49.1
+grpcio==1.58.0
# via
# feast (setup.py)
+ # grpcio-health-checking
# grpcio-reflection
-grpcio-reflection==1.49.1
+ # grpcio-tools
+grpcio-health-checking==1.58.0
+ # via feast (setup.py)
+grpcio-reflection==1.58.0
+ # via feast (setup.py)
+grpcio-tools==1.58.0
+ # via feast (setup.py)
+gunicorn==21.2.0
# via feast (setup.py)
h11==0.14.0
+ # via
+ # httpcore
+ # uvicorn
+httpcore==0.17.3
+ # via httpx
+httptools==0.6.0
# via uvicorn
-httptools==0.5.0
- # via uvicorn
+httpx==0.24.1
+ # via feast (setup.py)
idna==3.4
# via
# anyio
+ # httpx
# requests
+importlib-metadata==6.8.0
+ # via
+ # dask
+ # feast (setup.py)
+importlib-resources==6.0.1
+ # via feast (setup.py)
jinja2==3.1.2
# via feast (setup.py)
-jsonschema==4.16.0
+jsonschema==4.19.0
# via feast (setup.py)
+jsonschema-specifications==2023.7.1
+ # via jsonschema
locket==1.0.0
# via partd
-markupsafe==2.1.1
+markupsafe==2.1.3
# via jinja2
-mmh3==3.0.0
+mmh3==4.0.1
# via feast (setup.py)
moreorless==0.4.0
# via bowler
-mypy==0.981
+mypy==1.5.1
# via sqlalchemy
-mypy-extensions==0.4.3
+mypy-extensions==1.0.0
# via mypy
-numpy==1.23.3
+mypy-protobuf==3.1
+ # via feast (setup.py)
+numpy==1.24.4
# via
# feast (setup.py)
# pandas
# pandavro
# pyarrow
-packaging==21.3
- # via dask
-pandas==1.5.0
+packaging==23.1
+ # via
+ # dask
+ # gunicorn
+pandas==1.5.3
# via
# feast (setup.py)
# pandavro
pandavro==1.5.2
# via feast (setup.py)
-partd==1.3.0
+partd==1.4.0
# via dask
-proto-plus==1.22.1
+proto-plus==1.22.3
# via feast (setup.py)
-protobuf==4.21.7
+protobuf==4.23.3
# via
# feast (setup.py)
- # google-api-core
- # googleapis-common-protos
+ # grpcio-health-checking
# grpcio-reflection
+ # grpcio-tools
+ # mypy-protobuf
# proto-plus
-pyarrow==8.0.0
+pyarrow==11.0.0
# via feast (setup.py)
-pyasn1==0.4.8
- # via
- # pyasn1-modules
- # rsa
-pyasn1-modules==0.2.8
- # via google-auth
-pydantic==1.10.2
+pydantic==1.10.12
# via
# fastapi
# feast (setup.py)
-pygments==2.13.0
+pygments==2.16.1
# via feast (setup.py)
-pyparsing==3.0.9
- # via packaging
-pyrsistent==0.18.1
- # via jsonschema
python-dateutil==2.8.2
# via pandas
-python-dotenv==0.21.0
+python-dotenv==1.0.0
# via uvicorn
-pytz==2022.2.1
+pytz==2023.3.post1
# via pandas
-pyyaml==6.0
+pyyaml==6.0.1
# via
# dask
# feast (setup.py)
# uvicorn
-requests==2.28.1
- # via google-api-core
-rsa==4.9
- # via google-auth
+referencing==0.30.2
+ # via
+ # jsonschema
+ # jsonschema-specifications
+requests==2.31.0
+ # via feast (setup.py)
+rpds-py==0.10.2
+ # via
+ # jsonschema
+ # referencing
six==1.16.0
# via
- # google-auth
- # grpcio
# pandavro
# python-dateutil
sniffio==1.3.0
- # via anyio
-sqlalchemy[mypy]==1.4.41
+ # via
+ # anyio
+ # httpcore
+ # httpx
+sqlalchemy[mypy]==1.4.49
# via feast (setup.py)
-sqlalchemy2-stubs==0.0.2a27
+sqlalchemy2-stubs==0.0.2a35
# via sqlalchemy
-starlette==0.20.4
+starlette==0.27.0
# via fastapi
-tabulate==0.8.10
+tabulate==0.9.0
# via feast (setup.py)
-tenacity==8.1.0
+tenacity==8.2.3
# via feast (setup.py)
toml==0.10.2
# via feast (setup.py)
@@ -170,24 +191,33 @@ toolz==0.12.0
# via
# dask
# partd
-tqdm==4.64.1
+tqdm==4.66.1
# via feast (setup.py)
typeguard==2.13.3
# via feast (setup.py)
-typing-extensions==4.3.0
+types-protobuf==4.24.0.1
+ # via mypy-protobuf
+typing-extensions==4.7.1
# via
+ # fastapi
# mypy
# pydantic
# sqlalchemy2-stubs
-urllib3==1.26.12
+ # uvicorn
+urllib3==2.0.4
# via requests
-uvicorn[standard]==0.18.3
+uvicorn[standard]==0.23.2
# via feast (setup.py)
uvloop==0.17.0
# via uvicorn
volatile==2.1.0
# via bowler
-watchfiles==0.17.0
+watchfiles==0.20.0
# via uvicorn
-websockets==10.3
+websockets==11.0.3
# via uvicorn
+zipp==3.16.2
+ # via importlib-metadata
+
+# The following packages are considered to be unsafe in a requirements file:
+# setuptools
diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt
index 3f10e9b423..a5acaf55d4 100644
--- a/sdk/python/requirements/py3.8-ci-requirements.txt
+++ b/sdk/python/requirements/py3.8-ci-requirements.txt
@@ -5,107 +5,118 @@
# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.8-ci-requirements.txt
#
adal==1.2.7
- # via
- # azure-datalake-store
- # msrestazure
+ # via msrestazure
adlfs==0.5.9
# via feast (setup.py)
-aiobotocore==2.1.2
- # via s3fs
-aiohttp==3.8.3
+aiohttp==3.8.5
# via
# adlfs
- # aiobotocore
# gcsfs
- # s3fs
-aioitertools==0.11.0
- # via aiobotocore
-aiosignal==1.2.0
+aiosignal==1.3.1
# via aiohttp
alabaster==0.7.13
# via sphinx
altair==4.2.0
# via great-expectations
-anyio==3.6.1
+anyio==4.0.0
# via
+ # httpcore
+ # jupyter-server
# starlette
# watchfiles
appdirs==1.4.4
# via fissix
appnope==0.1.3
- # via ipython
+ # via
+ # ipykernel
+ # ipython
+argon2-cffi==23.1.0
+ # via jupyter-server
+argon2-cffi-bindings==21.2.0
+ # via argon2-cffi
+arrow==1.2.3
+ # via isoduration
asn1crypto==1.5.1
# via
# oscrypto
# snowflake-connector-python
assertpy==1.1
# via feast (setup.py)
-asttokens==2.0.8
+asttokens==2.4.0
# via stack-data
-async-timeout==4.0.2
+async-lru==2.0.4
+ # via jupyterlab
+async-timeout==4.0.3
# via
# aiohttp
# redis
-attrs==22.1.0
+attrs==23.1.0
# via
# aiohttp
# bowler
# jsonschema
- # pytest
+ # referencing
avro==1.10.0
# via feast (setup.py)
-azure-core==1.25.1
+azure-core==1.29.3
# via
# adlfs
# azure-identity
# azure-storage-blob
# msrest
-azure-datalake-store==0.0.52
+azure-datalake-store==0.0.53
# via adlfs
-azure-identity==1.11.0
+azure-identity==1.14.0
# via
# adlfs
# feast (setup.py)
-azure-storage-blob==12.13.1
+azure-storage-blob==12.17.0
# via
# adlfs
# feast (setup.py)
-babel==2.11.0
- # via sphinx
+babel==2.12.1
+ # via
+ # jupyterlab-server
+ # sphinx
backcall==0.2.0
# via ipython
backports-zoneinfo==0.2.1
# via
- # pytz-deprecation-shim
+ # trino
# tzlocal
-black==22.8.0
+beautifulsoup4==4.12.2
+ # via nbconvert
+black==22.12.0
# via feast (setup.py)
-boto3==1.20.23
+bleach==6.0.0
+ # via nbconvert
+boto3==1.28.42
# via
# feast (setup.py)
# moto
-botocore==1.23.24
+botocore==1.31.42
# via
- # aiobotocore
# boto3
# moto
# s3transfer
bowler==0.9.0
# via feast (setup.py)
-build==0.8.0
+build==1.0.3
# via
# feast (setup.py)
# pip-tools
-bytewax==0.13.1
+bytewax==0.15.1
# via feast (setup.py)
-cachecontrol==0.12.11
+cachecontrol==0.13.1
# via firebase-admin
-cachetools==5.2.0
+cachetools==5.3.1
# via google-auth
-cassandra-driver==3.25.0
+cassandra-driver==3.28.0
# via feast (setup.py)
-certifi==2022.12.7
+certifi==2023.7.22
# via
+ # httpcore
+ # httpx
# kubernetes
# minio
# msrest
@@ -113,35 +124,41 @@ certifi==2022.12.7
# snowflake-connector-python
cffi==1.15.1
# via
+ # argon2-cffi-bindings
# azure-datalake-store
# cryptography
# snowflake-connector-python
-cfgv==3.3.1
+cfgv==3.4.0
# via pre-commit
-charset-normalizer==2.1.1
+charset-normalizer==3.2.0
# via
# aiohttp
# requests
# snowflake-connector-python
-click==8.1.3
+click==8.1.7
# via
# black
# bowler
+ # dask
# feast (setup.py)
# geomet
# great-expectations
# moreorless
# pip-tools
# uvicorn
-cloudpickle==2.2.0
+cloudpickle==2.2.1
# via dask
-colorama==0.4.5
+colorama==0.4.6
# via
# feast (setup.py)
# great-expectations
-coverage[toml]==6.5.0
+comm==0.1.4
+ # via
+ # ipykernel
+ # ipywidgets
+coverage[toml]==7.3.1
# via pytest-cov
-cryptography==35.0.0
+cryptography==41.0.3
# via
# adal
# azure-identity
@@ -153,28 +170,30 @@ cryptography==35.0.0
# pyjwt
# pyopenssl
# snowflake-connector-python
-dask==2022.1.1
+ # types-pyopenssl
+ # types-redis
+dask==2023.5.0
# via feast (setup.py)
-dataclasses==0.6
- # via great-expectations
-db-dtypes==1.0.4
+db-dtypes==1.1.1
# via google-cloud-bigquery
+debugpy==1.6.7.post1
+ # via ipykernel
decorator==5.1.1
# via
# gcsfs
# ipython
-deprecated==1.2.13
- # via redis
+defusedxml==0.7.1
+ # via nbconvert
deprecation==2.1.0
# via testcontainers
-dill==0.3.5.1
+dill==0.3.7
# via
# bytewax
# feast (setup.py)
# multiprocess
-distlib==0.3.6
+distlib==0.3.7
# via virtualenv
-docker==6.0.0
+docker==6.1.3
# via
# feast (setup.py)
# testcontainers
@@ -182,19 +201,23 @@ docutils==0.19
# via sphinx
entrypoints==0.4
# via altair
-execnet==1.9.0
+exceptiongroup==1.1.3
+ # via
+ # anyio
+ # pytest
+execnet==2.0.2
# via pytest-xdist
-executing==1.1.0
+executing==1.2.0
# via stack-data
-fastapi==0.85.0
+fastapi==0.99.1
# via feast (setup.py)
-fastavro==1.6.1
+fastavro==1.8.2
# via
# feast (setup.py)
# pandavro
-fastjsonschema==2.16.2
+fastjsonschema==2.18.0
# via nbformat
-filelock==3.8.0
+filelock==3.12.3
# via
# snowflake-connector-python
# virtualenv
@@ -202,9 +225,11 @@ firebase-admin==5.4.0
# via feast (setup.py)
fissix==21.11.13
# via bowler
-flake8==5.0.4
+flake8==6.0.0
# via feast (setup.py)
-frozenlist==1.3.1
+fqdn==1.5.1
+ # via jsonschema
+frozenlist==1.4.0
# via
# aiohttp
# aiosignal
@@ -213,12 +238,13 @@ fsspec==2022.1.0
# adlfs
# dask
# gcsfs
- # s3fs
gcsfs==2022.1.0
# via feast (setup.py)
+geojson==2.5.0
+ # via rockset
geomet==0.2.1.post1
# via cassandra-driver
-google-api-core[grpc]==2.10.1
+google-api-core[grpc]==2.11.1
# via
# feast (setup.py)
# firebase-admin
@@ -230,9 +256,9 @@ google-api-core[grpc]==2.10.1
# google-cloud-datastore
# google-cloud-firestore
# google-cloud-storage
-google-api-python-client==2.63.0
+google-api-python-client==2.98.0
# via firebase-admin
-google-auth==2.12.0
+google-auth==2.22.0
# via
# gcsfs
# google-api-core
@@ -244,191 +270,296 @@ google-auth==2.12.0
# kubernetes
google-auth-httplib2==0.1.0
# via google-api-python-client
-google-auth-oauthlib==0.5.3
+google-auth-oauthlib==1.0.0
# via gcsfs
-google-cloud-bigquery[pandas]==3.3.3
+google-cloud-bigquery[pandas]==3.11.4
# via feast (setup.py)
-google-cloud-bigquery-storage==2.16.1
- # via
- # feast (setup.py)
- # google-cloud-bigquery
-google-cloud-bigtable==2.12.0
+google-cloud-bigquery-storage==2.22.0
# via feast (setup.py)
-google-cloud-core==2.3.2
+google-cloud-bigtable==2.21.0
+ # via feast (setup.py)
+google-cloud-core==2.3.3
# via
# google-cloud-bigquery
# google-cloud-bigtable
# google-cloud-datastore
# google-cloud-firestore
# google-cloud-storage
-google-cloud-datastore==2.8.1
+google-cloud-datastore==2.18.0
# via feast (setup.py)
-google-cloud-firestore==2.7.0
+google-cloud-firestore==2.11.1
# via firebase-admin
-google-cloud-storage==2.5.0
+google-cloud-storage==2.10.0
# via
# feast (setup.py)
# firebase-admin
# gcsfs
google-crc32c==1.5.0
# via google-resumable-media
-google-resumable-media==2.4.0
+google-resumable-media==2.6.0
# via
# google-cloud-bigquery
# google-cloud-storage
-googleapis-common-protos[grpc]==1.56.4
+googleapis-common-protos[grpc]==1.60.0
# via
# feast (setup.py)
# google-api-core
# grpc-google-iam-v1
# grpcio-status
-great-expectations==0.14.13
+great-expectations==0.15.50
# via feast (setup.py)
-greenlet==2.0.1
- # via sqlalchemy
-grpc-google-iam-v1==0.12.4
+grpc-google-iam-v1==0.12.6
# via google-cloud-bigtable
-grpcio==1.49.1
+grpcio==1.57.0
# via
# feast (setup.py)
# google-api-core
# google-cloud-bigquery
# googleapis-common-protos
# grpc-google-iam-v1
+ # grpcio-health-checking
# grpcio-reflection
# grpcio-status
# grpcio-testing
# grpcio-tools
-grpcio-reflection==1.49.1
+grpcio-health-checking==1.57.0
# via feast (setup.py)
-grpcio-status==1.49.1
+grpcio-reflection==1.57.0
+ # via feast (setup.py)
+grpcio-status==1.57.0
# via google-api-core
-grpcio-testing==1.49.1
+grpcio-testing==1.57.0
+ # via feast (setup.py)
+grpcio-tools==1.57.0
# via feast (setup.py)
-grpcio-tools==1.49.1
+gunicorn==21.2.0
# via feast (setup.py)
h11==0.14.0
- # via uvicorn
+ # via
+ # httpcore
+ # uvicorn
happybase==1.2.0
# via feast (setup.py)
-hiredis==2.0.0
+hazelcast-python-client==5.3.0
+ # via feast (setup.py)
+hiredis==2.2.3
# via feast (setup.py)
-httplib2==0.20.4
+httpcore==0.17.3
+ # via httpx
+httplib2==0.22.0
# via
# google-api-python-client
# google-auth-httplib2
-httptools==0.5.0
+httptools==0.6.0
# via uvicorn
-identify==2.5.5
+httpx==0.24.1
+ # via feast (setup.py)
+identify==2.5.27
# via pre-commit
idna==3.4
# via
# anyio
+ # httpx
+ # jsonschema
# requests
# snowflake-connector-python
# yarl
imagesize==1.4.1
# via sphinx
-importlib-metadata==4.12.0
+importlib-metadata==6.8.0
# via
+ # build
+ # dask
+ # feast (setup.py)
# great-expectations
+ # jupyter-client
+ # jupyter-lsp
+ # jupyterlab
+ # jupyterlab-server
+ # nbconvert
# sphinx
-importlib-resources==5.9.0
- # via jsonschema
-iniconfig==1.1.1
+importlib-resources==6.0.1
+ # via
+ # feast (setup.py)
+ # jsonschema
+ # jsonschema-specifications
+ # jupyterlab
+iniconfig==2.0.0
# via pytest
-ipython==8.5.0
+ipykernel==6.25.2
+ # via jupyterlab
+ipython==8.12.2
+ # via
+ # great-expectations
+ # ipykernel
+ # ipywidgets
+ipywidgets==8.1.0
# via great-expectations
isodate==0.6.1
- # via msrest
-isort==5.10.1
+ # via
+ # azure-storage-blob
+ # msrest
+isoduration==20.11.0
+ # via jsonschema
+isort==5.12.0
# via feast (setup.py)
-jedi==0.18.1
+jedi==0.19.0
# via ipython
-jinja2==3.0.3
+jinja2==3.1.2
# via
# altair
# feast (setup.py)
# great-expectations
+ # jupyter-server
+ # jupyterlab
+ # jupyterlab-server
# moto
+ # nbconvert
# sphinx
-jmespath==0.10.0
+jmespath==1.0.1
# via
# boto3
# botocore
-jsonpatch==1.32
+json5==0.9.14
+ # via jupyterlab-server
+jsonpatch==1.33
# via great-expectations
-jsonpointer==2.3
- # via jsonpatch
-jsonschema==4.16.0
+jsonpointer==2.4
+ # via
+ # jsonpatch
+ # jsonschema
+jsonschema[format-nongpl]==4.19.0
# via
# altair
# feast (setup.py)
# great-expectations
+ # jupyter-events
+ # jupyterlab-server
# nbformat
-jupyter-core==4.11.1
- # via nbformat
+jsonschema-specifications==2023.7.1
+ # via jsonschema
+jupyter-client==8.3.1
+ # via
+ # ipykernel
+ # jupyter-server
+ # nbclient
+jupyter-core==5.3.1
+ # via
+ # ipykernel
+ # jupyter-client
+ # jupyter-server
+ # jupyterlab
+ # nbclient
+ # nbconvert
+ # nbformat
+jupyter-events==0.7.0
+ # via jupyter-server
+jupyter-lsp==2.2.0
+ # via jupyterlab
+jupyter-server==2.7.3
+ # via
+ # jupyter-lsp
+ # jupyterlab
+ # jupyterlab-server
+ # notebook
+ # notebook-shim
+jupyter-server-terminals==0.4.4
+ # via jupyter-server
+jupyterlab==4.0.5
+ # via notebook
+jupyterlab-pygments==0.2.2
+ # via nbconvert
+jupyterlab-server==2.24.0
+ # via
+ # jupyterlab
+ # notebook
+jupyterlab-widgets==3.0.8
+ # via ipywidgets
kubernetes==20.13.0
# via feast (setup.py)
locket==1.0.0
# via partd
-markupsafe==2.1.1
+makefun==1.15.1
+ # via great-expectations
+markupsafe==2.1.3
# via
# jinja2
- # moto
+ # nbconvert
+ # werkzeug
+marshmallow==3.20.1
+ # via great-expectations
matplotlib-inline==0.1.6
- # via ipython
+ # via
+ # ipykernel
+ # ipython
mccabe==0.7.0
# via flake8
minio==7.1.0
# via feast (setup.py)
-mistune==2.0.4
- # via great-expectations
-mmh3==3.0.0
+mistune==3.0.1
+ # via
+ # great-expectations
+ # nbconvert
+mmh3==4.0.1
# via feast (setup.py)
mock==2.0.0
# via feast (setup.py)
moreorless==0.4.0
# via bowler
-moto==3.1.18
+moto==4.2.2
# via feast (setup.py)
-msal==1.19.0
+msal==1.23.0
# via
+ # azure-datalake-store
# azure-identity
# msal-extensions
msal-extensions==1.0.0
# via azure-identity
-msgpack==1.0.4
+msgpack==1.0.5
# via cachecontrol
msrest==0.7.1
- # via
- # azure-storage-blob
- # msrestazure
+ # via msrestazure
msrestazure==0.6.4
# via adlfs
-multidict==6.0.2
+multidict==6.0.4
# via
# aiohttp
# yarl
-multiprocess==0.70.13
+multiprocess==0.70.15
# via bytewax
-mypy==0.981
+mypy==0.982
# via
# feast (setup.py)
# sqlalchemy
-mypy-extensions==0.4.3
+mypy-extensions==1.0.0
# via
# black
# mypy
-mypy-protobuf==3.1
+mypy-protobuf==3.1.0
# via feast (setup.py)
-mysqlclient==2.1.1
+mysqlclient==2.2.0
# via feast (setup.py)
-nbformat==5.6.1
- # via great-expectations
-nodeenv==1.7.0
+nbclient==0.8.0
+ # via nbconvert
+nbconvert==7.8.0
+ # via jupyter-server
+nbformat==5.9.2
+ # via
+ # great-expectations
+ # jupyter-server
+ # nbclient
+ # nbconvert
+nest-asyncio==1.5.7
+ # via ipykernel
+nodeenv==1.8.0
# via pre-commit
-numpy==1.23.3
+notebook==7.0.3
+ # via great-expectations
+notebook-shim==0.2.3
+ # via
+ # jupyterlab
+ # notebook
+numpy==1.24.4
# via
# altair
# db-dtypes
@@ -438,11 +569,13 @@ numpy==1.23.3
# pandavro
# pyarrow
# scipy
-oauthlib==3.2.1
+oauthlib==3.2.2
# via requests-oauthlib
oscrypto==1.3.0
# via snowflake-connector-python
-packaging==21.3
+overrides==7.4.0
+ # via jupyter-server
+packaging==23.1
# via
# build
# dask
@@ -451,10 +584,17 @@ packaging==21.3
# docker
# google-cloud-bigquery
# great-expectations
+ # gunicorn
+ # ipykernel
+ # jupyter-server
+ # jupyterlab
+ # jupyterlab-server
+ # marshmallow
+ # nbconvert
# pytest
- # redis
+ # snowflake-connector-python
# sphinx
-pandas==1.4.4
+pandas==1.5.3
# via
# altair
# db-dtypes
@@ -465,39 +605,43 @@ pandas==1.4.4
# snowflake-connector-python
pandavro==1.5.2
# via feast (setup.py)
+pandocfilters==1.5.0
+ # via nbconvert
parso==0.8.3
# via jedi
-partd==1.3.0
+partd==1.4.0
# via dask
-pathspec==0.10.1
+pathspec==0.11.2
# via black
-pbr==5.10.0
+pbr==5.11.1
# via mock
-pep517==0.13.0
- # via build
pexpect==4.8.0
# via ipython
pickleshare==0.7.5
# via ipython
-pip-tools==6.8.0
+pip-tools==7.3.0
# via feast (setup.py)
pkgutil-resolve-name==1.3.10
# via jsonschema
-platformdirs==2.5.2
+platformdirs==3.8.1
# via
# black
+ # jupyter-core
+ # snowflake-connector-python
# virtualenv
-pluggy==1.0.0
+pluggy==1.3.0
# via pytest
ply==3.11
# via thriftpy2
-portalocker==2.5.1
+portalocker==2.7.0
# via msal-extensions
-pre-commit==2.20.0
+pre-commit==3.3.1
# via feast (setup.py)
-prompt-toolkit==3.0.31
+prometheus-client==0.17.1
+ # via jupyter-server
+prompt-toolkit==3.0.39
# via ipython
-proto-plus==1.22.1
+proto-plus==1.22.3
# via
# feast (setup.py)
# google-cloud-bigquery
@@ -505,7 +649,7 @@ proto-plus==1.22.1
# google-cloud-bigtable
# google-cloud-datastore
# google-cloud-firestore
-protobuf==4.21.7
+protobuf==4.23.3
# via
# feast (setup.py)
# google-api-core
@@ -515,6 +659,8 @@ protobuf==4.21.7
# google-cloud-datastore
# google-cloud-firestore
# googleapis-common-protos
+ # grpc-google-iam-v1
+ # grpcio-health-checking
# grpcio-reflection
# grpcio-status
# grpcio-testing
@@ -522,83 +668,82 @@ protobuf==4.21.7
# mypy-protobuf
# proto-plus
psutil==5.9.0
- # via feast (setup.py)
-psycopg2-binary==2.9.3
+ # via
+ # feast (setup.py)
+ # ipykernel
+psycopg2-binary==2.9.7
# via feast (setup.py)
ptyprocess==0.7.0
- # via pexpect
+ # via
+ # pexpect
+ # terminado
pure-eval==0.2.2
# via stack-data
py==1.11.0
- # via
- # feast (setup.py)
- # pytest
- # pytest-forked
-py-cpuinfo==8.0.0
+ # via feast (setup.py)
+py-cpuinfo==9.0.0
# via pytest-benchmark
-py4j==0.10.9.5
+py4j==0.10.9.7
# via pyspark
-pyarrow==8.0.0
+pyarrow==10.0.1
# via
# db-dtypes
# feast (setup.py)
# google-cloud-bigquery
# snowflake-connector-python
-pyasn1==0.4.8
+pyasn1==0.5.0
# via
# pyasn1-modules
# rsa
-pyasn1-modules==0.2.8
+pyasn1-modules==0.3.0
# via google-auth
pybindgen==0.22.1
# via feast (setup.py)
-pycodestyle==2.9.1
+pycodestyle==2.10.0
# via flake8
pycparser==2.21
# via cffi
-pycryptodomex==3.15.0
+pycryptodomex==3.18.0
# via snowflake-connector-python
-pydantic==1.10.2
+pydantic==1.10.12
# via
# fastapi
# feast (setup.py)
-pyflakes==2.5.0
+ # great-expectations
+pyflakes==3.0.1
# via flake8
-pygments==2.13.0
+pygments==2.16.1
# via
# feast (setup.py)
# ipython
+ # nbconvert
# sphinx
-pyjwt[crypto]==2.5.0
+pyjwt[crypto]==2.8.0
# via
# adal
# msal
# snowflake-connector-python
-pymssql==2.2.5
+pymssql==2.2.8
# via feast (setup.py)
-pymysql==1.0.2
+pymysql==1.1.0
# via feast (setup.py)
-pyodbc==4.0.34
+pyodbc==4.0.39
# via feast (setup.py)
-pyopenssl==22.0.0
- # via
- # feast (setup.py)
- # snowflake-connector-python
-pyparsing==2.4.7
+pyopenssl==23.2.0
+ # via snowflake-connector-python
+pyparsing==3.1.1
# via
# great-expectations
# httplib2
- # packaging
-pyrsistent==0.18.1
- # via jsonschema
-pyspark==3.3.0
+pyproject-hooks==1.0.0
+ # via build
+pyspark==3.4.1
# via feast (setup.py)
-pytest==7.1.3
+pytest==7.4.1
# via
# feast (setup.py)
# pytest-benchmark
# pytest-cov
- # pytest-forked
# pytest-lazy-fixture
# pytest-mock
# pytest-ordering
@@ -606,10 +751,8 @@ pytest==7.1.3
# pytest-xdist
pytest-benchmark==3.4.1
# via feast (setup.py)
-pytest-cov==4.0.0
+pytest-cov==4.1.0
# via feast (setup.py)
-pytest-forked==1.4.0
- # via pytest-xdist
pytest-lazy-fixture==0.6.3
# via feast (setup.py)
pytest-mock==1.10.4
@@ -618,39 +761,56 @@ pytest-ordering==0.6
# via feast (setup.py)
pytest-timeout==1.4.2
# via feast (setup.py)
-pytest-xdist==2.5.0
+pytest-xdist==3.3.1
# via feast (setup.py)
python-dateutil==2.8.2
# via
# adal
+ # arrow
# botocore
# google-cloud-bigquery
# great-expectations
+ # jupyter-client
# kubernetes
# moto
# pandas
-python-dotenv==0.21.0
+ # rockset
+ # trino
+python-dotenv==1.0.0
# via uvicorn
-pytz==2022.2.1
+python-json-logger==2.0.7
+ # via jupyter-events
+pytz==2023.3.post1
# via
# babel
# great-expectations
- # moto
# pandas
# snowflake-connector-python
# trino
-pytz-deprecation-shim==0.1.0.post0
- # via tzlocal
-pyyaml==6.0
+pyyaml==6.0.1
# via
# dask
# feast (setup.py)
+ # jupyter-events
# kubernetes
# pre-commit
+ # responses
# uvicorn
-redis==4.2.2
+pyzmq==25.1.1
+ # via
+ # ipykernel
+ # jupyter-client
+ # jupyter-server
+redis==4.6.0
# via feast (setup.py)
-requests==2.28.1
+referencing==0.30.2
+ # via
+ # jsonschema
+ # jsonschema-specifications
+ # jupyter-events
+regex==2023.8.8
+ # via feast (setup.py)
+requests==2.31.0
# via
# adal
# adlfs
@@ -658,11 +818,13 @@ requests==2.28.1
# azure-datalake-store
# cachecontrol
# docker
+ # feast (setup.py)
# gcsfs
# google-api-core
# google-cloud-bigquery
# google-cloud-storage
# great-expectations
+ # jupyterlab-server
# kubernetes
# moto
# msal
@@ -677,29 +839,43 @@ requests-oauthlib==1.3.1
# google-auth-oauthlib
# kubernetes
# msrest
-responses==0.21.0
+responses==0.23.3
# via moto
+rfc3339-validator==0.1.4
+ # via
+ # jsonschema
+ # jupyter-events
+rfc3986-validator==0.1.1
+ # via
+ # jsonschema
+ # jupyter-events
+rockset==2.1.0
+ # via feast (setup.py)
+rpds-py==0.10.2
+ # via
+ # jsonschema
+ # referencing
rsa==4.9
# via google-auth
ruamel-yaml==0.17.17
# via great-expectations
-ruamel-yaml-clib==0.2.6
+ruamel-yaml-clib==0.2.7
# via ruamel-yaml
-s3fs==2022.1.0
- # via feast (setup.py)
-s3transfer==0.5.2
+s3transfer==0.6.2
# via boto3
-scipy==1.9.1
+scipy==1.10.1
# via great-expectations
+send2trash==1.8.2
+ # via jupyter-server
six==1.16.0
# via
+ # asttokens
# azure-core
- # azure-identity
+ # bleach
# cassandra-driver
# geomet
# google-auth
# google-auth-httplib2
- # grpcio
# happybase
# isodate
# kubernetes
@@ -707,19 +883,28 @@ six==1.16.0
# msrestazure
# pandavro
# python-dateutil
+ # rfc3339-validator
+ # thriftpy2
sniffio==1.3.0
- # via anyio
+ # via
+ # anyio
+ # httpcore
+ # httpx
snowballstemmer==2.2.0
# via sphinx
-snowflake-connector-python[pandas]==2.8.0
+snowflake-connector-python[pandas]==3.1.1
# via feast (setup.py)
-sphinx==6.1.3
+sortedcontainers==2.4.0
+ # via snowflake-connector-python
+soupsieve==2.5
+ # via beautifulsoup4
+sphinx==6.2.1
# via feast (setup.py)
-sphinxcontrib-applehelp==1.0.3
+sphinxcontrib-applehelp==1.0.4
# via sphinx
sphinxcontrib-devhelp==1.0.2
# via sphinx
-sphinxcontrib-htmlhelp==2.0.0
+sphinxcontrib-htmlhelp==2.0.1
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
@@ -727,139 +912,181 @@ sphinxcontrib-qthelp==1.0.3
# via sphinx
sphinxcontrib-serializinghtml==1.1.5
# via sphinx
-sqlalchemy[mypy]==1.4.41
+sqlalchemy[mypy]==1.4.49
# via feast (setup.py)
-sqlalchemy2-stubs==0.0.2a27
+sqlalchemy2-stubs==0.0.2a35
# via sqlalchemy
-stack-data==0.5.1
+stack-data==0.6.2
# via ipython
-starlette==0.20.4
+starlette==0.27.0
# via fastapi
-tabulate==0.8.10
+tabulate==0.9.0
# via feast (setup.py)
-tenacity==8.1.0
+tenacity==8.2.3
# via feast (setup.py)
-termcolor==2.0.1
- # via great-expectations
-testcontainers==3.7.0
+terminado==0.17.1
+ # via
+ # jupyter-server
+ # jupyter-server-terminals
+testcontainers==3.7.1
# via feast (setup.py)
-thriftpy2==0.4.14
+thriftpy2==0.4.16
# via happybase
+tinycss2==1.2.1
+ # via nbconvert
toml==0.10.2
- # via
- # feast (setup.py)
- # pre-commit
+ # via feast (setup.py)
tomli==2.0.1
# via
# black
# build
# coverage
+ # jupyterlab
# mypy
- # pep517
+ # pip-tools
+ # pyproject-hooks
# pytest
+tomlkit==0.12.1
+ # via snowflake-connector-python
toolz==0.12.0
# via
# altair
# dask
# partd
-tqdm==4.64.1
+tornado==6.3.3
+ # via
+ # ipykernel
+ # jupyter-client
+ # jupyter-server
+ # jupyterlab
+ # notebook
+ # terminado
+tqdm==4.66.1
# via
# feast (setup.py)
# great-expectations
-traitlets==5.4.0
+traitlets==5.9.0
# via
+ # comm
+ # ipykernel
# ipython
+ # ipywidgets
+ # jupyter-client
# jupyter-core
+ # jupyter-events
+ # jupyter-server
+ # jupyterlab
# matplotlib-inline
+ # nbclient
+ # nbconvert
# nbformat
-trino==0.316.0
+trino==0.326.0
# via feast (setup.py)
typeguard==2.13.3
# via feast (setup.py)
-types-cryptography==3.3.23
- # via pyjwt
-types-protobuf==3.20.4
+types-protobuf==3.19.22
# via
# feast (setup.py)
# mypy-protobuf
-types-pymysql==1.0.19
+types-pymysql==1.1.0.1
# via feast (setup.py)
-types-python-dateutil==2.8.19
+types-pyopenssl==23.2.0.2
+ # via types-redis
+types-python-dateutil==2.8.19.14
# via feast (setup.py)
-types-pytz==2022.2.1.0
+types-pytz==2023.3.0.1
# via feast (setup.py)
-types-pyyaml==6.0.12
- # via feast (setup.py)
-types-redis==4.3.21
+types-pyyaml==6.0.12.11
+ # via
+ # feast (setup.py)
+ # responses
+types-redis==4.6.0.5
# via feast (setup.py)
-types-requests==2.28.11
+types-requests==2.31.0.2
# via feast (setup.py)
-types-setuptools==65.4.0.0
+types-setuptools==68.2.0.0
# via feast (setup.py)
-types-tabulate==0.8.11
+types-tabulate==0.9.0.3
# via feast (setup.py)
-types-urllib3==1.26.25
+types-urllib3==1.26.25.14
# via types-requests
-typing-extensions==4.3.0
+typing-extensions==4.7.1
# via
- # aioitertools
+ # async-lru
# azure-core
+ # azure-storage-blob
# black
+ # fastapi
+ # filelock
# great-expectations
+ # ipython
# mypy
# pydantic
# snowflake-connector-python
# sqlalchemy2-stubs
# starlette
-tzdata==2022.4
- # via pytz-deprecation-shim
-tzlocal==4.2
- # via great-expectations
+ # uvicorn
+tzlocal==5.0.1
+ # via
+ # great-expectations
+ # trino
+uri-template==1.3.0
+ # via jsonschema
uritemplate==4.1.1
# via google-api-python-client
-urllib3==1.26.12
+urllib3==1.26.16
# via
# botocore
# docker
# feast (setup.py)
+ # google-auth
# great-expectations
# kubernetes
# minio
# requests
# responses
+ # rockset
# snowflake-connector-python
-uvicorn[standard]==0.18.3
+uvicorn[standard]==0.23.2
# via feast (setup.py)
uvloop==0.17.0
# via uvicorn
-virtualenv==20.16.5
- # via pre-commit
+virtualenv==20.23.0
+ # via
+ # feast (setup.py)
+ # pre-commit
volatile==2.1.0
# via bowler
-watchfiles==0.17.0
+watchfiles==0.20.0
# via uvicorn
-wcwidth==0.2.5
+wcwidth==0.2.6
# via prompt-toolkit
-websocket-client==1.4.1
+webcolors==1.13
+ # via jsonschema
+webencodings==0.5.1
+ # via
+ # bleach
+ # tinycss2
+websocket-client==1.6.2
# via
# docker
+ # jupyter-server
# kubernetes
-websockets==10.3
+websockets==11.0.3
# via uvicorn
-werkzeug==2.1.2
+werkzeug==2.3.7
# via moto
-wheel==0.38.1
+wheel==0.41.2
# via pip-tools
-wrapt==1.14.1
- # via
- # aiobotocore
- # deprecated
- # testcontainers
+widgetsnbextension==4.0.8
+ # via ipywidgets
+wrapt==1.15.0
+ # via testcontainers
xmltodict==0.13.0
# via moto
-yarl==1.8.1
+yarl==1.9.2
# via aiohttp
-zipp==3.8.1
+zipp==3.16.2
# via
# importlib-metadata
# importlib-resources
diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt
index 0820644131..4011ce804f 100644
--- a/sdk/python/requirements/py3.8-requirements.txt
+++ b/sdk/python/requirements/py3.8-requirements.txt
@@ -4,167 +4,189 @@
#
# pip-compile --output-file=sdk/python/requirements/py3.8-requirements.txt
#
-anyio==3.6.1
+anyio==4.0.0
# via
+ # httpcore
# starlette
# watchfiles
appdirs==1.4.4
# via fissix
-attrs==22.1.0
+attrs==23.1.0
# via
# bowler
# jsonschema
+ # referencing
bowler==0.9.0
# via feast (setup.py)
-cachetools==5.2.0
- # via google-auth
-certifi==2022.12.7
- # via requests
-charset-normalizer==2.1.1
+certifi==2023.7.22
+ # via
+ # httpcore
+ # httpx
+ # requests
+charset-normalizer==3.2.0
# via requests
-click==8.1.3
+click==8.1.7
# via
# bowler
+ # dask
# feast (setup.py)
# moreorless
# uvicorn
-cloudpickle==2.2.0
+cloudpickle==2.2.1
# via dask
-colorama==0.4.5
+colorama==0.4.6
# via feast (setup.py)
-dask==2022.1.1
+dask==2023.5.0
# via feast (setup.py)
-dill==0.3.5.1
+dill==0.3.7
# via feast (setup.py)
-fastapi==0.85.0
+exceptiongroup==1.1.3
+ # via anyio
+fastapi==0.99.1
# via feast (setup.py)
-fastavro==1.6.1
+fastavro==1.8.3
# via
# feast (setup.py)
# pandavro
fissix==21.11.13
# via bowler
-fsspec==2022.8.2
+fsspec==2023.9.0
# via dask
-google-api-core==2.10.1
- # via feast (setup.py)
-google-auth==2.12.0
- # via google-api-core
-googleapis-common-protos==1.56.4
- # via
- # feast (setup.py)
- # google-api-core
-greenlet==2.0.1
- # via sqlalchemy
-grpcio==1.49.1
+grpcio==1.58.0
# via
# feast (setup.py)
+ # grpcio-health-checking
# grpcio-reflection
-grpcio-reflection==1.49.1
+ # grpcio-tools
+grpcio-health-checking==1.58.0
+ # via feast (setup.py)
+grpcio-reflection==1.58.0
+ # via feast (setup.py)
+grpcio-tools==1.58.0
+ # via feast (setup.py)
+gunicorn==21.2.0
# via feast (setup.py)
h11==0.14.0
+ # via
+ # httpcore
+ # uvicorn
+httpcore==0.17.3
+ # via httpx
+httptools==0.6.0
# via uvicorn
-httptools==0.5.0
- # via uvicorn
+httpx==0.24.1
+ # via feast (setup.py)
idna==3.4
# via
# anyio
+ # httpx
# requests
-importlib-resources==5.9.0
- # via jsonschema
+importlib-metadata==6.8.0
+ # via
+ # dask
+ # feast (setup.py)
+importlib-resources==6.0.1
+ # via
+ # feast (setup.py)
+ # jsonschema
+ # jsonschema-specifications
jinja2==3.1.2
# via feast (setup.py)
-jsonschema==4.16.0
+jsonschema==4.19.0
# via feast (setup.py)
+jsonschema-specifications==2023.7.1
+ # via jsonschema
locket==1.0.0
# via partd
-markupsafe==2.1.1
+markupsafe==2.1.3
# via jinja2
-mmh3==3.0.0
+mmh3==4.0.1
# via feast (setup.py)
moreorless==0.4.0
# via bowler
-mypy==0.981
+mypy==1.5.1
# via sqlalchemy
-mypy-extensions==0.4.3
+mypy-extensions==1.0.0
# via mypy
-numpy==1.23.3
+mypy-protobuf==3.1.0
+ # via feast (setup.py)
+numpy==1.24.4
# via
# feast (setup.py)
# pandas
# pandavro
# pyarrow
-packaging==21.3
- # via dask
-pandas==1.5.0
+packaging==23.1
+ # via
+ # dask
+ # gunicorn
+pandas==1.5.3
# via
# feast (setup.py)
# pandavro
pandavro==1.5.2
# via feast (setup.py)
-partd==1.3.0
+partd==1.4.0
# via dask
pkgutil-resolve-name==1.3.10
# via jsonschema
-proto-plus==1.22.1
+proto-plus==1.22.3
# via feast (setup.py)
-protobuf==4.21.7
+protobuf==4.23.3
# via
# feast (setup.py)
- # google-api-core
- # googleapis-common-protos
+ # grpcio-health-checking
# grpcio-reflection
+ # grpcio-tools
+ # mypy-protobuf
# proto-plus
-pyarrow==8.0.0
+pyarrow==11.0.0
# via feast (setup.py)
-pyasn1==0.4.8
- # via
- # pyasn1-modules
- # rsa
-pyasn1-modules==0.2.8
- # via google-auth
-pydantic==1.10.2
+pydantic==1.10.12
# via
# fastapi
# feast (setup.py)
-pygments==2.13.0
+pygments==2.16.1
# via feast (setup.py)
-pyparsing==3.0.9
- # via packaging
-pyrsistent==0.18.1
- # via jsonschema
python-dateutil==2.8.2
# via pandas
-python-dotenv==0.21.0
+python-dotenv==1.0.0
# via uvicorn
-pytz==2022.2.1
+pytz==2023.3.post1
# via pandas
-pyyaml==6.0
+pyyaml==6.0.1
# via
# dask
# feast (setup.py)
# uvicorn
-requests==2.28.1
- # via google-api-core
-rsa==4.9
- # via google-auth
+referencing==0.30.2
+ # via
+ # jsonschema
+ # jsonschema-specifications
+requests==2.31.0
+ # via feast (setup.py)
+rpds-py==0.10.2
+ # via
+ # jsonschema
+ # referencing
six==1.16.0
# via
- # google-auth
- # grpcio
# pandavro
# python-dateutil
sniffio==1.3.0
- # via anyio
-sqlalchemy[mypy]==1.4.41
+ # via
+ # anyio
+ # httpcore
+ # httpx
+sqlalchemy[mypy]==1.4.49
# via feast (setup.py)
-sqlalchemy2-stubs==0.0.2a27
+sqlalchemy2-stubs==0.0.2a35
# via sqlalchemy
-starlette==0.20.4
+starlette==0.27.0
# via fastapi
-tabulate==0.8.10
+tabulate==0.9.0
# via feast (setup.py)
-tenacity==8.1.0
+tenacity==8.2.3
# via feast (setup.py)
toml==0.10.2
# via feast (setup.py)
@@ -174,27 +196,36 @@ toolz==0.12.0
# via
# dask
# partd
-tqdm==4.64.1
+tqdm==4.66.1
# via feast (setup.py)
typeguard==2.13.3
# via feast (setup.py)
-typing-extensions==4.3.0
+types-protobuf==4.24.0.1
+ # via mypy-protobuf
+typing-extensions==4.7.1
# via
+ # fastapi
# mypy
# pydantic
# sqlalchemy2-stubs
# starlette
-urllib3==1.26.12
+ # uvicorn
+urllib3==2.0.4
# via requests
-uvicorn[standard]==0.18.3
+uvicorn[standard]==0.23.2
# via feast (setup.py)
uvloop==0.17.0
# via uvicorn
volatile==2.1.0
# via bowler
-watchfiles==0.17.0
+watchfiles==0.20.0
# via uvicorn
-websockets==10.3
+websockets==11.0.3
# via uvicorn
-zipp==3.8.1
- # via importlib-resources
+zipp==3.16.2
+ # via
+ # importlib-metadata
+ # importlib-resources
+
+# The following packages are considered to be unsafe in a requirements file:
+# setuptools
diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt
index 9d6aa61cc6..3a453153e5 100644
--- a/sdk/python/requirements/py3.9-ci-requirements.txt
+++ b/sdk/python/requirements/py3.9-ci-requirements.txt
@@ -1,107 +1,118 @@
#
-# This file is autogenerated by pip-compile with python 3.9
-# To update, run:
+# This file is autogenerated by pip-compile with Python 3.9
+# by the following command:
#
# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.9-ci-requirements.txt
#
adal==1.2.7
- # via
- # azure-datalake-store
- # msrestazure
+ # via msrestazure
adlfs==0.5.9
# via feast (setup.py)
-aiobotocore==2.1.2
- # via s3fs
-aiohttp==3.8.3
+aiohttp==3.8.5
# via
# adlfs
- # aiobotocore
# gcsfs
- # s3fs
-aioitertools==0.11.0
- # via aiobotocore
aiosignal==1.3.1
# via aiohttp
alabaster==0.7.13
# via sphinx
altair==4.2.0
# via great-expectations
-anyio==3.6.1
+anyio==4.0.0
# via
+ # httpcore
+ # jupyter-server
# starlette
# watchfiles
appdirs==1.4.4
# via fissix
appnope==0.1.3
- # via ipython
+ # via
+ # ipykernel
+ # ipython
+argon2-cffi==23.1.0
+ # via jupyter-server
+argon2-cffi-bindings==21.2.0
+ # via argon2-cffi
+arrow==1.2.3
+ # via isoduration
asn1crypto==1.5.1
# via
# oscrypto
# snowflake-connector-python
assertpy==1.1
# via feast (setup.py)
-asttokens==2.2.1
+asttokens==2.4.0
# via stack-data
-async-timeout==4.0.2
+async-lru==2.0.4
+ # via jupyterlab
+async-timeout==4.0.3
# via
# aiohttp
# redis
-attrs==22.1.0
+attrs==23.1.0
# via
# aiohttp
# bowler
# jsonschema
- # pytest
+ # referencing
avro==1.10.0
# via feast (setup.py)
-azure-core==1.26.2
+azure-core==1.29.4
# via
# adlfs
# azure-identity
# azure-storage-blob
# msrest
-azure-datalake-store==0.0.52
+azure-datalake-store==0.0.53
# via adlfs
-azure-identity==1.12.0
+azure-identity==1.14.0
# via
# adlfs
# feast (setup.py)
-azure-storage-blob==12.14.1
+azure-storage-blob==12.17.0
# via
# adlfs
# feast (setup.py)
-babel==2.11.0
- # via sphinx
+babel==2.12.1
+ # via
+ # jupyterlab-server
+ # sphinx
backcall==0.2.0
# via ipython
+beautifulsoup4==4.12.2
+ # via nbconvert
black==22.12.0
# via feast (setup.py)
-boto3==1.20.23
+bleach==6.0.0
+ # via nbconvert
+boto3==1.28.43
# via
# feast (setup.py)
# moto
-botocore==1.23.24
+botocore==1.31.43
# via
- # aiobotocore
# boto3
# moto
# s3transfer
bowler==0.9.0
# via feast (setup.py)
-build==0.10.0
+build==1.0.3
# via
# feast (setup.py)
# pip-tools
-bytewax==0.13.1
+bytewax==0.15.1
# via feast (setup.py)
-cachecontrol==0.12.11
+cachecontrol==0.13.1
# via firebase-admin
-cachetools==5.2.0
+cachetools==5.3.1
# via google-auth
-cassandra-driver==3.25.0
+cassandra-driver==3.28.0
# via feast (setup.py)
-certifi==2022.12.7
+certifi==2023.7.22
# via
+ # httpcore
+ # httpx
# kubernetes
# minio
# msrest
@@ -109,35 +120,41 @@ certifi==2022.12.7
# snowflake-connector-python
cffi==1.15.1
# via
+ # argon2-cffi-bindings
# azure-datalake-store
# cryptography
# snowflake-connector-python
-cfgv==3.3.1
+cfgv==3.4.0
# via pre-commit
-charset-normalizer==2.1.1
+charset-normalizer==3.2.0
# via
# aiohttp
# requests
# snowflake-connector-python
-click==8.1.3
+click==8.1.7
# via
# black
# bowler
+ # dask
# feast (setup.py)
# geomet
# great-expectations
# moreorless
# pip-tools
# uvicorn
-cloudpickle==2.2.0
+cloudpickle==2.2.1
# via dask
-colorama==0.4.5
+colorama==0.4.6
# via
# feast (setup.py)
# great-expectations
-coverage[toml]==7.0.5
+comm==0.1.4
+ # via
+ # ipykernel
+ # ipywidgets
+coverage[toml]==7.3.1
# via pytest-cov
-cryptography==35.0.0
+cryptography==41.0.3
# via
# adal
# azure-identity
@@ -149,28 +166,30 @@ cryptography==35.0.0
# pyjwt
# pyopenssl
# snowflake-connector-python
-dask==2022.1.1
+ # types-pyopenssl
+ # types-redis
+dask==2023.9.1
# via feast (setup.py)
-dataclasses==0.6
- # via great-expectations
-db-dtypes==1.0.5
+db-dtypes==1.1.1
# via google-cloud-bigquery
+debugpy==1.7.0
+ # via ipykernel
decorator==5.1.1
# via
# gcsfs
# ipython
-deprecated==1.2.13
- # via redis
+defusedxml==0.7.1
+ # via nbconvert
deprecation==2.1.0
# via testcontainers
-dill==0.3.6
+dill==0.3.7
# via
# bytewax
# feast (setup.py)
# multiprocess
-distlib==0.3.6
+distlib==0.3.7
# via virtualenv
-docker==6.0.1
+docker==6.1.3
# via
# feast (setup.py)
# testcontainers
@@ -178,21 +197,24 @@ docutils==0.19
# via sphinx
entrypoints==0.4
# via altair
-exceptiongroup==1.1.0
- # via pytest
-execnet==1.9.0
+exceptiongroup==1.1.3
+ # via
+ # anyio
+ # ipython
+ # pytest
+execnet==2.0.2
# via pytest-xdist
executing==1.2.0
# via stack-data
-fastapi==0.85.0
+fastapi==0.99.1
# via feast (setup.py)
-fastavro==1.6.1
+fastavro==1.8.3
# via
# feast (setup.py)
# pandavro
-fastjsonschema==2.16.2
+fastjsonschema==2.18.0
# via nbformat
-filelock==3.9.0
+filelock==3.12.3
# via
# snowflake-connector-python
# virtualenv
@@ -202,7 +224,9 @@ fissix==21.11.13
# via bowler
flake8==6.0.0
# via feast (setup.py)
-frozenlist==1.3.3
+fqdn==1.5.1
+ # via jsonschema
+frozenlist==1.4.0
# via
# aiohttp
# aiosignal
@@ -211,12 +235,13 @@ fsspec==2022.1.0
# adlfs
# dask
# gcsfs
- # s3fs
gcsfs==2022.1.0
# via feast (setup.py)
+geojson==2.5.0
+ # via rockset
geomet==0.2.1.post1
# via cassandra-driver
-google-api-core[grpc]==2.11.0
+google-api-core[grpc]==2.11.1
# via
# feast (setup.py)
# firebase-admin
@@ -228,9 +253,9 @@ google-api-core[grpc]==2.11.0
# google-cloud-datastore
# google-cloud-firestore
# google-cloud-storage
-google-api-python-client==2.72.0
+google-api-python-client==2.98.0
# via firebase-admin
-google-auth==2.16.0
+google-auth==2.22.0
# via
# gcsfs
# google-api-core
@@ -242,187 +267,292 @@ google-auth==2.16.0
# kubernetes
google-auth-httplib2==0.1.0
# via google-api-python-client
-google-auth-oauthlib==0.8.0
+google-auth-oauthlib==1.0.0
# via gcsfs
-google-cloud-bigquery[pandas]==3.4.1
+google-cloud-bigquery[pandas]==3.11.4
# via feast (setup.py)
-google-cloud-bigquery-storage==2.18.0
+google-cloud-bigquery-storage==2.22.0
# via feast (setup.py)
-google-cloud-bigtable==2.15.0
+google-cloud-bigtable==2.21.0
# via feast (setup.py)
-google-cloud-core==2.3.2
+google-cloud-core==2.3.3
# via
# google-cloud-bigquery
# google-cloud-bigtable
# google-cloud-datastore
# google-cloud-firestore
# google-cloud-storage
-google-cloud-datastore==2.12.0
+google-cloud-datastore==2.18.0
# via feast (setup.py)
-google-cloud-firestore==2.9.0
+google-cloud-firestore==2.11.1
# via firebase-admin
-google-cloud-storage==2.7.0
+google-cloud-storage==2.10.0
# via
# feast (setup.py)
# firebase-admin
# gcsfs
google-crc32c==1.5.0
# via google-resumable-media
-google-resumable-media==2.4.0
+google-resumable-media==2.6.0
# via
# google-cloud-bigquery
# google-cloud-storage
-googleapis-common-protos[grpc]==1.56.4
+googleapis-common-protos[grpc]==1.60.0
# via
# feast (setup.py)
# google-api-core
# grpc-google-iam-v1
# grpcio-status
-great-expectations==0.14.13
+great-expectations==0.15.50
# via feast (setup.py)
-greenlet==2.0.1
- # via sqlalchemy
grpc-google-iam-v1==0.12.6
# via google-cloud-bigtable
-grpcio==1.51.1
+grpcio==1.58.0
# via
# feast (setup.py)
# google-api-core
# google-cloud-bigquery
# googleapis-common-protos
# grpc-google-iam-v1
+ # grpcio-health-checking
# grpcio-reflection
# grpcio-status
# grpcio-testing
# grpcio-tools
-grpcio-reflection==1.49.1
+grpcio-health-checking==1.58.0
# via feast (setup.py)
-grpcio-status==1.51.1
+grpcio-reflection==1.58.0
+ # via feast (setup.py)
+grpcio-status==1.58.0
# via google-api-core
-grpcio-testing==1.51.1
+grpcio-testing==1.58.0
+ # via feast (setup.py)
+grpcio-tools==1.58.0
# via feast (setup.py)
-grpcio-tools==1.51.1
+gunicorn==21.2.0
# via feast (setup.py)
h11==0.14.0
- # via uvicorn
+ # via
+ # httpcore
+ # uvicorn
happybase==1.2.0
# via feast (setup.py)
-hiredis==2.1.1
+hazelcast-python-client==5.3.0
+ # via feast (setup.py)
+hiredis==2.2.3
# via feast (setup.py)
-httplib2==0.21.0
+httpcore==0.17.3
+ # via httpx
+httplib2==0.22.0
# via
# google-api-python-client
# google-auth-httplib2
-httptools==0.5.0
+httptools==0.6.0
# via uvicorn
-identify==2.5.13
+httpx==0.24.1
+ # via feast (setup.py)
+identify==2.5.27
# via pre-commit
idna==3.4
# via
# anyio
+ # httpx
+ # jsonschema
# requests
# snowflake-connector-python
# yarl
imagesize==1.4.1
# via sphinx
-importlib-metadata==6.0.0
+importlib-metadata==6.8.0
# via
+ # build
+ # dask
+ # feast (setup.py)
# great-expectations
+ # jupyter-client
+ # jupyter-lsp
+ # jupyterlab
+ # jupyterlab-server
+ # nbconvert
# sphinx
+importlib-resources==6.0.1
+ # via feast (setup.py)
iniconfig==2.0.0
# via pytest
-ipython==8.8.0
+ipykernel==6.25.2
+ # via jupyterlab
+ipython==8.15.0
+ # via
+ # great-expectations
+ # ipykernel
+ # ipywidgets
+ipywidgets==8.1.0
# via great-expectations
isodate==0.6.1
- # via msrest
-isort==5.11.4
+ # via
+ # azure-storage-blob
+ # msrest
+isoduration==20.11.0
+ # via jsonschema
+isort==5.12.0
# via feast (setup.py)
-jedi==0.18.2
+jedi==0.19.0
# via ipython
-jinja2==3.0.3
+jinja2==3.1.2
# via
# altair
# feast (setup.py)
# great-expectations
+ # jupyter-server
+ # jupyterlab
+ # jupyterlab-server
# moto
+ # nbconvert
# sphinx
-jmespath==0.10.0
+jmespath==1.0.1
# via
# boto3
# botocore
-jsonpatch==1.32
+json5==0.9.14
+ # via jupyterlab-server
+jsonpatch==1.33
# via great-expectations
-jsonpointer==2.3
- # via jsonpatch
-jsonschema==4.16.0
+jsonpointer==2.4
+ # via
+ # jsonpatch
+ # jsonschema
+jsonschema[format-nongpl]==4.19.0
# via
# altair
# feast (setup.py)
# great-expectations
+ # jupyter-events
+ # jupyterlab-server
# nbformat
-jupyter-core==5.1.3
- # via nbformat
+jsonschema-specifications==2023.7.1
+ # via jsonschema
+jupyter-client==8.3.1
+ # via
+ # ipykernel
+ # jupyter-server
+ # nbclient
+jupyter-core==5.3.1
+ # via
+ # ipykernel
+ # jupyter-client
+ # jupyter-server
+ # jupyterlab
+ # nbclient
+ # nbconvert
+ # nbformat
+jupyter-events==0.7.0
+ # via jupyter-server
+jupyter-lsp==2.2.0
+ # via jupyterlab
+jupyter-server==2.7.3
+ # via
+ # jupyter-lsp
+ # jupyterlab
+ # jupyterlab-server
+ # notebook
+ # notebook-shim
+jupyter-server-terminals==0.4.4
+ # via jupyter-server
+jupyterlab==4.0.5
+ # via notebook
+jupyterlab-pygments==0.2.2
+ # via nbconvert
+jupyterlab-server==2.24.0
+ # via
+ # jupyterlab
+ # notebook
+jupyterlab-widgets==3.0.8
+ # via ipywidgets
kubernetes==20.13.0
# via feast (setup.py)
locket==1.0.0
# via partd
-markupsafe==2.1.1
+makefun==1.15.1
+ # via great-expectations
+markupsafe==2.1.3
# via
# jinja2
- # moto
+ # nbconvert
+ # werkzeug
+marshmallow==3.20.1
+ # via great-expectations
matplotlib-inline==0.1.6
- # via ipython
+ # via
+ # ipykernel
+ # ipython
mccabe==0.7.0
# via flake8
minio==7.1.0
# via feast (setup.py)
-mistune==2.0.4
- # via great-expectations
-mmh3==3.0.0
+mistune==3.0.1
+ # via
+ # great-expectations
+ # nbconvert
+mmh3==4.0.1
# via feast (setup.py)
mock==2.0.0
# via feast (setup.py)
moreorless==0.4.0
# via bowler
-moto==3.1.18
+moto==4.2.2
# via feast (setup.py)
-msal==1.20.0
+msal==1.23.0
# via
+ # azure-datalake-store
# azure-identity
# msal-extensions
msal-extensions==1.0.0
# via azure-identity
-msgpack==1.0.4
+msgpack==1.0.5
# via cachecontrol
msrest==0.7.1
- # via
- # azure-storage-blob
- # msrestazure
+ # via msrestazure
msrestazure==0.6.4
# via adlfs
multidict==6.0.4
# via
# aiohttp
# yarl
-multiprocess==0.70.14
+multiprocess==0.70.15
# via bytewax
-mypy==0.981
+mypy==0.982
# via
# feast (setup.py)
# sqlalchemy
-mypy-extensions==0.4.3
+mypy-extensions==1.0.0
# via
# black
# mypy
-mypy-protobuf==3.1
+mypy-protobuf==3.1.0
# via feast (setup.py)
-mysqlclient==2.1.1
+mysqlclient==2.2.0
# via feast (setup.py)
-nbformat==5.7.3
- # via great-expectations
-nodeenv==1.7.0
+nbclient==0.8.0
+ # via nbconvert
+nbconvert==7.8.0
+ # via jupyter-server
+nbformat==5.9.2
+ # via
+ # great-expectations
+ # jupyter-server
+ # nbclient
+ # nbconvert
+nest-asyncio==1.5.7
+ # via ipykernel
+nodeenv==1.8.0
# via pre-commit
-numpy==1.23.3
+notebook==7.0.3
+ # via great-expectations
+notebook-shim==0.2.3
+ # via
+ # jupyterlab
+ # notebook
+numpy==1.24.4
# via
# altair
# db-dtypes
@@ -436,7 +566,9 @@ oauthlib==3.2.2
# via requests-oauthlib
oscrypto==1.3.0
# via snowflake-connector-python
-packaging==21.3
+overrides==7.4.0
+ # via jupyter-server
+packaging==23.1
# via
# build
# dask
@@ -445,10 +577,17 @@ packaging==21.3
# docker
# google-cloud-bigquery
# great-expectations
+ # gunicorn
+ # ipykernel
+ # jupyter-server
+ # jupyterlab
+ # jupyterlab-server
+ # marshmallow
+ # nbconvert
# pytest
- # redis
+ # snowflake-connector-python
# sphinx
-pandas==1.4.4
+pandas==1.5.3
# via
# altair
# db-dtypes
@@ -459,11 +598,13 @@ pandas==1.4.4
# snowflake-connector-python
pandavro==1.5.2
# via feast (setup.py)
+pandocfilters==1.5.0
+ # via nbconvert
parso==0.8.3
# via jedi
-partd==1.3.0
+partd==1.4.0
# via dask
-pathspec==0.10.3
+pathspec==0.11.2
# via black
pbr==5.11.1
# via mock
@@ -471,24 +612,27 @@ pexpect==4.8.0
# via ipython
pickleshare==0.7.5
# via ipython
-pip-tools==6.12.1
+pip-tools==7.3.0
# via feast (setup.py)
-platformdirs==2.6.2
+platformdirs==3.8.1
# via
# black
# jupyter-core
+ # snowflake-connector-python
# virtualenv
-pluggy==1.0.0
+pluggy==1.3.0
# via pytest
ply==3.11
# via thriftpy2
-portalocker==2.6.0
+portalocker==2.7.0
# via msal-extensions
-pre-commit==2.21.0
+pre-commit==3.3.1
# via feast (setup.py)
-prompt-toolkit==3.0.36
+prometheus-client==0.17.1
+ # via jupyter-server
+prompt-toolkit==3.0.39
# via ipython
-proto-plus==1.22.1
+proto-plus==1.22.3
# via
# feast (setup.py)
# google-cloud-bigquery
@@ -496,7 +640,7 @@ proto-plus==1.22.1
# google-cloud-bigtable
# google-cloud-datastore
# google-cloud-firestore
-protobuf==4.21.7
+protobuf==4.23.3
# via
# feast (setup.py)
# google-api-core
@@ -507,6 +651,7 @@ protobuf==4.21.7
# google-cloud-firestore
# googleapis-common-protos
# grpc-google-iam-v1
+ # grpcio-health-checking
# grpcio-reflection
# grpcio-status
# grpcio-testing
@@ -514,30 +659,34 @@ protobuf==4.21.7
# mypy-protobuf
# proto-plus
psutil==5.9.0
- # via feast (setup.py)
-psycopg2-binary==2.9.5
+ # via
+ # feast (setup.py)
+ # ipykernel
+psycopg2-binary==2.9.7
# via feast (setup.py)
ptyprocess==0.7.0
- # via pexpect
+ # via
+ # pexpect
+ # terminado
pure-eval==0.2.2
# via stack-data
py==1.11.0
# via feast (setup.py)
py-cpuinfo==9.0.0
# via pytest-benchmark
-py4j==0.10.9.5
+py4j==0.10.9.7
# via pyspark
-pyarrow==8.0.0
+pyarrow==10.0.1
# via
# db-dtypes
# feast (setup.py)
# google-cloud-bigquery
# snowflake-connector-python
-pyasn1==0.4.8
+pyasn1==0.5.0
# via
# pyasn1-modules
# rsa
-pyasn1-modules==0.2.8
+pyasn1-modules==0.3.0
# via google-auth
pybindgen==0.22.1
# via feast (setup.py)
@@ -545,46 +694,43 @@ pycodestyle==2.10.0
# via flake8
pycparser==2.21
# via cffi
-pycryptodomex==3.16.0
+pycryptodomex==3.18.0
# via snowflake-connector-python
-pydantic==1.10.2
+pydantic==1.10.12
# via
# fastapi
# feast (setup.py)
+ # great-expectations
pyflakes==3.0.1
# via flake8
-pygments==2.13.0
+pygments==2.16.1
# via
# feast (setup.py)
# ipython
+ # nbconvert
# sphinx
-pyjwt[crypto]==2.6.0
+pyjwt[crypto]==2.8.0
# via
# adal
# msal
# snowflake-connector-python
-pymssql==2.2.7
+pymssql==2.2.8
# via feast (setup.py)
-pymysql==1.0.2
+pymysql==1.1.0
# via feast (setup.py)
-pyodbc==4.0.35
+pyodbc==4.0.39
# via feast (setup.py)
-pyopenssl==22.0.0
- # via
- # feast (setup.py)
- # snowflake-connector-python
-pyparsing==2.4.7
+pyopenssl==23.2.0
+ # via snowflake-connector-python
+pyparsing==3.1.1
# via
# great-expectations
# httplib2
- # packaging
pyproject-hooks==1.0.0
# via build
-pyrsistent==0.18.1
- # via jsonschema
-pyspark==3.3.1
+pyspark==3.4.1
# via feast (setup.py)
-pytest==7.2.1
+pytest==7.4.2
# via
# feast (setup.py)
# pytest-benchmark
@@ -596,7 +742,7 @@ pytest==7.2.1
# pytest-xdist
pytest-benchmark==3.4.1
# via feast (setup.py)
-pytest-cov==4.0.0
+pytest-cov==4.1.0
# via feast (setup.py)
pytest-lazy-fixture==0.6.3
# via feast (setup.py)
@@ -606,39 +752,55 @@ pytest-ordering==0.6
# via feast (setup.py)
pytest-timeout==1.4.2
# via feast (setup.py)
-pytest-xdist==3.1.0
+pytest-xdist==3.3.1
# via feast (setup.py)
python-dateutil==2.8.2
# via
# adal
+ # arrow
# botocore
# google-cloud-bigquery
# great-expectations
+ # jupyter-client
# kubernetes
# moto
# pandas
-python-dotenv==0.21.0
+ # rockset
+ # trino
+python-dotenv==1.0.0
# via uvicorn
-pytz==2022.2.1
+python-json-logger==2.0.7
+ # via jupyter-events
+pytz==2023.3.post1
# via
- # babel
# great-expectations
- # moto
# pandas
# snowflake-connector-python
# trino
-pytz-deprecation-shim==0.1.0.post0
- # via tzlocal
-pyyaml==6.0
+pyyaml==6.0.1
# via
# dask
# feast (setup.py)
+ # jupyter-events
# kubernetes
# pre-commit
+ # responses
# uvicorn
-redis==4.2.2
+pyzmq==25.1.1
+ # via
+ # ipykernel
+ # jupyter-client
+ # jupyter-server
+redis==4.6.0
+ # via feast (setup.py)
+referencing==0.30.2
+ # via
+ # jsonschema
+ # jsonschema-specifications
+ # jupyter-events
+regex==2023.8.8
# via feast (setup.py)
-requests==2.28.1
+requests==2.31.0
# via
# adal
# adlfs
@@ -646,11 +808,13 @@ requests==2.28.1
# azure-datalake-store
# cachecontrol
# docker
+ # feast (setup.py)
# gcsfs
# google-api-core
# google-cloud-bigquery
# google-cloud-storage
# great-expectations
+ # jupyterlab-server
# kubernetes
# moto
# msal
@@ -665,24 +829,39 @@ requests-oauthlib==1.3.1
# google-auth-oauthlib
# kubernetes
# msrest
-responses==0.22.0
+responses==0.23.3
# via moto
+rfc3339-validator==0.1.4
+ # via
+ # jsonschema
+ # jupyter-events
+rfc3986-validator==0.1.1
+ # via
+ # jsonschema
+ # jupyter-events
+rockset==2.1.0
+ # via feast (setup.py)
+rpds-py==0.10.2
+ # via
+ # jsonschema
+ # referencing
rsa==4.9
# via google-auth
ruamel-yaml==0.17.17
# via great-expectations
ruamel-yaml-clib==0.2.7
# via ruamel-yaml
-s3fs==2022.1.0
- # via feast (setup.py)
-s3transfer==0.5.2
+s3transfer==0.6.2
# via boto3
-scipy==1.10.0
+scipy==1.11.2
# via great-expectations
+send2trash==1.8.2
+ # via jupyter-server
six==1.16.0
# via
+ # asttokens
# azure-core
- # azure-identity
+ # bleach
# cassandra-driver
# geomet
# google-auth
@@ -694,165 +873,219 @@ six==1.16.0
# msrestazure
# pandavro
# python-dateutil
+ # rfc3339-validator
# thriftpy2
sniffio==1.3.0
- # via anyio
+ # via
+ # anyio
+ # httpcore
+ # httpx
snowballstemmer==2.2.0
# via sphinx
-snowflake-connector-python[pandas]==2.9.0
+snowflake-connector-python[pandas]==3.1.1
# via feast (setup.py)
-sphinx==6.1.3
- # via feast (setup.py)
-sphinxcontrib-applehelp==1.0.3
+sortedcontainers==2.4.0
+ # via snowflake-connector-python
+soupsieve==2.5
+ # via beautifulsoup4
+sphinx==6.2.1
+ # via
+ # feast (setup.py)
+ # sphinxcontrib-applehelp
+ # sphinxcontrib-devhelp
+ # sphinxcontrib-htmlhelp
+ # sphinxcontrib-qthelp
+ # sphinxcontrib-serializinghtml
+sphinxcontrib-applehelp==1.0.7
# via sphinx
-sphinxcontrib-devhelp==1.0.2
+sphinxcontrib-devhelp==1.0.5
# via sphinx
-sphinxcontrib-htmlhelp==2.0.0
+sphinxcontrib-htmlhelp==2.0.4
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
-sphinxcontrib-qthelp==1.0.3
+sphinxcontrib-qthelp==1.0.6
# via sphinx
-sphinxcontrib-serializinghtml==1.1.5
+sphinxcontrib-serializinghtml==1.1.9
# via sphinx
-sqlalchemy[mypy]==1.4.41
+sqlalchemy[mypy]==1.4.49
# via feast (setup.py)
-sqlalchemy2-stubs==0.0.2a27
+sqlalchemy2-stubs==0.0.2a35
# via sqlalchemy
stack-data==0.6.2
# via ipython
-starlette==0.20.4
+starlette==0.27.0
# via fastapi
-tabulate==0.8.10
+tabulate==0.9.0
# via feast (setup.py)
-tenacity==8.1.0
+tenacity==8.2.3
# via feast (setup.py)
-termcolor==2.2.0
- # via great-expectations
+terminado==0.17.1
+ # via
+ # jupyter-server
+ # jupyter-server-terminals
testcontainers==3.7.1
# via feast (setup.py)
thriftpy2==0.4.16
# via happybase
+tinycss2==1.2.1
+ # via nbconvert
toml==0.10.2
- # via
- # feast (setup.py)
- # responses
+ # via feast (setup.py)
tomli==2.0.1
# via
# black
# build
# coverage
+ # jupyterlab
# mypy
+ # pip-tools
# pyproject-hooks
# pytest
+tomlkit==0.12.1
+ # via snowflake-connector-python
toolz==0.12.0
# via
# altair
# dask
# partd
-tqdm==4.64.1
+tornado==6.3.3
+ # via
+ # ipykernel
+ # jupyter-client
+ # jupyter-server
+ # jupyterlab
+ # notebook
+ # terminado
+tqdm==4.66.1
# via
# feast (setup.py)
# great-expectations
-traitlets==5.8.1
+traitlets==5.9.0
# via
+ # comm
+ # ipykernel
# ipython
+ # ipywidgets
+ # jupyter-client
# jupyter-core
+ # jupyter-events
+ # jupyter-server
+ # jupyterlab
# matplotlib-inline
+ # nbclient
+ # nbconvert
# nbformat
-trino==0.321.0
+trino==0.326.0
# via feast (setup.py)
typeguard==2.13.3
# via feast (setup.py)
-types-docutils==0.19.1.1
- # via types-setuptools
-types-protobuf==4.21.0.2
+types-protobuf==3.19.22
# via
# feast (setup.py)
# mypy-protobuf
-types-pymysql==1.0.19.1
- # via feast (setup.py)
-types-python-dateutil==2.8.19.5
+types-pymysql==1.1.0.1
# via feast (setup.py)
-types-pytz==2022.7.0.0
+types-pyopenssl==23.2.0.2
+ # via types-redis
+types-python-dateutil==2.8.19.14
# via feast (setup.py)
-types-pyyaml==6.0.12.2
+types-pytz==2023.3.0.1
# via feast (setup.py)
-types-redis==4.4.0.0
+types-pyyaml==6.0.12.11
+ # via
+ # feast (setup.py)
+ # responses
+types-redis==4.6.0.5
# via feast (setup.py)
-types-requests==2.28.11.7
+types-requests==2.31.0.2
# via feast (setup.py)
-types-setuptools==65.7.0.1
+types-setuptools==68.2.0.0
# via feast (setup.py)
-types-tabulate==0.9.0.0
+types-tabulate==0.9.0.3
# via feast (setup.py)
-types-toml==0.10.8.1
- # via responses
-types-urllib3==1.26.25.4
+types-urllib3==1.26.25.14
# via types-requests
-typing-extensions==4.3.0
+typing-extensions==4.7.1
# via
- # aioitertools
+ # async-lru
# azure-core
+ # azure-storage-blob
# black
+ # fastapi
+ # filelock
# great-expectations
+ # ipython
# mypy
# pydantic
# snowflake-connector-python
# sqlalchemy2-stubs
# starlette
-tzdata==2022.7
- # via pytz-deprecation-shim
-tzlocal==4.2
+ # uvicorn
+tzlocal==5.0.1
# via
# great-expectations
# trino
+uri-template==1.3.0
+ # via jsonschema
uritemplate==4.1.1
# via google-api-python-client
-urllib3==1.26.12
+urllib3==1.26.16
# via
# botocore
# docker
# feast (setup.py)
+ # google-auth
# great-expectations
# kubernetes
# minio
# requests
# responses
+ # rockset
# snowflake-connector-python
-uvicorn[standard]==0.18.3
+uvicorn[standard]==0.23.2
# via feast (setup.py)
uvloop==0.17.0
# via uvicorn
-virtualenv==20.17.1
- # via pre-commit
+virtualenv==20.23.0
+ # via
+ # feast (setup.py)
+ # pre-commit
volatile==2.1.0
# via bowler
-watchfiles==0.17.0
+watchfiles==0.20.0
# via uvicorn
-wcwidth==0.2.5
+wcwidth==0.2.6
# via prompt-toolkit
-websocket-client==1.4.2
+webcolors==1.13
+ # via jsonschema
+webencodings==0.5.1
+ # via
+ # bleach
+ # tinycss2
+websocket-client==1.6.2
# via
# docker
+ # jupyter-server
# kubernetes
-websockets==10.3
+websockets==11.0.3
# via uvicorn
-werkzeug==2.1.2
+werkzeug==2.3.7
# via moto
-wheel==0.38.1
+wheel==0.41.2
# via pip-tools
-wrapt==1.14.1
- # via
- # aiobotocore
- # deprecated
- # testcontainers
+widgetsnbextension==4.0.8
+ # via ipywidgets
+wrapt==1.15.0
+ # via testcontainers
xmltodict==0.13.0
# via moto
-yarl==1.8.2
+yarl==1.9.2
# via aiohttp
-zipp==3.11.0
- # via importlib-metadata
+zipp==3.16.2
+ # via
+ # importlib-metadata
+ # importlib-resources
# The following packages are considered to be unsafe in a requirements file:
# pip
diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt
index c69c5a9635..432ac4cbe6 100644
--- a/sdk/python/requirements/py3.9-requirements.txt
+++ b/sdk/python/requirements/py3.9-requirements.txt
@@ -1,165 +1,187 @@
#
-# This file is autogenerated by pip-compile with python 3.9
-# To update, run:
+# This file is autogenerated by pip-compile with Python 3.9
+# by the following command:
#
# pip-compile --output-file=sdk/python/requirements/py3.9-requirements.txt
#
-anyio==3.6.1
+anyio==4.0.0
# via
+ # httpcore
# starlette
# watchfiles
appdirs==1.4.4
# via fissix
-attrs==22.1.0
+attrs==23.1.0
# via
# bowler
# jsonschema
+ # referencing
bowler==0.9.0
# via feast (setup.py)
-cachetools==5.2.0
- # via google-auth
-certifi==2022.12.7
- # via requests
-charset-normalizer==2.1.1
+certifi==2023.7.22
+ # via
+ # httpcore
+ # httpx
+ # requests
+charset-normalizer==3.2.0
# via requests
-click==8.1.3
+click==8.1.7
# via
# bowler
+ # dask
# feast (setup.py)
# moreorless
# uvicorn
-cloudpickle==2.2.0
+cloudpickle==2.2.1
# via dask
-colorama==0.4.5
+colorama==0.4.6
# via feast (setup.py)
-dask==2022.1.1
+dask==2023.9.1
# via feast (setup.py)
-dill==0.3.6
+dill==0.3.7
# via feast (setup.py)
-fastapi==0.85.0
+exceptiongroup==1.1.3
+ # via anyio
+fastapi==0.99.1
# via feast (setup.py)
-fastavro==1.6.1
+fastavro==1.8.3
# via
# feast (setup.py)
# pandavro
fissix==21.11.13
# via bowler
-fsspec==2022.1.0
+fsspec==2023.9.0
# via dask
-google-api-core==2.11.0
- # via feast (setup.py)
-google-auth==2.16.0
- # via google-api-core
-googleapis-common-protos==1.56.4
- # via
- # feast (setup.py)
- # google-api-core
-greenlet==2.0.1
- # via sqlalchemy
-grpcio==1.51.1
+grpcio==1.58.0
# via
# feast (setup.py)
+ # grpcio-health-checking
# grpcio-reflection
-grpcio-reflection==1.49.1
+ # grpcio-tools
+grpcio-health-checking==1.58.0
+ # via feast (setup.py)
+grpcio-reflection==1.58.0
+ # via feast (setup.py)
+grpcio-tools==1.58.0
+ # via feast (setup.py)
+gunicorn==21.2.0
# via feast (setup.py)
h11==0.14.0
+ # via
+ # httpcore
+ # uvicorn
+httpcore==0.17.3
+ # via httpx
+httptools==0.6.0
# via uvicorn
-httptools==0.5.0
- # via uvicorn
+httpx==0.24.1
+ # via feast (setup.py)
idna==3.4
# via
# anyio
+ # httpx
# requests
-jinja2==3.0.3
+importlib-metadata==6.8.0
+ # via
+ # dask
+ # feast (setup.py)
+importlib-resources==6.0.1
# via feast (setup.py)
-jsonschema==4.16.0
+jinja2==3.1.2
# via feast (setup.py)
+jsonschema==4.19.0
+ # via feast (setup.py)
+jsonschema-specifications==2023.7.1
+ # via jsonschema
locket==1.0.0
# via partd
-markupsafe==2.1.1
+markupsafe==2.1.3
# via jinja2
-mmh3==3.0.0
+mmh3==4.0.1
# via feast (setup.py)
moreorless==0.4.0
# via bowler
-mypy==0.981
+mypy==1.5.1
# via sqlalchemy
-mypy-extensions==0.4.3
+mypy-extensions==1.0.0
# via mypy
-numpy==1.23.3
+mypy-protobuf==3.1.0
+ # via feast (setup.py)
+numpy==1.24.4
# via
# feast (setup.py)
# pandas
# pandavro
# pyarrow
-packaging==21.3
- # via dask
-pandas==1.5.0
+packaging==23.1
+ # via
+ # dask
+ # gunicorn
+pandas==1.5.3
# via
# feast (setup.py)
# pandavro
pandavro==1.5.2
# via feast (setup.py)
-partd==1.3.0
+partd==1.4.0
# via dask
-proto-plus==1.22.1
+proto-plus==1.22.3
# via feast (setup.py)
-protobuf==4.21.7
+protobuf==4.23.3
# via
# feast (setup.py)
- # google-api-core
- # googleapis-common-protos
+ # grpcio-health-checking
# grpcio-reflection
+ # grpcio-tools
+ # mypy-protobuf
# proto-plus
-pyarrow==8.0.0
+pyarrow==11.0.0
# via feast (setup.py)
-pyasn1==0.4.8
- # via
- # pyasn1-modules
- # rsa
-pyasn1-modules==0.2.8
- # via google-auth
-pydantic==1.10.2
+pydantic==1.10.12
# via
# fastapi
# feast (setup.py)
-pygments==2.13.0
+pygments==2.16.1
# via feast (setup.py)
-pyparsing==2.4.7
- # via packaging
-pyrsistent==0.18.1
- # via jsonschema
python-dateutil==2.8.2
# via pandas
-python-dotenv==0.21.0
+python-dotenv==1.0.0
# via uvicorn
-pytz==2022.2.1
+pytz==2023.3.post1
# via pandas
-pyyaml==6.0
+pyyaml==6.0.1
# via
# dask
# feast (setup.py)
# uvicorn
-requests==2.28.1
- # via google-api-core
-rsa==4.9
- # via google-auth
+referencing==0.30.2
+ # via
+ # jsonschema
+ # jsonschema-specifications
+requests==2.31.0
+ # via feast (setup.py)
+rpds-py==0.10.2
+ # via
+ # jsonschema
+ # referencing
six==1.16.0
# via
- # google-auth
# pandavro
# python-dateutil
sniffio==1.3.0
- # via anyio
-sqlalchemy[mypy]==1.4.41
+ # via
+ # anyio
+ # httpcore
+ # httpx
+sqlalchemy[mypy]==1.4.49
# via feast (setup.py)
-sqlalchemy2-stubs==0.0.2a27
+sqlalchemy2-stubs==0.0.2a35
# via sqlalchemy
-starlette==0.20.4
+starlette==0.27.0
# via fastapi
-tabulate==0.8.10
+tabulate==0.9.0
# via feast (setup.py)
-tenacity==8.1.0
+tenacity==8.2.3
# via feast (setup.py)
toml==0.10.2
# via feast (setup.py)
@@ -169,25 +191,36 @@ toolz==0.12.0
# via
# dask
# partd
-tqdm==4.64.1
+tqdm==4.66.1
# via feast (setup.py)
typeguard==2.13.3
# via feast (setup.py)
-typing-extensions==4.3.0
+types-protobuf==4.24.0.1
+ # via mypy-protobuf
+typing-extensions==4.7.1
# via
+ # fastapi
# mypy
# pydantic
# sqlalchemy2-stubs
# starlette
-urllib3==1.26.12
+ # uvicorn
+urllib3==2.0.4
# via requests
-uvicorn[standard]==0.18.3
+uvicorn[standard]==0.23.2
# via feast (setup.py)
uvloop==0.17.0
# via uvicorn
volatile==2.1.0
# via bowler
-watchfiles==0.17.0
+watchfiles==0.20.0
# via uvicorn
-websockets==10.3
+websockets==11.0.3
# via uvicorn
+zipp==3.16.2
+ # via
+ # importlib-metadata
+ # importlib-resources
+
+# The following packages are considered to be unsafe in a requirements file:
+# setuptools
diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py
index 7441645027..728bd9b34f 100644
--- a/sdk/python/tests/conftest.py
+++ b/sdk/python/tests/conftest.py
@@ -80,6 +80,10 @@ def pytest_configure(config):
"markers", "integration: mark test that has external dependencies"
)
config.addinivalue_line("markers", "benchmark: mark benchmarking tests")
+ config.addinivalue_line(
+ "markers",
+ "universal_online_stores: mark tests that can be run against different online stores",
+ )
config.addinivalue_line(
"markers",
"universal_offline_stores: mark tests that can be run against different offline stores",
@@ -100,6 +104,7 @@ def pytest_addoption(parser):
help="Run benchmark tests",
)
+
def pytest_collection_modifyitems(config, items: List[Item]):
should_run_integration = config.getoption("--integration") is True
should_run_benchmark = config.getoption("--benchmark") is True
@@ -388,3 +393,17 @@ def feature_store_for_online_retrieval(
]
return fs, feature_refs, entity_rows
+
+
+@pytest.fixture
+def fake_ingest_data():
+ """Fake data to ingest into the feature store"""
+ data = {
+ "driver_id": [1],
+ "conv_rate": [0.5],
+ "acc_rate": [0.6],
+ "avg_daily_trips": [4],
+ "event_timestamp": [pd.Timestamp(datetime.utcnow()).round("ms")],
+ "created": [pd.Timestamp(datetime.utcnow()).round("ms")],
+ }
+ return pd.DataFrame(data)
diff --git a/sdk/python/tests/integration/e2e/test_go_feature_server.py b/sdk/python/tests/integration/e2e/test_go_feature_server.py
deleted file mode 100644
index 0f972e45df..0000000000
--- a/sdk/python/tests/integration/e2e/test_go_feature_server.py
+++ /dev/null
@@ -1,263 +0,0 @@
-import threading
-import time
-from datetime import datetime
-from typing import List
-
-import grpc
-import pandas as pd
-import pytest
-import pytz
-import requests
-
-from feast.embedded_go.online_features_service import EmbeddedOnlineFeatureServer
-from feast.feast_object import FeastObject
-from feast.feature_logging import LoggingConfig
-from feast.feature_service import FeatureService
-from feast.infra.feature_servers.base_config import FeatureLoggingConfig
-from feast.protos.feast.serving.ServingService_pb2 import (
- FieldStatus,
- GetOnlineFeaturesRequest,
- GetOnlineFeaturesResponse,
-)
-from feast.protos.feast.serving.ServingService_pb2_grpc import ServingServiceStub
-from feast.protos.feast.types.Value_pb2 import RepeatedValue
-from feast.type_map import python_values_to_proto_values
-from feast.value_type import ValueType
-from feast.wait import wait_retry_backoff
-from tests.integration.feature_repos.repo_configuration import (
- construct_universal_feature_views,
-)
-from tests.integration.feature_repos.universal.entities import (
- customer,
- driver,
- location,
-)
-from tests.utils.http_server import check_port_open, free_port
-from tests.utils.test_log_creator import generate_expected_logs, get_latest_rows
-
-
-@pytest.mark.integration
-@pytest.mark.goserver
-def test_go_grpc_server(grpc_client):
- resp: GetOnlineFeaturesResponse = grpc_client.GetOnlineFeatures(
- GetOnlineFeaturesRequest(
- feature_service="driver_features",
- entities={
- "driver_id": RepeatedValue(
- val=python_values_to_proto_values(
- [5001, 5002], feature_type=ValueType.INT64
- )
- )
- },
- full_feature_names=True,
- )
- )
- assert list(resp.metadata.feature_names.val) == [
- "driver_id",
- "driver_stats__conv_rate",
- "driver_stats__acc_rate",
- "driver_stats__avg_daily_trips",
- ]
- for vector in resp.results:
- assert all([s == FieldStatus.PRESENT for s in vector.statuses])
-
-
-@pytest.mark.integration
-@pytest.mark.goserver
-def test_go_http_server(http_server_port):
- response = requests.post(
- f"http://localhost:{http_server_port}/get-online-features",
- json={
- "feature_service": "driver_features",
- "entities": {"driver_id": [5001, 5002]},
- "full_feature_names": True,
- },
- )
- assert response.status_code == 200, response.text
- response = response.json()
- assert set(response.keys()) == {"metadata", "results"}
- metadata = response["metadata"]
- results = response["results"]
- assert response["metadata"] == {
- "feature_names": [
- "driver_id",
- "driver_stats__conv_rate",
- "driver_stats__acc_rate",
- "driver_stats__avg_daily_trips",
- ]
- }, metadata
- assert len(results) == 4, results
- assert all(
- set(result.keys()) == {"event_timestamps", "statuses", "values"}
- for result in results
- ), results
- assert all(
- result["statuses"] == ["PRESENT", "PRESENT"] for result in results
- ), results
- assert results[0]["values"] == [5001, 5002], results
- for result in results[1:]:
- assert len(result["values"]) == 2, result
- assert all(value is not None for value in result["values"]), result
-
-
-@pytest.mark.integration
-@pytest.mark.goserver
-@pytest.mark.universal_offline_stores
-@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v))
-def test_feature_logging(
- grpc_client, environment, universal_data_sources, full_feature_names
-):
- fs = environment.feature_store
- feature_service = fs.get_feature_service("driver_features")
- log_start_date = datetime.now().astimezone(pytz.UTC)
- driver_ids = list(range(5001, 5011))
-
- for driver_id in driver_ids:
- # send each driver id in separate request
- grpc_client.GetOnlineFeatures(
- GetOnlineFeaturesRequest(
- feature_service="driver_features",
- entities={
- "driver_id": RepeatedValue(
- val=python_values_to_proto_values(
- [driver_id], feature_type=ValueType.INT64
- )
- )
- },
- full_feature_names=full_feature_names,
- )
- )
- # with some pause
- time.sleep(0.1)
-
- _, datasets, _ = universal_data_sources
- latest_rows = get_latest_rows(datasets.driver_df, "driver_id", driver_ids)
- feature_view = fs.get_feature_view("driver_stats")
- features = [
- feature.name
- for proj in feature_service.feature_view_projections
- for feature in proj.features
- ]
- expected_logs = generate_expected_logs(
- latest_rows, feature_view, features, ["driver_id"], "event_timestamp"
- )
-
- def retrieve():
- retrieval_job = fs._get_provider().retrieve_feature_service_logs(
- feature_service=feature_service,
- start_date=log_start_date,
- end_date=datetime.now().astimezone(pytz.UTC),
- config=fs.config,
- registry=fs._registry,
- )
- try:
- df = retrieval_job.to_df()
- except Exception:
- # Table or directory was not created yet
- return None, False
-
- return df, df.shape[0] == len(driver_ids)
-
- persisted_logs = wait_retry_backoff(
- retrieve, timeout_secs=60, timeout_msg="Logs retrieval failed"
- )
-
- persisted_logs = persisted_logs.sort_values(by="driver_id").reset_index(drop=True)
- persisted_logs = persisted_logs[expected_logs.columns]
- pd.testing.assert_frame_equal(expected_logs, persisted_logs, check_dtype=False)
-
-
-"""
-Start go feature server either on http or grpc based on the repo configuration for testing.
-"""
-
-
-def _server_port(environment, server_type: str):
- if not environment.test_repo_config.go_feature_serving:
- pytest.skip("Only for Go path")
-
- fs = environment.feature_store
-
- embedded = EmbeddedOnlineFeatureServer(
- repo_path=str(fs.repo_path.absolute()),
- repo_config=fs.config,
- feature_store=fs,
- )
- port = free_port()
- if server_type == "grpc":
- target = embedded.start_grpc_server
- elif server_type == "http":
- target = embedded.start_http_server
- else:
- raise ValueError("Server Type must be either 'http' or 'grpc'")
-
- t = threading.Thread(
- target=target,
- args=("127.0.0.1", port),
- kwargs=dict(
- enable_logging=True,
- logging_options=FeatureLoggingConfig(
- enabled=True,
- queue_capacity=100,
- write_to_disk_interval_secs=1,
- flush_interval_secs=1,
- emit_timeout_micro_secs=10000,
- ),
- ),
- )
- t.start()
-
- wait_retry_backoff(
- lambda: (None, check_port_open("127.0.0.1", port)), timeout_secs=15
- )
-
- yield port
- if server_type == "grpc":
- embedded.stop_grpc_server()
- else:
- embedded.stop_http_server()
-
- # wait for graceful stop
- time.sleep(5)
-
-
-# Go test fixtures
-
-
-@pytest.fixture
-def initialized_registry(environment, universal_data_sources):
- fs = environment.feature_store
-
- _, _, data_sources = universal_data_sources
- feature_views = construct_universal_feature_views(data_sources)
-
- feature_service = FeatureService(
- name="driver_features",
- features=[feature_views.driver],
- logging_config=LoggingConfig(
- destination=environment.data_source_creator.create_logged_features_destination(),
- sample_rate=1.0,
- ),
- )
- feast_objects: List[FeastObject] = [feature_service]
- feast_objects.extend(feature_views.values())
- feast_objects.extend([driver(), customer(), location()])
-
- fs.apply(feast_objects)
- fs.materialize(environment.start_date, environment.end_date)
-
-
-@pytest.fixture
-def grpc_server_port(environment, initialized_registry):
- yield from _server_port(environment, "grpc")
-
-
-@pytest.fixture
-def http_server_port(environment, initialized_registry):
- yield from _server_port(environment, "http")
-
-
-@pytest.fixture
-def grpc_client(grpc_server_port):
- ch = grpc.insecure_channel(f"localhost:{grpc_server_port}")
- yield ServingServiceStub(ch)
diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py
index fcd8e11449..fda5b3c11d 100644
--- a/sdk/python/tests/integration/feature_repos/repo_configuration.py
+++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py
@@ -93,6 +93,12 @@
"instance": os.getenv("BIGTABLE_INSTANCE_ID", "feast-integration-tests"),
}
+ROCKSET_CONFIG = {
+ "type": "rockset",
+ "api_key": os.getenv("ROCKSET_APIKEY", ""),
+ "host": os.getenv("ROCKSET_APISERVER", "api.rs2.usw2.rockset.com"),
+}
+
OFFLINE_STORE_TO_PROVIDER_CONFIG: Dict[str, DataSourceCreator] = {
"file": ("local", FileDataSourceCreator),
"bigquery": ("gcp", BigQueryDataSourceCreator),
@@ -126,6 +132,11 @@
AVAILABLE_ONLINE_STORES["snowflake"] = (SNOWFLAKE_CONFIG, None)
AVAILABLE_ONLINE_STORES["bigtable"] = (BIGTABLE_CONFIG, None)
+ # Uncomment to test using private Rockset account. Currently not enabled as
+ # there is no dedicated Rockset instance for CI testing and there is no
+ # containerized version of Rockset.
+ # AVAILABLE_ONLINE_STORES["rockset"] = (ROCKSET_CONFIG, None)
+
full_repo_configs_module = os.environ.get(FULL_REPO_CONFIGS_MODULE_ENV_NAME)
if full_repo_configs_module is not None:
diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py
index c92a413616..dfe8e3d33b 100644
--- a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py
+++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py
@@ -59,6 +59,7 @@ def create_data_source(
aws_utils.upload_df_to_redshift(
self.client,
self.offline_store_config.cluster_id,
+ self.offline_store_config.workgroup,
self.offline_store_config.database,
self.offline_store_config.user,
self.s3,
@@ -105,6 +106,7 @@ def teardown(self):
aws_utils.execute_redshift_statement(
self.client,
self.offline_store_config.cluster_id,
+ self.offline_store_config.workgroup,
self.offline_store_config.database,
self.offline_store_config.user,
f"DROP TABLE IF EXISTS {table}",
diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py
index f0a09b4d5b..c7e5961a88 100644
--- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py
+++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py
@@ -13,8 +13,8 @@
SnowflakeLoggingDestination,
)
from feast.infra.utils.snowflake.snowflake_utils import (
+ GetSnowflakeConnection,
execute_snowflake_statement,
- get_snowflake_conn,
write_pandas,
)
from feast.repo_config import FeastConfigBaseModel
@@ -54,11 +54,10 @@ def create_data_source(
field_mapping: Dict[str, str] = None,
) -> DataSource:
- snowflake_conn = get_snowflake_conn(self.offline_store_config)
-
destination_name = self.get_prefixed_table_name(destination_name)
- write_pandas(snowflake_conn, df, destination_name, auto_create_table=True)
+ with GetSnowflakeConnection(self.offline_store_config) as conn:
+ write_pandas(conn, df, destination_name, auto_create_table=True)
self.tables.append(destination_name)
@@ -67,7 +66,6 @@ def create_data_source(
timestamp_field=timestamp_field,
created_timestamp_column=created_timestamp_column,
field_mapping=field_mapping or {"ts_1": "ts"},
- warehouse=self.offline_store_config.warehouse,
)
def create_saved_dataset_destination(self) -> SavedDatasetSnowflakeStorage:
@@ -93,7 +91,7 @@ def get_prefixed_table_name(self, suffix: str) -> str:
return f"{self.project_name}_{suffix}"
def teardown(self):
- with get_snowflake_conn(self.offline_store_config) as conn:
+ with GetSnowflakeConnection(self.offline_store_config) as conn:
for table in self.tables:
query = f'DROP TABLE IF EXISTS "{table}"'
execute_snowflake_statement(conn, query)
diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py b/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py
new file mode 100644
index 0000000000..65d74135ae
--- /dev/null
+++ b/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py
@@ -0,0 +1,48 @@
+import logging
+import random
+import string
+from typing import Any, Dict
+
+from testcontainers.core.container import DockerContainer
+from testcontainers.core.waiting_utils import wait_for_logs
+
+from tests.integration.feature_repos.universal.online_store_creator import (
+ OnlineStoreCreator,
+)
+
+
+class HazelcastOnlineStoreCreator(OnlineStoreCreator):
+
+ cluster_name: str = ""
+ container: DockerContainer = None
+
+ def __init__(self, project_name: str, **kwargs):
+ logging.getLogger("hazelcast").setLevel(logging.ERROR)
+ super().__init__(project_name)
+ self.cluster_name = "".join(
+ random.choice(string.ascii_lowercase) for _ in range(5)
+ )
+ self.container = (
+ DockerContainer("hazelcast/hazelcast")
+ .with_env("HZ_CLUSTERNAME", self.cluster_name)
+ .with_env("HZ_NETWORK_PORT_AUTOINCREMENT", "true")
+ .with_exposed_ports(5701)
+ )
+
+ def create_online_store(self) -> Dict[str, Any]:
+ self.container.start()
+ cluster_member = (
+ self.container.get_container_host_ip()
+ + ":"
+ + self.container.get_exposed_port(5701)
+ )
+ log_string_to_wait_for = r"Cluster name: " + self.cluster_name
+ wait_for_logs(self.container, predicate=log_string_to_wait_for, timeout=10)
+ return {
+ "type": "hazelcast",
+ "cluster_name": self.cluster_name,
+ "cluster_members": [cluster_member],
+ }
+
+ def teardown(self):
+ self.container.stop()
diff --git a/sdk/python/tests/integration/materialization/test_lambda.py b/sdk/python/tests/integration/materialization/test_lambda.py
index d93508c156..07ab93e2e1 100644
--- a/sdk/python/tests/integration/materialization/test_lambda.py
+++ b/sdk/python/tests/integration/materialization/test_lambda.py
@@ -21,6 +21,7 @@
@pytest.mark.integration
+@pytest.mark.skip(reason="Very flaky test")
def test_lambda_materialization_consistency():
lambda_config = IntegrationTestRepoConfig(
provider="aws",
@@ -64,7 +65,6 @@ def test_lambda_materialization_consistency():
)
try:
-
fs.apply([driver, driver_stats_fv])
print(df)
diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py
index 5b987fea4e..2942aae6de 100644
--- a/sdk/python/tests/integration/online_store/test_universal_online.py
+++ b/sdk/python/tests/integration/online_store/test_universal_online.py
@@ -18,6 +18,7 @@
from feast.feature_service import FeatureService
from feast.feature_view import FeatureView
from feast.field import Field
+from feast.infra.utils.postgres.postgres_config import ConnectionType
from feast.online_response import TIMESTAMP_POSTFIX
from feast.types import Float32, Int32, String
from feast.wait import wait_retry_backoff
@@ -33,6 +34,42 @@
from tests.utils.data_source_test_creator import prep_file_source
+@pytest.mark.integration
+@pytest.mark.universal_online_stores(only=["postgres"])
+def test_connection_pool_online_stores(
+ environment, universal_data_sources, fake_ingest_data
+):
+ if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True":
+ return
+ fs = environment.feature_store
+ fs.config.online_store.conn_type = ConnectionType.pool
+ fs.config.online_store.min_conn = 1
+ fs.config.online_store.max_conn = 10
+
+ entities, datasets, data_sources = universal_data_sources
+ driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver)
+ driver_entity = driver()
+
+ # Register Feature View and Entity
+ fs.apply([driver_hourly_stats, driver_entity])
+
+ # directly ingest data into the Online Store
+ fs.write_to_online_store("driver_stats", fake_ingest_data)
+
+ # assert the right data is in the Online Store
+ df = fs.get_online_features(
+ features=[
+ "driver_stats:avg_daily_trips",
+ "driver_stats:acc_rate",
+ "driver_stats:conv_rate",
+ ],
+ entity_rows=[{"driver_id": 1}],
+ ).to_df()
+ assertpy.assert_that(df["avg_daily_trips"].iloc[0]).is_equal_to(4)
+ assertpy.assert_that(df["acc_rate"].iloc[0]).is_close_to(0.6, 1e-6)
+ assertpy.assert_that(df["conv_rate"].iloc[0]).is_close_to(0.5, 1e-6)
+
+
@pytest.mark.integration
@pytest.mark.universal_online_stores(only=["redis"])
def test_entity_ttl_online_store(environment, universal_data_sources):
@@ -437,6 +474,7 @@ def test_online_retrieval_with_event_timestamps(
1646263600
)
+
@pytest.mark.integration
@pytest.mark.universal_online_stores(only=["redis"])
def test_online_store_cleanup(environment, universal_data_sources):
diff --git a/sdk/python/tests/integration/registration/test_inference.py b/sdk/python/tests/integration/registration/test_inference.py
index 17bb09933e..9f490d7f4e 100644
--- a/sdk/python/tests/integration/registration/test_inference.py
+++ b/sdk/python/tests/integration/registration/test_inference.py
@@ -20,7 +20,10 @@ def test_update_file_data_source_with_inferred_event_timestamp_col(simple_datase
update_data_sources_with_inferred_event_timestamp_col(
data_sources,
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ registry="test.pb",
+ entity_key_serialization_version=2,
),
)
actual_event_timestamp_cols = [
@@ -35,7 +38,10 @@ def test_update_file_data_source_with_inferred_event_timestamp_col(simple_datase
update_data_sources_with_inferred_event_timestamp_col(
[file_source],
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ registry="test.pb",
+ entity_key_serialization_version=2,
),
)
@@ -53,7 +59,10 @@ def test_update_data_sources_with_inferred_event_timestamp_col(universal_data_so
update_data_sources_with_inferred_event_timestamp_col(
data_sources_copy.values(),
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ registry="test.pb",
+ entity_key_serialization_version=2,
),
)
actual_event_timestamp_cols = [
diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py
index 739fb9ec5c..57e625e66b 100644
--- a/sdk/python/tests/integration/registration/test_registry.py
+++ b/sdk/python/tests/integration/registration/test_registry.py
@@ -45,7 +45,7 @@ def gcs_registry() -> Registry:
registry_config = RegistryConfig(
path=f"gs://{bucket_name}/registry.db", cache_ttl_seconds=600
)
- return Registry(registry_config, None)
+ return Registry("project", registry_config, None)
@pytest.fixture
@@ -57,7 +57,7 @@ def s3_registry() -> Registry:
path=f"{aws_registry_path}/{int(time.time() * 1000)}/registry.db",
cache_ttl_seconds=600,
)
- return Registry(registry_config, None)
+ return Registry("project", registry_config, None)
@pytest.mark.integration
diff --git a/sdk/python/tests/unit/cli/test_cli.py b/sdk/python/tests/unit/cli/test_cli.py
index 25a1dfed34..d15e1d1616 100644
--- a/sdk/python/tests/unit/cli/test_cli.py
+++ b/sdk/python/tests/unit/cli/test_cli.py
@@ -122,6 +122,7 @@ def setup_third_party_provider_repo(provider_name: str):
type: sqlite
offline_store:
type: file
+ entity_key_serialization_version: 2
"""
)
)
@@ -159,6 +160,7 @@ def setup_third_party_registry_store_repo(
type: sqlite
offline_store:
type: file
+ entity_key_serialization_version: 2
"""
)
)
diff --git a/sdk/python/tests/unit/infra/offline_stores/test_bigquery.py b/sdk/python/tests/unit/infra/offline_stores/test_bigquery.py
new file mode 100644
index 0000000000..662be20b31
--- /dev/null
+++ b/sdk/python/tests/unit/infra/offline_stores/test_bigquery.py
@@ -0,0 +1,84 @@
+from unittest.mock import Mock, patch
+
+import pandas as pd
+import pyarrow
+import pytest
+
+from feast.infra.offline_stores.bigquery import (
+ BigQueryOfflineStoreConfig,
+ BigQueryRetrievalJob,
+)
+from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig
+from feast.repo_config import RepoConfig
+
+
+@pytest.fixture
+def pandas_dataframe():
+ return pd.DataFrame(
+ data={
+ "key": [1, 2, 3],
+ "value": ["a", None, "c"],
+ }
+ )
+
+
+@pytest.fixture
+def big_query_result(pandas_dataframe):
+ class BigQueryResult:
+ def to_dataframe(self, **kwargs):
+ return pandas_dataframe
+
+ def to_arrow(self, **kwargs):
+ return pyarrow.Table.from_pandas(pandas_dataframe)
+
+ def exception(self, timeout=None):
+ return None
+
+ return BigQueryResult()
+
+
+class TestBigQueryRetrievalJob:
+ query = "SELECT * FROM bigquery"
+ client = Mock()
+ retrieval_job = BigQueryRetrievalJob(
+ query=query,
+ client=client,
+ config=RepoConfig(
+ registry="gs://ml-test/repo/registry.db",
+ project="test",
+ provider="gcp",
+ online_store=SqliteOnlineStoreConfig(type="sqlite"),
+ offline_store=BigQueryOfflineStoreConfig(type="bigquery", dataset="feast"),
+ ),
+ full_feature_names=True,
+ on_demand_feature_views=[],
+ )
+
+ def test_to_sql(self):
+ assert self.retrieval_job.to_sql() == self.query
+
+ def test_to_df(self, big_query_result, pandas_dataframe):
+ self.client.query.return_value = big_query_result
+ actual = self.retrieval_job.to_df()
+ pd.testing.assert_frame_equal(actual, pandas_dataframe)
+
+ def test_to_df_timeout(self, big_query_result):
+ self.client.query.return_value = big_query_result
+ with patch.object(self.retrieval_job, "_execute_query"):
+ self.retrieval_job.to_df(timeout=30)
+ self.retrieval_job._execute_query.assert_called_once_with(
+ query=self.query, timeout=30
+ )
+
+ def test_to_arrow(self, big_query_result, pandas_dataframe):
+ self.client.query.return_value = big_query_result
+ actual = self.retrieval_job.to_arrow()
+ pd.testing.assert_frame_equal(actual.to_pandas(), pandas_dataframe)
+
+ def test_to_arrow_timeout(self, big_query_result):
+ self.client.query.return_value = big_query_result
+ with patch.object(self.retrieval_job, "_execute_query"):
+ self.retrieval_job.to_arrow(timeout=30)
+ self.retrieval_job._execute_query.assert_called_once_with(
+ query=self.query, timeout=30
+ )
diff --git a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py
new file mode 100644
index 0000000000..ef0cce0470
--- /dev/null
+++ b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py
@@ -0,0 +1,225 @@
+from typing import List, Optional
+from unittest.mock import MagicMock, patch
+
+import pandas as pd
+import pyarrow
+import pytest
+
+from feast.infra.offline_stores.contrib.athena_offline_store.athena import (
+ AthenaOfflineStoreConfig,
+ AthenaRetrievalJob,
+)
+from feast.infra.offline_stores.contrib.mssql_offline_store.mssql import (
+ MsSqlServerOfflineStoreConfig,
+ MsSqlServerRetrievalJob,
+)
+from feast.infra.offline_stores.contrib.postgres_offline_store.postgres import (
+ PostgreSQLOfflineStoreConfig,
+ PostgreSQLRetrievalJob,
+)
+from feast.infra.offline_stores.contrib.spark_offline_store.spark import (
+ SparkOfflineStoreConfig,
+ SparkRetrievalJob,
+)
+from feast.infra.offline_stores.contrib.trino_offline_store.trino import (
+ TrinoRetrievalJob,
+)
+from feast.infra.offline_stores.file import FileRetrievalJob
+from feast.infra.offline_stores.offline_store import RetrievalJob, RetrievalMetadata
+from feast.infra.offline_stores.redshift import (
+ RedshiftOfflineStoreConfig,
+ RedshiftRetrievalJob,
+)
+from feast.infra.offline_stores.snowflake import (
+ SnowflakeOfflineStoreConfig,
+ SnowflakeRetrievalJob,
+)
+from feast.on_demand_feature_view import OnDemandFeatureView
+from feast.saved_dataset import SavedDatasetStorage
+
+
+class MockRetrievalJob(RetrievalJob):
+ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame:
+ """
+ Synchronously executes the underlying query and returns the result as a pandas dataframe.
+
+ Does not handle on demand transformations or dataset validation. For either of those,
+ `to_df` should be used.
+ """
+ pass
+
+ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table:
+ """
+ Synchronously executes the underlying query and returns the result as an arrow table.
+
+ Does not handle on demand transformations or dataset validation. For either of those,
+ `to_arrow` should be used.
+ """
+ pass
+
+ @property
+ def full_feature_names(self) -> bool:
+ """Returns True if full feature names should be applied to the results of the query."""
+ pass
+
+ @property
+ def on_demand_feature_views(self) -> List[OnDemandFeatureView]:
+ """Returns a list containing all the on demand feature views to be handled."""
+ pass
+
+ def persist(
+ self,
+ storage: SavedDatasetStorage,
+ allow_overwrite: bool = False,
+ timeout: Optional[int] = None,
+ ):
+ """
+ Synchronously executes the underlying query and persists the result in the same offline store
+ at the specified destination.
+
+ Args:
+ storage: The saved dataset storage object specifying where the result should be persisted.
+ allow_overwrite: If True, a pre-existing location (e.g. table or file) can be overwritten.
+ Currently not all individual offline store implementations make use of this parameter.
+ """
+ pass
+
+ @property
+ def metadata(self) -> Optional[RetrievalMetadata]:
+ """Returns metadata about the retrieval job."""
+ pass
+
+
+# Since RetreivalJob are not really tested for subclasses we add some tests here.
+@pytest.fixture(
+ params=[
+ MockRetrievalJob,
+ FileRetrievalJob,
+ RedshiftRetrievalJob,
+ SnowflakeRetrievalJob,
+ AthenaRetrievalJob,
+ MsSqlServerRetrievalJob,
+ PostgreSQLRetrievalJob,
+ SparkRetrievalJob,
+ TrinoRetrievalJob,
+ ]
+)
+def retrieval_job(request, environment):
+ if request.param is FileRetrievalJob:
+ return FileRetrievalJob(lambda: 1, full_feature_names=False)
+ elif request.param is RedshiftRetrievalJob:
+ offline_store_config = RedshiftOfflineStoreConfig(
+ cluster_id="feast-integration-tests",
+ region="us-west-2",
+ user="admin",
+ database="feast",
+ s3_staging_location="s3://feast-integration-tests/redshift/tests/ingestion",
+ iam_role="arn:aws:iam::402087665549:role/redshift_s3_access_role",
+ )
+ environment.test_repo_config.offline_store = offline_store_config
+ return RedshiftRetrievalJob(
+ query="query",
+ redshift_client="",
+ s3_resource="",
+ config=environment.test_repo_config,
+ full_feature_names=False,
+ )
+ elif request.param is SnowflakeRetrievalJob:
+ offline_store_config = SnowflakeOfflineStoreConfig(
+ type="snowflake.offline",
+ account="snow",
+ user="snow",
+ password="snow",
+ role="snow",
+ warehouse="snow",
+ database="FEAST",
+ schema="OFFLINE",
+ storage_integration_name="FEAST_S3",
+ blob_export_location="s3://feast-snowflake-offload/export",
+ )
+ environment.test_repo_config.offline_store = offline_store_config
+ environment.test_repo_config.project = "project"
+ return SnowflakeRetrievalJob(
+ query="query",
+ snowflake_conn=MagicMock(),
+ config=environment.test_repo_config,
+ full_feature_names=False,
+ )
+ elif request.param is AthenaRetrievalJob:
+ offline_store_config = AthenaOfflineStoreConfig(
+ data_source="athena",
+ region="athena",
+ database="athena",
+ workgroup="athena",
+ s3_staging_location="athena",
+ )
+
+ environment.test_repo_config.offline_store = offline_store_config
+ return AthenaRetrievalJob(
+ query="query",
+ athena_client="client",
+ s3_resource="",
+ config=environment.test_repo_config.offline_store,
+ full_feature_names=False,
+ )
+ elif request.param is MsSqlServerRetrievalJob:
+
+ return MsSqlServerRetrievalJob(
+ query="query",
+ engine=MagicMock(),
+ config=MsSqlServerOfflineStoreConfig(
+ connection_string="str"
+ ), # TODO: this does not match the RetrievalJob pattern. Suppose to be RepoConfig
+ full_feature_names=False,
+ )
+ elif request.param is PostgreSQLRetrievalJob:
+ offline_store_config = PostgreSQLOfflineStoreConfig(
+ host="str",
+ database="str",
+ user="str",
+ password="str",
+ )
+ environment.test_repo_config.offline_store = offline_store_config
+ return PostgreSQLRetrievalJob(
+ query="query",
+ config=environment.test_repo_config.offline_store,
+ full_feature_names=False,
+ )
+ elif request.param is SparkRetrievalJob:
+ offline_store_config = SparkOfflineStoreConfig()
+ environment.test_repo_config.offline_store = offline_store_config
+ return SparkRetrievalJob(
+ spark_session=MagicMock(),
+ query="str",
+ full_feature_names=False,
+ config=environment.test_repo_config,
+ )
+ elif request.param is TrinoRetrievalJob:
+ offline_store_config = SparkOfflineStoreConfig()
+ environment.test_repo_config.offline_store = offline_store_config
+ return TrinoRetrievalJob(
+ query="str",
+ client=MagicMock(),
+ config=environment.test_repo_config,
+ full_feature_names=False,
+ )
+ else:
+ return request.param()
+
+
+def test_to_sql():
+ assert MockRetrievalJob().to_sql() is None
+
+
+@pytest.mark.parametrize("timeout", (None, 30))
+def test_to_df_timeout(retrieval_job, timeout: Optional[int]):
+ with patch.object(retrieval_job, "_to_df_internal") as mock_to_df_internal:
+ retrieval_job.to_df(timeout=timeout)
+ mock_to_df_internal.assert_called_once_with(timeout=timeout)
+
+
+@pytest.mark.parametrize("timeout", (None, 30))
+def test_to_arrow_timeout(retrieval_job, timeout: Optional[int]):
+ with patch.object(retrieval_job, "_to_arrow_internal") as mock_to_arrow_internal:
+ retrieval_job.to_arrow(timeout=timeout)
+ mock_to_arrow_internal.assert_called_once_with(timeout=timeout)
diff --git a/sdk/python/tests/unit/infra/offline_stores/test_redshift.py b/sdk/python/tests/unit/infra/offline_stores/test_redshift.py
new file mode 100644
index 0000000000..049977489b
--- /dev/null
+++ b/sdk/python/tests/unit/infra/offline_stores/test_redshift.py
@@ -0,0 +1,67 @@
+from unittest.mock import MagicMock, patch
+
+import pandas as pd
+import pyarrow as pa
+
+from feast import FeatureView
+from feast.infra.offline_stores import offline_utils
+from feast.infra.offline_stores.redshift import (
+ RedshiftOfflineStore,
+ RedshiftOfflineStoreConfig,
+)
+from feast.infra.offline_stores.redshift_source import RedshiftSource
+from feast.infra.utils import aws_utils
+from feast.repo_config import RepoConfig
+
+
+@patch.object(aws_utils, "upload_arrow_table_to_redshift")
+def test_offline_write_batch(
+ mock_upload_arrow_table_to_redshift: MagicMock,
+ simple_dataset_1: pd.DataFrame,
+):
+ repo_config = RepoConfig(
+ registry="registry",
+ project="project",
+ provider="local",
+ offline_store=RedshiftOfflineStoreConfig(
+ type="redshift",
+ region="us-west-2",
+ cluster_id="cluster_id",
+ database="database",
+ user="user",
+ iam_role="abcdef",
+ s3_staging_location="s3://bucket/path",
+ ),
+ )
+
+ batch_source = RedshiftSource(
+ name="test_source",
+ timestamp_field="ts",
+ table="table_name",
+ schema="schema_name",
+ )
+ feature_view = FeatureView(
+ name="test_view",
+ source=batch_source,
+ )
+
+ pa_dataset = pa.Table.from_pandas(simple_dataset_1)
+
+ # patch some more things so that the function can run
+ def mock_get_pyarrow_schema_from_batch_source(*args, **kwargs) -> pa.Schema:
+ return pa_dataset.schema, pa_dataset.column_names
+
+ with patch.object(
+ offline_utils,
+ "get_pyarrow_schema_from_batch_source",
+ new=mock_get_pyarrow_schema_from_batch_source,
+ ):
+ RedshiftOfflineStore.offline_write_batch(
+ repo_config, feature_view, pa_dataset, progress=None
+ )
+
+ # check that we have included the fully qualified table name
+ mock_upload_arrow_table_to_redshift.assert_called_once()
+
+ call = mock_upload_arrow_table_to_redshift.call_args_list[0]
+ assert call.kwargs["table_name"] == "schema_name.table_name"
diff --git a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py
index 9dca44dc09..6045dbc6ce 100644
--- a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py
+++ b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py
@@ -3,7 +3,7 @@
import boto3
import pytest
-from moto import mock_dynamodb2
+from moto import mock_dynamodb
from feast.infra.offline_stores.file import FileOfflineStoreConfig
from feast.infra.online_stores.dynamodb import (
@@ -159,7 +159,7 @@ def test_dynamodb_table_dynamodb_resource():
assert dynamodb_resource.meta.client.meta.endpoint_url == endpoint_url
-@mock_dynamodb2
+@mock_dynamodb
@pytest.mark.parametrize("n_samples", [5, 50, 100])
def test_dynamodb_online_store_online_read(
repo_config, dynamodb_online_store, n_samples
@@ -180,7 +180,7 @@ def test_dynamodb_online_store_online_read(
assert [item[1] for item in returned_items] == list(features)
-@mock_dynamodb2
+@mock_dynamodb
@pytest.mark.parametrize("n_samples", [5, 50, 100])
def test_dynamodb_online_store_online_write_batch(
repo_config, dynamodb_online_store, n_samples
@@ -207,7 +207,7 @@ def test_dynamodb_online_store_online_write_batch(
assert [item[1] for item in stored_items] == list(features)
-@mock_dynamodb2
+@mock_dynamodb
def test_dynamodb_online_store_update(repo_config, dynamodb_online_store):
"""Test DynamoDBOnlineStore update method."""
# create dummy table to keep
@@ -236,7 +236,7 @@ def test_dynamodb_online_store_update(repo_config, dynamodb_online_store):
assert existing_tables[0] == f"test_aws.{db_table_keep_name}"
-@mock_dynamodb2
+@mock_dynamodb
def test_dynamodb_online_store_teardown(repo_config, dynamodb_online_store):
"""Test DynamoDBOnlineStore teardown method."""
db_table_delete_name_one = f"{TABLE_NAME}_delete_teardown_1"
@@ -262,7 +262,7 @@ def test_dynamodb_online_store_teardown(repo_config, dynamodb_online_store):
assert len(existing_tables) == 0
-@mock_dynamodb2
+@mock_dynamodb
def test_dynamodb_online_store_online_read_unknown_entity(
repo_config, dynamodb_online_store
):
@@ -301,7 +301,7 @@ def test_dynamodb_online_store_online_read_unknown_entity(
assert returned_items[pos] == (None, None)
-@mock_dynamodb2
+@mock_dynamodb
def test_write_batch_non_duplicates(repo_config, dynamodb_online_store):
"""Test DynamoDBOnline Store deduplicate write batch request items."""
dynamodb_tbl = f"{TABLE_NAME}_batch_non_duplicates"
@@ -321,7 +321,7 @@ def test_write_batch_non_duplicates(repo_config, dynamodb_online_store):
assert len(returned_items) == len(data)
-@mock_dynamodb2
+@mock_dynamodb
def test_dynamodb_online_store_online_read_unknown_entity_end_of_batch(
repo_config, dynamodb_online_store
):
diff --git a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py
index 22fd1e696f..42229f8683 100644
--- a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py
+++ b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py
@@ -45,8 +45,8 @@ def test_nullable_online_store_aws():
entity_key_serialization_version: 2
"""
),
- expect_error="__root__ -> offline_store -> cluster_id\n"
- " field required (type=value_error.missing)",
+ expect_error="__root__ -> offline_store -> __root__\n"
+ " please specify either cluster_id & user if using provisioned clusters, or workgroup if using serverless (type=value_error)",
)
diff --git a/sdk/python/tests/unit/infra/test_inference_unit_tests.py b/sdk/python/tests/unit/infra/test_inference_unit_tests.py
index 46a131e1b5..a108d397bd 100644
--- a/sdk/python/tests/unit/infra/test_inference_unit_tests.py
+++ b/sdk/python/tests/unit/infra/test_inference_unit_tests.py
@@ -194,7 +194,10 @@ def test_feature_view_inference_respects_basic_inference():
[feature_view_1],
[entity1],
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ entity_key_serialization_version=2,
+ registry="dummy_registry.pb",
),
)
assert len(feature_view_1.schema) == 2
@@ -209,7 +212,10 @@ def test_feature_view_inference_respects_basic_inference():
[feature_view_2],
[entity1, entity2],
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ entity_key_serialization_version=2,
+ registry="dummy_registry.pb",
),
)
assert len(feature_view_2.schema) == 3
@@ -240,7 +246,10 @@ def test_feature_view_inference_on_entity_value_types():
[feature_view_1],
[entity1],
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ entity_key_serialization_version=2,
+ registry="dummy_registry.pb",
),
)
@@ -310,7 +319,10 @@ def test_feature_view_inference_on_entity_columns(simple_dataset_1):
[feature_view_1],
[entity1],
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ entity_key_serialization_version=2,
+ registry="dummy_registry.pb",
),
)
@@ -345,7 +357,10 @@ def test_feature_view_inference_on_feature_columns(simple_dataset_1):
[feature_view_1],
[entity1],
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ entity_key_serialization_version=2,
+ registry="dummy_registry.pb",
),
)
@@ -397,7 +412,10 @@ def test_update_feature_services_with_inferred_features(simple_dataset_1):
[feature_view_1, feature_view_2],
[entity1],
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ entity_key_serialization_version=2,
+ registry="dummy_registry.pb",
),
)
feature_service.infer_features(
@@ -454,7 +472,10 @@ def test_update_feature_services_with_specified_features(simple_dataset_1):
[feature_view_1, feature_view_2],
[entity1],
RepoConfig(
- provider="local", project="test", entity_key_serialization_version=2
+ provider="local",
+ project="test",
+ entity_key_serialization_version=2,
+ registry="dummy_registry.pb",
),
)
assert len(feature_view_1.features) == 1
diff --git a/sdk/python/tests/unit/infra/test_local_registry.py b/sdk/python/tests/unit/infra/test_local_registry.py
index 1e3b2aec88..b5e7d23a97 100644
--- a/sdk/python/tests/unit/infra/test_local_registry.py
+++ b/sdk/python/tests/unit/infra/test_local_registry.py
@@ -39,7 +39,7 @@
def local_registry() -> Registry:
fd, registry_path = mkstemp()
registry_config = RegistryConfig(path=registry_path, cache_ttl_seconds=600)
- return Registry(registry_config, None)
+ return Registry("project", registry_config, None)
@pytest.mark.parametrize(
@@ -443,7 +443,7 @@ def test_apply_data_source(test_registry: Registry):
def test_commit():
fd, registry_path = mkstemp()
registry_config = RegistryConfig(path=registry_path, cache_ttl_seconds=600)
- test_registry = Registry(registry_config, None)
+ test_registry = Registry("project", registry_config, None)
entity = Entity(
name="driver_car_id",
@@ -484,7 +484,7 @@ def test_commit():
validate_project_uuid(project_uuid, test_registry)
# Create new registry that points to the same store
- registry_with_same_store = Registry(registry_config, None)
+ registry_with_same_store = Registry("project", registry_config, None)
# Retrieving the entity should fail since the store is empty
entities = registry_with_same_store.list_entities(project)
@@ -495,7 +495,7 @@ def test_commit():
test_registry.commit()
# Reconstruct the new registry in order to read the newly written store
- registry_with_same_store = Registry(registry_config, None)
+ registry_with_same_store = Registry("project", registry_config, None)
# Retrieving the entity should now succeed
entities = registry_with_same_store.list_entities(project)
diff --git a/sdk/python/tests/unit/online_store/test_online_retrieval.py b/sdk/python/tests/unit/online_store/test_online_retrieval.py
index 6f96e7b5d9..926c7226fc 100644
--- a/sdk/python/tests/unit/online_store/test_online_retrieval.py
+++ b/sdk/python/tests/unit/online_store/test_online_retrieval.py
@@ -137,7 +137,7 @@ def test_online() -> None:
fs_fast_ttl = FeatureStore(
config=RepoConfig(
registry=RegistryConfig(
- path=store.config.registry, cache_ttl_seconds=cache_ttl
+ path=store.config.registry.path, cache_ttl_seconds=cache_ttl
),
online_store=store.config.online_store,
project=store.project,
@@ -161,7 +161,7 @@ def test_online() -> None:
assert result["trips"] == [7]
# Rename the registry.db so that it cant be used for refreshes
- os.rename(store.config.registry, store.config.registry + "_fake")
+ os.rename(store.config.registry.path, store.config.registry.path + "_fake")
# Wait for registry to expire
time.sleep(cache_ttl)
@@ -180,7 +180,7 @@ def test_online() -> None:
).to_dict()
# Restore registry.db so that we can see if it actually reloads registry
- os.rename(store.config.registry + "_fake", store.config.registry)
+ os.rename(store.config.registry.path + "_fake", store.config.registry.path)
# Test if registry is actually reloaded and whether results return
result = fs_fast_ttl.get_online_features(
@@ -200,7 +200,7 @@ def test_online() -> None:
fs_infinite_ttl = FeatureStore(
config=RepoConfig(
registry=RegistryConfig(
- path=store.config.registry, cache_ttl_seconds=0
+ path=store.config.registry.path, cache_ttl_seconds=0
),
online_store=store.config.online_store,
project=store.project,
@@ -227,7 +227,7 @@ def test_online() -> None:
time.sleep(2)
# Rename the registry.db so that it cant be used for refreshes
- os.rename(store.config.registry, store.config.registry + "_fake")
+ os.rename(store.config.registry.path, store.config.registry.path + "_fake")
# TTL is infinite so this method should use registry cache
result = fs_infinite_ttl.get_online_features(
@@ -248,7 +248,7 @@ def test_online() -> None:
fs_infinite_ttl.refresh_registry()
# Restore registry.db so that teardown works
- os.rename(store.config.registry + "_fake", store.config.registry)
+ os.rename(store.config.registry.path + "_fake", store.config.registry.path)
def test_online_to_df():
diff --git a/sdk/python/tests/unit/test_data_sources.py b/sdk/python/tests/unit/test_data_sources.py
index 1e8fb75c3e..990c5d3b69 100644
--- a/sdk/python/tests/unit/test_data_sources.py
+++ b/sdk/python/tests/unit/test_data_sources.py
@@ -118,7 +118,6 @@ def test_proto_conversion():
snowflake_source = SnowflakeSource(
name="test_source",
database="test_database",
- warehouse="test_warehouse",
schema="test_schema",
table="test_table",
timestamp_field="event_timestamp",
@@ -191,3 +190,46 @@ def test_column_conflict():
timestamp_field="event_timestamp",
created_timestamp_column="event_timestamp",
)
+
+
+@pytest.mark.parametrize(
+ "source_kwargs,expected_name",
+ [
+ (
+ {
+ "database": "test_database",
+ "schema": "test_schema",
+ "table": "test_table",
+ },
+ "test_database.test_schema.test_table",
+ ),
+ (
+ {"database": "test_database", "table": "test_table"},
+ "test_database.public.test_table",
+ ),
+ ({"table": "test_table"}, "public.test_table"),
+ ({"database": "test_database", "table": "b.c"}, "test_database.b.c"),
+ ({"database": "test_database", "table": "a.b.c"}, "a.b.c"),
+ (
+ {
+ "database": "test_database",
+ "schema": "test_schema",
+ "query": "select * from abc",
+ },
+ "",
+ ),
+ ],
+)
+def test_redshift_fully_qualified_table_name(source_kwargs, expected_name):
+ redshift_source = RedshiftSource(
+ name="test_source",
+ timestamp_field="event_timestamp",
+ created_timestamp_column="created_timestamp",
+ field_mapping={"foo": "bar"},
+ description="test description",
+ tags={"test": "test"},
+ owner="test@gmail.com",
+ **source_kwargs,
+ )
+
+ assert redshift_source.redshift_options.fully_qualified_table_name == expected_name
diff --git a/sdk/python/tests/unit/test_feature.py b/sdk/python/tests/unit/test_feature.py
index a8cfeef3da..ca0dce4445 100644
--- a/sdk/python/tests/unit/test_feature.py
+++ b/sdk/python/tests/unit/test_feature.py
@@ -27,3 +27,6 @@ def test_field_serialization_with_description():
assert serialized_field.description == expected_description
assert field_from_feature.description == expected_description
+
+ field = Field.from_proto(serialized_field)
+ assert field.description == expected_description
diff --git a/sdk/python/tests/unit/test_sql_registry.py b/sdk/python/tests/unit/test_sql_registry.py
index 51cb430c9e..39896d3a9d 100644
--- a/sdk/python/tests/unit/test_sql_registry.py
+++ b/sdk/python/tests/unit/test_sql_registry.py
@@ -72,7 +72,7 @@ def pg_registry():
path=f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@127.0.0.1:{container_port}/{POSTGRES_DB}",
)
- yield SqlRegistry(registry_config, None)
+ yield SqlRegistry(registry_config, "project", None)
container.stop()
@@ -91,7 +91,7 @@ def mysql_registry():
container.start()
# The log string uses '8.0.*' since the version might be changed as new Docker images are pushed.
- log_string_to_wait_for = "/usr/sbin/mysqld: ready for connections. Version: '8.0.*' socket: '/var/run/mysqld/mysqld.sock' port: 3306"
+ log_string_to_wait_for = "/usr/sbin/mysqld: ready for connections. Version: '(\d+(\.\d+){1,2})' socket: '/var/run/mysqld/mysqld.sock' port: 3306" # noqa: W605
waited = wait_for_logs(
container=container,
predicate=log_string_to_wait_for,
@@ -106,7 +106,7 @@ def mysql_registry():
path=f"mysql+mysqldb://{POSTGRES_USER}:{POSTGRES_PASSWORD}@127.0.0.1:{container_port}/{POSTGRES_DB}",
)
- yield SqlRegistry(registry_config, None)
+ yield SqlRegistry(registry_config, "project", None)
container.stop()
@@ -118,7 +118,7 @@ def sqlite_registry():
path="sqlite://",
)
- yield SqlRegistry(registry_config, None)
+ yield SqlRegistry(registry_config, "project", None)
@pytest.mark.skipif(
@@ -565,6 +565,76 @@ def test_apply_data_source(sql_registry):
sql_registry.teardown()
+@pytest.mark.skipif(
+ sys.platform == "darwin" and "GITHUB_REF" in os.environ,
+ reason="does not run on mac github actions",
+)
+@pytest.mark.parametrize(
+ "sql_registry",
+ [
+ lazy_fixture("mysql_registry"),
+ lazy_fixture("pg_registry"),
+ lazy_fixture("sqlite_registry"),
+ ],
+)
+def test_registry_cache(sql_registry):
+ # Create Feature Views
+ batch_source = FileSource(
+ name="test_source",
+ file_format=ParquetFormat(),
+ path="file://feast/*",
+ timestamp_field="ts_col",
+ created_timestamp_column="timestamp",
+ )
+
+ entity = Entity(name="fs1_my_entity_1", join_keys=["test"])
+
+ fv1 = FeatureView(
+ name="my_feature_view_1",
+ schema=[
+ Field(name="fs1_my_feature_1", dtype=Int64),
+ Field(name="fs1_my_feature_2", dtype=String),
+ Field(name="fs1_my_feature_3", dtype=Array(String)),
+ Field(name="fs1_my_feature_4", dtype=Array(Bytes)),
+ ],
+ entities=[entity],
+ tags={"team": "matchmaking"},
+ source=batch_source,
+ ttl=timedelta(minutes=5),
+ )
+
+ project = "project"
+
+ # Register data source and feature view
+ sql_registry.apply_data_source(batch_source, project)
+ sql_registry.apply_feature_view(fv1, project)
+ registry_feature_views_cached = sql_registry.list_feature_views(
+ project, allow_cache=True
+ )
+ registry_data_sources_cached = sql_registry.list_data_sources(
+ project, allow_cache=True
+ )
+ # Not refreshed cache, so cache miss
+ assert len(registry_feature_views_cached) == 0
+ assert len(registry_data_sources_cached) == 0
+ sql_registry.refresh(project)
+ # Now objects exist
+ registry_feature_views_cached = sql_registry.list_feature_views(
+ project, allow_cache=True
+ )
+ registry_data_sources_cached = sql_registry.list_data_sources(
+ project, allow_cache=True
+ )
+ assert len(registry_feature_views_cached) == 1
+ assert len(registry_data_sources_cached) == 1
+ registry_feature_view = registry_feature_views_cached[0]
+ assert registry_feature_view.batch_source == batch_source
+ registry_data_source = registry_data_sources_cached[0]
+ assert registry_data_source == batch_source
+
+ sql_registry.teardown()
+
+
@pytest.mark.skipif(
sys.platform == "darwin" and "GITHUB_REF" in os.environ,
reason="does not run on mac github actions",
diff --git a/sdk/python/tests/unit/test_type_map.py b/sdk/python/tests/unit/test_type_map.py
index 0ba259ab73..78ff15fe93 100644
--- a/sdk/python/tests/unit/test_type_map.py
+++ b/sdk/python/tests/unit/test_type_map.py
@@ -1,4 +1,5 @@
import numpy as np
+import pytest
from feast.type_map import (
feast_value_type_to_python_type,
@@ -26,3 +27,24 @@ def test_null_unix_timestamp_list():
converted = feast_value_type_to_python_type(protos[0])
assert converted[0] is None
+
+
+@pytest.mark.parametrize(
+ "values",
+ (
+ np.array([True]),
+ np.array([False]),
+ np.array([0]),
+ np.array([1]),
+ [True],
+ [False],
+ [0],
+ [1],
+ ),
+)
+def test_python_values_to_proto_values_bool(values):
+
+ protos = python_values_to_proto_values(values, ValueType.BOOL)
+ converted = feast_value_type_to_python_type(protos[0])
+
+ assert converted is bool(values[0])
diff --git a/setup.cfg b/setup.cfg
index e2d707e272..2781169a71 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -20,3 +20,6 @@ exclude = .git,__pycache__,docs/conf.py,dist,feast/protos,feast/embedded_go/lib
files=feast,tests
ignore_missing_imports=true
exclude=feast/embedded_go/lib
+
+[bdist_wheel]
+universal = 1
diff --git a/setup.py b/setup.py
index 1995413da2..8b5a903e0a 100644
--- a/setup.py
+++ b/setup.py
@@ -50,40 +50,51 @@
"colorama>=0.3.9,<1",
"dill==0.3.*",
"fastavro>=1.1.0,<2",
- "google-api-core>=1.23.0,<3",
- "googleapis-common-protos>=1.52.*,<2",
- "grpcio==1.51.1",
- "grpcio-reflection>=1.47.0,<2",
+ "grpcio>=1.56.2,<2",
+ "grpcio-tools>=1.56.2,<2",
+ "grpcio-reflection>=1.56.2,<2",
+ "grpcio-health-checking>=1.56.2,<2",
+ "mypy-protobuf==3.1",
"Jinja2>=2,<4",
"jsonschema",
"mmh3",
- "numpy<1.22,<3",
- "pandas<1.4.3,<2",
- "pandavro==1.5.*", # For some reason pandavro higher than 1.5.* only support pandas less than 1.3.
- "protobuf<5,>3",
+ "numpy>=1.22,<1.25",
+ "pandas>=1.4.3,<2",
+ # For some reason pandavro higher than 1.5.* only support pandas less than 1.3.
+ "pandavro~=1.5.0",
+ # Higher than 4.23.4 seems to cause a seg fault
+ "protobuf<4.23.4,>3.20",
"proto-plus>=1.20.0,<2",
- "pyarrow>=4,<9",
+ "pyarrow>=4,<12",
"pydantic>=1,<2",
"pygments>=2.12.0,<3",
- "PyYAML>=5.4.*,<7",
+ "PyYAML>=5.4.0,<7",
+ "requests",
"SQLAlchemy[mypy]>1,<2",
"tabulate>=0.8.0,<1",
"tenacity>=7,<9",
"toml>=0.10.0,<1",
"tqdm>=4,<5",
- "typeguard",
- "fastapi>=0.68.0,<1",
+ "typeguard==2.13.3",
+ "fastapi>=0.68.0,<0.100",
"uvicorn[standard]>=0.14.0,<1",
- "dask>=2021.*",
+ "gunicorn",
+ "dask>=2021.1.0",
"bowler", # Needed for automatic repo upgrades
+ # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656).
+ "httpx>=0.23.3",
+ "importlib-resources>=6.0.0,<7",
+ "importlib_metadata>=6.8.0,<7"
]
GCP_REQUIRED = [
+ "google-api-core>=1.23.0,<3",
+ "googleapis-common-protos>=1.52.0,<2",
"google-cloud-bigquery[pandas]>=2,<4",
"google-cloud-bigquery-storage >= 2.0.0,<3",
- "google-cloud-datastore>=2.1.0,<3",
- "google-cloud-storage>=1.34.0,<3",
- "google-cloud-bigtable>=2.11.0,<3",
+ "google-cloud-datastore>=2.1.*,<3",
+ "google-cloud-storage>=1.34.*,<3",
+ "google-cloud-bigtable>=2.11.*,<3",
]
REDIS_REQUIRED = [
@@ -139,6 +150,14 @@
"pymssql",
]
+ROCKSET_REQUIRED = [
+ "rockset>=1.0.3",
+]
+
+HAZELCAST_REQUIRED = [
+ "hazelcast-python-client>=5.1",
+]
+
CI_REQUIRED = (
[
"build",
@@ -182,6 +201,7 @@
"types-requests",
"types-setuptools",
"types-tabulate",
+ "virtualenv<20.24.2"
]
+ GCP_REQUIRED
+ REDIS_REQUIRED
@@ -196,6 +216,8 @@
+ HBASE_REQUIRED
+ CASSANDRA_REQUIRED
+ AZURE_REQUIRED
+ + ROCKSET_REQUIRED
+ + HAZELCAST_REQUIRED
)
AFFIRM_REQUIRED = [
@@ -566,9 +588,10 @@ def copy_extensions_to_source(self):
"mysql": MYSQL_REQUIRED,
"ge": GE_REQUIRED,
"hbase": HBASE_REQUIRED,
- "go": GO_REQUIRED,
"docs": DOCS_REQUIRED,
"cassandra": CASSANDRA_REQUIRED,
+ "hazelcast": HAZELCAST_REQUIRED,
+ "rockset": ROCKSET_REQUIRED,
},
include_package_data=True,
license="Apache",
@@ -581,6 +604,7 @@ def copy_extensions_to_source(self):
"Programming Language :: Python :: 3.7",
],
entry_points={"console_scripts": ["feast=feast.cli:cli"]},
+ use_scm_version=use_scm_version,
setup_requires=[
# "setuptools_scm",
"grpcio==1.51.1",
diff --git a/ui/CONTRIBUTING.md b/ui/CONTRIBUTING.md
index 970bd3676c..3c13759e26 100644
--- a/ui/CONTRIBUTING.md
+++ b/ui/CONTRIBUTING.md
@@ -91,7 +91,7 @@ The Feast UI is published as a module to NPM and can be found here: https://www.
### Requirements
To publish a new version of the module, you will need:
-- to be part of the @feast-dev team in NPM. Ask `#feast-development` on http://slack.feast.dev to add you if necessary.
+- to be part of the @feast-dev team in NPM.
- to [login to your NPM account on the command line](https://docs.npmjs.com/cli/v8/commands/npm-adduser).
### Steps for Publishing
diff --git a/ui/package.json b/ui/package.json
index fd8262826c..e1c468046d 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -1,6 +1,6 @@
{
"name": "@feast-dev/feast-ui",
- "version": "0.28.0",
+ "version": "0.34.1",
"private": false,
"files": [
"dist"
@@ -43,6 +43,7 @@
"prop-types": "^15.8.1",
"protobufjs": "^7.1.1",
"query-string": "^7.1.1",
+ "react-code-blocks": "^0.0.9-0",
"react-query": "^3.34.12",
"react-router-dom": "6",
"react-scripts": "^5.0.0",
diff --git a/ui/src/FeastUISansProviders.tsx b/ui/src/FeastUISansProviders.tsx
index 8a0e0b94db..8a12abdc39 100644
--- a/ui/src/FeastUISansProviders.tsx
+++ b/ui/src/FeastUISansProviders.tsx
@@ -62,6 +62,8 @@ const FeastUISansProviders = ({
isCustom: true,
}
: { projectsListPromise: defaultProjectListPromise(), isCustom: false };
+
+ const BASE_URL = process.env.PUBLIC_URL || ""
return (
@@ -74,9 +76,9 @@ const FeastUISansProviders = ({
>
- }>
+ }>
} />
- }>
+ }>
} />
} />
{
render: (name: string) => {
return (
{name}
diff --git a/ui/src/components/FeaturesListDisplay.tsx b/ui/src/components/FeaturesListDisplay.tsx
index a40730c687..2a0628b0f5 100644
--- a/ui/src/components/FeaturesListDisplay.tsx
+++ b/ui/src/components/FeaturesListDisplay.tsx
@@ -21,8 +21,8 @@ const FeaturesList = ({
field: "name",
render: (item: string) => (
{item}
diff --git a/ui/src/components/ObjectsCountStats.tsx b/ui/src/components/ObjectsCountStats.tsx
index bf1dd2dc9d..eff3f8a2ca 100644
--- a/ui/src/components/ObjectsCountStats.tsx
+++ b/ui/src/components/ObjectsCountStats.tsx
@@ -55,7 +55,7 @@ const ObjectsCountStats = () => {
navigate(`/p/${projectName}/feature-service`)}
+ onClick={() => navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/feature-service`)}
description="Feature Services→"
title={data.featureServices}
reverse
@@ -65,7 +65,7 @@ const ObjectsCountStats = () => {
navigate(`/p/${projectName}/feature-view`)}
+ onClick={() => navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/feature-view`)}
title={data.featureViews}
reverse
/>
@@ -74,7 +74,7 @@ const ObjectsCountStats = () => {
navigate(`/p/${projectName}/entity`)}
+ onClick={() => navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/entity`)}
title={data.entities}
reverse
/>
@@ -83,7 +83,7 @@ const ObjectsCountStats = () => {
navigate(`/p/${projectName}/data-source`)}
+ onClick={() => navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/data-source`)}
title={data.dataSources}
reverse
/>
diff --git a/ui/src/components/ProjectSelector.tsx b/ui/src/components/ProjectSelector.tsx
index 1bb7ebf85a..edbcf9d98f 100644
--- a/ui/src/components/ProjectSelector.tsx
+++ b/ui/src/components/ProjectSelector.tsx
@@ -22,7 +22,7 @@ const ProjectSelector = () => {
const basicSelectId = useGeneratedHtmlId({ prefix: "basicSelect" });
const onChange = (e: React.ChangeEvent) => {
- navigate(`/p/${e.target.value}`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${e.target.value}`);
};
return (
diff --git a/ui/src/index.tsx b/ui/src/index.tsx
index e38570929d..7559d02ebf 100644
--- a/ui/src/index.tsx
+++ b/ui/src/index.tsx
@@ -97,6 +97,13 @@ ReactDOM.render(
reactQueryClient={queryClient}
feastUIConfigs={{
tabsRegistry: tabsRegistry,
+ projectListPromise: fetch((process.env.PUBLIC_URL || "") + "/projects-list.json", {
+ headers: {
+ "Content-Type": "application/json",
+ },
+ }).then((res) => {
+ return res.json();
+ })
}}
/>
,
diff --git a/ui/src/pages/RootProjectSelectionPage.tsx b/ui/src/pages/RootProjectSelectionPage.tsx
index 424e93c85d..d287342055 100644
--- a/ui/src/pages/RootProjectSelectionPage.tsx
+++ b/ui/src/pages/RootProjectSelectionPage.tsx
@@ -22,12 +22,12 @@ const RootProjectSelectionPage = () => {
useEffect(() => {
if (data && data.default) {
// If a default is set, redirect there.
- navigate(`/p/${data.default}`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${data.default}`);
}
if (data && data.projects.length === 1) {
// If there is only one project, redirect there.
- navigate(`/p/${data.projects[0].id}`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${data.projects[0].id}`);
}
}, [data, navigate]);
@@ -39,7 +39,7 @@ const RootProjectSelectionPage = () => {
title={`${item.name}`}
description={item?.description || ""}
onClick={() => {
- navigate(`/p/${item.id}`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${item.id}`);
}}
/>
diff --git a/ui/src/pages/Sidebar.tsx b/ui/src/pages/Sidebar.tsx
index 9fc1a532f2..2b652fc08d 100644
--- a/ui/src/pages/Sidebar.tsx
+++ b/ui/src/pages/Sidebar.tsx
@@ -60,7 +60,7 @@ const SideNav = () => {
name: "Home",
id: htmlIdGenerator("basicExample")(),
onClick: () => {
- navigate(`/p/${projectName}/`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/`);
},
items: [
{
@@ -68,7 +68,7 @@ const SideNav = () => {
id: htmlIdGenerator("dataSources")(),
icon: ,
onClick: () => {
- navigate(`/p/${projectName}/data-source`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/data-source`);
},
isSelected: useMatchSubpath("data-source"),
},
@@ -77,7 +77,7 @@ const SideNav = () => {
id: htmlIdGenerator("entities")(),
icon: ,
onClick: () => {
- navigate(`/p/${projectName}/entity`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/entity`);
},
isSelected: useMatchSubpath("entity"),
},
@@ -86,7 +86,7 @@ const SideNav = () => {
id: htmlIdGenerator("featureView")(),
icon: ,
onClick: () => {
- navigate(`/p/${projectName}/feature-view`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/feature-view`);
},
isSelected: useMatchSubpath("feature-view"),
},
@@ -95,7 +95,7 @@ const SideNav = () => {
id: htmlIdGenerator("featureService")(),
icon: ,
onClick: () => {
- navigate(`/p/${projectName}/feature-service`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/feature-service`);
},
isSelected: useMatchSubpath("feature-service"),
},
@@ -104,7 +104,7 @@ const SideNav = () => {
id: htmlIdGenerator("savedDatasets")(),
icon: ,
onClick: () => {
- navigate(`/p/${projectName}/data-set`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/data-set`);
},
isSelected: useMatchSubpath("data-set"),
},
diff --git a/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx b/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx
index c19e4ff50f..b7cd3c90fc 100644
--- a/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx
+++ b/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx
@@ -6,6 +6,7 @@ import {
EuiFlexGroup,
EuiFlexItem,
} from "@elastic/eui";
+import { CopyBlock, atomOneDark } from "react-code-blocks";
import { feast } from "../../protos";
import { toDate } from "../../utils/timestamp";
@@ -61,6 +62,24 @@ const BatchSourcePropertiesView = (props: BatchSourcePropertiesViewProps) => {
)}
+ {batchSource.bigqueryOptions && (
+
+ Source {batchSource.bigqueryOptions.table ? "Table" : "Query"}
+ {batchSource.bigqueryOptions.table ? (
+
+ {batchSource.bigqueryOptions.table}
+
+ ) :
+ }
+
+
+ )}
{batchSource.meta?.latestEventTimestamp && (
Latest Event
diff --git a/ui/src/pages/data-sources/DataSourcesListingTable.tsx b/ui/src/pages/data-sources/DataSourcesListingTable.tsx
index ad549f991e..e4f06d6bd0 100644
--- a/ui/src/pages/data-sources/DataSourcesListingTable.tsx
+++ b/ui/src/pages/data-sources/DataSourcesListingTable.tsx
@@ -21,8 +21,8 @@ const DatasourcesListingTable = ({
render: (name: string) => {
return (
{name}
diff --git a/ui/src/pages/entities/EntitiesListingTable.tsx b/ui/src/pages/entities/EntitiesListingTable.tsx
index 2a017b18aa..baf4ddb8e4 100644
--- a/ui/src/pages/entities/EntitiesListingTable.tsx
+++ b/ui/src/pages/entities/EntitiesListingTable.tsx
@@ -21,8 +21,8 @@ const EntitiesListingTable = ({ entities }: EntitiesListingTableProps) => {
render: (name: string) => {
return (
{name}
diff --git a/ui/src/pages/entities/FeatureViewEdgesList.tsx b/ui/src/pages/entities/FeatureViewEdgesList.tsx
index 95bc51c56d..ab1fbfb6df 100644
--- a/ui/src/pages/entities/FeatureViewEdgesList.tsx
+++ b/ui/src/pages/entities/FeatureViewEdgesList.tsx
@@ -54,8 +54,8 @@ const FeatureViewEdgesList = ({ fvNames }: FeatureViewEdgesListInterace) => {
render: (name: string) => {
return (
{name}
diff --git a/ui/src/pages/feature-services/FeatureServiceListingTable.tsx b/ui/src/pages/feature-services/FeatureServiceListingTable.tsx
index c81edeaeb5..13ffa76409 100644
--- a/ui/src/pages/feature-services/FeatureServiceListingTable.tsx
+++ b/ui/src/pages/feature-services/FeatureServiceListingTable.tsx
@@ -31,8 +31,8 @@ const FeatureServiceListingTable = ({
render: (name: string) => {
return (
{name}
diff --git a/ui/src/pages/feature-services/FeatureServiceOverviewTab.tsx b/ui/src/pages/feature-services/FeatureServiceOverviewTab.tsx
index 387320778f..f43a0cb68f 100644
--- a/ui/src/pages/feature-services/FeatureServiceOverviewTab.tsx
+++ b/ui/src/pages/feature-services/FeatureServiceOverviewTab.tsx
@@ -109,7 +109,7 @@ const FeatureServiceOverviewTab = () => {
tags={data.spec.tags}
createLink={(key, value) => {
return (
- `/p/${projectName}/feature-service?` +
+ `${process.env.PUBLIC_URL || ""}/p/${projectName}/feature-service?` +
encodeSearchQueryString(`${key}:${value}`)
);
}}
@@ -133,7 +133,7 @@ const FeatureServiceOverviewTab = () => {
color="primary"
onClick={() => {
navigate(
- `/p/${projectName}/entity/${entity.name}`
+ `${process.env.PUBLIC_URL || ""}/p/${projectName}/entity/${entity.name}`
);
}}
onClickAriaLabel={entity.name}
diff --git a/ui/src/pages/feature-views/ConsumingFeatureServicesList.tsx b/ui/src/pages/feature-views/ConsumingFeatureServicesList.tsx
index fc98fe8e5e..44df7b5111 100644
--- a/ui/src/pages/feature-views/ConsumingFeatureServicesList.tsx
+++ b/ui/src/pages/feature-views/ConsumingFeatureServicesList.tsx
@@ -19,8 +19,8 @@ const ConsumingFeatureServicesList = ({
render: (name: string) => {
return (
{name}
diff --git a/ui/src/pages/feature-views/FeatureViewListingTable.tsx b/ui/src/pages/feature-views/FeatureViewListingTable.tsx
index e4eccecc97..ff1a31c416 100644
--- a/ui/src/pages/feature-views/FeatureViewListingTable.tsx
+++ b/ui/src/pages/feature-views/FeatureViewListingTable.tsx
@@ -32,8 +32,8 @@ const FeatureViewListingTable = ({
render: (name: string, item: genericFVType) => {
return (
{name} {(item.type === "ondemand" && ondemand) || (item.type === "stream" && stream)}
diff --git a/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx
index 3bbb906e05..cde4f46d4e 100644
--- a/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx
+++ b/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx
@@ -96,7 +96,7 @@ const RegularFeatureViewOverviewTab = ({
{
- navigate(`/p/${projectName}/entity/${entity}`);
+ navigate(`${process.env.PUBLIC_URL || ""}/p/${projectName}/entity/${entity}`);
}}
onClickAriaLabel={entity}
data-test-sub="testExample1"
@@ -134,7 +134,7 @@ const RegularFeatureViewOverviewTab = ({
tags={data.spec.tags}
createLink={(key, value) => {
return (
- `/p/${projectName}/feature-view?` +
+ `${process.env.PUBLIC_URL || ""}/p/${projectName}/feature-view?` +
encodeSearchQueryString(`${key}:${value}`)
);
}}
diff --git a/ui/src/pages/feature-views/StreamFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/StreamFeatureViewOverviewTab.tsx
index 3584cccdd8..99f82d3e74 100644
--- a/ui/src/pages/feature-views/StreamFeatureViewOverviewTab.tsx
+++ b/ui/src/pages/feature-views/StreamFeatureViewOverviewTab.tsx
@@ -96,8 +96,8 @@ const StreamFeatureViewOverviewTab = ({
{inputGroup?.name}
diff --git a/ui/src/pages/feature-views/components/FeatureViewProjectionDisplayPanel.tsx b/ui/src/pages/feature-views/components/FeatureViewProjectionDisplayPanel.tsx
index 156f6db1ec..f6856471e0 100644
--- a/ui/src/pages/feature-views/components/FeatureViewProjectionDisplayPanel.tsx
+++ b/ui/src/pages/feature-views/components/FeatureViewProjectionDisplayPanel.tsx
@@ -31,8 +31,8 @@ const FeatureViewProjectionDisplayPanel = (featureViewProjection: RequestDataDis
{featureViewProjection?.featureViewName}
diff --git a/ui/src/pages/feature-views/components/RequestDataDisplayPanel.tsx b/ui/src/pages/feature-views/components/RequestDataDisplayPanel.tsx
index e8e6854389..f3adaa28f0 100644
--- a/ui/src/pages/feature-views/components/RequestDataDisplayPanel.tsx
+++ b/ui/src/pages/feature-views/components/RequestDataDisplayPanel.tsx
@@ -38,8 +38,8 @@ const RequestDataDisplayPanel = ({
{requestDataSource?.name}
diff --git a/ui/src/pages/features/FeatureOverviewTab.tsx b/ui/src/pages/features/FeatureOverviewTab.tsx
index e339c30fc9..cc7879b038 100644
--- a/ui/src/pages/features/FeatureOverviewTab.tsx
+++ b/ui/src/pages/features/FeatureOverviewTab.tsx
@@ -63,8 +63,8 @@ const FeatureOverviewTab = () => {
FeatureView
+ href={`${process.env.PUBLIC_URL || ""}/p/${projectName}/feature-view/${FeatureViewName}`}
+ to={`${process.env.PUBLIC_URL || ""}/p/${projectName}/feature-view/${FeatureViewName}`}>
{FeatureViewName}
diff --git a/ui/src/pages/saved-data-sets/DatasetsListingTable.tsx b/ui/src/pages/saved-data-sets/DatasetsListingTable.tsx
index a1a9708417..af794a35f9 100644
--- a/ui/src/pages/saved-data-sets/DatasetsListingTable.tsx
+++ b/ui/src/pages/saved-data-sets/DatasetsListingTable.tsx
@@ -20,8 +20,8 @@ const DatasetsListingTable = ({ datasets }: DatasetsListingTableProps) => {
render: (name: string) => {
return (
{name}