Skip to content

Commit

Permalink
fixup! trigger action on push to branch for testing porpuses
Browse files Browse the repository at this point in the history
  • Loading branch information
IcaroG committed Jan 28, 2025
1 parent 4613275 commit 17c9baf
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .github/scripts/copy-s3.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export AWS_REQUEST_CHECKSUM_CALCULATION=when_required
export AWS_RESPONSE_CHECKSUM_VALIDATION=when_required

# Upload the file to S3
aws s3 cp "$source_path" "$target_path" --endpoint-url="$endpoint_url" --debug
aws s3 cp "$source_path" "$target_path" --endpoint-url="$endpoint_url"

# Check if the upload was successful
if [ $? -eq 0 ]; then
Expand Down
21 changes: 21 additions & 0 deletions .github/scripts/delete-s3.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#!/bin/bash

set -uxo pipefail

# Variables
file_path=$1
endpoint_url=$2

export AWS_REQUEST_CHECKSUM_CALCULATION=when_required
export AWS_RESPONSE_CHECKSUM_VALIDATION=when_required

# Delete the file from S3
aws s3 rm "$file_path" --endpoint-url="$endpoint_url"

# Check if the delete was successful
if [ $? -eq 0 ]; then
echo "File $file_path deleted"
else
echo "Failed to delete file $file_path"
exit 1
fi
37 changes: 30 additions & 7 deletions .github/workflows/check-duckdb-schema.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,17 +46,40 @@ jobs:
credentials_json: '${{ secrets.GOOGLE_BQ_ADMIN_CREDENTIALS_JSON }}'
create_credentials_file: true

# - name: Download file from Cloudflare R2
# env:
# AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_R2_S3_API_ACCESS_KEY }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_R2_S3_API_SECRET_KEY }}
# AWS_DEFAULT_REGION: "auto"
# run: bash .github/scripts/copy-s3.sh "$R2_S3_PATH" "$SQLMESH_DUCKDB_LOCAL_PATH" "${{ secrets.CLOUDFLARE_R2_S3_API_ENDPOINT }}"
- name: Download file from Cloudflare R2
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_R2_S3_API_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_R2_S3_API_SECRET_KEY }}
AWS_DEFAULT_REGION: "auto"
run: bash .github/scripts/copy-s3.sh "$R2_S3_PATH" "$SQLMESH_DUCKDB_LOCAL_PATH" "${{ secrets.CLOUDFLARE_R2_S3_API_ENDPOINT }}"

- name: Save copy of the database
run: cp $SQLMESH_DUCKDB_LOCAL_PATH $SQLMESH_DUCKDB_LOCAL_PATH.bak

- name: Initialize local data
run: poetry run oso metrics local initialize --max-results-per-query 50 --max-days 3


- name: Compare database files
id: file-diff
run: |
# Compare two files or directories using the Unix `diff` command
if diff -q $SQLMESH_DUCKDB_LOCAL_PATH $SQLMESH_DUCKDB_LOCAL_PATH.bak > /dev/null; then
echo "::set-output name=files_different::false"
else
echo "::set-output name=files_different::true"
fi
# Because of the checksum issue, we first delete the old file before uploading the new one
- name: Delete old Cloudflare R2 file
if: steps.file-diff.outputs.files_different == 'true'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_R2_S3_API_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_R2_S3_API_SECRET_KEY }}
AWS_DEFAULT_REGION: "auto"
run: bash .github/scripts/delete-s3.sh "$R2_S3_PATH" "${{ secrets.CLOUDFLARE_R2_S3_API_ENDPOINT }}"

- name: Upload file to Cloudflare R2
if: steps.file-diff.outputs.files_different == 'true'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_R2_S3_API_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_R2_S3_API_SECRET_KEY }}
Expand Down

0 comments on commit 17c9baf

Please sign in to comment.