Skip to content

[BOT] Conda Lock Update #4278

[BOT] Conda Lock Update

[BOT] Conda Lock Update #4278

Workflow file for this run

name: Automerge
concurrency:
group: ${{ github.repository }}-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
on:
push:
pull_request:
jobs:
Pipeline:
if: ${{ !(github.event_name != 'pull_request' && github.actor == 'dependabot[bot]') }}
uses: ./.github/workflows/Tests.yml
GCS:
if: ${{ !(github.event_name != 'pull_request' && github.actor == 'dependabot[bot]') }}
needs: Pipeline
container: ubuntu:bionic
runs-on:
- self-hosted
- Linux
- X64
env:
MAX_CORES: 80
GHA_EXTERNAL_DISK: "tools"
GHA_SA: "gh-sa-f4pga-arch-defs-ci"
steps:
- name: '📤 Download artifact: arch-defs packages'
uses: actions/download-artifact@v4
with:
name: packages
path: './'
- name: 🕐 Timestamp, Hash and packages.list
id: timestamp
run: |
TIMESTAMP="$(date +'%Y%m%d-%H%M%S')"
echo '::set-output name=timestamp::'"$TIMESTAMP"
echo 'Timestamp: '"$TIMESTAMP" >> $GITHUB_STEP_SUMMARY
echo 'Hash: '"$(echo symbiflow-arch-defs*.tar.xz | sed 's/.*-\(.*\)\.tar\.xz/\1/')" >> $GITHUB_STEP_SUMMARY
ls *.tar.xz > packages.list
mkdir -p install
- name: '🛠️ Setup Python and gsutil'
run: |
apt update -qqy
apt install -qqy curl python3 gnupg2
echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list
curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add -
apt update -qqy
apt install -qqy google-cloud-cli
- name: 🚀 Upload to Google Cloud Storage (GCS) bucket
run: |
python3 - <<EOF
from subprocess import check_call, run
from pathlib import Path
from re import compile as re_compile
TIMESTAMP = '${{ steps.timestamp.outputs.timestamp }}'
DO_UPLOAD = '${{ github.event_name == 'push' && github.ref_name == 'main' }}' == 'true'
print(f'> TIMESTAMP: {TIMESTAMP}')
PACKAGE_RE = re_compile("symbiflow-arch-defs-([a-zA-Z0-9_-]+)-([a-z0-9])")
def write_latest(package_name, timestamp, artifact):
with (Path("install") / f"symbiflow-{package_name}-latest").open("w") as wptr:
wptr.write(
'https://storage.googleapis.com/symbiflow-arch-defs/artifacts/prod/foss-fpga-tools/'
f'symbiflow-arch-defs/continuous/install/{TIMESTAMP}/{artifact}'
)
with (Path(__file__).parent.parent.parent / 'packages.list').open('r') as rptr:
for artifact in rptr.read().splitlines():
if DO_UPLOAD:
check_call([
'gsutil', 'cp', str(artifact),
f'gs://symbiflow-arch-defs/artifacts/prod/foss-fpga-tools/symbiflow-arch-defs/continuous/install/{TIMESTAMP}/'
])
m = PACKAGE_RE.match(artifact)
assert m, f"Package name not recognized! {artifact}"
package_name = m.group(1)
write_latest(package_name, TIMESTAMP, artifact)
if package_name == 'install-xc7':
write_latest('toolchain', TIMESTAMP, artifact)
if package_name == 'benchmarks-xc7':
write_latest('benchmarks', TIMESTAMP, artifact)
if DO_UPLOAD:
run('gsutil -m rsync $* -r install gs://${{ secrets.GCP_STORAGE_BUCKET }}/', shell=True, check=False)
EOF
- name: '📤 Upload artifact: latest'
uses: actions/upload-artifact@v3
with:
name: latest
path: install
Assets:
if: ${{ !(github.event_name != 'pull_request' && github.actor == 'dependabot[bot]') && (github.event_name == 'push' && github.ref_name == 'main')}}
needs: GCS
runs-on: ubuntu-latest
steps:
- name: '📤 Download artifact: latest'
uses: actions/download-artifact@v4
with:
name: latest
path: install
- uses: pyTooling/Actions/releaser@r0
with:
token: ${{ github.token }}
tag: latest
files: install/*
Automerge:
needs: Pipeline
if: github.event_name == 'pull_request' && github.actor == 'dependabot[bot]'
runs-on: ubuntu-latest
name: Automerge dependabot PRs
permissions:
contents: write
steps:
- name: Auto-merge Dependabot PR
run: GITHUB_TOKEN='${{ github.token }}' gh pr merge '${{ github.event.pull_request.html_url }}' --squash