Skip to content

Commit

Permalink
Added S3 express benchmark
Browse files Browse the repository at this point in the history
Signed-off-by: Ankit Saurabh <[email protected]>
  • Loading branch information
Ankit Saurabh committed Feb 21, 2024
1 parent 022f915 commit b8e70df
Show file tree
Hide file tree
Showing 5 changed files with 185 additions and 8 deletions.
7 changes: 6 additions & 1 deletion .github/workflows/bench_main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Benchmarks

on:
push:
branches: [ "main" ]
branches: [ "main", "s3-express-benchmark" ]

permissions:
id-token: write
Expand All @@ -14,3 +14,8 @@ jobs:
uses: ./.github/workflows/bench.yml
with:
ref: ${{ github.event.after }}
s3express-integration:
name: Benchmarks (s3express)
uses: ./.github/workflows/bench_s3express.yml
with:
ref: ${{ github.event.after }}
9 changes: 8 additions & 1 deletion .github/workflows/bench_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Benchmarks (PR)

on:
pull_request_target:
branches: [ "main" ]
branches: [ "main", "s3-express-benchmark" ]
types: [ labeled, opened, reopened, synchronize ]

permissions:
Expand All @@ -17,3 +17,10 @@ jobs:
with:
environment: PR benchmarks
ref: ${{ github.event.pull_request.head.sha }}
s3express-integration:
name: Benchmarks (s3express)
uses: ./.github/workflows/bench_s3express.yml
if: ${{ contains(github.event.pull_request.labels.*.name, 'performance') }}
with:
environment: PR benchmarks
ref: ${{ github.event.pull_request.head.sha }}
163 changes: 163 additions & 0 deletions .github/workflows/bench_s3express.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
name: Benchmark (s3-express one zone)

# We use environments to require approval to run benchmarks on PRs, but not on pushes to `main`
# (which have been approved already since PRs are required for `main`).
on:
workflow_call:
inputs:
environment:
type: string
ref:
required: true
type: string

env:
RUST_BACKTRACE: 1
CARGO_TERM_COLOR: always
S3_BUCKET_NAME: ${{ vars.S3_EXPRESS_ONE_ZONE_BUCKET_NAME }}
S3_BUCKET_TEST_PREFIX: ${{ vars.S3_BUCKET_BENCH_PREFIX || 'mountpoint-benchmark/' }}
S3_BUCKET_BENCH_FILE: ${{ vars.BENCH_FILE_NAME || 'bench100GB.bin' }}
S3_BUCKET_SMALL_BENCH_FILE: ${{ vars.SMALL_BENCH_FILE_NAME || 'bench5MB.bin' }}
S3_REGION: ${{ vars.S3_REGION }}

jobs:
bench:
name: Benchmark (Throughput)
runs-on: [self-hosted, linux, x64, high-performance]

environment: ${{ inputs.environment }}

steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: ${{ vars.ACTIONS_IAM_ROLE }}
aws-region: ${{ vars.S3_REGION }}
role-duration-seconds: 21600
- name: Checkout code
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
submodules: true
persist-credentials: false
- name: Install operating system dependencies
uses: ./.github/actions/install-dependencies
with:
fuseVersion: 2
libunwind: true
fio: true
- name: Set up stable Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- name: Restore Cargo cache
id: restore-cargo-cache
uses: actions/cache/restore@v3
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-${{ github.job }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Build
run: cargo build --release
- name: Run Benchmark
run: mountpoint-s3/scripts/fs_bench.sh
- name: Save Cargo cache
uses: actions/cache/save@v3
if: inputs.environment != 'PR benchmarks'
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ steps.restore-cargo-cache.outputs.cache-primary-key }}
- name: Check benchmark results
uses: benchmark-action/github-action-benchmark@v1
with:
tool: 'customBiggerIsBetter'
output-file-path: results/output_s3_express.json
benchmark-data-dir-path: dev/s3-express/bench
alert-threshold: "200%"
fail-on-alert: true
# GitHub API token to make a commit comment
github-token: ${{ secrets.GITHUB_TOKEN }}
# Store the results and deploy GitHub pages automatically if the results are from main branch
auto-push: ${{ inputs.environment && 'false' || 'true' }}
comment-on-alert: true
max-items-in-chart: 20

latency-bench:
name: Benchmark (Latency)
runs-on: [self-hosted, linux, x64, high-performance]

environment: ${{ inputs.environment }}

steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: ${{ vars.ACTIONS_IAM_ROLE }}
aws-region: ${{ vars.S3_REGION }}
- name: Checkout code
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
submodules: true
persist-credentials: false
- name: Install operating system dependencies
uses: ./.github/actions/install-dependencies
with:
fuseVersion: 2
libunwind: true
fio: true
- name: Set up stable Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- name: Restore Cargo cache
id: restore-cargo-cache
uses: actions/cache/restore@v3
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-${{ github.job }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Build
run: cargo build --release
- name: Run Benchmark
run: mountpoint-s3/scripts/fs_latency_bench.sh
- name: Save Cargo cache
uses: actions/cache/save@v3
if: inputs.environment != 'PR benchmarks'
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ steps.restore-cargo-cache.outputs.cache-primary-key }}
- name: Check benchmark results
uses: benchmark-action/github-action-benchmark@v1
with:
tool: 'customSmallerIsBetter'
output-file-path: results/output_s3_express.json
benchmark-data-dir-path: dev/s3-express/latency_bench
alert-threshold: "200%"
fail-on-alert: true
# GitHub API token to make a commit comment
github-token: ${{ secrets.GITHUB_TOKEN }}
# Store the results and deploy GitHub pages automatically if the results are from main branch
auto-push: ${{ inputs.environment && 'false' || 'true' }}
comment-on-alert: true
max-items-in-chart: 20
3 changes: 2 additions & 1 deletion mountpoint-s3/scripts/fs_bench.sh
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,8 @@ read_benchmark () {
--debug \
--allow-delete \
--log-directory=${log_dir} \
--prefix=${S3_BUCKET_TEST_PREFIX}
--prefix=${S3_BUCKET_TEST_PREFIX} \
--part-size=16777216
mount_status=$?
set -e
if [ $mount_status -ne 0 ]; then
Expand Down
11 changes: 6 additions & 5 deletions mountpoint-s3/scripts/fs_latency_bench.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,13 @@ mkdir -p ${results_dir}

# start readdir benchmarking
dir_size=100
jobs_dir=mountpoint-s3/scripts/fio/create

while [ $dir_size -le 100000 ]
do
sum=0
job_name="readdir_${dir_size}"
job_file="${jobs_dir}/create_files_${dir_size}.fio"
mount_dir=$(mktemp -d /tmp/mount-s3-XXXXXXXXXXXX)
target_dir="${mount_dir}/bench_dir_${dir_size}"
startdelay=30
Expand All @@ -61,11 +64,9 @@ do
exit 1
fi

# verify that the target directory exists before running the benchmark
if [ ! -d "${target_dir}" ]; then
echo "Target directory ${target_dir} does not exist."
exit 1
fi
# create the target directory if it does not exist before running the benchmark
mkdir -p ${target_dir}
fio --directory=${target_dir} ${job_file}

sleep $startdelay
# run each case for 10 iterations
Expand Down

0 comments on commit b8e70df

Please sign in to comment.