From 296f30c487d339416ff2e3d656f0ae3f4dba297c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Dec 2023 15:10:49 -0600 Subject: [PATCH 1/5] Bump org.slf4j:slf4j-reload4j from 2.0.7 to 2.0.9 (#419) Bumps org.slf4j:slf4j-reload4j from 2.0.7 to 2.0.9. [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=org.slf4j:slf4j-reload4j&package-manager=maven&previous-version=2.0.7&new-version=2.0.9)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 930e7eaa0..185a1a937 100644 --- a/pom.xml +++ b/pom.xml @@ -235,7 +235,7 @@ org.slf4j slf4j-reload4j - 2.0.7 + 2.0.9 From c3c2972ee55f686781ff8fead6e5a37963de91cc Mon Sep 17 00:00:00 2001 From: Brian Kroth Date: Thu, 14 Dec 2023 09:33:11 -0600 Subject: [PATCH 2/5] Enforce dependency management (#417) Builds off of #413 to enable the dependency management plugin in non-report mode. --- .devcontainer/devcontainer.json | 1 + .github/workflows/maven.yml | 5 ++- .vscode/extensions.json | 1 + pom.xml | 67 ++++++++++++++++++++++++++++++++- 4 files changed, 70 insertions(+), 4 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index f38d12ce5..b5773b3b9 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -46,6 +46,7 @@ "github.vscode-github-actions", "github.vscode-pull-request-github", "huntertran.auto-markdown-toc", + "redhat.fabric8-analytics", "vscjava.vscode-java-pack" ] } diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index b04b8d4e7..46519d8c6 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -76,8 +76,9 @@ jobs: cache: 'maven' distribution: 'temurin' - - name: Package with Maven - run: mvn -B package -P ${{matrix.profile}} --file pom.xml -DskipTests -D descriptors=src/main/assembly/tgz.xml + - name: Package and verify with Maven + run: | + mvn -B package verify -P ${{matrix.profile}} --file pom.xml -DskipTests -D descriptors=src/main/assembly/tgz.xml - name: Upload TGZ artifact uses: actions/upload-artifact@v3 diff --git a/.vscode/extensions.json b/.vscode/extensions.json index f0c178cee..7c93cfabc 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -3,6 +3,7 @@ "EditorConfig.EditorConfig", "github.vscode-pull-request-github", "huntertran.auto-markdown-toc", + "redhat.fabric8-analytics", "vscjava.vscode-java-pack" ] } diff --git a/pom.xml b/pom.xml index 185a1a937..1841c1247 100644 --- a/pom.xml +++ b/pom.xml @@ -84,6 +84,7 @@ org.postgresql postgresql 42.6.0 + runtime @@ -103,6 +104,7 @@ mysql mysql-connector-java 8.0.30 + runtime @@ -144,6 +146,7 @@ org.mariadb.jdbc mariadb-java-client 3.1.4 + runtime @@ -163,6 +166,7 @@ com.google.cloud google-cloud-spanner-jdbc 2.10.0 + runtime @@ -182,6 +186,7 @@ org.postgresql postgresql 42.6.0 + runtime @@ -201,6 +206,7 @@ org.apache.phoenix phoenix-client-hbase-2.4 5.1.3 + runtime @@ -220,6 +226,7 @@ com.microsoft.sqlserver mssql-jdbc 11.2.3.jre17 + runtime @@ -233,6 +240,7 @@ + org.slf4j slf4j-reload4j 2.0.9 @@ -245,6 +253,7 @@ + commons-jxpath commons-jxpath 1.3 @@ -268,6 +277,13 @@ 2.15.1 + + org.apache.commons + commons-text + 1.10.0 + compile + + org.apache.commons commons-collections4 @@ -293,15 +309,18 @@ + org.glassfish.jaxb jaxb-runtime 4.0.4 + compile org.hsqldb hsqldb 2.7.2 + test @@ -319,6 +338,13 @@ + org.codehaus.janino + commons-compiler + 3.1.11 + + + + org.codehaus.janino janino 3.1.11 @@ -365,6 +391,21 @@ + org.apache.maven.plugins maven-jar-plugin @@ -461,7 +502,30 @@ analyze-only - false + true + + + + org.glassfish.jaxb:jaxb-runtime:jar + + org.slf4j:slf4j-reload4j:jar + + org.codehaus.janino:janino:jar + + commons-jxpath:commons-jxpath:jar + + + org.postgresql:postgresql:jar + mysql:mysql-connector-java:jar + com.oracle.database.jdbc:ojdbc11:jar + org.mariadb.jdbc:mariadb-java-client:jar + com.google.cloud:google-cloud-spanner-jdbc:jar + org.apache.phoenix:phoenix-client-hbase-2.4:jar + com.microsoft.sqlserver:mssql-jdbc:jar + org.xerial:sqlite-jdbc:jar + @@ -476,5 +540,4 @@ - From 75b00142e5abf46682e39e5c8782fbdb2e1c20d3 Mon Sep 17 00:00:00 2001 From: Brian Kroth Date: Thu, 14 Dec 2023 11:19:26 -0600 Subject: [PATCH 3/5] EditorConfig fixups to match Google Java Style (#425) Fixups following #416 --- .editorconfig | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.editorconfig b/.editorconfig index ddf305feb..c2badd185 100644 --- a/.editorconfig +++ b/.editorconfig @@ -19,3 +19,9 @@ end_of_line = lf [*.yml] indent_size = 2 + +[*.java] +# https://google.github.io/styleguide/javaguide.html#s4.2-block-indentation +indent_size = 2 +# https://google.github.io/styleguide/javaguide.html#s4.4-column-limit +max_line_length = 100 From c0ef0476a54529825024603d6a08213a55dc8202 Mon Sep 17 00:00:00 2001 From: Brian Kroth Date: Thu, 14 Dec 2023 11:34:31 -0600 Subject: [PATCH 4/5] Add .git-blame-ignore-revs file (#426) Follow-on work to #416, this file uses a [`.git-blame-ignore-revs` file](https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view) to ignore revs that cause major reformatting changes in git blame view. --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..3edc731e0 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# Enforce and apply Google Java Style formatting (#416) +56e3191bf8df94dcdf226ae814a9aa2c08b931b9 From 5e44cbf8bc9a982c598e5996a0c26ab6e9b0d17d Mon Sep 17 00:00:00 2001 From: Brian Kroth Date: Thu, 14 Dec 2023 15:15:52 -0600 Subject: [PATCH 5/5] Improve test result checking (#427) - Add checks on the result outputs to make sure that the length of time and number of queries expected to run were. - Make sure to exit non-zero when RuntimeExceptions are encountered. - Temporarily remove a faulty INSERT test from the templated benchmark. Split out from #283. --- .github/workflows/maven.yml | 38 +++++++-- .../cockroachdb/sample_templated_config.xml | 60 ++++++++++++++ config/mariadb/sample_templated_config.xml | 4 +- config/mysql/sample_templated_config.xml | 4 +- config/oracle/sample_templated_config.xml | 13 ++- config/postgres/sample_templated_config.xml | 4 +- config/sqlite/sample_templated_config.xml | 4 +- config/sqlserver/sample_hyadapt_config.xml | 2 +- config/sqlserver/sample_templated_config.xml | 4 +- config/sqlserver/sample_ycsb_config.xml | 2 +- data/templated/example.xml | 2 + docker/benchbase/devcontainer/Dockerfile | 2 +- docker/benchbase/run-dev-image.sh | 3 +- docker/benchbase/run-full-image.sh | 6 +- docker/build-run-benchmark-with-docker.sh | 20 ++++- .../README.md | 0 .../docker-compose.yml | 0 .../down.cmd | 0 .../down.sh | 0 .../prune.cmd | 0 .../prune.sh | 0 .../up.cmd | 0 .../up.sh | 0 scripts/check_latest_benchmark_results.sh | 79 +++++++++++++++++++ .../java/com/oltpbenchmark/DBWorkload.java | 6 ++ src/main/java/com/oltpbenchmark/Results.java | 12 ++- .../java/com/oltpbenchmark/ThreadBench.java | 15 +++- .../templated/procedures/GenericQuery.java | 8 +- .../com/oltpbenchmark/util/ResultWriter.java | 1 + .../java/com/oltpbenchmark/util/SQLUtil.java | 12 ++- 30 files changed, 270 insertions(+), 31 deletions(-) create mode 100644 config/cockroachdb/sample_templated_config.xml rename docker/{cockroach-latest => cockroachdb-latest}/README.md (100%) rename docker/{cockroach-latest => cockroachdb-latest}/docker-compose.yml (100%) rename docker/{cockroach-latest => cockroachdb-latest}/down.cmd (100%) rename docker/{cockroach-latest => cockroachdb-latest}/down.sh (100%) rename docker/{cockroach-latest => cockroachdb-latest}/prune.cmd (100%) rename docker/{cockroach-latest => cockroachdb-latest}/prune.sh (100%) rename docker/{cockroach-latest => cockroachdb-latest}/up.cmd (100%) rename docker/{cockroach-latest => cockroachdb-latest}/up.sh (100%) create mode 100755 scripts/check_latest_benchmark_results.sh diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index 46519d8c6..6db9b8030 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -121,7 +121,7 @@ jobs: run: | # For templated benchmarks, we need to preload some data for the test since by design, templated benchmarks do not support the 'load' operation # In this case, we load the tpcc data. - if [[ ${{matrix.benchmark}} == templated ]]; then + if [[ ${{matrix.benchmark}} == templated ]] || [[ ${{matrix.benchmark}} == tpcc ]]; then # Disable synchronous mode for sqlite tpcc data loading to save some time. java -jar benchbase.jar -b tpcc -c config/sqlite/sample_tpcc_nosync_config.xml --create=true --load=true --execute=false --json-histograms results/histograms.json # Run the templated benchmark. @@ -129,7 +129,9 @@ jobs: else java -jar benchbase.jar -b ${{matrix.benchmark}} -c config/sqlite/sample_${{matrix.benchmark}}_config.xml --create=true --load=true --execute=true --json-histograms results/histograms.json fi + # FIXME: Reduce the error rate so we don't need these overrides. + results_benchmark=${{matrix.benchmark}} if [ ${{matrix.benchmark}} == auctionmark ]; then ERRORS_THRESHOLD=0.02 elif [ ${{matrix.benchmark}} == resourcestresser ]; then @@ -139,6 +141,7 @@ jobs: elif [ ${{matrix.benchmark}} == tatp ]; then ERRORS_THRESHOLD=0.05 fi + ./scripts/check_latest_benchmark_results.sh $results_benchmark ./scripts/check_histogram_results.sh results/histograms.json $ERRORS_THRESHOLD ## ---------------------------------------------------------------------------------- @@ -150,6 +153,7 @@ jobs: strategy: fail-fast: false matrix: + # FIXME: Add tpch back in (#333). benchmark: [ 'auctionmark', 'epinions', 'hyadapt', 'noop', 'otmetrics', 'resourcestresser', 'seats', 'sibench', 'smallbank', 'tatp', 'templated', 'tpcc', 'twitter', 'voter', 'wikipedia', 'ycsb' ] services: mariadb: # https://hub.docker.com/_/mariadb @@ -203,11 +207,13 @@ jobs: fi # FIXME: Reduce the error rate so we don't need these overrides. + results_benchmark=${{matrix.benchmark}} if [ ${{matrix.benchmark}} == auctionmark ]; then ERRORS_THRESHOLD=0.02 elif [ ${{matrix.benchmark}} == tatp ]; then ERRORS_THRESHOLD=0.05 fi + ./scripts/check_latest_benchmark_results.sh $results_benchmark ./scripts/check_histogram_results.sh results/histograms.json $ERRORS_THRESHOLD ## ---------------------------------------------------------------------------------- @@ -219,7 +225,7 @@ jobs: strategy: fail-fast: false matrix: - benchmark: [ 'auctionmark', 'epinions', 'hyadapt', 'noop', 'otmetrics', 'resourcestresser', 'seats', 'sibench', 'smallbank', 'tatp', 'templated', 'tpcc', 'twitter', 'voter', 'wikipedia', 'ycsb' ] + benchmark: [ 'auctionmark', 'epinions', 'hyadapt', 'noop', 'otmetrics', 'resourcestresser', 'seats', 'sibench', 'smallbank', 'tatp', 'templated', 'tpcc', 'tpch', 'twitter', 'voter', 'wikipedia', 'ycsb' ] services: mysql: # https://hub.docker.com/_/mysql image: mysql:latest @@ -271,11 +277,13 @@ jobs: fi # FIXME: Reduce the error rate so we don't need these overrides. + results_benchmark=${{matrix.benchmark}} if [ ${{matrix.benchmark}} == auctionmark ]; then ERRORS_THRESHOLD=0.02 elif [ ${{matrix.benchmark}} == tatp ]; then ERRORS_THRESHOLD=0.05 fi + ./scripts/check_latest_benchmark_results.sh $results_benchmark ./scripts/check_histogram_results.sh results/histograms.json $ERRORS_THRESHOLD ## ---------------------------------------------------------------------------------- @@ -287,7 +295,7 @@ jobs: strategy: fail-fast: false matrix: - benchmark: [ 'auctionmark', 'epinions', 'hyadapt', 'otmetrics', 'resourcestresser', 'seats', 'sibench', 'smallbank', 'tatp', 'tpcc', 'twitter', 'voter', 'wikipedia', 'ycsb', 'templated' ] + benchmark: [ 'auctionmark', 'epinions', 'hyadapt', 'otmetrics', 'resourcestresser', 'seats', 'sibench', 'smallbank', 'tatp', 'tpcc', 'tpch', 'twitter', 'voter', 'wikipedia', 'ycsb', 'templated' ] services: oracle: image: gvenzl/oracle-xe:21.3.0-slim-faststart @@ -341,7 +349,9 @@ jobs: else java -jar benchbase.jar -b ${{matrix.benchmark}} -c config/oracle/sample_${{matrix.benchmark}}_config.xml --create=true --load=true --execute=true --json-histograms results/histograms.json fi + # FIXME: Reduce the error rate so we don't need these overrides. + results_benchmark=${{matrix.benchmark}} if [ ${{matrix.benchmark}} == auctionmark ]; then ERRORS_THRESHOLD=0.04 elif [ ${{matrix.benchmark}} == tatp ]; then @@ -353,6 +363,7 @@ jobs: elif [ ${{matrix.benchmark}} == wikipedia ]; then ERRORS_THRESHOLD=0.02 fi + ./scripts/check_latest_benchmark_results.sh $results_benchmark ./scripts/check_histogram_results.sh results/histograms.json $ERRORS_THRESHOLD ## ---------------------------------------------------------------------------------- @@ -414,11 +425,13 @@ jobs: fi # FIXME: Reduce the error rate so we don't need these overrides. + results_benchmark=${{matrix.benchmark}} if [ ${{matrix.benchmark}} == auctionmark ]; then ERRORS_THRESHOLD=0.02 elif [ ${{matrix.benchmark}} == tatp ]; then ERRORS_THRESHOLD=0.05 fi + ./scripts/check_latest_benchmark_results.sh $results_benchmark ./scripts/check_histogram_results.sh results/histograms.json $ERRORS_THRESHOLD ## ---------------------------------------------------------------------------------- @@ -430,7 +443,7 @@ jobs: strategy: fail-fast: false matrix: - benchmark: [ 'auctionmark', 'epinions', 'hyadapt', 'noop', 'otmetrics', 'resourcestresser', 'seats', 'sibench', 'smallbank', 'tatp', 'tpcc', 'tpch', 'twitter', 'voter', 'wikipedia', 'ycsb' ] + benchmark: [ 'auctionmark', 'epinions', 'hyadapt', 'noop', 'otmetrics', 'resourcestresser', 'seats', 'sibench', 'smallbank', 'tatp', 'templated', 'tpcc', 'tpch', 'twitter', 'voter', 'wikipedia', 'ycsb' ] services: cockroach: # https://hub.docker.com/repository/docker/timveil/cockroachdb-single-node image: timveil/cockroachdb-single-node:latest @@ -461,14 +474,23 @@ jobs: - name: Run benchmark run: | - java -jar benchbase.jar -b ${{matrix.benchmark}} -c config/cockroachdb/sample_${{matrix.benchmark}}_config.xml --create=true --load=true --execute=true --json-histograms results/histograms.json + # For templated benchmarks, we need to preload some data for the test since by design, templated benchmarks do not support the 'load' operation + # In this case, we load the tpcc data. + if [[ ${{matrix.benchmark}} == templated ]]; then + java -jar benchbase.jar -b tpcc -c config/cockroachdb/sample_tpcc_config.xml --create=true --load=true --execute=false --json-histograms results/histograms.json + java -jar benchbase.jar -b ${{matrix.benchmark}} -c config/cockroachdb/sample_${{matrix.benchmark}}_config.xml --create=false --load=false --execute=true --json-histograms results/histograms.json + else + java -jar benchbase.jar -b ${{matrix.benchmark}} -c config/cockroachdb/sample_${{matrix.benchmark}}_config.xml --create=true --load=true --execute=true --json-histograms results/histograms.json + fi # FIXME: Reduce the error rate so we don't need these overrides. # FIXME: Reduce the error rate so we don't need these overrides. + results_benchmark=${{matrix.benchmark}} if [ ${{matrix.benchmark}} == auctionmark ]; then ERRORS_THRESHOLD=0.02 elif [ ${{matrix.benchmark}} == tatp ]; then ERRORS_THRESHOLD=0.05 fi + ./scripts/check_latest_benchmark_results.sh $results_benchmark ./scripts/check_histogram_results.sh results/histograms.json $ERRORS_THRESHOLD ## ---------------------------------------------------------------------------------- @@ -480,8 +502,7 @@ jobs: strategy: fail-fast: false matrix: - # TODO: add more benchmarks - #benchmark: [ 'auctionmark', 'epinions', 'hyadapt', 'noop', 'otmetrics', 'resourcestresser', 'seats', 'sibench', 'smallbank', 'tatp', 'tpcc', 'tpch', 'twitter', 'voter', 'wikipedia', 'ycsb' ] + # TODO: add auctionmark and seats benchmark benchmark: [ 'epinions', 'hyadapt', 'noop', 'otmetrics', 'resourcestresser', 'sibench', 'smallbank', 'tatp', 'tpcc', 'templated', 'tpch', 'twitter', 'voter', 'wikipedia', 'ycsb' ] services: sqlserver: @@ -553,9 +574,11 @@ jobs: fi # FIXME: Reduce the error rate so we don't need these overrides. + results_benchmark=${{matrix.benchmark}} if [ ${{matrix.benchmark}} == tatp ]; then ERRORS_THRESHOLD=0.05 fi + ./scripts/check_latest_benchmark_results.sh $results_benchmark ./scripts/check_histogram_results.sh results/histograms.json $ERRORS_THRESHOLD ## ---------------------------------------------------------------------------------- @@ -632,6 +655,7 @@ jobs: "$image" -b "$benchmark" -c /tmp/config.xml --create=true --load=true --execute=true --json-histograms results/histograms.json # Test that the results files were produced. ls results/${benchmark}_*.csv + ./scripts/check_latest_benchmark_results.sh ${benchmark} ./scripts/check_histogram_results.sh results/histograms.json $ERRORS_THRESHOLD done # Publish the docker image if the build/test was successful. diff --git a/config/cockroachdb/sample_templated_config.xml b/config/cockroachdb/sample_templated_config.xml new file mode 100644 index 000000000..8f0e31630 --- /dev/null +++ b/config/cockroachdb/sample_templated_config.xml @@ -0,0 +1,60 @@ + + + + + COCKROACHDB + org.postgresql.Driver + jdbc:postgresql://localhost:26257/benchbase?sslmode=disable&ApplicationName=templated&reWriteBatchedInserts=true + root + + TRANSACTION_SERIALIZABLE + 128 + + + + data/templated/example.xml + + + 1 + + + + 100 + 20,20,10,10,10,10,10 + + + + + + + GetOrder + + + GetCust + + + GetCustNull + + + GetWarehouse + + + GetItemByPrice + + + UpdateItemPrice + + + DeleteItem + + + + diff --git a/config/mariadb/sample_templated_config.xml b/config/mariadb/sample_templated_config.xml index 1198bfaeb..8d76aa606 100644 --- a/config/mariadb/sample_templated_config.xml +++ b/config/mariadb/sample_templated_config.xml @@ -24,7 +24,7 @@ 100 - 20,20,10,10,10,10,10,10 + 20,20,10,10,10,10,10 @@ -51,8 +51,10 @@ DeleteItem + diff --git a/config/mysql/sample_templated_config.xml b/config/mysql/sample_templated_config.xml index bb9c3c6f9..369cdb268 100644 --- a/config/mysql/sample_templated_config.xml +++ b/config/mysql/sample_templated_config.xml @@ -24,7 +24,7 @@ 100 - 20,20,10,10,10,10,10,10 + 20,20,10,10,10,10,10 @@ -51,8 +51,10 @@ DeleteItem + diff --git a/config/oracle/sample_templated_config.xml b/config/oracle/sample_templated_config.xml index 043cb413f..58e59c489 100644 --- a/config/oracle/sample_templated_config.xml +++ b/config/oracle/sample_templated_config.xml @@ -24,7 +24,7 @@ 100 - 30,20,10,30,10 + 20,20,10,10,10,10,10 @@ -45,5 +45,16 @@ GetItemByPrice + + UpdateItemPrice + + + DeleteItem + + diff --git a/config/postgres/sample_templated_config.xml b/config/postgres/sample_templated_config.xml index de5baa5a6..1a5a61edd 100644 --- a/config/postgres/sample_templated_config.xml +++ b/config/postgres/sample_templated_config.xml @@ -24,7 +24,7 @@ 100 - 20,20,10,10,10,10,10,10 + 20,20,10,10,10,10,10 @@ -51,8 +51,10 @@ DeleteItem + diff --git a/config/sqlite/sample_templated_config.xml b/config/sqlite/sample_templated_config.xml index a3b5b5f7f..8b2c8ecbf 100644 --- a/config/sqlite/sample_templated_config.xml +++ b/config/sqlite/sample_templated_config.xml @@ -22,7 +22,7 @@ 100 - 20,20,10,10,10,10,10,10 + 20,20,10,10,10,10,10 @@ -49,8 +49,10 @@ DeleteItem + diff --git a/config/sqlserver/sample_hyadapt_config.xml b/config/sqlserver/sample_hyadapt_config.xml index a62b48b05..ae10aba04 100644 --- a/config/sqlserver/sample_hyadapt_config.xml +++ b/config/sqlserver/sample_hyadapt_config.xml @@ -114,6 +114,6 @@ SumRecord10 - + diff --git a/config/sqlserver/sample_templated_config.xml b/config/sqlserver/sample_templated_config.xml index 8f3ea23a4..c6e301ce7 100644 --- a/config/sqlserver/sample_templated_config.xml +++ b/config/sqlserver/sample_templated_config.xml @@ -24,7 +24,7 @@ 100 - 20,20,10,10,10,10,10,10 + 20,20,10,10,10,10,10 @@ -51,8 +51,10 @@ DeleteItem + diff --git a/config/sqlserver/sample_ycsb_config.xml b/config/sqlserver/sample_ycsb_config.xml index 94e66f62a..52b3927aa 100644 --- a/config/sqlserver/sample_ycsb_config.xml +++ b/config/sqlserver/sample_ycsb_config.xml @@ -12,7 +12,7 @@ 1 - + diff --git a/data/templated/example.xml b/data/templated/example.xml index c48917d4e..cdb506782 100644 --- a/data/templated/example.xml +++ b/data/templated/example.xml @@ -79,6 +79,7 @@ 255.0 + \ No newline at end of file diff --git a/docker/benchbase/devcontainer/Dockerfile b/docker/benchbase/devcontainer/Dockerfile index c44afa4d7..1fcc38004 100644 --- a/docker/benchbase/devcontainer/Dockerfile +++ b/docker/benchbase/devcontainer/Dockerfile @@ -7,7 +7,7 @@ LABEL org.opencontainers.image.source = "https://github.com/cmu-db/benchbase/" # Also add a few nice cli tools. RUN apt-get update \ && apt-get -y upgrade \ - && apt-get -y install --no-install-recommends sudo vim-nox neovim less bash-completion colordiff git openssh-client jq ripgrep \ + && apt-get -y install --no-install-recommends sudo vim-nox neovim less bash-completion colordiff git openssh-client jq ripgrep libxml2-utils \ && apt-get clean && rm -rf /var/lib/apt/lists/* # Add a containeruser that allows vscode/codespaces to map the local host user diff --git a/docker/benchbase/run-dev-image.sh b/docker/benchbase/run-dev-image.sh index ce1eb4bb4..1e84d4d74 100755 --- a/docker/benchbase/run-dev-image.sh +++ b/docker/benchbase/run-dev-image.sh @@ -40,5 +40,4 @@ docker run ${INTERACTIVE_ARGS:-} --rm \ --env EXTRA_MAVEN_ARGS="${EXTRA_MAVEN_ARGS:-}" \ --user "$CONTAINERUSER_UID:$CONTAINERUSER_GID" \ -v "$MAVEN_CONFIG:/home/containeruser/.m2" \ - -v "$SRC_DIR:/benchbase" benchbase-dev:latest $* -set +x + -v "$SRC_DIR:/benchbase" benchbase-dev:latest $* \ No newline at end of file diff --git a/docker/benchbase/run-full-image.sh b/docker/benchbase/run-full-image.sh index fb27189ad..6410d3084 100755 --- a/docker/benchbase/run-full-image.sh +++ b/docker/benchbase/run-full-image.sh @@ -4,6 +4,7 @@ set -eu # When we are running the full image we don't generally want to have to rebuild it repeatedly. CLEAN_BUILD=${CLEAN_BUILD:-false} +BUILD_IMAGE=${BUILD_IMAGE:-true} scriptdir=$(dirname "$(readlink -f "$0")") rootdir=$(readlink -f "$scriptdir/../../") @@ -15,9 +16,9 @@ if ! docker image ls --quiet benchbase-$BENCHBASE_PROFILE:latest | grep -q .; th CLEAN_BUILD=true fi -#if [ "$CLEAN_BUILD" != 'false' ]; then +if [ "$BUILD_IMAGE" != 'false' ]; then ./build-full-image.sh -#fi +fi if [ "$imagename" != 'benchbase' ]; then echo "ERROR: Unexpected imagename: $imagename" >&2 @@ -37,4 +38,3 @@ docker run -it --rm \ --env BENCHBASE_PROFILE="$BENCHBASE_PROFILE" \ --user "$CONTAINERUSER_UID:$CONTAINERUSER_GID" \ -v "$SRC_DIR/results:/benchbase/results" benchbase-$BENCHBASE_PROFILE:latest $* -set +x diff --git a/docker/build-run-benchmark-with-docker.sh b/docker/build-run-benchmark-with-docker.sh index 3643da0c2..f5b583598 100755 --- a/docker/build-run-benchmark-with-docker.sh +++ b/docker/build-run-benchmark-with-docker.sh @@ -18,6 +18,11 @@ scriptdir=$(dirname "$(readlink -f "$0")") rootdir=$(readlink -f "$scriptdir/..") cd "$rootdir" +# Do the rebuild (if necessary) build first. +if [ "${BUILD_IMAGE:-true}" != "false" ]; then + SKIP_TESTS=${SKIP_TESTS:-true} ./docker/benchbase/build-full-image.sh +fi + EXTRA_DOCKER_ARGS='' if [ "$BENCHBASE_PROFILE" == 'sqlite' ]; then # Map the sqlite db back to the host. @@ -54,7 +59,7 @@ elif [ "$benchmark" == 'templated' ]; then else tpcc_config="config/sample_tpcc_config.xml" fi - SKIP_TESTS=${SKIP_TESTS:-true} EXTRA_DOCKER_ARGS="--network=host $EXTRA_DOCKER_ARGS" \ + BUILD_IMAGE=false EXTRA_DOCKER_ARGS="--network=host $EXTRA_DOCKER_ARGS" \ ./docker/benchbase/run-full-image.sh \ --config "$tpcc_config" --bench tpcc \ $CREATE_DB_ARGS --execute=false @@ -64,10 +69,19 @@ elif [ "$benchmark" == 'templated' ]; then SKIP_TESTS=true fi -SKIP_TESTS=${SKIP_TESTS:-true} EXTRA_DOCKER_ARGS="--network=host $EXTRA_DOCKER_ARGS" \ +rm -f results/histograms.json +BUILD_IMAGE=false EXTRA_DOCKER_ARGS="--network=host $EXTRA_DOCKER_ARGS" \ ./docker/benchbase/run-full-image.sh \ --config "config/sample_${benchmark}_config.xml" --bench "$benchmark" \ $CREATE_DB_ARGS --execute=true \ --sample 1 --interval-monitor 1000 \ --json-histograms results/histograms.json -./scripts/check_histogram_results.sh results/histograms.json +rc=$? +wait # for the interrupt script, if any +if [ $rc -ne 0 ]; then + echo "ERROR: benchmark execution failed with exit code $rc" >&2 + exit $rc +fi +# else, check that the results look ok +./scripts/check_latest_benchmark_results.sh "$benchmark" +./scripts/check_histogram_results.sh results/histograms.json \ No newline at end of file diff --git a/docker/cockroach-latest/README.md b/docker/cockroachdb-latest/README.md similarity index 100% rename from docker/cockroach-latest/README.md rename to docker/cockroachdb-latest/README.md diff --git a/docker/cockroach-latest/docker-compose.yml b/docker/cockroachdb-latest/docker-compose.yml similarity index 100% rename from docker/cockroach-latest/docker-compose.yml rename to docker/cockroachdb-latest/docker-compose.yml diff --git a/docker/cockroach-latest/down.cmd b/docker/cockroachdb-latest/down.cmd similarity index 100% rename from docker/cockroach-latest/down.cmd rename to docker/cockroachdb-latest/down.cmd diff --git a/docker/cockroach-latest/down.sh b/docker/cockroachdb-latest/down.sh similarity index 100% rename from docker/cockroach-latest/down.sh rename to docker/cockroachdb-latest/down.sh diff --git a/docker/cockroach-latest/prune.cmd b/docker/cockroachdb-latest/prune.cmd similarity index 100% rename from docker/cockroach-latest/prune.cmd rename to docker/cockroachdb-latest/prune.cmd diff --git a/docker/cockroach-latest/prune.sh b/docker/cockroachdb-latest/prune.sh similarity index 100% rename from docker/cockroach-latest/prune.sh rename to docker/cockroachdb-latest/prune.sh diff --git a/docker/cockroach-latest/up.cmd b/docker/cockroachdb-latest/up.cmd similarity index 100% rename from docker/cockroach-latest/up.cmd rename to docker/cockroachdb-latest/up.cmd diff --git a/docker/cockroach-latest/up.sh b/docker/cockroachdb-latest/up.sh similarity index 100% rename from docker/cockroach-latest/up.sh rename to docker/cockroachdb-latest/up.sh diff --git a/scripts/check_latest_benchmark_results.sh b/scripts/check_latest_benchmark_results.sh new file mode 100755 index 000000000..22f861d11 --- /dev/null +++ b/scripts/check_latest_benchmark_results.sh @@ -0,0 +1,79 @@ +#!/bin/bash + +# Simple script to try and sanity check the results of the latest benchmark run. + +set -eu +set -x + +# Move to the root of the repository. +scriptdir=$(dirname "$(readlink -f "$0")") +rootdir=$(readlink -f "$scriptdir/..") +cd "$rootdir" + +# Check that the results directory exists. +if ! [ -d results ]; then + echo "ERROR: Missing results directory" >&2 + exit 1 +fi + +BENCHBASE_PROFILE="${BENCHBASE_PROFILE:-}" +benchmark="${1:-}" +if [ -z "$benchmark" ]; then + echo "ERROR: Missing benchmark argument." >&2 +fi + +config_file=$(ls -1t results/${benchmark}_*.config.xml | head -n1) +if [ -z "$config_file" ]; then + echo "ERROR: Failed to find $benchmark benchmark results files." >&2 + exit 1 +fi + +ts=$(basename "$config_file" | sed -e "s/^${benchmark}_//" -e 's/\.config\.xml//') +summary_json="results/${benchmark}_${ts}.summary.json" + +if ! type xmllint 2>/dev/null; then + # Attempt to install xmllint. + # TODO: Add support for non apt based systems. + sudo -n /bin/bash -c "apt-get update && apt-get install -y libxml2-utils" || true +fi +if ! type xmllint 2>/dev/null; then + echo "ERROR: Missing xmllint utility." >&2 + exit 1 +fi + +# TODO: include warmup? +expected_runtime=$(xmllint --xpath '//works/work/time/text()' "$config_file" | awk '{ print sum=sum+$1 }' | tail -n1) + +if xmllint --xpath '//works/work/serial/text()' "$config_file" | grep -q -x true; then + if [ -n "$expected_runtime" ]; then + echo "ERROR: Unhandled: Found expected runtime in config file for serial workloads." >&2 + exit 1 + fi + + expected_query_count=$(xmllint --xpath '//works/work/weights/text()' "$config_file" | sed 's/,/\n/g' | grep -c -x 1 || true) + if xmllint --xpath '//works/work/weights/text()' "$config_file" | sed 's/,/\n/g' | grep -q -v '^[01]$'; then + echo "ERROR: Unsupported weight specification for serial workloads. Only 0,1 are handled currently." >&2 + exit 1 + fi + + measured_requests=$(cat "$summary_json" | jq -e '.["Measured Requests"]') + if [ "$measured_requests" -ne "$expected_query_count" ]; then + echo "ERROR: Benchmark measured requests ($measured_requests) was less than expected ($expected_query_count) or failed to parse output." >&2 + exit 1 + else + echo "OK: Benchmark measured requests ($measured_requests) matched ($expected_query_count)." + fi +else + if [ -z "$expected_runtime" ]; then + echo "ERROR: Failed to find expected runtime in config file: $config_file" >&2 + exit 1 + fi + + elapsed_time=$(cat "$summary_json" | jq -e '.["Elapsed Time (nanoseconds)"] / 1000000000 | round') + if [ "$elapsed_time" -lt "$expected_runtime" ]; then + echo "ERROR: Benchmark elapsed runtime ($elapsed_time) was less than expected ($expected_runtime) or failed to parse output." >&2 + exit 1 + else + echo "OK: Benchmark elapsed runtime ($elapsed_time) matched expected ($expected_runtime)." + fi +fi \ No newline at end of file diff --git a/src/main/java/com/oltpbenchmark/DBWorkload.java b/src/main/java/com/oltpbenchmark/DBWorkload.java index 5ca701046..32b814b0b 100644 --- a/src/main/java/com/oltpbenchmark/DBWorkload.java +++ b/src/main/java/com/oltpbenchmark/DBWorkload.java @@ -22,6 +22,7 @@ import com.oltpbenchmark.api.TransactionTypes; import com.oltpbenchmark.api.Worker; import com.oltpbenchmark.types.DatabaseType; +import com.oltpbenchmark.types.State; import com.oltpbenchmark.util.*; import java.io.File; import java.io.IOException; @@ -524,6 +525,11 @@ public static void main(String[] args) throws Exception { FileUtil.writeStringToFile(new File(fileName), histogram_json); LOG.info("Histograms JSON Data: " + fileName); } + + if (r.getState() == State.ERROR) { + throw new RuntimeException( + "Errors encountered during benchmark execution. See output above for details."); + } } catch (Throwable ex) { LOG.error("Unexpected error when executing benchmarks.", ex); System.exit(1); diff --git a/src/main/java/com/oltpbenchmark/Results.java b/src/main/java/com/oltpbenchmark/Results.java index 9e740351f..335a49dc7 100644 --- a/src/main/java/com/oltpbenchmark/Results.java +++ b/src/main/java/com/oltpbenchmark/Results.java @@ -19,6 +19,7 @@ import com.oltpbenchmark.LatencyRecord.Sample; import com.oltpbenchmark.api.TransactionType; +import com.oltpbenchmark.types.State; import com.oltpbenchmark.util.Histogram; import java.util.HashMap; import java.util.List; @@ -26,6 +27,7 @@ public final class Results { + private final State state; private final long startTimestampMs; private final long nanoseconds; private final int measuredRequests; @@ -40,6 +42,7 @@ public final class Results { private final Map> abortMessages = new HashMap<>(); public Results( + State state, long startTimestampMs, long elapsedNanoseconds, int measuredRequests, @@ -49,6 +52,7 @@ public Results( this.nanoseconds = elapsedNanoseconds; this.measuredRequests = measuredRequests; this.distributionStatistics = distributionStatistics; + this.state = state; if (distributionStatistics == null) { this.latencySamples = null; @@ -58,6 +62,10 @@ public Results( } } + public State getState() { + return state; + } + public DistributionStatistics getDistributionStatistics() { return distributionStatistics; } @@ -117,7 +125,9 @@ public int getMeasuredRequests() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("Results(nanoSeconds="); + sb.append("Results(state="); + sb.append(state); + sb.append(", nanoSeconds="); sb.append(nanoseconds); sb.append(", measuredRequests="); sb.append(measuredRequests); diff --git a/src/main/java/com/oltpbenchmark/ThreadBench.java b/src/main/java/com/oltpbenchmark/ThreadBench.java index 450ad249e..9161ae904 100644 --- a/src/main/java/com/oltpbenchmark/ThreadBench.java +++ b/src/main/java/com/oltpbenchmark/ThreadBench.java @@ -105,6 +105,7 @@ private int finalizeWorkers(ArrayList workerThreads) throws InterruptedE } private Results runRateLimitedMultiPhase() { + boolean errorsThrown = false; List workStates = new ArrayList<>(); for (WorkloadConfiguration workState : this.workConfs) { @@ -116,7 +117,7 @@ private Results runRateLimitedMultiPhase() { // long measureStart = start; - long start_ts = System.currentTimeMillis(); + long startTs = System.currentTimeMillis(); long start = System.nanoTime(); long warmupStart = System.nanoTime(); long warmup = warmupStart; @@ -210,6 +211,7 @@ private Results runRateLimitedMultiPhase() { // Go to next phase if this one is complete or enter if error was thrown boolean errorThrown = testState.getState() == State.ERROR; + errorsThrown = errorsThrown || errorThrown; if ((phaseComplete || errorThrown) && !lastEntry) { // enters here after each phase of the test // reset the queues so that the new phase is not affected by the @@ -318,7 +320,16 @@ private Results runRateLimitedMultiPhase() { } DistributionStatistics stats = DistributionStatistics.computeStatistics(latencies); - Results results = new Results(start_ts, measureEnd - start, requests, stats, samples); + Results results = + new Results( + // If any errors were thrown during the execution, proprogate that fact to the + // final Results state so we can exit non-zero *after* we output the results. + errorsThrown ? State.ERROR : testState.getState(), + startTs, + measureEnd - start, + requests, + stats, + samples); // Compute transaction histogram Set txnTypes = new HashSet<>(); diff --git a/src/main/java/com/oltpbenchmark/benchmarks/templated/procedures/GenericQuery.java b/src/main/java/com/oltpbenchmark/benchmarks/templated/procedures/GenericQuery.java index dbcc89f11..e4ab9a2b1 100644 --- a/src/main/java/com/oltpbenchmark/benchmarks/templated/procedures/GenericQuery.java +++ b/src/main/java/com/oltpbenchmark/benchmarks/templated/procedures/GenericQuery.java @@ -54,7 +54,10 @@ public void run(Connection conn, List params) throws SQLException { } } catch (Exception e) { e.printStackTrace(); - throw new RuntimeException("Error when trying to execute statement"); + throw new RuntimeException( + String.format( + "Error when trying to execute statement with params:\n%s\n%s", + this.getQueryTemplateInfo(), params)); } conn.commit(); @@ -84,7 +87,8 @@ public PreparedStatement getStatement(Connection conn, List params) thro } else { try { // TODO: add support for nullable other types - // For instance, can we provide a tag in the XML file to represent a NULL value? + // For instance, can we provide a tag in the XML file to represent a + // NULL value? // Or does it need a special marker like "$null" to signify a NULL value? Object param = params.get(i); stmt.setObject( diff --git a/src/main/java/com/oltpbenchmark/util/ResultWriter.java b/src/main/java/com/oltpbenchmark/util/ResultWriter.java index 46864a0f3..d698994d1 100644 --- a/src/main/java/com/oltpbenchmark/util/ResultWriter.java +++ b/src/main/java/com/oltpbenchmark/util/ResultWriter.java @@ -93,6 +93,7 @@ public void writeSummary(PrintStream os) { summaryMap.put("DBMS Type", dbType); summaryMap.put("DBMS Version", collector.collectVersion()); summaryMap.put("Benchmark Type", benchType); + summaryMap.put("Final State", results.getState()); summaryMap.put("Measured Requests", results.getMeasuredRequests()); for (String field : BENCHMARK_KEY_FIELD) { summaryMap.put(field, expConf.getString(field)); diff --git a/src/main/java/com/oltpbenchmark/util/SQLUtil.java b/src/main/java/com/oltpbenchmark/util/SQLUtil.java index 7a157a941..88346adca 100644 --- a/src/main/java/com/oltpbenchmark/util/SQLUtil.java +++ b/src/main/java/com/oltpbenchmark/util/SQLUtil.java @@ -677,13 +677,21 @@ public static boolean isDuplicateKeyException(Exception ex) { return (true); } else if (ex instanceof SQLException) { SQLException sqlEx = (SQLException) ex; + String sqlState = sqlEx.getSQLState(); + String sqlMessage = sqlEx.getMessage(); // POSTGRES - if (sqlEx.getSQLState().contains("23505")) { + if (sqlState != null && sqlState.contains("23505")) { return (true); } // SQLSERVER - else if (sqlEx.getSQLState().equals("23000") && sqlEx.getErrorCode() == 2627) { + else if (sqlState != null && sqlState.equals("23000") && sqlEx.getErrorCode() == 2627) { + return (true); + } + // SQLITE + else if (sqlEx.getErrorCode() == 19 + && sqlMessage != null + && sqlMessage.contains("SQLITE_CONSTRAINT_UNIQUE")) { return (true); } }