Skip to content

Commit

Permalink
review
Browse files Browse the repository at this point in the history
  • Loading branch information
maropu committed Mar 30, 2021
1 parent 267dbed commit 0a19585
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 13 deletions.
19 changes: 14 additions & 5 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -435,23 +435,32 @@ jobs:
steps:
- name: Checkout Spark repository
uses: actions/checkout@v2
- name: Cache TPC-DS generated data
id: cache-tpcds-sf-1
uses: actions/cache@v2
with:
path: ./tpcds-sf-1
key: tpcds-${{ hashFiles('tpcds-sf-1/.spark-tpcds-sf-1.md5') }}
restore-keys: |
tpcds-
- name: Checkout TPC-DS (SF=1) generated data repository
if: steps.cache-tpcds-sf-1.outputs.cache-hit != 'true'
uses: actions/checkout@v2
with:
repository: maropu/spark-tpcds-sf-1
path: ./tpcds-sf-1
- name: Cache TPC-DS generated data
- name: Cache Coursier local repository
uses: actions/cache@v2
with:
path: ./tpcds-sf-1
key: tpcds-${{ hashFiles('tpcds-sf-1/.spark-tpcds-sf-1.md5') }}
path: ~/.cache/coursier
key: tpcds-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
restore-keys: |
tpcds-
tpcds-coursier-
- name: Install Java 8
uses: actions/setup-java@v1
with:
java-version: 8
- name: Run TPCDSQueryTestSuite
- name: Run TPC-DS queries
run: |
SPARK_TPCDS_DATA=`pwd`/tpcds-sf-1 build/sbt "sql/testOnly org.apache.spark.sql.TPCDSQueryTestSuite"
- name: Upload test results to report
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ import org.apache.spark.sql.test.TestSparkSession
*/
class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelper {

private val tpcdsDataPath = System.getenv("SPARK_TPCDS_DATA")
private val regenerateGoldenFiles = System.getenv("SPARK_GENERATE_GOLDEN_FILES") == "1"
private val tpcdsDataPath = sys.env.get("SPARK_TPCDS_DATA")
private val regenerateGoldenFiles = sys.env.get("SPARK_GENERATE_GOLDEN_FILES").exists(_ == "1")

// To make output results deterministic
protected override def sparkConf: SparkConf = super.sparkConf
Expand All @@ -67,12 +67,12 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
// We use SF=1 table data here, so we cannot use SF=100 stats
protected override val injectStats: Boolean = false

if (tpcdsDataPath != null) {
if (tpcdsDataPath.nonEmpty) {
val nonExistentTables = tableNames.filterNot { tableName =>
Files.exists(Paths.get(s"$tpcdsDataPath/$tableName"))
Files.exists(Paths.get(s"${tpcdsDataPath.get}/$tableName"))
}
if (nonExistentTables.nonEmpty) {
fail(s"Non-existent TPCDS table paths found in $tpcdsDataPath: " +
fail(s"Non-existent TPCDS table paths found in ${tpcdsDataPath.get}: " +
nonExistentTables.mkString(", "))
}
}
Expand All @@ -92,7 +92,7 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
s"""
|CREATE TABLE `$tableName` (${tableColumns(tableName)})
|USING $format
|LOCATION '$tpcdsDataPath/$tableName'
|LOCATION '${tpcdsDataPath.get}/$tableName'
|${options.mkString("\n")}
""".stripMargin)
}
Expand Down Expand Up @@ -134,7 +134,7 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
assertResult(expectedOutput, s"Result did not match\n$queryString") { outputString }
}

if (tpcdsDataPath != null) {
if (tpcdsDataPath.nonEmpty) {
tpcdsQueries.foreach { name =>
val queryString = resourceToString(s"tpcds/$name.sql",
classLoader = Thread.currentThread().getContextClassLoader)
Expand All @@ -153,6 +153,6 @@ class TPCDSQueryTestSuite extends QueryTest with TPCDSBase with SQLQueryTestHelp
}
}
} else {
ignore("skipped because env `SPARK_TPCDS_DATA` is not given") {}
ignore("skipped because env `SPARK_TPCDS_DATA` is not set") {}
}
}

0 comments on commit 0a19585

Please sign in to comment.