Skip to content

Commit

Permalink
Use git rev-parse --show-toplevel for the root dir.
Browse files Browse the repository at this point in the history
Makes it so that the test scripts do not necessarily have to be run from
the top level of the repository.
  • Loading branch information
mccheah committed Jan 9, 2018
1 parent dbd5643 commit 8ff354d
Showing 1 changed file with 20 additions and 26 deletions.
46 changes: 20 additions & 26 deletions e2e/runner.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,7 @@ usage () {
echo " The deployment mode can be specified using the 'd' flag."
}

### Basic Validation ###
if [ ! -d "integration-test" ]; then
echo "This script must be invoked from the top-level directory of the integration-tests repository"
usage
exit 1
fi
cd "$(dirname "$0")"

### Set sensible defaults ###
REPO="https://github.com/apache/spark"
Expand Down Expand Up @@ -79,44 +74,43 @@ echo "Running tests on cluster $MASTER against $REPO."
echo "Spark images will be created in $IMAGE_REPO"

set -ex
root=$(pwd)

TEST_ROOT=$(git rev-parse --show-toplevel)
SPARK_REPO_ROOT="$TEST_ROOT/spark"
# clone spark distribution if needed.
if [ -d "spark" ];
if [ -d "$SPARK_REPO_ROOT" ];
then
(cd spark && git pull origin $BRANCH);
(cd $SPARK_REPO_ROOT && git pull origin $BRANCH);
else
git clone $REPO;
git clone $REPO $SPARK_REPO_ROOT
fi

cd spark
cd $SPARK_REPO_ROOT
git checkout -B $BRANCH origin/$BRANCH
./dev/make-distribution.sh --tgz -Phadoop-2.7 -Pkubernetes -DskipTests
tag=$(git rev-parse HEAD | cut -c -6)
echo "Spark distribution built at SHA $tag"
TAG=$(git rev-parse HEAD | cut -c -6)
echo "Spark distribution built at SHA $TAG"

if [[ $DEPLOY_MODE == cloud ]] ;
then
cd dist && ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag build
cd dist && ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG build
if [[ $IMAGE_REPO == gcr.io* ]] ;
then
gcloud docker -- push $IMAGE_REPO/spark-driver:$tag && \
gcloud docker -- push $IMAGE_REPO/spark-executor:$tag && \
gcloud docker -- push $IMAGE_REPO/spark-init:$tag
gcloud docker -- push $IMAGE_REPO/spark-driver:$TAG && \
gcloud docker -- push $IMAGE_REPO/spark-executor:$TAG && \
gcloud docker -- push $IMAGE_REPO/spark-init:$TAG
else
./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag push
./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG push
fi
else
# -m option for minikube.
cd dist && ./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $tag build
cd dist && ./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $TAG build
fi

cd $root/integration-test
$root/spark/build/mvn clean -Ddownload.plugin.skip=true integration-test \
-Dspark-distro-tgz=$root/spark/*.tgz \
$TEST_ROOT/integration-test/build/mvn clean -Ddownload.plugin.skip=true integration-test \
-Dspark-distro-tgz=$SPARK_REPO_ROOT/*.tgz \
-DextraScalaTestArgs="-Dspark.kubernetes.test.master=k8s://$MASTER \
-Dspark.docker.test.driverImage=$IMAGE_REPO/spark-driver:$tag \
-Dspark.docker.test.executorImage=$IMAGE_REPO/spark-executor:$tag \
-Dspark.docker.test.initContainerImage=$IMAGE_REPO/spark-init:$tag" || :
-Dspark.docker.test.driverImage=$IMAGE_REPO/spark-driver:$TAG \
-Dspark.docker.test.executorImage=$IMAGE_REPO/spark-executor:$TAG \
-Dspark.docker.test.initContainerImage=$IMAGE_REPO/spark-init:$TAG" || :

echo "TEST SUITE FINISHED"

0 comments on commit 8ff354d

Please sign in to comment.