diff --git a/examples/cache-warmer/cmd.sh b/examples/cache-warmer/cmd.sh index c5cdb55..8174b61 100755 --- a/examples/cache-warmer/cmd.sh +++ b/examples/cache-warmer/cmd.sh @@ -3,6 +3,10 @@ set -eou pipefail export LOCK_FILE="/tmp/scyllaridae-cache.lock" +cleanup() { + rm -f "$LOCK_FILE" links.xml pc.json +} +trap cleanup EXIT # how many cURL commands to run in parallel for /node/\d+ if [ ! -v NODE_PARALLEL_EXECUTIONS ] || [ "$NODE_PARALLEL_EXECUTIONS" = "" ]; then @@ -14,13 +18,6 @@ if [ ! -v IIIF_PARALLEL_EXECUTIONS ] || [ "$IIIF_PARALLEL_EXECUTIONS" = "" ]; th IIIF_PARALLEL_EXECUTIONS=3 fi - -handle_error() { - rm -f "$LOCK_FILE" - exit 1 -} -trap 'handle_error' ERR - # curl wrapper function so on 302 we can forward the cache-warmer paramater process_url() { local URL="$1" @@ -93,8 +90,6 @@ while true; do fi done -rm -f links.xml - # now that the sitemap is warm, get all the IIIF paged content manifests warm curl -s "$DRUPAL_URL/api/v1/paged-content" > pc.json mapfile -t NIDS < <(jq -r '.[]' pc.json) @@ -116,7 +111,3 @@ for NID in "${NIDS[@]}"; do wait "$job_id" || echo "One job failed, but continuing anyway" done done - -rm -f pc.json - -rm "$LOCK_FILE"