forked from NationalSecurityAgency/datawave
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbootstrap.sh
386 lines (299 loc) · 16.9 KB
/
bootstrap.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
# Sourced by env.sh
# This bootstrap sources both bootstrap-ingest.sh and bootstrap-web.sh, so that
# higher level scripts can harness both of those services via the "datawave"
# service name, and so that there's a single place to define variables and code
# shared by both components
# Current script dir
DW_DATAWAVE_SERVICE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Source/repository root
DW_DATAWAVE_SOURCE_DIR="$( cd "${DW_DATAWAVE_SERVICE_DIR}/../../../../.." && pwd )"
# Comma-delimited list of Accumulo authorizations to grant DataWave's Accumulo user. These will be automatically
# assigned to that user with a dynamically generated accumulo-shell script during the DataWave install. Override the
# default list as needed for whatever auths your test data requires. Defaults to authorizations known to exist on our
# canned example data. Should be exhaustive for any and all known/required auths. Otherwise, you will not be able to
# view the data in Accumulo Shell
DW_DATAWAVE_ACCUMULO_AUTHS="${DW_DATAWAVE_ACCUMULO_AUTHS:-PUBLIC,PRIVATE,FOO,BAR,DEF}"
# Import DataWave Web test user configuration
source "${DW_DATAWAVE_SERVICE_DIR}/bootstrap-user.sh"
# Selected Maven profile for the DataWave build
DW_DATAWAVE_BUILD_PROFILE=${DW_DATAWAVE_BUILD_PROFILE:-dev}
# Maven command
DW_DATAWAVE_BUILD_COMMAND="${DW_DATAWAVE_BUILD_COMMAND:-mvn -P${DW_DATAWAVE_BUILD_PROFILE} -Ddeploy -Dtar -Ddist -DskipTests -DskipITs clean install}"
# Home of any temp data and *.properties file overrides for this instance of DataWave
DW_DATAWAVE_DATA_DIR="${DW_CLOUD_DATA}/datawave"
# Temp dir for persisting our dynamically-generated ${DW_DATAWAVE_BUILD_PROFILE}.properties file
DW_DATAWAVE_BUILD_PROPERTIES_DIR="${DW_DATAWAVE_DATA_DIR}/build-properties"
DW_DATAWAVE_BUILD_STATUS_LOG="${DW_DATAWAVE_BUILD_PROPERTIES_DIR}/build-progress.tmp"
DW_DATAWAVE_INGEST_TARBALL="*/datawave-${DW_DATAWAVE_BUILD_PROFILE}-*-dist.tar.gz"
DW_DATAWAVE_WEB_TARBALL="*/datawave-ws-deploy-application-*-${DW_DATAWAVE_BUILD_PROFILE}.tar.gz"
DW_DATAWAVE_KEYSTORE="${DW_DATAWAVE_KEYSTORE:-${DW_DATAWAVE_SOURCE_DIR}/web-services/deploy/application/src/main/wildfly/overlay/standalone/configuration/certificates/testServer.p12}"
DW_DATAWAVE_KEYSTORE_TYPE="${DW_DATAWAVE_KEYSTORE_TYPE:-PKCS12}"
DW_DATAWAVE_TRUSTSTORE="${DW_DATAWAVE_TRUSTSTORE:-${DW_DATAWAVE_SOURCE_DIR}/web-services/deploy/application/src/main/wildfly/overlay/standalone/configuration/certificates/ca.jks}"
DW_DATAWAVE_TRUSTSTORE_TYPE="${DW_DATAWAVE_TRUSTSTORE_TYPE:-JKS}"
# Accumulo shell script for initializing whatever we may need in Accumulo for DataWave
DW_ACCUMULO_SHELL_INIT_SCRIPT="${DW_ACCUMULO_SHELL_INIT_SCRIPT:-
config -s table.classpath.context=datawave
createtable QueryMetrics_m
setauths -s ${DW_DATAWAVE_ACCUMULO_AUTHS}
quit
}"
function createBuildPropertiesDirectory() {
if [ ! -d ${DW_DATAWAVE_BUILD_PROPERTIES_DIR} ] ; then
if ! mkdir -p ${DW_DATAWAVE_BUILD_PROPERTIES_DIR} ; then
error "Failed to create directory ${DW_DATAWAVE_BUILD_PROPERTIES_DIR}"
return 1
fi
fi
return 0
}
function setBuildPropertyOverrides() {
# DataWave's build configs (*.properties) can be loaded from a variety of locations based on the 'read-properties'
# Maven plugin configuration. Typically, the source-root/properties/*.properties files are loaded first to provide
# default values, starting with 'default.properties', followed by '{selected-profile}.properties'. Finally,
# ~/.m2/datawave/properties/{selected-profile}.properties is loaded, if it exists, allowing you to override
# defaults as needed
# With that in mind, the goal of this function is to generate a new '${DW_DATAWAVE_BUILD_PROFILE}.properties' file under
# DW_DATAWAVE_BUILD_PROPERTIES_DIR and *symlinked* as ~/.m2/datawave/properties/${DW_DATAWAVE_BUILD_PROFILE}.properties,
# to inject all the overrides that we need for successful deployment to source-root/contrib/datawave-quickstart/
# If a file having the name '${DW_DATAWAVE_BUILD_PROFILE}.properties' already exists under ~/.m2/datawave/properties,
# then it will be renamed automatically with a ".saved-by-quickstart-$(date)" suffix, and the symlink for the new
# file will be created as required
local BUILD_PROPERTIES_BASENAME=${DW_DATAWAVE_BUILD_PROFILE}.properties
local BUILD_PROPERTIES_FILE=${DW_DATAWAVE_BUILD_PROPERTIES_DIR}/${BUILD_PROPERTIES_BASENAME}
local BUILD_PROPERTIES_SYMLINK_DIR=${HOME}/.m2/datawave/properties
local BUILD_PROPERTIES_SYMLINK=${BUILD_PROPERTIES_SYMLINK_DIR}/${BUILD_PROPERTIES_BASENAME}
! createBuildPropertiesDirectory && error "Failed to override properties!" && return 1
# Create symlink directory if it doesn't exist
[ ! -d ${BUILD_PROPERTIES_SYMLINK_DIR} ] \
&& ! mkdir -p ${BUILD_PROPERTIES_SYMLINK_DIR} \
&& error "Failed to create symlink directory ${BUILD_PROPERTIES_SYMLINK_DIR}" \
&& return 1
# Copy existing source-root/properties/${DW_DATAWAVE_BUILD_PROFILE}.properties to our new $BUILD_PROPERTIES_FILE
! cp "${DW_DATAWAVE_SOURCE_DIR}/properties/${DW_DATAWAVE_BUILD_PROFILE}.properties" ${BUILD_PROPERTIES_FILE} \
&& error "Aborting property overrides! Failed to copy ${DW_DATAWAVE_BUILD_PROFILE}.properties" \
&& return 1
# Apply overrides as needed by simply appending them to the end of the file...
echo "#" >> ${BUILD_PROPERTIES_FILE}
echo "######## Begin overrides for datawave-quickstart ########" >> ${BUILD_PROPERTIES_FILE}
echo "#" >> ${BUILD_PROPERTIES_FILE}
echo "WAREHOUSE_ACCUMULO_HOME=${ACCUMULO_HOME}" >> ${BUILD_PROPERTIES_FILE}
echo "WAREHOUSE_INSTANCE_NAME=${DW_ACCUMULO_INSTANCE_NAME}" >> ${BUILD_PROPERTIES_FILE}
echo "WAREHOUSE_JOBTRACKER_NODE=${DW_HADOOP_RESOURCE_MANAGER_ADDRESS}" >> ${BUILD_PROPERTIES_FILE}
echo "INGEST_ACCUMULO_HOME=${ACCUMULO_HOME}" >> ${BUILD_PROPERTIES_FILE}
echo "INGEST_INSTANCE_NAME=${DW_ACCUMULO_INSTANCE_NAME}" >> ${BUILD_PROPERTIES_FILE}
echo "INGEST_JOBTRACKER_NODE=${DW_HADOOP_RESOURCE_MANAGER_ADDRESS}" >> ${BUILD_PROPERTIES_FILE}
echo "BULK_INGEST_DATA_TYPES=${DW_DATAWAVE_INGEST_BULK_DATA_TYPES}" >> ${BUILD_PROPERTIES_FILE}
echo "LIVE_INGEST_DATA_TYPES=${DW_DATAWAVE_INGEST_LIVE_DATA_TYPES}" >> ${BUILD_PROPERTIES_FILE}
echo "PASSWORD=${DW_ACCUMULO_PASSWORD}" >> ${BUILD_PROPERTIES_FILE}
echo "ZOOKEEPER_HOME=${ZOOKEEPER_HOME}" >> ${BUILD_PROPERTIES_FILE}
echo "HADOOP_HOME=${HADOOP_HOME}" >> ${BUILD_PROPERTIES_FILE}
echo "MAPRED_HOME=${HADOOP_HOME}" >> ${BUILD_PROPERTIES_FILE}
echo "WAREHOUSE_HADOOP_CONF=${HADOOP_CONF_DIR}" >> ${BUILD_PROPERTIES_FILE}
echo "INGEST_HADOOP_CONF=${HADOOP_CONF_DIR}" >> ${BUILD_PROPERTIES_FILE}
echo "HDFS_BASE_DIR=${DW_DATAWAVE_INGEST_HDFS_BASEDIR}" >> ${BUILD_PROPERTIES_FILE}
echo "MAPRED_INGEST_OPTS=${DW_DATAWAVE_MAPRED_INGEST_OPTS}" >> ${BUILD_PROPERTIES_FILE}
echo "LOG_DIR=${DW_DATAWAVE_INGEST_LOG_DIR}" >> ${BUILD_PROPERTIES_FILE}
echo "FLAG_DIR=${DW_DATAWAVE_INGEST_FLAGFILE_DIR}" >> ${BUILD_PROPERTIES_FILE}
echo "FLAG_MAKER_CONFIG=${DW_DATAWAVE_INGEST_FLAGMAKER_CONFIGS}" >> ${BUILD_PROPERTIES_FILE}
echo "BIN_DIR_FOR_FLAGS=${DW_DATAWAVE_INGEST_HOME}/bin" >> ${BUILD_PROPERTIES_FILE}
echo "KEYSTORE=${DW_DATAWAVE_KEYSTORE}" >> ${BUILD_PROPERTIES_FILE}
echo "KEYSTORE_TYPE=${DW_DATAWAVE_KEYSTORE_TYPE}" >> ${BUILD_PROPERTIES_FILE}
echo "KEYSTORE_PASSWORD=${DW_ACCUMULO_PASSWORD}" >> ${BUILD_PROPERTIES_FILE}
echo "TRUSTSTORE=${DW_DATAWAVE_TRUSTSTORE}" >> ${BUILD_PROPERTIES_FILE}
echo "FLAG_METRICS_DIR=${DW_DATAWAVE_INGEST_FLAGMETRICS_DIR}" >> ${BUILD_PROPERTIES_FILE}
echo "TRUSTSTORE_TYPE=${DW_DATAWAVE_TRUSTSTORE_TYPE}" >> ${BUILD_PROPERTIES_FILE}
echo "accumulo.instance.name=${DW_ACCUMULO_INSTANCE_NAME}" >> ${BUILD_PROPERTIES_FILE}
echo "accumulo.user.password=${DW_ACCUMULO_PASSWORD}" >> ${BUILD_PROPERTIES_FILE}
echo "cached.results.hdfs.uri=${DW_HADOOP_DFS_URI}" >> ${BUILD_PROPERTIES_FILE}
echo "lock.file.dir=${DW_DATAWAVE_INGEST_LOCKFILE_DIR}" >> ${BUILD_PROPERTIES_FILE}
echo "server.keystore.password=${DW_ACCUMULO_PASSWORD}" >> ${BUILD_PROPERTIES_FILE}
echo "mysql.user.password=${DW_ACCUMULO_PASSWORD}" >> ${BUILD_PROPERTIES_FILE}
echo "jboss.jmx.password=${DW_ACCUMULO_PASSWORD}" >> ${BUILD_PROPERTIES_FILE}
echo "hornetq.cluster.password=${DW_ACCUMULO_PASSWORD}" >> ${BUILD_PROPERTIES_FILE}
echo "hornetq.system.password=${DW_ACCUMULO_PASSWORD}" >> ${BUILD_PROPERTIES_FILE}
echo "mapReduce.job.tracker=${DW_HADOOP_RESOURCE_MANAGER_ADDRESS}" >> ${BUILD_PROPERTIES_FILE}
echo "bulkResults.job.tracker=${DW_HADOOP_RESOURCE_MANAGER_ADDRESS}" >> ${BUILD_PROPERTIES_FILE}
echo "EVENT_DISCARD_INTERVAL=0" >> ${BUILD_PROPERTIES_FILE}
echo "ingest.data.types=${DW_DATAWAVE_INGEST_LIVE_DATA_TYPES},${DW_DATAWAVE_INGEST_BULK_DATA_TYPES}" >> ${BUILD_PROPERTIES_FILE}
echo "JOB_CACHE_REPLICATION=1" >> ${BUILD_PROPERTIES_FILE}
echo "EDGE_DEFINITION_FILE=${DW_DATAWAVE_INGEST_EDGE_DEFINITIONS}" >> ${BUILD_PROPERTIES_FILE}
echo "DATAWAVE_INGEST_HOME=${DW_DATAWAVE_INGEST_HOME}" >> ${BUILD_PROPERTIES_FILE}
echo "PASSWORD_INGEST_ENV=${DW_DATAWAVE_INGEST_PASSWD_FILE}" >> ${BUILD_PROPERTIES_FILE}
echo "hdfs.site.config.urls=file://${HADOOP_CONF_DIR}/core-site.xml,file://${HADOOP_CONF_DIR}/hdfs-site.xml" >> ${BUILD_PROPERTIES_FILE}
echo "NUM_SHARDS=${DW_DATAWAVE_INGEST_NUM_SHARDS}" >> ${BUILD_PROPERTIES_FILE}
generateTestDatawaveUserServiceConfig
# Apply DW_JAVA_HOME_OVERRIDE, if needed...
# We can override the JAVA_HOME location for the DataWave deployment, if necessary. E.g., if we're deploying
# to a Docker container or other, where our current JAVA_HOME isn't applicable
if [ -n "${DW_JAVA_HOME_OVERRIDE}" ] ; then
echo "JAVA_HOME=${DW_JAVA_HOME_OVERRIDE}" >> ${BUILD_PROPERTIES_FILE}
else
echo "JAVA_HOME=${JAVA_HOME}" >> ${BUILD_PROPERTIES_FILE}
fi
# Apply DW_ROOT_DIRECTORY_OVERRIDE, if needed...
# We can override any instances of DW_DATAWAVE_SOURCE_DIR within the build config in order to relocate
# the deployment, if necessary. E.g., used when building the datawave-quickstart Docker image to reorient
# the deployment under /opt/datawave/ within the container
if [ -n "${DW_ROOT_DIRECTORY_OVERRIDE}" ] ; then
sed -i "s~${DW_DATAWAVE_SOURCE_DIR}~${DW_ROOT_DIRECTORY_OVERRIDE}~g" ${BUILD_PROPERTIES_FILE}
fi
# Create the symlink under ~/.m2/datawave/properties
setBuildPropertiesSymlink || return 1
}
function setBuildPropertiesSymlink() {
# Replace any existing ~/.m2/datawave/properties/${BUILD_PROPERTIES_BASENAME} file/symlink with
# a symlink to our new ${BUILD_PROPERTIES_FILE}
if [[ -f ${BUILD_PROPERTIES_SYMLINK} || -L ${BUILD_PROPERTIES_SYMLINK} ]] ; then
if [ -L ${BUILD_PROPERTIES_SYMLINK} ] ; then
info "Unlinking existing symbolic link: ${BUILD_PROPERTIES_SYMLINK}"
if ! unlink "${BUILD_PROPERTIES_SYMLINK}" ; then
warn "Failed to unlink $( readlink ${BUILD_PROPERTIES_SYMLINK} ) from ${BUILD_PROPERTIES_SYMLINK_DIR}"
fi
else
local backupFile="${BUILD_PROPERTIES_SYMLINK}.saved-by-quickstart.$(date +%Y-%m-%d-%H%M%S)"
info "Backing up your existing ~/.m2/**/${BUILD_PROPERTIES_BASENAME} file to ~/.m2/**/$( basename ${backupFile} )"
if ! mv "${BUILD_PROPERTIES_SYMLINK}" "${backupFile}" ; then
error "Failed to backup ${BUILD_PROPERTIES_SYMLINK}. Aborting properties file override. Please fix me!!"
return 1
fi
fi
fi
if ln -s "${BUILD_PROPERTIES_FILE}" "${BUILD_PROPERTIES_SYMLINK}" ; then
info "Override for ${BUILD_PROPERTIES_BASENAME} successful"
else
error "Override for ${BUILD_PROPERTIES_BASENAME} failed"
return 1
fi
}
function buildRequiredPlugins() {
( cd "${DW_DATAWAVE_SOURCE_DIR}/contrib/assert-properties" && mvn clean install )
( cd "${DW_DATAWAVE_SOURCE_DIR}/contrib/read-properties" && mvn clean install )
}
function datawaveBuildSucceeded() {
local success=$( tail -n 7 "$DW_DATAWAVE_BUILD_STATUS_LOG" | grep "BUILD SUCCESS" )
if [ -z "${success}" ] ; then
return 1
fi
return 0
}
function buildDataWave() {
if ! mavenIsInstalled ; then
! mavenInstall && error "Maven install failed. Please correct" && return 1
fi
[[ "$1" == "--verbose" ]] && local verbose=true
! setBuildPropertyOverrides && error "Aborting DataWave build" && return 1
[ -f "${DW_DATAWAVE_BUILD_STATUS_LOG}" ] && rm -f "$DW_DATAWAVE_BUILD_STATUS_LOG"
buildRequiredPlugins
info "DataWave build in progress: '${DW_DATAWAVE_BUILD_COMMAND}'"
info "Build status log: $DW_DATAWAVE_BUILD_STATUS_LOG"
if [ "${verbose}" == true ] ; then
( cd "${DW_DATAWAVE_SOURCE_DIR}" && eval "${DW_DATAWAVE_BUILD_COMMAND}" 2>&1 | tee ${DW_DATAWAVE_BUILD_STATUS_LOG} )
else
( cd "${DW_DATAWAVE_SOURCE_DIR}" && eval "${DW_DATAWAVE_BUILD_COMMAND}" &> ${DW_DATAWAVE_BUILD_STATUS_LOG} )
fi
if ! datawaveBuildSucceeded ; then
error "The build has FAILED! See $DW_DATAWAVE_BUILD_STATUS_LOG for details"
return 1
fi
info "DataWave build successful"
return 0
}
function getDataWaveTarball() {
# Looks for a DataWave tarball matching the specified pattern and, if found, sets the global 'tarball'
# variable to its basename for the caller as expected.
# If no tarball is found matching the specified pattern, then the DataWave build is kicked off
local tarballPattern="${1}"
tarball=""
# Check if the tarball already exists in the plugin directory.
local tarballPath="$( find "${DW_DATAWAVE_SERVICE_DIR}" -path "${tarballPattern}" -type f )"
if [ -f "${tarballPath}" ]; then
tarball="$( basename "${tarballPath}" )"
return 0;
fi
# Ensure that java is installed and JAVA_HOME set before we try to clean/build
source "${DW_CLOUD_HOME}/bin/services/java/bootstrap.sh"
! javaIsInstalled && javaInstall
javaIsInstalled || error "Java bootstrap failed. DataWave build may not succeed"
! buildDataWave --verbose && error "Please correct this issue before continuing" && return 1
# Build succeeded. Set global 'tarball' variable for the specified pattern and copy all tarballs into place
tarballPath="$( find "${DW_DATAWAVE_SOURCE_DIR}" -path "${tarballPattern}" -type f | tail -1 )"
[ -z "${tarballPath}" ] && error "Failed to find '${tarballPattern}' tar file after build" && return 1
tarball="$( basename "${tarballPath}" )"
# Current caller (ie, either bootstrap-web.sh or bootstrap-ingest.sh) only cares about current $tarball,
# but go ahead and copy both tarballs into datawave service dir to satisfy next caller as well
! copyDataWaveTarball "${DW_DATAWAVE_INGEST_TARBALL}" && error "Failed to copy DataWave Ingest tarball" && return 1
! copyDataWaveTarball "${DW_DATAWAVE_WEB_TARBALL}" && error "Failed to copy DataWave Web tarball" && return 1
return 0
}
function copyDataWaveTarball() {
local pattern="${1}"
local dwTarball="$( find "${DW_DATAWAVE_SOURCE_DIR}" -path "${pattern}" -type f | tail -1 )";
if [ -n "${dwTarball}" ] ; then
! cp "${dwTarball}" "${DW_DATAWAVE_SERVICE_DIR}" && error "Failed to copy '${dwTarball}'" && return 1
else
error "No tar file found matching '${pattern}'"
return 1
fi
return 0
}
# Bootstrap DW ingest and webservice components as needed
source "${DW_DATAWAVE_SERVICE_DIR}/bootstrap-ingest.sh"
source "${DW_DATAWAVE_SERVICE_DIR}/bootstrap-web.sh"
function datawaveIsRunning() {
datawaveIngestIsRunning && return 0
datawaveWebIsRunning && return 0
return 1
}
function datawaveStart() {
datawaveIngestStart
datawaveWebStart
}
function datawaveStop() {
datawaveIngestStop
datawaveWebStop
}
function datawaveStatus() {
datawaveIngestStatus
datawaveWebStatus
}
function datawaveIsInstalled() {
datawaveIngestIsInstalled && return 0
datawaveWebIsInstalled && return 0
return 1
}
function datawaveUninstall() {
datawaveIngestUninstall
datawaveWebUninstall
[[ "${1}" == "${DW_UNINSTALL_RM_BINARIES_FLAG_LONG}" || "${1}" == "${DW_UNINSTALL_RM_BINARIES_FLAG_SHORT}" ]] && rm -f "${DW_DATAWAVE_SERVICE_DIR}"/*.tar.gz
}
function datawaveInstall() {
datawaveIngestInstall
datawaveWebInstall
}
function datawavePrintenv() {
echo
echo "DataWave Environment"
echo
( set -o posix ; set ) | grep -E "DATAWAVE_|WILDFLY|JBOSS"
echo
}
function datawavePidList() {
datawaveIngestIsRunning
datawaveWebIsRunning
if [[ -n "${DW_DATAWAVE_WEB_PID_LIST}" || -n "${DW_DATAWAVE_INGEST_PID_LIST}" ]] ; then
echo "${DW_DATAWAVE_WEB_PID_LIST} ${DW_DATAWAVE_INGEST_PID_LIST}"
fi
}
function datawaveBuildDeploy() {
datawaveIsRunning && info "Stopping all DataWave services" && datawaveStop
datawaveIsInstalled && info "Uninstalling DataWave" && datawaveUninstall --remove-binaries
resetQuickstartEnvironment
export DW_REDEPLOY_IN_PROGRESS=true
datawaveInstall
export DW_REDEPLOY_IN_PROGRESS=false
}
function datawaveBuild() {
info "Building DataWave"
rm -f "${DW_DATAWAVE_SERVICE_DIR}"/datawave*.tar.gz
resetQuickstartEnvironment
}