Artemis Staging Deployment #14434
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Artemis Staging Deployment | |
on: | |
workflow_dispatch: | |
inputs: | |
branch: | |
description: 'Branch to deploy' | |
required: true | |
concurrency: staging | |
env: | |
build_workflow_name: build.yml | |
jobs: | |
check-build-status: | |
runs-on: ubuntu-latest | |
outputs: | |
build_workflow_run_id: ${{ steps.set_build_workflow_id.outputs.workflow_id }} | |
steps: | |
- name: Get latest build workflow run | |
id: get_workflow_run | |
uses: octokit/[email protected] | |
with: | |
route: GET /repos/${{ github.repository }}/actions/workflows/${{ env.build_workflow_name }}/runs?branch=${{ github.event.inputs.branch }}&per_page=1 | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
- name: Set build workflow ID | |
id: set_build_workflow_id | |
run: | | |
WORKFLOW_ID=$(echo '${{ steps.get_workflow_run.outputs.data }}' | jq -r '.workflow_runs[0].id') | |
echo "workflow_id=$WORKFLOW_ID" >> $GITHUB_OUTPUT | |
- name: Check for war artifact | |
id: verify_artifact | |
uses: octokit/[email protected] | |
with: | |
route: GET /repos/${{ github.repository }}/actions/runs/${{ steps.set_build_workflow_id.outputs.workflow_id }}/artifacts?name=Artemis.war | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
- name: Verify artifact exists | |
id: check_result | |
run: | | |
TOTAL_COUNT=$(echo '${{ steps.verify_artifact.outputs.data }}' | jq -r '.total_count') | |
if [ "$TOTAL_COUNT" -gt 0 ]; then | |
echo "Found Artemis.war artifact in latest build" | |
else | |
echo "::error::No Artemis.war artifact found in latest build!" | |
exit 1 | |
fi | |
deploy: | |
needs: check-build-status | |
runs-on: [self-hosted, ase-large-ubuntu] | |
environment: | |
name: artemis-staging-localci.artemis.cit.tum.de | |
url: ${{ vars.DEPLOYMENT_URL }} | |
env: | |
DEPLOYMENT_HOSTS_PRIMARY: ${{ vars.DEPLOYMENT_HOSTS_PRIMARY}} | |
DEPLOYMENT_HOSTS_SECONDARY: ${{ vars.DEPLOYMENT_HOSTS_SECONDARY}} | |
DEPLOYMENT_USER: ${{ vars.DEPLOYMENT_USER }} | |
DEPLOYMENT_FOLDER: ${{ vars.DEPLOYMENT_FOLDER }} | |
HEALTH_CHECK_URL: "${{vars.DEPLOYMENT_URL}}/management/health" | |
WORKFLOW_RUN_ID: ${{needs.check-build-status.outputs.build_workflow_run_id}} | |
timeout-minutes: 10 | |
steps: | |
- name: Clean workspace | |
run: | | |
echo "[INFO] Cleaning workspace..." | |
rm -rf artifacts/ | |
rm -rf ./* | |
mkdir -p artifacts | |
- name: Download artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: Artemis.war | |
path: artifacts | |
github-token: ${{ secrets.GITHUB_TOKEN }} | |
run-id: ${{ env.WORKFLOW_RUN_ID }} | |
- name: Setup SSH and Known Hosts | |
env: | |
DEPLOYMENT_SSH_KEY: ${{ secrets.DEPLOYMENT_SSH_KEY }} | |
SSH_AUTH_SOCK: /tmp/ssh_agent_${{ github.run_id }}.sock | |
DEPLOYMENT_HOST_PUBLIC_KEYS: ${{ vars.DEPLOYMENT_HOST_PUBLIC_KEYS }} | |
run: | | |
mkdir -p ~/.ssh | |
chmod 700 ~/.ssh | |
# Write private key | |
echo "$DEPLOYMENT_SSH_KEY" | sed 's/\\n/\n/g' > ~/.ssh/id_rsa | |
chmod 600 ~/.ssh/id_rsa | |
# Write known hosts | |
echo "$DEPLOYMENT_HOST_PUBLIC_KEYS" > ~/.ssh/known_hosts | |
chmod 644 ~/.ssh/known_hosts | |
# Test SSH connection | |
echo "Testing SSH connection..." | |
ssh -v -i ~/.ssh/id_rsa $DEPLOYMENT_USER@$DEPLOYMENT_HOSTS_PRIMARY 'echo "test"' | |
- name: Phase 1 - Stop Secondary Nodes | |
run: | | |
HOSTS_SPACE_SEPARATED=$(echo "$DEPLOYMENT_HOSTS_SECONDARY" | tr -d '\r' | tr '\n' ' ' | awk '{$1=$1};1') | |
echo "Debug: Hosts list: $HOSTS_SPACE_SEPARATED" | |
for node in $HOSTS_SPACE_SEPARATED | |
do | |
SSH="ssh -i ~/.ssh/id_rsa -l $DEPLOYMENT_USER $node" | |
echo "[INFO] Stop artemis.service on ${node} ..." | |
$SSH sudo systemctl stop artemis | |
done | |
- name: Phase 1 - Deploy to Primary Node | |
run: | | |
echo "[INFO] Deploy on $DEPLOYMENT_HOSTS_PRIMARY ..." | |
SSH="ssh -o LogLevel=ERROR -i ~/.ssh/id_rsa -l $DEPLOYMENT_USER $DEPLOYMENT_HOSTS_PRIMARY" | |
# Store the war file name | |
WAR_FILE=$(ls -1 artifacts/*.war | head -n 1) | |
# Check if artifacts directory contains the WAR file | |
echo "[INFO] Checking local artifacts..." | |
ls -la artifacts/ | |
if [ ! -f "$WAR_FILE" ]; then | |
echo "Error: No WAR file found in artifacts directory" | |
exit 1 | |
fi | |
# Check remote directory exists and is writable | |
echo "[INFO] Checking remote directory..." | |
$SSH "if [ ! -d /opt/artemis ]; then echo 'Error: /opt/artemis directory does not exist'; exit 1; fi" | |
$SSH "if [ ! -w /opt/artemis ]; then echo 'Error: /opt/artemis directory is not writable'; exit 1; fi" | |
# Remove old backup if exists | |
echo "[INFO] Remove old artemis.war ..." | |
$SSH "rm -f /opt/artemis/artemis.war.old" | |
# Copy new artemis.war to node | |
echo "[INFO] Copy new artemis.war ..." | |
scp -v -i ~/.ssh/id_rsa "$WAR_FILE" $DEPLOYMENT_USER@$DEPLOYMENT_HOSTS_PRIMARY:/opt/artemis/artemis.war.new | |
if [ $? -ne 0 ]; then | |
echo "Error: Failed to copy WAR file" | |
exit 1 | |
fi | |
# Verify the file was copied successfully | |
echo "[INFO] Verify new WAR file..." | |
$SSH "if [ ! -f /opt/artemis/artemis.war.new ]; then echo 'Error: WAR file not found after copy'; exit 1; fi" | |
# Stop Artemis-Service on node | |
echo "[INFO] Stop artemis.service ..." | |
$SSH sudo systemctl stop artemis | |
# Replace old artemis.war | |
echo "[INFO] Rename old artemis.war ..." | |
$SSH mv /opt/artemis/artemis.war /opt/artemis/artemis.war.old || true | |
echo "[INFO] Rename new artemis.war ..." | |
$SSH mv /opt/artemis/artemis.war.new /opt/artemis/artemis.war | |
# Start Artemis-Service on node | |
echo "[INFO] Start artemis.service ..." | |
$SSH sudo systemctl start artemis | |
- name: Verify Primary Node Deployment | |
id: verify_deployment | |
run: | | |
while true; do | |
echo "Performing health check..." | |
RESPONSE=$(curl -s -f $HEALTH_CHECK_URL || echo '{"status":"DOWN"}') | |
STATUS=$(echo $RESPONSE | grep -o '"status":"[^"]*"' | cut -d'"' -f4) | |
if [ "$STATUS" = "UP" ]; then | |
echo "Health check passed! Application is UP" | |
exit 0 | |
else | |
echo "Health check failed. Status: $STATUS" | |
echo "Waiting 10 seconds before next attempt..." | |
sleep 10 | |
fi | |
done | |
- name: Phase 2 - Deploy to Secondary Nodes | |
run: | | |
HOSTS_SPACE_SEPARATED=$(echo "$DEPLOYMENT_HOSTS_SECONDARY" | tr -d '\r' | tr '\n' ' ' | awk '{$1=$1};1') | |
WAR_FILE=$(ls -1 artifacts/*.war | head -n 1) | |
echo "Debug: Hosts list: $HOSTS_SPACE_SEPARATED" | |
for node in $HOSTS_SPACE_SEPARATED | |
do | |
echo "##################################################################################################" | |
echo "[INFO] Deploy on $node ..." | |
echo "##################################################################################################" | |
# Build SSH-command | |
SSH="ssh -o LogLevel=ERROR -i ~/.ssh/id_rsa -l $DEPLOYMENT_USER $node" | |
# Remove old artemis.war | |
echo "[INFO] Remove old artemis.war ..." | |
$SSH "rm -f /opt/artemis/artemis.war.old" | |
# Copy new artemis.war to node | |
echo "[INFO] Copy new artemis.war ..." | |
scp -i ~/.ssh/id_rsa "$WAR_FILE" "$DEPLOYMENT_USER@$node:/opt/artemis/artemis.war.new" | |
# Stop Artemis-Service on node | |
echo "[INFO] Stop artemis.service ..." | |
$SSH "sudo systemctl stop artemis" | |
# Replace old artemis.war | |
echo "[INFO] Rename old artemis.war ..." | |
$SSH "mv /opt/artemis/artemis.war /opt/artemis/artemis.war.old || true" | |
echo "[INFO] Rename new artemis.war ..." | |
$SSH "mv /opt/artemis/artemis.war.new /opt/artemis/artemis.war" | |
# Start Artemis-Service on node | |
echo "[INFO] Start artemis.service ..." | |
$SSH "sudo systemctl start artemis" | |
done | |