Skip to content

adding detection for domain root ACL modification #17603

adding detection for domain root ACL modification

adding detection for domain root ACL modification #17603

#This file makes use of a number of useful, external Github Actions.
#Check the links below for additional documentation on each of these:
#https://github.com/actions/setup-python
#https://github.com/actions/setup-node
#https://github.com/actions/checkout
#https://github.com/actions/upload-artifact
#The mechanism for persisting data between jobs in a workflow is the same as for persisting it
#permanently:
#https://docs.github.com/en/actions/guides/storing-workflow-data-as-artifacts
#In CircleCI, this was different (store_artifacts vs persist_to_workspace)
name: build-and-validate
on:
push:
pull_request:
types: [opened, reopened]
jobs:
validate-tag-if-present:
runs-on: ubuntu-latest
steps:
- name: TAGGED, Validate that the tag is in the correct format
run: |
echo "The GITHUB_REF: $GITHUB_REF"
#First check to see if the release is a tag
if [[ $GITHUB_REF =~ refs/tags/* ]]; then
#Yes, this is a tag, so we need to test to make sure that the tag
#is in the correct format (like v1.10.20)
if [[ $GITHUB_REF =~ refs/tags/v[0-9]+.[0-9]+.[0-9]+ ]]; then
echo "PASS: Tagged release with good format"
exit 0
else
echo "FAIL: Tagged release with bad format"
exit 1
fi
else
echo "PASS: Not a tagged release"
exit 0
fi
validate-and-build:
#Note that the CircleCI job used a Container. The way to do this with Github Actions
#is to first start up a Virtual Machine, then we can by following:
# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idcontainer
runs-on: ubuntu-latest
needs: [validate-tag-if-present]
steps:
- name: Configure Enrichment and Tag
id: vars
run: |
if [ $(echo ${GITHUB_REF} | grep "^refs/tags/*") ]; then
#failed to find the refs/tags/ beginning, grab and set the tag
echo "Release is TAGGED!"
echo "::set-output name=tag::${GITHUB_REF#refs/tags/}"
echo "::set-output name=skip_enrichment_var::"
else
#Not a tagged relese
echo "Release is NOT TAGGED!"
echo "::set-output name=tag::"
echo "::set-output name=skip_enrichment_var::--skip_enrichment"
fi
#Previous config chose which branch/tag to operate on.
#I think Github is smart enough to choose based on whether it's a pull request or push + other info?
- name: Check out the repository code
uses: actions/checkout@v3
#with:
# repository: splunk/security-content #check out https://github.com/mitre/cti.git, defaults to HEAD
# path: "security-content"
- uses: actions/setup-python@v4
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
- name: Install System Packages
run: |
sudo apt update -qq
sudo apt install jq -qq
- name: Install Python Dependencies
run: |
#Get the virtualenv set up
rm -rf venv
python -m venv --clear .venv
source .venv/bin/activate
python -m pip install --upgrade pip
python -m pip install wheel
python -m pip install -q -r requirements.txt
- name: content_ctl validate
run: |
source .venv/bin/activate
python contentctl.py -p . ${{ steps.vars.outputs.skip_enrichment_var }} validate -pr ESCU
python contentctl.py -p . ${{ steps.vars.outputs.skip_enrichment_var }} validate -pr SSA
- name: contentctl generate
run: |
source .venv/bin/activate
rm -rf dist/escu/default/data/ui/panels/*.xml
python contentctl.py --path . ${{ steps.vars.outputs.skip_enrichment_var }} generate --product ESCU --output dist/escu
python contentctl.py --path . ${{ steps.vars.outputs.skip_enrichment_var }} generate --product SSA --output dist/ssa
python contentctl.py --path . ${{ steps.vars.outputs.skip_enrichment_var }} generate --product API --output dist/api
- name: Copy lookups .mlmodel files
run: |
cd lookups
count=`ls -1 *.mlmodel 2>/dev/null | wc -l`
if [ $count != 0 ]
then
cp -rv *.mlmodel ../dist/escu/lookups
else
echo "No mlmodel files to copy"
fi
- name: Update Version and Build number
run : |
# check if tag is set, get build number from the tag if set
if [ -z "${{ steps.vars.outputs.tag }}" ]; then
CONTENT_VERSION=$(grep -oP "(\d+.\d+.\d+$)" dist/escu/default/content-version.conf)
echo "detected content version: $CONTENT_VERSION"
else
CONTENT_VERSION=$(echo ${{ steps.vars.outputs.tag }} | grep -oP "\d+.\d+.\d+")
echo "content version: $CONTENT_VERSION, set by tag: ${{ steps.vars.outputs.tag }}"
fi
# update build number and version for ESCU
sed -i "s/build = .*$/build = ${{ github.run_number }}/g" dist/escu/default/app.conf
sed -i "s/^version = .*$/version = $CONTENT_VERSION/g" dist/escu/default/app.conf
sed -i "s/\"version\": .*$/\"version\": \"$CONTENT_VERSION\"/g" dist/escu/app.manifest
sed -i "s/version = .*$/version = $CONTENT_VERSION/g" dist/escu/default/content-version.conf
mkdir build
tar -czf build/content-pack-build-escu.tar.gz dist/escu/*
# update build number and version for ssa
tar -czf build/content-pack-build-ssa.tar.gz dist/ssa/*
tar -czf build/content-pack-build-api.tar.gz dist/api/*
- name: Build ESCU
run: |
source .venv/bin/activate
cd build
tar -zxf content-pack-build-escu.tar.gz
tar -zxf content-pack-build-ssa.tar.gz
tar -zxf content-pack-build-api.tar.gz
mv dist/escu DA-ESS-ContentUpdate
mv dist/ssa SSA_Content
mv dist/api API_Content
#Build ESCU Content
#Do not use slim for speed, simplicity, and compatability
tar -zcf DA-ESS-ContentUpdate-latest.tar.gz DA-ESS-ContentUpdate
sha256sum DA-ESS-ContentUpdate-latest.tar.gz > checksum.txt
#Build the SSA Content
#Do not use slim for speed, simplicity, and compatability
tar -zcf SSA_Content-latest.tar.gz SSA_Content
sha256sum SSA_Content-latest.tar.gz >> checksum.txt
#Package the API Content
tar -zcf API_Content-latest.tar.gz API_Content
sha256sum API_Content-latest.tar.gz >> checksum.txt
- name: store_artifacts
uses: actions/upload-artifact@v3
with:
name: content-latest
path: |
build/DA-ESS-ContentUpdate-latest.tar.gz
build/SSA_Content-latest.tar.gz
build/API_Content-latest.tar.gz
build/checksum.txt
#Everything below this line should ONLY run on a tag and nothing else
#We still want all of the above checks to run and pass before running these
run-appinspect:
runs-on: ubuntu-latest
needs: [validate-and-build]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Checkout Repo
uses: actions/checkout@v3
with:
ref: 'develop'
#Download the artifacts we want to check
- uses: actions/download-artifact@v3
with:
name: content-latest
path: build/
- name: Install System Packages
run: |
sudo apt update -qq
sudo apt install jq -qq
- name: Submit ESCU to AppInspect API
env:
APPINSPECT_USERNAME: ${{ secrets.AppInspectUsername }}
APPINSPECT_PASSWORD: ${{ secrets.AppInspectPassword }}
run: |
cd bin
#Enclose in quotes in case there are any special characters in the username/password
#Better not to pass these arguments on the command line, if possible
./appinspect.sh ../ DA-ESS-ContentUpdate-latest.tar.gz "$APPINSPECT_USERNAME" "$APPINSPECT_PASSWORD"
- name: Create report artifact
if: always()
run: |
#Always create this, regardless of whether success or failure above
tar -cvzf report.tar.gz report/
- name: store_artifacts
uses: actions/upload-artifact@v3
with:
name: appinspect_reports
path: |
report.tar.gz
#Still store the report, even if we have failed (otherwise we don't know why/how we failed)
- name: store_artifacts_on_failure
uses: actions/upload-artifact@v3
if: failure()
with:
name: appinspect_reports_failure
path: |
report.tar.gz
create-report:
runs-on: ubuntu-latest
needs: [validate-and-build, run-appinspect]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Checkout Repo
uses: actions/checkout@v3
with:
ref: 'develop'
- name: Install System Packages
run: |
sudo apt update -qq
sudo apt install jq -qq
- uses: actions/setup-python@v4
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
- name: Install Python Dependencies
run: |
#Get the virtualenv set up
rm -rf venv
python -m venv --clear venv
source venv/bin/activate
python -m pip install --upgrade pip
python -m pip install wheel
python -m pip install -q -r requirements.txt
- name: run reporting
run: |
source venv/bin/activate
python contentctl.py -p . reporting
#Official, Verified Amazon-AWS Github Account Provided Action
- uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# aws-session-token: ${{ secrets.AWS_SESSION_TOKEN }} # if you have/need it
aws-region: us-west-1 #assume we will always use this, could make this an environment variable...
- name: Upload Reporting
run: |
aws s3 cp bin/reporting s3://security-content/reporting --recursive --exclude "*" --include "*.svg"
update-sources-github:
runs-on: ubuntu-latest
permissions:
# Need the write permission because this job commits changes to some folders like dist
contents: write
needs: [validate-and-build, run-appinspect, create-report]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Checkout Repo
uses: actions/checkout@v3
with:
token: ${{ secrets.SECURITY_CONTENT_ADMIN_TASKS }}
ref: 'develop'
- uses: actions/setup-python@v4
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
- uses: actions/download-artifact@v3
with:
name: content-latest
- name: Stage artifacts in proper directories
run: |
mkdir latest-escu
tar -zxf DA-ESS-ContentUpdate-latest.tar.gz -C latest-escu --strip-components=1
mkdir latest-ssa
tar -zxf SSA_Content-latest.tar.gz -C latest-ssa --strip-components=1
mkdir latest-api
tar -zxf API_Content-latest.tar.gz -C latest-api --strip-components=1
- name: Install Python Dependencies
run: |
#Get the virtualenv set up
rm -rf venv
python -m venv --clear venv
source venv/bin/activate
python -m pip install --upgrade pip
python -m pip install wheel
python -m pip install -q -r requirements.txt
- name: Get branch and PR required for detection testing main.py
id: vars
run: |
echo "::set-output name=branch::${GITHUB_REF#refs/heads/}"
- name: Run reporting
run: |
source venv/bin/activate
python contentctl.py -p . reporting
- name: Update github with new docs and package bits
run: |
rm -rf dist
mkdir dist
mv latest-escu dist/escu
mv latest-ssa dist/ssa
mv latest-api dist/api
# configure git to prep for commit
git config user.email "[email protected]"
git config user.name "research bot"
git config --global push.default simple
git add dist/*
git add docs/*
git add detections/*
git commit --allow-empty -m "Update dist/escu, dist/ssa, and dist/api folders with the latest content associated with this tag "
# Push quietly to prevent showing the token in log
#No need to provide any credentials
git push
publish-github-release:
#Github-maintained release action is in archived state: https://github.com/actions/create-release
#They recommend several and we use the following with the most stars: https://github.com/softprops/action-gh-release
runs-on: ubuntu-latest
needs: [validate-and-build, run-appinspect, create-report]
#Only run when tagged
# adding perms needed
permissions:
contents: write
if: startsWith(github.ref, 'refs/tags/')
steps:
#Get the artifacts that we need
- uses: actions/download-artifact@v3
with:
name: content-latest
- uses: actions/download-artifact@v3
with:
name: appinspect_reports
#Rename those artifacts appropriately
- name: Set tag
id: vars
run: echo "::set-output name=tag::${GITHUB_REF#refs/*/}"
- name: Rename the content-update appropriately
run: |
cp DA-ESS-ContentUpdate-latest.tar.gz DA-ESS-ContentUpdate-${{ steps.vars.outputs.tag }}.tar.gz
cp SSA_Content-latest.tar.gz SSA_Content-${{ steps.vars.outputs.tag }}.tar.gz
cp API_Content-latest.tar.gz API_Content-${{ steps.vars.outputs.tag }}.tar.gz
#No checksum on the reports
cp report.tar.gz report-${{ steps.vars.outputs.tag }}.tar.gz
cp checksum.txt checksum-${{ steps.vars.outputs.tag }}.txt
#Upload all of the artifacts that we have created using the third party
#action recommended bu Github
- name: Upload Release Artifacts
uses: softprops/action-gh-release@v1
with:
files: |
DA-ESS-ContentUpdate-${{ steps.vars.outputs.tag }}.tar.gz
SSA_Content-${{ steps.vars.outputs.tag }}.tar.gz
API_Content-${{ steps.vars.outputs.tag }}.tar.gz
report-${{ steps.vars.outputs.tag }}.tar.gz
checksum-${{ steps.vars.outputs.tag }}.txt
attack-range-update:
runs-on: ubuntu-latest
needs: [validate-and-build, run-appinspect, create-report]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
#Get the artifacts that we need
- uses: actions/download-artifact@v3
with:
name: content-latest
#Official, Verified Amazon-AWS Github Account Provided Action
- uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# aws-session-token: ${{ secrets.AWS_SESSION_TOKEN }} # if you have/need it
aws-region: us-west-1 #assume we will always use this, could make this an environment variable...
- name: Sync latest ESCU to the Attack Range S3 bucket for apps
run: |
aws s3 cp DA-ESS-ContentUpdate-latest.tar.gz s3://attack-range-appbinaries/
# make the file public since it is not by default
aws s3api put-object-acl --bucket attack-range-appbinaries --key DA-ESS-ContentUpdate-latest.tar.gz --acl public-read
master-api-update:
runs-on: ubuntu-latest
needs: [validate-and-build, run-appinspect, create-report]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Checkout Repo
uses: actions/checkout@v3
with:
ref: 'develop'
- uses: actions/setup-python@v4
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
- name: Install Python Dependencies
run: |
#Get the virtualenv set up
rm -rf venv
python -m venv --clear venv
source venv/bin/activate
python -m pip install --upgrade pip
python -m pip install wheel
python -m pip install -q -r requirements.txt
- name: Create YML to JSON Folder
run: |
source venv/bin/activate
python contentctl.py --path . generate --product API --output dist/api
- name: Generate content version and timestamp JSON
run : |
VERSION_VAL=$GITHUB_REF_NAME
VERSION_TS=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
echo "{\"version\":{\"name\":\"$VERSION_VAL\",\"published_at\":\"$VERSION_TS\"}}" > dist/api/version.json
echo "contents of version.json:"
cat dist/api/version.json
#Official, Verified Amazon-AWS Github Account Provided Action
- uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# aws-session-token: ${{ secrets.AWS_SESSION_TOKEN }} # if you have/need it
aws-region: us-west-1 #assume we will always use this, could make this an environment variable...
- name: Update API sources
run: |
aws s3 rm s3://security-content --recursive --exclude "*" --include "*.yml"
aws s3 cp stories s3://security-content/stories --recursive --exclude "*" --include "*.yml"
aws s3 cp baselines s3://security-content/baselines --recursive --exclude "*" --include "*.yml"
aws s3 cp detections s3://security-content/detections --recursive --exclude "*" --include "*.yml"
aws s3 cp playbooks s3://security-content/playbooks --recursive --exclude "*" --include "*.yml"
aws s3 cp lookups s3://security-content/lookups --recursive --exclude "*" --include "*.yml"
aws s3 cp lookups s3://security-content/lookups --recursive --exclude "*" --include "*.csv"
aws s3 cp lookups s3://security-content/lookups --recursive --exclude "*" --include "*.mlmodel"
aws s3 cp macros s3://security-content/macros --recursive --exclude "*" --include "*.yml"
aws s3 cp deployments s3://security-content/deployments --recursive --exclude "*" --include "*.yml"
aws s3 cp dist/api s3://security-content/json --recursive --exclude "*" --include "*.json"
- name: Security Content API Smoke Test
run: |
API_URL='https://content.splunkresearch.com/detections'
API_STATUS=$(curl -s -o /dev/null -w "%{http_code}" $API_URL)
echo "Security Content API Status: $API_STATUS"
if [ "$API_STATUS" != "200" ]; then
echo "Error [Security Content API status: $API_STATUS]"
exit 1
fi