-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
1 changed file
with
120 additions
and
95 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,22 +2,22 @@ pipeline { | |
agent any | ||
// In additional to manual runs, trigger somewhere at midnight to | ||
// give us the max time in a day to get things right. | ||
//triggers { | ||
triggers { | ||
// Master never runs--Feb 31st. | ||
//cron('0 0 31 2 *') | ||
cron('0 0 31 2 *') | ||
// Nightly @12am, for "snapshot", skip "release" night. | ||
//cron('0 0 2-31/2 * *') | ||
// First of the month @12am, for "release" (also "current"). | ||
//cron('0 0 1 * *') | ||
//} | ||
} | ||
environment { | ||
|
||
/// | ||
/// Internal run variables. | ||
/// | ||
|
||
// The branch of geneontology/go-site to use. | ||
TARGET_GO_SITE_BRANCH = 'issue-2093-metadata' | ||
TARGET_GO_SITE_BRANCH = 'issue-go-site-2093-metadata' | ||
// The branch of geneontology/go-stats to use. | ||
TARGET_GO_STATS_BRANCH = 'master' | ||
// The branch of go-ontology to use. | ||
|
@@ -70,27 +70,27 @@ pipeline { | |
|
||
// The Zenodo concept ID to use for releases (and occasionally | ||
// master testing). | ||
ZENODO_ARCHIVE_CONCEPT = null | ||
ZENODO_ARCHIVE_CONCEPT = '' | ||
// Distribution ID for the AWS CloudFront for this branch, | ||
// used soley for invalidations. Versioned release does not | ||
// need this as it is always a new location and the index | ||
// upload already has an invalidation on it. For current, | ||
// snapshot, and experimental. | ||
AWS_CLOUDFRONT_DISTRIBUTION_ID = null | ||
AWS_CLOUDFRONT_RELEASE_DISTRIBUTION_ID = null | ||
AWS_CLOUDFRONT_DISTRIBUTION_ID = '' | ||
AWS_CLOUDFRONT_RELEASE_DISTRIBUTION_ID = '' | ||
|
||
/// | ||
/// Ontobio Validation | ||
/// | ||
VALIDATION_ONTOLOGY_URL="http://skyhook.berkeleybop.org/issue-2093-metadata/ontology/go.json" | ||
VALIDATION_ONTOLOGY_URL="http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/ontology/go.json" | ||
|
||
/// | ||
/// Minerva input. | ||
/// | ||
|
||
// Minerva operating profile. | ||
MINERVA_INPUT_ONTOLOGIES = [ | ||
"http://skyhook.berkeleybop.org/issue-2093-metadata/ontology/extensions/go-lego.owl" | ||
"http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/ontology/extensions/go-lego.owl" | ||
].join(" ") | ||
|
||
/// | ||
|
@@ -101,19 +101,19 @@ pipeline { | |
GOLR_SOLR_MEMORY = "128G" | ||
GOLR_LOADER_MEMORY = "192G" | ||
GOLR_INPUT_ONTOLOGIES = [ | ||
"http://skyhook.berkeleybop.org/issue-2093-metadata/ontology/extensions/go-amigo.owl" | ||
"http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/ontology/extensions/go-amigo.owl" | ||
].join(" ") | ||
GOLR_INPUT_GAFS = [ | ||
//"http://skyhook.berkeleybop.org/master/products/upstream_and_raw_data/paint_other.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-2093-metadata/annotations/goa_chicken.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-2093-metadata/annotations/goa_chicken_complex.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-2093-metadata/annotations/goa_uniprot_all_noiea.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-2093-metadata/annotations/mgi.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-2093-metadata/annotations/pombase.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-2093-metadata/annotations/wb.gaf.gz" | ||
"http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/annotations/goa_chicken.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/annotations/goa_chicken_complex.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/annotations/goa_uniprot_all_noiea.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/annotations/mgi.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/annotations/pombase.gaf.gz", | ||
"http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/annotations/wb.gaf.gz" | ||
].join(" ") | ||
GOLR_INPUT_PANTHER_TREES = [ | ||
"http://skyhook.berkeleybop.org/issue-2093-metadata/products/panther/arbre.tgz" | ||
"http://skyhook.berkeleybop.org/issue-go-site-2093-metadata/products/panther/arbre.tgz" | ||
].join(" ") | ||
|
||
/// | ||
|
@@ -135,7 +135,7 @@ pipeline { | |
GORULE_TAGS_TO_SUPPRESS="silent" | ||
|
||
// Optional. Groups to run. | ||
RESOURCE_GROUPS="ecocyc goa mgi rgd paint pseudocap wb" | ||
RESOURCE_GROUPS="ecocyc goa mgi paint pseudocap wb" | ||
// Optional. Datasets to skip within the resources that we | ||
// will run (defined in the line above). | ||
DATASET_EXCLUDES="" | ||
|
@@ -294,57 +294,32 @@ pipeline { | |
// Download GAFs from datasets.yaml in go-site and then upload | ||
// to skyhook in their appropriate locations. | ||
stage("Download annotation data") { | ||
agent { | ||
docker { | ||
image 'geneontology/dev-base:ea32b54c822f7a3d9bf20c78208aca452af7ee80_2023-08-28T125255' | ||
args "-u root:root --tmpfs /opt:exec -w /opt" | ||
} | ||
} | ||
steps { | ||
dir("./go-site") { | ||
git branch: TARGET_GO_SITE_BRANCH, url: 'https://github.com/geneontology/go-site.git' | ||
|
||
script { | ||
def excluded_datasets_args = "" | ||
if ( env.DATASET_EXCLUDES ) { | ||
excluded_datasets_args = DATASET_EXCLUDES.split(" ").collect { "-x ${it}" }.join(" ") | ||
} | ||
|
||
def included_resources = "" | ||
if (env.RESOURCE_GROUPS) { | ||
included_resources = RESOURCE_GROUPS.split(" ").collect { "-g ${it}" }.join(" ") | ||
} | ||
|
||
def goa_mapping_url = "" | ||
if (env.GOA_UNIPROT_ALL_URL) { | ||
goa_mapping_url = "-m goa_uniprot_all gaf ${GOA_UNIPROT_ALL_URL}" | ||
} | ||
sh "cd scripts && pip3 install -r requirements.txt" | ||
sh "python3 ./scripts/download_source_gafs.py all --datasets ./metadata/datasets --target ./target/ --type gaf ${excluded_datasets_args} ${included_resources} ${goa_mapping_url}" | ||
} | ||
dir("./go-site") { | ||
git branch: TARGET_GO_SITE_BRANCH, url: 'https://github.com/geneontology/go-site.git' | ||
|
||
withCredentials([file(credentialsId: 'skyhook-private-key', variable: 'SKYHOOK_IDENTITY')]) { | ||
// Upload to skyhook to the expected location. | ||
sh 'rsync -avz -e "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=true -o IdentityFile=$SKYHOOK_IDENTITY" ./target/* [email protected]:/home/skyhook/$BRANCH_NAME/products/upstream_and_raw_data/' | ||
} | ||
script { | ||
def excluded_datasets_args = "" | ||
if ( env.DATASET_EXCLUDES ) { | ||
excluded_datasets_args = DATASET_EXCLUDES.split(" ").collect { "-x ${it}" }.join(" ") | ||
} | ||
def included_resources = "" | ||
if (env.RESOURCE_GROUPS) { | ||
included_resources = RESOURCE_GROUPS.split(" ").collect { "-g ${it}" }.join(" ") | ||
} | ||
def goa_mapping_url = "" | ||
if (env.GOA_UNIPROT_ALL_URL) { | ||
goa_mapping_url = "-m goa_uniprot_all gaf ${GOA_UNIPROT_ALL_URL}" | ||
} | ||
sh "python3 ./scripts/download_source_gafs.py all --datasets ./metadata/datasets --target ./target/ --type gaf ${excluded_datasets_args} ${included_resources} ${goa_mapping_url}" | ||
} | ||
dir("./gopreprocess") { | ||
git branch: TARGET_GO_PREPROCESS_BRANCH, url: 'https://github.com/geneontology/gopreprocess.git' | ||
sh "pwd" | ||
sh "ls -lrt" | ||
sh "poetry install" | ||
sh "make download_human" | ||
sh "make download_rat" | ||
sh "ls -lrt ~/.data/HUMAN/" | ||
sh "ls -lrt ~/.data/RGD" | ||
sh "ls -lrt ~/.data/MGI" | ||
withCredentials([file(credentialsId: 'skyhook-private-key', variable: 'SKYHOOK_IDENTITY')]) { | ||
// Upload pystow'd files from gopreprocess downloader to skyhook upstream and raw data folder. | ||
sh 'rsync -avz -e "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=true -o IdentityFile=$SKYHOOK_IDENTITY" ~/.data/* [email protected]:/home/skyhook/$BRANCH_NAME/products/upstream_and_raw_data/preprocess_raw_files/' | ||
} | ||
|
||
withCredentials([file(credentialsId: 'skyhook-private-key', variable: 'SKYHOOK_IDENTITY')]) { | ||
// Upload to skyhook to the expected location. | ||
sh 'rsync -avz -e "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=true -o IdentityFile=$SKYHOOK_IDENTITY" ./target/* [email protected]:/home/skyhook/$BRANCH_NAME/products/upstream_and_raw_data/' | ||
} | ||
} | ||
} | ||
|
||
} | ||
// See https://github.com/geneontology/go-ontology for details | ||
// on the ontology release pipeline. This ticket runs | ||
|
@@ -414,34 +389,6 @@ pipeline { | |
} | ||
} | ||
} | ||
stage('Generate automated annotations'){ | ||
agent { | ||
docker { | ||
image 'geneontology/dev-base:ea32b54c822f7a3d9bf20c78208aca452af7ee80_2023-08-28T125255' | ||
args "-u root:root --tmpfs /opt:exec -w /opt" | ||
} | ||
} | ||
steps { | ||
dir("./gopreprocess") { | ||
git branch: TARGET_GO_PREPROCESS_BRANCH, url: 'https://github.com/geneontology/gopreprocess.git' | ||
sh "pwd" | ||
sh "ls -lrt" | ||
sh "poetry install" | ||
sh "make convert_rat" | ||
sh "make convert_human" | ||
sh "make get_goa_files" | ||
sh "make merge_gafs" | ||
sh "ls -lrt ~/.data/GAF_OUTPUT" | ||
withCredentials([file(credentialsId: 'skyhook-private-key', variable: 'SKYHOOK_IDENTITY')]) { | ||
// Upload pystow'd files from gopreprocess downloader to skyhook upstream and raw data folder. | ||
sh 'rsync -avz -e "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=true -o IdentityFile=$SKYHOOK_IDENTITY" ~/.data/GAF_OUTPUT/* [email protected]:/home/skyhook/$BRANCH_NAME/products/upstream_and_raw_data/preprocessed_GAF_output/' | ||
sh 'rsync -avz -e "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=true -o IdentityFile=$SKYHOOK_IDENTITY" ~/.data/* [email protected]:/home/skyhook/$BRANCH_NAME/products/upstream_and_raw_data/preprocess_raw_files/' | ||
} | ||
|
||
} | ||
|
||
} | ||
} | ||
stage('Minerva generations') { | ||
steps { | ||
parallel( | ||
|
@@ -1417,6 +1364,86 @@ pipeline { | |
} | ||
} | ||
} | ||
// Big things to do on major branches. | ||
stage('Deploy') { | ||
// For exploration of #204, we'll hold back attempts to push out to AmiGO for master and snapshot | ||
// so we don't keep clobbering #204 trials out. | ||
//when { anyOf { branch 'release'; branch 'snapshot'; branch 'master' } } | ||
when { anyOf { branch 'release' } } | ||
steps { | ||
parallel( | ||
"AmiGO": { | ||
|
||
// Ninja in our file credentials from Jenkins. | ||
withCredentials([file(credentialsId: 'skyhook-private-key', variable: 'SKYHOOK_IDENTITY'), file(credentialsId: 'go-svn-private-key', variable: 'GO_SVN_IDENTITY'), file(credentialsId: 'ansible-bbop-local-slave', variable: 'DEPLOY_LOCAL_IDENTITY'), file(credentialsId: 'go-aws-ec2-ansible-slave', variable: 'DEPLOY_REMOTE_IDENTITY')]) { | ||
|
||
// Get our operations code and decend into ansible | ||
// working directory. | ||
dir('./operations') { | ||
|
||
git([branch: 'master', | ||
credentialsId: 'bbop-agent-github-user-pass', | ||
url: 'https://github.com/geneontology/operations.git']) | ||
dir('./ansible') { | ||
/// | ||
/// Push out to an AmiGO. | ||
/// | ||
script { | ||
if( env.BRANCH_NAME == 'release' ){ | ||
|
||
echo 'No current public push on release to Blazegraph.' | ||
// retry(3){ | ||
// sh 'ansible-playbook update-endpoint.yaml --inventory=hosts.local-rdf-endpoint --private-key="$DEPLOY_LOCAL_IDENTITY" -e target_user=bbop --extra-vars="pipeline=current build=production endpoint=production"' | ||
// } | ||
|
||
echo 'No current public push on release to GOlr.' | ||
// retry(3){ | ||
// sh 'ansible-playbook ./update-golr.yaml --inventory=hosts.amigo --private-key="$DEPLOY_LOCAL_IDENTITY" -e target_host=amigo-golr-aux -e target_user=bbop' | ||
// } | ||
// retry(3){ | ||
// sh 'ansible-playbook ./update-golr.yaml --inventory=hosts.amigo --private-key="$DEPLOY_LOCAL_IDENTITY" -e target_host=amigo-golr-production -e target_user=bbop' | ||
// } | ||
|
||
}else if( env.BRANCH_NAME == 'snapshot' ){ | ||
|
||
echo 'Push snapshot out internal Blazegraph' | ||
retry(3){ | ||
sh 'ansible-playbook update-endpoint.yaml --inventory=hosts.local-rdf-endpoint --private-key="$DEPLOY_LOCAL_IDENTITY" -e target_user=bbop --extra-vars="pipeline=current build=internal endpoint=internal"' | ||
} | ||
|
||
echo 'Push snapshot out to experimental AmiGO' | ||
retry(3){ | ||
sh 'ansible-playbook ./update-golr-w-snap.yaml --inventory=hosts.amigo --private-key="$DEPLOY_REMOTE_IDENTITY" -e target_host=amigo-golr-exp -e target_user=ubuntu' | ||
} | ||
|
||
}else if( env.BRANCH_NAME == 'master' ){ | ||
|
||
echo 'Push master out to experimental AmiGO' | ||
retry(3){ | ||
sh 'ansible-playbook ./update-golr-w-exp.yaml --inventory=hosts.amigo --private-key="$DEPLOY_REMOTE_IDENTITY" -e target_host=amigo-golr-exp -e target_user=ubuntu' | ||
} | ||
|
||
} | ||
} | ||
} | ||
} | ||
} | ||
} | ||
) | ||
} | ||
// WARNING: Extra safety as I expect this to sometimes fail. | ||
post { | ||
always { | ||
// Bail on the remote filesystem. | ||
sh 'fusermount -u $WORKSPACE/mnt/ || true' | ||
} | ||
} | ||
} | ||
// stage('TODO: Final status') { | ||
// steps { | ||
// echo 'TODO: final' | ||
// } | ||
// } | ||
} | ||
post { | ||
// Let's let our people know if things go well. | ||
|
@@ -1483,8 +1510,6 @@ void initialize() { | |
sh 'mkdir -p $WORKSPACE/mnt/$BRANCH_NAME/products/json || true' | ||
sh 'mkdir -p $WORKSPACE/mnt/$BRANCH_NAME/products/blazegraph || true' | ||
sh 'mkdir -p $WORKSPACE/mnt/$BRANCH_NAME/products/upstream_and_raw_data || true' | ||
sh 'mkdir -p $WORKSPACE/mnt/$BRANCH_NAME/products/upstream_and_raw_data/preprocess_raw_files || true' | ||
sh 'mkdir -p $WORKSPACE/mnt/$BRANCH_NAME/products/upstream_and_raw_data/preprocessed_GAF_output || true' | ||
sh 'mkdir -p $WORKSPACE/mnt/$BRANCH_NAME/products/pages || true' | ||
sh 'mkdir -p $WORKSPACE/mnt/$BRANCH_NAME/products/solr || true' | ||
sh 'mkdir -p $WORKSPACE/mnt/$BRANCH_NAME/products/panther || true' | ||
|
@@ -1513,4 +1538,4 @@ void initialize() { | |
// TODO: This should be wrapped in exception | ||
// handling. In fact, this whole thing should be. | ||
sh 'fusermount -u $WORKSPACE/mnt/ || true' | ||
} | ||
} |