Skip to content

Commit

Permalink
[develop] Feature/cicd scripts (ufs-community#142)
Browse files Browse the repository at this point in the history
* create CICD init script

* add CICD build and test scripts

Signed-off-by: Bruce Kropp <[email protected]>

* fix CICD script location of test-log

Signed-off-by: Bruce Kropp <[email protected]>

* fix CICD workspace variable

Signed-off-by: Bruce Kropp <[email protected]>

* add usage help to CICD scripts

Signed-off-by: Bruce Kropp <[email protected]>

* fix BRANCH_NAME checkout for CICD scripts

* fix exit status for CICD scripts

* check for bin and lib after build in CICD scripts

* Create Jenkinsfile

* Update Jenkinsfile

* Update Jenkinsfile

* Update Jenkinsfile

* Update Jenkinsfile to debug workspace

* Update Jenkinsfile debug build sorc

* Update Jenkinsfile to use git clone before init

* check for bin and lib files prior to CICD tests

Signed-off-by: Bruce Kropp <[email protected]>

* Update Jenkinsfile boolean to run tests

* Update Jenkinsfile pass bool param to test

* Update Jenkinsfile run tests = true

* Update Jenkinsfile use checkout scm

* Update Jenkinsfile echo git clone from init

* Update Jenkinsfile run_tests from LAND_DA_RUN_TESTS

* Update Jenkinsfile clear out pre-init

* Update Jenkinsfile allow hercules

* Update Jenkinsfile quote params

* dont need to set account in CICD scripts

Signed-off-by: Bruce Kropp <[email protected]>

* Update Jenkinsfile

simplify CICD script calls

* cleanup CICD script debugging

Signed-off-by: Bruce Kropp <[email protected]>

* add CICD script disk_usage.sh

Signed-off-by: Bruce Kropp <[email protected]>

* Update Jenkinsfile save results and stats

* Update Jenkinsfile to use UFS_PLATFORM variable name

* Update Jenkinsfile to fix paths to upload files

* Update Jenkinsfile

* Adjust post stage in Jenkinsfile

Signed-off-by: Bruce Kropp <[email protected]>

* update CICD script land_test.sh to check if ctest produced output

Signed-off-by: Bruce Kropp <[email protected]>

---------

Signed-off-by: Bruce Kropp <[email protected]>
Signed-off-by: Bruce Kropp <[email protected]>
  • Loading branch information
BruceKropp-Raytheon authored Aug 16, 2024
1 parent f7769d6 commit 7731b28
Show file tree
Hide file tree
Showing 5 changed files with 547 additions and 0 deletions.
228 changes: 228 additions & 0 deletions .cicd/Jenkinsfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
pipeline {
agent none

options {
disableConcurrentBuilds()
overrideIndexTriggers(false)
skipDefaultCheckout(true)
timestamps()
timeout(time: 6, unit: 'HOURS')
}

parameters {
// Allow job runner to filter based on platform
choice(name: 'UFS_PLATFORM_FILTER', choices: ['all', 'hera', 'orion', 'hercules', 'jet', 'gaea', 'derecho', 'pclusternoaav2use1', 'azclusternoaav2use1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use')
// Allow job runner to filter based on compiler
choice(name: 'UFS_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build')
// Run tests ?
booleanParam name: 'LAND_DA_RUN_TESTS', defaultValue: true, description: 'Whether to execute the tests'
}

stages {
stage('Launch SonarQube') {
steps {
script {
echo "SonarQube: Scan land-DA_workflow code"
/*
build job: '/land-DA_workflow/land-da-sonarqube', parameters: [
string(name: 'BRANCH_NAME', value: env.CHANGE_BRANCH ?: 'develop'),
string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: '')
], wait: false
*/
}
}
}

/*
stage('Check Nodes') {
steps {
script {
build job: '/infrastructure/is_up', parameters: [string(name: 'NODE_CHOICE', value: 'pclusternoaav2use1' ?: 'none')], wait: false
build job: '/infrastructure/is_up', parameters: [string(name: 'NODE_CHOICE', value: 'azclusternoaav2use1' ?: 'none')], wait: false
build job: '/infrastructure/is_up', parameters: [string(name: 'NODE_CHOICE', value: 'gclusternoaav2usc1' ?: 'none')], wait: false
sleep time: 120, unit: 'SECONDS'
}
}
}
*/

// Build and test the Land-DA application on all supported platforms using the supported compilers for each platform
stage('Build and Test') {
matrix {
// Run on all platform/compiler combinations by default or build and test only on the platform(s) and
// compiler(s) specified by UFS_PLATFORM_FILTER and UFS_COMPILER_FILTER
when {
beforeAgent true
expression {
return nodesByLabel(env.UFS_PLATFORM).size() > 0
}

//expression { env.UFS_PLATFORM != 'hera' }
//expression { env.UFS_PLATFORM != 'orion' }
//expression { env.UFS_PLATFORM != 'hercules' }
expression { env.UFS_PLATFORM != 'jet' }
expression { env.UFS_PLATFORM != 'gaea' }
expression { env.UFS_PLATFORM != 'derecho' }
expression { env.UFS_PLATFORM != 'pclusternoaav2use1'}
expression { env.UFS_PLATFORM != 'azclusternoaav2use1'}
expression { env.UFS_PLATFORM != 'gclusternoaav2usc1' }

allOf {
anyOf {
expression { params.UFS_PLATFORM_FILTER == 'all' }
expression { params.UFS_PLATFORM_FILTER == env.UFS_PLATFORM }
}

anyOf {
expression { params.UFS_COMPILER_FILTER == 'all' }
expression { params.UFS_COMPILER_FILTER == env.UFS_COMPILER }
}
}
}

axes {
axis {
name 'UFS_PLATFORM'
values 'hera', 'orion', 'hercules', 'jet', 'gaea', 'derecho' , 'pclusternoaav2use1', 'azclusternoaav2use1', 'gclusternoaav2usc1'
}

axis {
name 'UFS_COMPILER'
values 'gnu', 'intel'
}
}

excludes {
// Exclude GNU from platforms that don't support it
exclude {
axis {
name 'UFS_PLATFORM'
values 'hera', 'orion', 'hercules', 'jet', 'gaea', 'derecho', 'pclusternoaav2use1', 'azclusternoaav2use1', 'gclusternoaav2usc1'
}

axis {
name 'UFS_COMPILER'
values 'gnu'
}
}
}

agent {
label env.UFS_PLATFORM
}

environment {
REPO_PROJECT = "ufs-community"
BRANCH_NAME_ESCAPED = env.BRANCH_NAME.replace('/', '_')
BUILD_VERSION = "${env.UFS_PLATFORM}-${env.UFS_COMPILER}-${env.BRANCH_NAME_ESCAPED}-${env.BUILD_NUMBER}"
BUILD_NAME = "land-DA_workflow_${env.BUILD_VERSION}"
INSTALL_NAME = "install_${env.UFS_COMPILER}"
ACCNR = "epic"
CMAKE_FLAGS = "-DAPP=ATM -DCCPP_SUITES=FV3_GFS_v16"
}

stages {
// Clean and create the workspace , checkout the repository into ${env.UFS_PLATFORM}
// In the workspace repository directory, run initialization
stage('Initialize') {
steps {
dir ("${env.UFS_PLATFORM}") {
echo "${env.STAGE_NAME} Land-DA (${env.UFS_COMPILER}) build environment on ${env.UFS_PLATFORM} (using ${env.WORKSPACE}/${env.UFS_PLATFORM})"
cleanWs()
checkout scm
sh 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/land_init.sh"'
sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/disk_usage.sh"'
}
}

post {
always {
echo "#### Initialize COMPLETE."
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}/*-time-land_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
}
}
}

// Run the build script; if successful create a tarball of the build and upload to S3
stage('Build') {
options {
timeout(time: 4, unit: 'HOURS')
}

steps {
dir ("${env.UFS_PLATFORM}") {
echo "${env.STAGE_NAME} Land-DA (${env.UFS_COMPILER}) on ${env.UFS_PLATFORM} (using ${env.WORKSPACE}/${env.UFS_PLATFORM})"
sh 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/land_build.sh"'
sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/disk_usage.sh"'
}
}

post {
success {
echo "#### Build SUCCESS."
}
always {
echo "#### Build COMPLETE."
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}/*-env.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}/*-time-land_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
}
}
}

// Run the test script
stage('Test') {
// when { not { expression { return env.UFS_PLATFORM == jet }}}
options {
timeout(time: 4, unit: 'HOURS')
}

environment {
LAND_DA_RUN_TESTS = "${params.LAND_DA_RUN_TESTS}"
}

steps {
dir ("${env.UFS_PLATFORM}") {
echo "${env.STAGE_NAME} Land-DA (${env.UFS_COMPILER}) on ${env.UFS_PLATFORM} (using ${env.WORKSPACE}/${env.UFS_PLATFORM}) ${env.LAND_DA_RUN_TESTS}"
sh 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/land_test.sh"'
sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/disk_usage.sh"'
}
}

post {
success {
echo "#### Test SUCCESS."
}
always {
echo "#### Test COMPLETE."
// Archive the test log files
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}/*-test-log.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}/*-time-land_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
}
}
}
} // stages
} // matrix
} // stage
} // stages

post {
always {
script {
// Trigger another job to collect all build statistics
CI_JOB_NAME=env.JOB_NAME.replace("/${env.BRANCH_NAME}","")
CI_BRANCH_NAME=env.BRANCH_NAME.replace("%2F","%252F")
echo "#### post: Triggering land-DA_workflow/ufs-land-metrics job for ${CI_JOB_NAME} on branch build ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..."
/*
build job: '/land-DA_workflow/ufs-land-metrics', parameters: [
string(name: 'CI_JOB_NAME', value: "${CI_JOB_NAME}"),
string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}")
], wait: false
*/
echo "#### post: land-DA_workflow/ufs-land-metrics COMPLETE."
}
}
}
} // pipeline
48 changes: 48 additions & 0 deletions .cicd/scripts/disk_usage.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#!/usr/bin/env bash

# Output a CSV report of disk usage on subdirs of some path
# Usage:
# [JOB_NAME=<ci_job>] [BUILD_NUMBER=<n>] [UFS_COMPILER=<intel>] [UFS_PLATFORM=<machine>] disk_usage path depth size outfile.csv
#
# args:
# directory=$1
# depth=$2
# size=$3
# outfile=$4

[[ -n ${WORKSPACE} ]] || WORKSPACE=$(pwd)
[[ -n ${UFS_PLATFORM} ]] || UFS_PLATFORM=$(hostname -s 2>/dev/null) || UFS_PLATFORM=$(hostname 2>/dev/null)
[[ -n ${UFS_COMPILER} ]] || UFS_COMPILER=compiler

script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"

# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set
# relative to script directory.
declare workspace
if [[ -d "${WORKSPACE}/${UFS_PLATFORM}" ]]; then
workspace="${WORKSPACE}/${UFS_PLATFORM}"
else
workspace="$(cd -- "${script_dir}/../.." && pwd)"
fi

echo "STAGE_NAME=${STAGE_NAME}" # from pipeline
outfile="${4:-${workspace}-${UFS_COMPILER}-disk-usage${STAGE_NAME}.csv}"

function disk_usage() {
local directory=${1:-${PWD}}
local depth=${2:-1}
local size=${3:-k}
echo "Disk usage: ${JOB_NAME:-ci}/${UFS_PLATFORM}/$(basename $directory)"
(
cd $directory || exit 1
echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename"
du -Px -d ${depth:-1} --inode --exclude='./workspace' | \
while read line ; do
arr=($line); inode=${arr[0]}; filename=${arr[1]};
echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' $filename),${inode:-0},$(du -Px -s -${size:-k} --time $filename)" | tr '\t' ',' ;
done | sort -t, -k5 -n #-r
)
echo ""
}

disk_usage $1 $2 $3 | tee ${outfile}
77 changes: 77 additions & 0 deletions .cicd/scripts/land_build.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
#!/usr/bin/env bash
#
# A unified build script for the Land-DA_workflow application. This script is expected to
# build Land-DA_workflow model from source for all supported platforms.
#
# Usage:
# UFS_PLATFORM=<platform> UFS_COMPILER=<compiler> .cicd/scripts/land_build.sh
# .cicd/scripts/land_build.sh <platform> <compiler>
#
pwd
set +x
#echo "UFS_PLATFORM=${UFS_PLATFORM}"
#echo "UFS_COMPILER=${UFS_COMPILER}"
[[ -n $1 ]] && export UFS_PLATFORM=${1} && export machine=${1,,} || export machine=${UFS_PLATFORM,,}
[[ -n $2 ]] && export UFS_COMPILER=${2} && export compiler=${2} || export compiler=${UFS_COMPILER}
[[ -n ${WORKSPACE} ]] && export workspace=${WORKSPACE} || export workspace=$(pwd)
echo "machine=${machine}"
echo "compiler=${compiler}"
echo "workspace=${workspace}"

set -e -u -x

echo "UFS_PLATFORM=${UFS_PLATFORM}"
echo "UFS_COMPILER=${UFS_COMPILER}"

script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"

# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set
# relative to script directory.
declare workspace
if [[ -d "${WORKSPACE:=$(pwd)}/${UFS_PLATFORM}" ]]; then
workspace="${WORKSPACE:=$(pwd)}/${UFS_PLATFORM}"
else
workspace="$(cd -- "${script_dir}/../.." && pwd)"
fi

# Normalize Parallel Works cluster platform value.
declare platform
if [[ "${UFS_PLATFORM}" =~ ^(az|g|p)clusternoaa ]]; then
platform='noaacloud'
else
platform="${UFS_PLATFORM}"
fi

echo "ACCNR=${ACCNR:=}"

# Build
cd ${workspace}
pwd
set +e

git branch
git log -1 --oneline

echo "Pipeline Building Land-DA on ${UFS_PLATFORM} ${UFS_COMPILER} with Account=${ACCNR:=}."
/usr/bin/time -p \
-o ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-time-land_build.json \
-f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \
sorc/app_build.sh -p=${machine} -c=${compiler} --conda=off --build 2>&1 | tee ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-build-log.txt
status=${PIPESTATUS[0]}
cat sorc/build/log.ecbuild sorc/build/log.make >> ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-build-log.txt
echo "Pipeline Completed Land-DA build on ${UFS_PLATFORM} ${UFS_COMPILER}. status=$status"

ls -l sorc/build/bin/*.exe sorc/build/lib/*.so
status=$?

git status -u

build_exit=$status
echo "STAGE_NAME=${STAGE_NAME:=manual}"
env | grep = | sort > ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-env.txt
set -e
cd -
pwd

exit $build_exit

Loading

0 comments on commit 7731b28

Please sign in to comment.