From 9916f441644462bbd6b6903325526c2acd4b5dae Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 8 Nov 2024 12:07:46 -0600 Subject: [PATCH 001/106] template files for cicd pipeline builds --- .cicd/Jenkinsfile.combined | 388 ++++++++++++++++++++++++++++++++++++ .cicd/scripts/disk_usage.sh | 48 +++++ .cicd/scripts/wm_build.sh | 24 +++ .cicd/scripts/wm_init.sh | 18 ++ .cicd/scripts/wm_test.sh | 19 ++ 5 files changed, 497 insertions(+) create mode 100644 .cicd/Jenkinsfile.combined create mode 100755 .cicd/scripts/disk_usage.sh create mode 100755 .cicd/scripts/wm_build.sh create mode 100755 .cicd/scripts/wm_init.sh create mode 100755 .cicd/scripts/wm_test.sh diff --git a/.cicd/Jenkinsfile.combined b/.cicd/Jenkinsfile.combined new file mode 100644 index 0000000000..2f62ecfe24 --- /dev/null +++ b/.cicd/Jenkinsfile.combined @@ -0,0 +1,388 @@ +matchedNode = [] +generateBaselineNode = [] +for (label in pullRequest.labels) { + listOfLabelNodeNames = jenkins.model.Jenkins.instance.nodes.collect { + node -> node.getLabelString().contains(label) ? node.name : null + + if ((label.matches(node.getLabelString()+"-(.*)"))) { + matchedNode += node.getLabelString() + } + + if ((label.matches(node.getLabelString()+"(.*)-BL"))) { + generateBaselineNode += node.getLabelString() + } + } +} + +modifiedLabels = matchedNode.collect{"'" + it + "'"} +baselineLabels = generateBaselineNode.collect{"'" + it + "'"} +def generateStage(nodeLabel) { + return { + stage("Running on ${nodeLabel}") { + node(nodeLabel) { + cleanWs() + checkout scm + script { + try { + echo "Running on ${nodeLabel}" + if (baselineLabels.contains(nodeLabel)) { + sh ''' + git submodule update --init --recursive + cd tests + pwd + export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + export machine=${NODE_NAME} + export PATH=$PATH:~/bin + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + + if [[ $machine =~ "Jet" ]] + then + echo "Creating baselines on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh hercules develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed -i 's|/work/noaa/stmp/${USER}|/work/noaa/epic/stmp/role-epic/|g' rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh orion develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_orion.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Gaea" ]] + then + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH + export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /gpfs/f5/epic/scratch/role.epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + chgrp ncep develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Hera" ]] + then + echo "Creating baselines on $machine" + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -r -l rt.conf + export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Creating baselines on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /glade/derecho/scratch/epicufsrt/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + echo "Testing concluded...removing labels for $machine from $GIT_URL" + + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + git add logs/RegressionTests_$machine_name_logs.log + git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + git pull sshorigin $FORK_BRANCH + git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + + } + else { + sh ''' + git submodule update --init --recursive + pwd + cd tests + export machine=${NODE_NAME} + export PATH=$PATH:~/bin + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + + if [[ $machine =~ "Jet" ]] + then + echo "Running regression tests on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Running regression tests on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + echo "Running regression tests on $machine" + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_orion.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Gaea" ]] + then + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH + cd logs/ + cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Hera" ]] + then + echo "Running regression tests on $machine" + export ACCNR=epic + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Running regression tests on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + echo "Testing concluded...removing labels for $machine from $GIT_URL" + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + git add logs/RegressionTests_$machine_name_logs.log + git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + git pull sshorigin $FORK_BRANCH + git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT + + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } + } + catch(err) { + sh ''' + export machine=${NODE_NAME} + export CHANGE_ID=${CHANGE_ID} + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + cd $WORKSPACE/tests + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + echo "Testing concluded...removing labels for $machine from $GIT_URL" + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + currentBuild.result = 'FAILURE' + } + } + } + } + } +} + +def parallelStagesMap = modifiedLabels.collectEntries { + ["${it}" : generateStage(it)] +} + +pipeline { + agent none + environment { + ACCNR = 'epic' + AWS_PROD_ACCOUNT_ID = credentials('AWS_PROD_ACCOUNT_ID') + AWS_PROD_SNS_TOPIC = credentials('AWS_PROD_SNS_TOPIC') + GITHUB_TOKEN = credentials('GithubJenkinsNew') + GIT_URL = 'https://github.com/ufs-community/ufs-weather-model.git' + } + stages { + stage('Launch SonarQube') { + steps { + script { + echo "BRANCH_NAME=${env.CHANGE_BRANCH}" + echo "FORK_NAME=${env.CHANGE_FORK}" + echo "CHANGE_URL=${env.CHANGE_URL}" + echo "CHANGE_ID=${env.CHANGE_ID}" + build job: '/ufs-weather-model/ufs-wm-sonarqube', parameters: [ + string(name: 'BRANCH_NAME', value: env.CHANGE_BRANCH ?: 'develop'), + string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: ''), + string(name: 'CHANGE_URL', value: env.CHANGE_URL ?: ''), + string(name: 'CHANGE_ID', value: env.CHANGE_ID ?: '') + ], wait: false + } + } + } + stage('Run Regression Tests in Parallel') { + steps { + script { + parallel parallelStagesMap + } + } + } + } + post { + success { + node('built-in') { + echo 'This will run only if successful.' + sh ''' + aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":sunny: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *succeeded*"}}' + ''' + } + } + failure { + node('built-in') { + echo 'This will run only if the run was marked as unstable.' + sh ''' + aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":warning: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *failed!*"}}' + ''' + } + } + } +} diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh new file mode 100755 index 0000000000..22ffbeaa54 --- /dev/null +++ b/.cicd/scripts/disk_usage.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash + +# Output a CSV report of disk usage on subdirs of some path +# Usage: +# [JOB_NAME=] [BUILD_NUMBER=] [UFS_COMPILER=] [UFS_PLATFORM=] disk_usage path depth size outfile.csv +# +# args: +# directory=$1 +# depth=$2 +# size=$3 +# outfile=$4 + +[[ -n ${WORKSPACE} ]] || WORKSPACE=$(pwd) +[[ -n ${UFS_PLATFORM} ]] || UFS_PLATFORM=$(hostname -s 2>/dev/null) || UFS_PLATFORM=$(hostname 2>/dev/null) +[[ -n ${UFS_COMPILER} ]] || UFS_COMPILER=compiler + +script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" + +# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set +# relative to script directory. +declare workspace +if [[ -n "${WORKSPACE}/${UFS_PLATFORM}" ]]; then + workspace="${WORKSPACE}/${UFS_PLATFORM}" +else + workspace="$(cd -- "${script_dir}/../.." && pwd)" +fi + +echo "STAGE_NAME=${STAGE_NAME}" # from pipeline +outfile="${4:-${workspace}-${UFS_COMPILER}-disk-usage${STAGE_NAME}.csv}" + +function disk_usage() { + local directory=${1:-${PWD}} + local depth=${2:-1} + local size=${3:-k} + echo "Disk usage: ${JOB_NAME:-ci}/${UFS_PLATFORM}/$(basename $directory)" + ( + cd $directory || exit 1 + echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" + du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ + while read line ; do + arr=($line); inode=${arr[0]}; filename=${arr[1]}; + echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' $filename),${inode:-0},$(du -Px -s -${size:-k} --time $filename)" | tr '\t' ',' ; + done | sort -t, -k5 -n #-r + ) + echo "" +} + +disk_usage $1 $2 $3 | tee ${outfile} diff --git a/.cicd/scripts/wm_build.sh b/.cicd/scripts/wm_build.sh new file mode 100755 index 0000000000..ee81b04a5d --- /dev/null +++ b/.cicd/scripts/wm_build.sh @@ -0,0 +1,24 @@ +#!/bin/bash +set -eu + +SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") +SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") +UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") +readonly UFS_MODEL_DIR +echo "UFS MODEL DIR: ${UFS_MODEL_DIR}" + +export CC=${CC:-mpicc} +export CXX=${CXX:-mpicxx} +export FC=${FC:-mpif90} + +cd "${UFS_MODEL_DIR}" +pwd +ls -l ./build.sh + +BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} +TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} +mkdir -p "${BUILD_DIR}" + +cd "${BUILD_DIR}" +pwd + diff --git a/.cicd/scripts/wm_init.sh b/.cicd/scripts/wm_init.sh new file mode 100755 index 0000000000..27db702be1 --- /dev/null +++ b/.cicd/scripts/wm_init.sh @@ -0,0 +1,18 @@ +#!/bin/bash +set -eu + +SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") +SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") +UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") +readonly UFS_MODEL_DIR +echo "UFS MODEL DIR: ${UFS_MODEL_DIR}" + +export CC=${CC:-mpicc} +export CXX=${CXX:-mpicxx} +export FC=${FC:-mpif90} + +BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} +TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} + +cd "${UFS_MODEL_DIR}" +pwd diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh new file mode 100755 index 0000000000..9ac36ac6e1 --- /dev/null +++ b/.cicd/scripts/wm_test.sh @@ -0,0 +1,19 @@ +#!/bin/bash -x +set -eu + +SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") +SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") +UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") +readonly UFS_MODEL_DIR +echo "UFS MODEL DIR: ${UFS_MODEL_DIR}" + +export CC=${CC:-mpicc} +export CXX=${CXX:-mpicxx} +export FC=${FC:-mpif90} + +BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} +TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} + +cd "${TESTS_DIR}" +pwd +ls -al ./rt.sh From a8c0c621dd1cb7dc97bff275609cfc74fae83543 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 8 Nov 2024 12:44:52 -0600 Subject: [PATCH 002/106] start with a Jenkinsfile that doesnt push logs to the repo --- .cicd/Jenkinsfile.pipeline | 388 +++++++++++++++++++++++++++++++++++++ 1 file changed, 388 insertions(+) create mode 100644 .cicd/Jenkinsfile.pipeline diff --git a/.cicd/Jenkinsfile.pipeline b/.cicd/Jenkinsfile.pipeline new file mode 100644 index 0000000000..58f1d239b4 --- /dev/null +++ b/.cicd/Jenkinsfile.pipeline @@ -0,0 +1,388 @@ +matchedNode = [] +generateBaselineNode = [] +for (label in pullRequest.labels) { + listOfLabelNodeNames = jenkins.model.Jenkins.instance.nodes.collect { + node -> node.getLabelString().contains(label) ? node.name : null + + if ((label.matches(node.getLabelString()+"-(.*)"))) { + matchedNode += node.getLabelString() + } + + if ((label.matches(node.getLabelString()+"(.*)-BL"))) { + generateBaselineNode += node.getLabelString() + } + } +} + +modifiedLabels = matchedNode.collect{"'" + it + "'"} +baselineLabels = generateBaselineNode.collect{"'" + it + "'"} +def generateStage(nodeLabel) { + return { + stage("Running on ${nodeLabel}") { + node(nodeLabel) { + cleanWs() + checkout scm + script { + try { + echo "Running on ${nodeLabel}" + if (baselineLabels.contains(nodeLabel)) { + sh ''' + git submodule update --init --recursive + cd tests + pwd + export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + export machine=${NODE_NAME} + export PATH=$PATH:~/bin + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + + if [[ $machine =~ "Jet" ]] + then + echo "Creating baselines on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh hercules develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed -i 's|/work/noaa/stmp/${USER}|/work/noaa/epic/stmp/role-epic/|g' rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh orion develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_orion.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Gaea" ]] + then + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH + export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /gpfs/f5/epic/scratch/role.epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + chgrp ncep develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Hera" ]] + then + echo "Creating baselines on $machine" + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -r -l rt.conf + export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Creating baselines on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /glade/derecho/scratch/epicufsrt/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + echo "Testing concluded...removing labels for $machine from $GIT_URL" + + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + + } + else { + sh ''' + git submodule update --init --recursive + pwd + cd tests + export machine=${NODE_NAME} + export PATH=$PATH:~/bin + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + + if [[ $machine =~ "Jet" ]] + then + echo "Running regression tests on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Running regression tests on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + echo "Running regression tests on $machine" + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_orion.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Gaea" ]] + then + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH + cd logs/ + cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Hera" ]] + then + echo "Running regression tests on $machine" + export ACCNR=epic + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Running regression tests on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + echo "Testing concluded...removing labels for $machine from $GIT_URL" + #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT + + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } + } + catch(err) { + sh ''' + export machine=${NODE_NAME} + export CHANGE_ID=${CHANGE_ID} + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + cd $WORKSPACE/tests + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + echo "Testing concluded...removing labels for $machine from $GIT_URL" + #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + currentBuild.result = 'FAILURE' + } + } + } + } + } +} + +def parallelStagesMap = modifiedLabels.collectEntries { + ["${it}" : generateStage(it)] +} + +pipeline { + agent none + environment { + ACCNR = 'epic' + AWS_PROD_ACCOUNT_ID = credentials('AWS_PROD_ACCOUNT_ID') + AWS_PROD_SNS_TOPIC = credentials('AWS_PROD_SNS_TOPIC') + GITHUB_TOKEN = credentials('GithubJenkinsNew') + GIT_URL = 'https://github.com/ufs-community/ufs-weather-model.git' + } + stages { + stage('Launch SonarQube') { + steps { + script { + echo "BRANCH_NAME=${env.CHANGE_BRANCH}" + echo "FORK_NAME=${env.CHANGE_FORK}" + echo "CHANGE_URL=${env.CHANGE_URL}" + echo "CHANGE_ID=${env.CHANGE_ID}" + build job: '/ufs-weather-model/ufs-wm-sonarqube', parameters: [ + string(name: 'BRANCH_NAME', value: env.CHANGE_BRANCH ?: 'develop'), + string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: ''), + string(name: 'CHANGE_URL', value: env.CHANGE_URL ?: ''), + string(name: 'CHANGE_ID', value: env.CHANGE_ID ?: '') + ], wait: false + } + } + } + stage('Run Regression Tests in Parallel') { + steps { + script { + parallel parallelStagesMap + } + } + } + } + post { + success { + node('built-in') { + echo 'This will run only if successful.' + sh ''' + aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":sunny: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *succeeded*"}}' + ''' + } + } + failure { + node('built-in') { + echo 'This will run only if the run was marked as unstable.' + sh ''' + aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":warning: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *failed!*"}}' + ''' + } + } + } +} From 8839312fb4e274154e6113fd9dec592c37042708 Mon Sep 17 00:00:00 2001 From: Bruce Kropp - Raytheon <104453151+BruceKropp-Raytheon@users.noreply.github.com> Date: Wed, 13 Nov 2024 10:42:16 -0800 Subject: [PATCH 003/106] Update Jenkinsfile.pipeline list .cicd/ --- .cicd/Jenkinsfile.pipeline | 1 + 1 file changed, 1 insertion(+) diff --git a/.cicd/Jenkinsfile.pipeline b/.cicd/Jenkinsfile.pipeline index 58f1d239b4..57d9bc62fb 100644 --- a/.cicd/Jenkinsfile.pipeline +++ b/.cicd/Jenkinsfile.pipeline @@ -28,6 +28,7 @@ def generateStage(nodeLabel) { if (baselineLabels.contains(nodeLabel)) { sh ''' git submodule update --init --recursive + ls -al .cicd/* cd tests pwd export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) From d2ca0c808316860d6155793d847cd0514cbf244d Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 13 Nov 2024 13:41:57 -0600 Subject: [PATCH 004/106] template test scripts Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile.pipeline | 1 + .cicd/scripts/create_baseline.sh | 19 +++++++++++++++++++ .cicd/scripts/disk_usage.sh | 2 +- .cicd/scripts/regression_test.sh | 19 +++++++++++++++++++ 4 files changed, 40 insertions(+), 1 deletion(-) create mode 100755 .cicd/scripts/create_baseline.sh create mode 100755 .cicd/scripts/regression_test.sh diff --git a/.cicd/Jenkinsfile.pipeline b/.cicd/Jenkinsfile.pipeline index 58f1d239b4..57d9bc62fb 100644 --- a/.cicd/Jenkinsfile.pipeline +++ b/.cicd/Jenkinsfile.pipeline @@ -28,6 +28,7 @@ def generateStage(nodeLabel) { if (baselineLabels.contains(nodeLabel)) { sh ''' git submodule update --init --recursive + ls -al .cicd/* cd tests pwd export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh new file mode 100755 index 0000000000..9ac36ac6e1 --- /dev/null +++ b/.cicd/scripts/create_baseline.sh @@ -0,0 +1,19 @@ +#!/bin/bash -x +set -eu + +SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") +SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") +UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") +readonly UFS_MODEL_DIR +echo "UFS MODEL DIR: ${UFS_MODEL_DIR}" + +export CC=${CC:-mpicc} +export CXX=${CXX:-mpicxx} +export FC=${FC:-mpif90} + +BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} +TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} + +cd "${TESTS_DIR}" +pwd +ls -al ./rt.sh diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index 22ffbeaa54..8c0fa08eef 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -19,7 +19,7 @@ script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd) # Get repository root from Jenkins WORKSPACE variable if set, otherwise, set # relative to script directory. declare workspace -if [[ -n "${WORKSPACE}/${UFS_PLATFORM}" ]]; then +if [[ -d "${WORKSPACE}/${UFS_PLATFORM}" ]]; then workspace="${WORKSPACE}/${UFS_PLATFORM}" else workspace="$(cd -- "${script_dir}/../.." && pwd)" diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh new file mode 100755 index 0000000000..9ac36ac6e1 --- /dev/null +++ b/.cicd/scripts/regression_test.sh @@ -0,0 +1,19 @@ +#!/bin/bash -x +set -eu + +SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") +SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") +UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") +readonly UFS_MODEL_DIR +echo "UFS MODEL DIR: ${UFS_MODEL_DIR}" + +export CC=${CC:-mpicc} +export CXX=${CXX:-mpicxx} +export FC=${FC:-mpif90} + +BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} +TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} + +cd "${TESTS_DIR}" +pwd +ls -al ./rt.sh From e6d3472479e1d266d6e96f9b789d6f40433a8823 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 13 Nov 2024 14:24:15 -0600 Subject: [PATCH 005/106] lint issues Signed-off-by: Bruce Kropp --- .cicd/scripts/disk_usage.sh | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index 8c0fa08eef..70b6041b91 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -10,9 +10,9 @@ # size=$3 # outfile=$4 -[[ -n ${WORKSPACE} ]] || WORKSPACE=$(pwd) -[[ -n ${UFS_PLATFORM} ]] || UFS_PLATFORM=$(hostname -s 2>/dev/null) || UFS_PLATFORM=$(hostname 2>/dev/null) -[[ -n ${UFS_COMPILER} ]] || UFS_COMPILER=compiler +[[ -n ${WORKSPACE} ]] || WORKSPACE="$(pwd)" +[[ -n ${UFS_PLATFORM} ]] || UFS_PLATFORM="$(hostname -s 2>/dev/null)" || UFS_PLATFORM="$(hostname 2>/dev/null)" +[[ -n ${UFS_COMPILER} ]] || UFS_COMPILER="compiler" script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" @@ -29,20 +29,20 @@ echo "STAGE_NAME=${STAGE_NAME}" # from pipeline outfile="${4:-${workspace}-${UFS_COMPILER}-disk-usage${STAGE_NAME}.csv}" function disk_usage() { - local directory=${1:-${PWD}} - local depth=${2:-1} - local size=${3:-k} - echo "Disk usage: ${JOB_NAME:-ci}/${UFS_PLATFORM}/$(basename $directory)" + local directory="${1:-${PWD}}" + local depth="${2:-1}" + local size="${3:-k}" + echo "Disk usage: ${JOB_NAME:-ci}/${UFS_PLATFORM}/$(basename ${directory})" ( - cd $directory || exit 1 + cd ${directory} || exit 1 echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ while read line ; do - arr=($line); inode=${arr[0]}; filename=${arr[1]}; - echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' $filename),${inode:-0},$(du -Px -s -${size:-k} --time $filename)" | tr '\t' ',' ; + arr="(${line})"; inode="${arr[0]}"; filename="${arr[1]}"; + echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' ${filename}),${inode:-0},$(du -Px -s -${size:-k} --time ${filename})" | tr '\t' ',' ; done | sort -t, -k5 -n #-r ) echo "" } -disk_usage $1 $2 $3 | tee ${outfile} +disk_usage ${1} ${2} ${3} | tee ${outfile} From 042f1dca760c4f5dd44860fc4392678922e16116 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 13 Nov 2024 15:31:30 -0600 Subject: [PATCH 006/106] regression test scripts Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 173 ++++++++++++++++++++++++++++++- .cicd/scripts/regression_test.sh | 130 ++++++++++++++++++++++- 2 files changed, 299 insertions(+), 4 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 9ac36ac6e1..4e9824db0e 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -1,6 +1,11 @@ #!/bin/bash -x set -eu +echo "USER=${USER}" +echo "WORKSPACE=${WORKSPACE}" + export machine=${NODE_NAME} + export ACCNR=epic + SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") @@ -14,6 +19,170 @@ export FC=${FC:-mpif90} BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} -cd "${TESTS_DIR}" pwd -ls -al ./rt.sh +ls -al .cicd/* +ls -al ${TESTS_DIR}/rt.sh + +function create_baseline() { + git submodule update --init --recursive + pwd + ls -al .cicd/* + cd tests + pwd + export machine=${NODE_NAME} + export PATH=$PATH:~/bin + export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + + if [[ $machine =~ "Jet" ]] + then + echo "Creating baselines on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh hercules develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed -i 's|/work/noaa/stmp/${USER}|/work/noaa/epic/stmp/role-epic/|g' rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh orion develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_orion.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Gaea" ]] + then + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH + export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /gpfs/f5/epic/scratch/role.epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + chgrp ncep develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Hera" ]] + then + echo "Creating baselines on $machine" + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -r -l rt.conf + export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Creating baselines on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /glade/derecho/scratch/epicufsrt/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + echo "Testing concluded for $machine" +} + +function post_test() { + echo "Testing concluded...removing labels for $machine from $GIT_URL" + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + echo "GIT_URL=${GIT_URL}" + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + + #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL +} + diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 9ac36ac6e1..b58f34745a 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -1,6 +1,11 @@ #!/bin/bash -x set -eu +echo "USER=${USER}" +echo "WORKSPACE=${WORKSPACE}" +export machine=${NODE_NAME} +export ACCNR=epic + SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") @@ -14,6 +19,127 @@ export FC=${FC:-mpif90} BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} -cd "${TESTS_DIR}" pwd -ls -al ./rt.sh +ls -al .cicd/* +ls -al ${TESTS_DIR}/rt.sh + +function regression_test() { + git submodule update --init --recursive + pwd + ls -al .cicd/* + cd tests + pwd + export machine=${NODE_NAME} + export PATH=$PATH:~/bin + + if [[ $machine =~ "Jet" ]] + then + echo "Running regression tests on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Running regression tests on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + echo "Running regression tests on $machine" + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_orion.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Gaea" ]] + then + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH + cd logs/ + cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Hera" ]] + then + echo "Running regression tests on $machine" + export ACCNR=epic + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Running regression tests on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + + echo "Testing concluded for $machine" +} + +function post_test() { + echo "Testing concluded...removing labels for $machine from $GIT_URL" + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') +echo "GIT_URL=${GIT_URL}" + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + + #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT +} + +regression_test From 9c13414e16c1598e30ed4f66cafed22d75e18424 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 13 Nov 2024 18:21:40 -0600 Subject: [PATCH 007/106] optional suite Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 32 ++++++++++++++++++++------------ .cicd/scripts/regression_test.sh | 22 +++++++++++++++------- 2 files changed, 35 insertions(+), 19 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 4e9824db0e..df789c98ad 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -24,6 +24,14 @@ ls -al .cicd/* ls -al ${TESTS_DIR}/rt.sh function create_baseline() { + opt="-l" + suite="rt.conf" + [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 + [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" + [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" + [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" + [[ ${suite} = rt.conf ]] && opt="-l" + git submodule update --init --recursive pwd ls -al .cicd/* @@ -37,14 +45,14 @@ function create_baseline() { then echo "Creating baselines on $machine" export dprefix=/lfs1/NAGAPE/$ACCNR/$USER - ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log elif [[ $machine =~ "Hercules" ]] then echo "Creating baselines on $machine" export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ mkdir develop-${BL_DATE} @@ -54,7 +62,7 @@ function create_baseline() { ./adjust_permissions.sh hercules develop-${BL_DATE} chgrp noaa-hpc develop-${BL_DATE} cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -73,7 +81,7 @@ function create_baseline() { export dprefix=/work2/noaa/$ACCNR/$USER sed -i 's|/work/noaa/stmp/${USER}|/work/noaa/epic/stmp/role-epic/|g' rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log export DISKNM=/work/noaa/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ mkdir develop-${BL_DATE} @@ -83,7 +91,7 @@ function create_baseline() { ./adjust_permissions.sh orion develop-${BL_DATE} chgrp noaa-hpc develop-${BL_DATE} cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -95,7 +103,7 @@ function create_baseline() { elif [[ $machine =~ "Gaea" ]] then echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log unset LD_LIBRARY_PATH export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ @@ -105,7 +113,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ chgrp ncep develop-${BL_DATE} cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v @@ -118,14 +126,14 @@ function create_baseline() { then echo "Creating baselines on $machine" export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -r -l rt.conf + ./rt.sh -a ${ACCNR} -c -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ mkdir develop-${BL_DATE} cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v @@ -138,14 +146,14 @@ function create_baseline() { then echo "Creating baselines on $machine" export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ cd ${DISKNM}/NEMSfv3gfs/ mkdir develop-${BL_DATE} cd /glade/derecho/scratch/epicufsrt/FV3_RT rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v @@ -156,7 +164,7 @@ function create_baseline() { cd $WORKSPACE/tests/ else echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log fi echo "Testing concluded for $machine" } diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index b58f34745a..9f4d9276d4 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -24,6 +24,14 @@ ls -al .cicd/* ls -al ${TESTS_DIR}/rt.sh function regression_test() { + opt="-l" + suite="rt.conf" + [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 + [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" + [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" + [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" + [[ ${suite} = rt.conf ]] && opt="-l" + git submodule update --init --recursive pwd ls -al .cicd/* @@ -36,14 +44,14 @@ function regression_test() { then echo "Running regression tests on $machine" export dprefix=/lfs1/NAGAPE/$ACCNR/$USER - ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log elif [[ $machine =~ "Hercules" ]] then echo "Running regression tests on $machine" export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -61,7 +69,7 @@ function regression_test() { cd tests export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -73,7 +81,7 @@ function regression_test() { elif [[ $machine =~ "Gaea" ]] then echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log unset LD_LIBRARY_PATH cd logs/ cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace @@ -87,7 +95,7 @@ function regression_test() { then echo "Running regression tests on $machine" export ACCNR=epic - ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v @@ -100,7 +108,7 @@ function regression_test() { then echo "Running regression tests on $machine" export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v @@ -111,7 +119,7 @@ function regression_test() { cd $WORKSPACE/tests/ else echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log fi echo "Testing concluded for $machine" From d286efa8e7c483f8b3b73b509b8f90fa868c81a4 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 15 Nov 2024 11:36:11 -0600 Subject: [PATCH 008/106] wrap suite name and compiler in quotes for -n opt Signed-off-by: Bruce Kropp --- .cicd/scripts/regression_test.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 9f4d9276d4..a8e9686851 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -30,7 +30,7 @@ function regression_test() { [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" - [[ ${suite} = rt.conf ]] && opt="-l" + [[ "${suite}" = rt.conf ]] && opt="-l" git submodule update --init --recursive pwd @@ -44,14 +44,14 @@ function regression_test() { then echo "Running regression tests on $machine" export dprefix=/lfs1/NAGAPE/$ACCNR/$USER - ./rt.sh -a ${ACCNR} -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log elif [[ $machine =~ "Hercules" ]] then echo "Running regression tests on $machine" export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -69,7 +69,7 @@ function regression_test() { cd tests export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -81,7 +81,7 @@ function regression_test() { elif [[ $machine =~ "Gaea" ]] then echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log unset LD_LIBRARY_PATH cd logs/ cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace @@ -95,7 +95,7 @@ function regression_test() { then echo "Running regression tests on $machine" export ACCNR=epic - ./rt.sh -a ${ACCNR} -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v @@ -108,7 +108,7 @@ function regression_test() { then echo "Running regression tests on $machine" export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v @@ -119,7 +119,7 @@ function regression_test() { cd $WORKSPACE/tests/ else echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log fi echo "Testing concluded for $machine" From be0337c440855f1e4df033889c5155babd4285e1 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 15 Nov 2024 14:36:52 -0600 Subject: [PATCH 009/106] create a tests/log/ dir Signed-off-by: Bruce Kropp --- .cicd/scripts/regression_test.sh | 105 ++++++++++++++++--------------- 1 file changed, 53 insertions(+), 52 deletions(-) diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index a8e9686851..afb99fde71 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -32,22 +32,23 @@ function regression_test() { [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" [[ "${suite}" = rt.conf ]] && opt="-l" - git submodule update --init --recursive - pwd - ls -al .cicd/* - cd tests + git submodule update --init --recursive + pwd + ls -al .cicd/* + cd tests pwd - export machine=${NODE_NAME} - export PATH=$PATH:~/bin + export machine=${NODE_NAME} + export PATH=$PATH:~/bin - if [[ $machine =~ "Jet" ]] - then - echo "Running regression tests on $machine" + mkdir -p logs/ + if [[ $machine =~ "Jet" ]] + then + echo "Running regression tests on $machine" export dprefix=/lfs1/NAGAPE/$ACCNR/$USER ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - elif [[ $machine =~ "Hercules" ]] - then - echo "Running regression tests on $machine" + elif [[ $machine =~ "Hercules" ]] + then + echo "Running regression tests on $machine" export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic @@ -60,9 +61,9 @@ function regression_test() { cd .. && cd .. && cd .. cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ - elif [[ $machine =~ "Orion" ]] - then - echo "Running regression tests on $machine" + elif [[ $machine =~ "Orion" ]] + then + echo "Running regression tests on $machine" cd .. module load git/2.28.0 git submodule update --init --recursive @@ -70,17 +71,17 @@ function regression_test() { export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_orion.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Gaea" ]] - then - echo "Running regression tests on $machine" + cd logs/ + cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_orion.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Gaea" ]] + then + echo "Running regression tests on $machine" ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log unset LD_LIBRARY_PATH cd logs/ @@ -91,24 +92,24 @@ function regression_test() { cd .. && cd .. && cd .. cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ - elif [[ $machine =~ "Hera" ]] - then - echo "Running regression tests on $machine" - export ACCNR=epic - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_hera.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Derecho" ]] - then - echo "Running regression tests on $machine" - export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hera" ]] + then + echo "Running regression tests on $machine" + export ACCNR=epic + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + cp RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Running regression tests on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v @@ -117,20 +118,20 @@ function regression_test() { cd .. && cd .. && cd .. cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ - else - echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - fi + else + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi echo "Testing concluded for $machine" } function post_test() { echo "Testing concluded...removing labels for $machine from $GIT_URL" - echo $CHANGE_ID - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') -echo "GIT_URL=${GIT_URL}" + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + echo "GIT_URL=${GIT_URL}" git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') From 10ceba8503faed8cd33faf27ed1debf75fa66b42 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 15 Nov 2024 16:25:00 -0600 Subject: [PATCH 010/106] define WORKSPACE within machine Signed-off-by: Bruce Kropp --- .cicd/scripts/regression_test.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index afb99fde71..646cf8ca7e 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -24,6 +24,10 @@ ls -al .cicd/* ls -al ${TESTS_DIR}/rt.sh function regression_test() { + export machine=${NODE_NAME} + export PATH=$PATH:~/bin + local WORKSPACE="$(pwd)/${machine}" + opt="-l" suite="rt.conf" [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 @@ -37,8 +41,6 @@ function regression_test() { ls -al .cicd/* cd tests pwd - export machine=${NODE_NAME} - export PATH=$PATH:~/bin mkdir -p logs/ if [[ $machine =~ "Jet" ]] From b5f956ae3594379f12775dcf4ccc1f49ed5527a7 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 15 Nov 2024 17:26:37 -0600 Subject: [PATCH 011/106] define WORKSPACE as PWD Signed-off-by: Bruce Kropp --- .cicd/scripts/regression_test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 646cf8ca7e..e512e879ef 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -26,7 +26,7 @@ ls -al ${TESTS_DIR}/rt.sh function regression_test() { export machine=${NODE_NAME} export PATH=$PATH:~/bin - local WORKSPACE="$(pwd)/${machine}" + local WORKSPACE="$(pwd)" opt="-l" suite="rt.conf" From 588ffebf096c274be41ae7d6dedd2ad201d3cf17 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 15 Nov 2024 18:48:06 -0600 Subject: [PATCH 012/106] temp save out the test log Signed-off-by: Bruce Kropp --- .cicd/scripts/regression_test.sh | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index e512e879ef..bd86e31ee9 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -56,12 +56,13 @@ function regression_test() { export ACCNR=epic ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ - cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace + cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. - cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ + pwd + cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Orion" ]] then @@ -74,12 +75,13 @@ function regression_test() { sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ - cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace + cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. - cp RegressionTests_orion.log $WORKSPACE/tests/logs/ + pwd + cp $(dirname $WORKSPACE)/RegressionTests_orion.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Gaea" ]] then @@ -87,12 +89,13 @@ function regression_test() { ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log unset LD_LIBRARY_PATH cd logs/ - cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. - cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ + pwd + cp $(dirname $WORKSPACE)/RegressionTests_gaea.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Hera" ]] then @@ -100,12 +103,13 @@ function regression_test() { export ACCNR=epic ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ - cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace + cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. - cp RegressionTests_hera.log $WORKSPACE/tests/logs/ + pwd + cp $(dirname $WORKSPACE)/RegressionTests_hera.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Derecho" ]] then @@ -113,12 +117,13 @@ function regression_test() { export ACCNR=nral0032 ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ - cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace + cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. - cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ + pwd + cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ else echo "Running regression tests on $machine" From d3982fe3a19a596689b27707f70810ea3cfce977 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 18 Nov 2024 11:41:41 -0600 Subject: [PATCH 013/106] align scripts Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 308 ++++++++++++++++--------------- .cicd/scripts/regression_test.sh | 179 +++++++++--------- 2 files changed, 258 insertions(+), 229 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index df789c98ad..777c0e516f 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -3,8 +3,8 @@ set -eu echo "USER=${USER}" echo "WORKSPACE=${WORKSPACE}" - export machine=${NODE_NAME} - export ACCNR=epic +export machine=${NODE_NAME} +export ACCNR=epic SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") @@ -24,173 +24,191 @@ ls -al .cicd/* ls -al ${TESTS_DIR}/rt.sh function create_baseline() { + export machine=${NODE_NAME} + export PATH=$PATH:~/bin + local WORKSPACE="$(pwd)" + + account="-a ${ACCNR}" opt="-l" suite="rt.conf" [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" - [[ ${suite} = rt.conf ]] && opt="-l" - - git submodule update --init --recursive - pwd - ls -al .cicd/* - cd tests - pwd - export machine=${NODE_NAME} - export PATH=$PATH:~/bin - export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) - - if [[ $machine =~ "Jet" ]] - then - echo "Creating baselines on $machine" - export dprefix=/lfs1/NAGAPE/$ACCNR/$USER - ./rt.sh -a ${ACCNR} -c -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - elif [[ $machine =~ "Hercules" ]] - then - echo "Creating baselines on $machine" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT + [[ "${suite}" = rt.conf ]] && opt="-l" + + #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" + #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" + #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" + + git submodule update --init --recursive + pwd + ls -al .cicd/* + cd tests + pwd + + [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf + + mkdir -p logs/ + export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + + if [[ $machine =~ "Jet" ]] + then + echo "Creating baselines on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh + ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ mkdir develop-${BL_DATE} cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh hercules develop-${BL_DATE} - chgrp noaa-hpc develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh hercules develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ - cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Orion" ]] - then - cd .. - module load git/2.28.0 - git submodule update --init --recursive - cd tests - echo "Creating baselines on $machine" - export dprefix=/work2/noaa/$ACCNR/$USER - sed -i 's|/work/noaa/stmp/${USER}|/work/noaa/epic/stmp/role-epic/|g' rt.sh - export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/work/noaa/epic/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh orion develop-${BL_DATE} - chgrp noaa-hpc develop-${BL_DATE} + cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh orion develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_orion.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Gaea" ]] - then - echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - unset LD_LIBRARY_PATH - export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /gpfs/f5/epic/scratch/role.epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd ${DISKNM}/NEMSfv3gfs/ - chgrp ncep develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ - cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_orion.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ - elif [[ $machine =~ "Hera" ]] - then - echo "Creating baselines on $machine" - export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT + elif [[ $machine =~ "Gaea" ]] + then + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH + export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /gpfs/f5/epic/scratch/role.epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + chgrp ncep develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Hera" ]] + then + echo "Creating baselines on $machine" + export ACCNR=epic + sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh + ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ mkdir develop-${BL_DATE} cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_hera.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Derecho" ]] - then - echo "Creating baselines on $machine" - export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -c -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /glade/derecho/scratch/epicufsrt/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - else - echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -r ${opt} ${suite} | tee $WORKSPACE/tests/logs/RT-run-$machine.log - fi - echo "Testing concluded for $machine" + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Creating baselines on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /glade/derecho/scratch/epicufsrt/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + echo "Testing concluded for $machine" } function post_test() { - echo "Testing concluded...removing labels for $machine from $GIT_URL" - echo $CHANGE_ID - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') - echo "GIT_URL=${GIT_URL}" - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - - #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + echo "Testing concluded...removing labels for $machine from $GIT_URL" + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + echo "GIT_URL=${GIT_URL}" + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + + #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL } diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index bd86e31ee9..4134157115 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -28,6 +28,7 @@ function regression_test() { export PATH=$PATH:~/bin local WORKSPACE="$(pwd)" + account="-a ${ACCNR}" opt="-l" suite="rt.conf" [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 @@ -36,126 +37,136 @@ function regression_test() { [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" [[ "${suite}" = rt.conf ]] && opt="-l" + #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" + #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" + #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" + git submodule update --init --recursive pwd ls -al .cicd/* cd tests pwd + [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf + mkdir -p logs/ - if [[ $machine =~ "Jet" ]] - then - echo "Running regression tests on $machine" - export dprefix=/lfs1/NAGAPE/$ACCNR/$USER - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - elif [[ $machine =~ "Hercules" ]] - then - echo "Running regression tests on $machine" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - export ACCNR=epic - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. + export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + + if [[ $machine =~ "Jet" ]] + then + echo "Running regression tests on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Running regression tests on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Orion" ]] - then - echo "Running regression tests on $machine" - cd .. - module load git/2.28.0 - git submodule update --init --recursive - cd tests - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. + cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + echo "Running regression tests on $machine" + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. pwd cp $(dirname $WORKSPACE)/RegressionTests_orion.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ - elif [[ $machine =~ "Gaea" ]] - then - echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - unset LD_LIBRARY_PATH + elif [[ $machine =~ "Gaea" ]] + then + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH cd logs/ cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. pwd cp $(dirname $WORKSPACE)/RegressionTests_gaea.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ - elif [[ $machine =~ "Hera" ]] - then - echo "Running regression tests on $machine" - export ACCNR=epic - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hera" ]] + then + echo "Running regression tests on $machine" + export ACCNR=epic + sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. pwd cp $(dirname $WORKSPACE)/RegressionTests_hera.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Derecho" ]] - then - echo "Running regression tests on $machine" - export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + then + echo "Running regression tests on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - else - echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - fi - - echo "Testing concluded for $machine" + cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + + echo "Testing concluded for $machine" } function post_test() { - echo "Testing concluded...removing labels for $machine from $GIT_URL" - echo $CHANGE_ID - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + echo "Testing concluded...removing labels for $machine from $GIT_URL" + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') echo "GIT_URL=${GIT_URL}" - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH + #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT } regression_test From 1e1c3cff8455c251876ef3df626c1ed3d3e7bbec Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 18 Nov 2024 15:22:46 -0600 Subject: [PATCH 014/106] add wm_test script Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 342 ++++++++++++++++--------------- .cicd/scripts/regression_test.sh | 255 +++++++++++------------ .cicd/scripts/wm_test.sh | 129 +++++++++++- 3 files changed, 429 insertions(+), 297 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 777c0e516f..ad23154975 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -29,6 +29,11 @@ function create_baseline() { local WORKSPACE="$(pwd)" account="-a ${ACCNR}" + workflow="-e" + #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" + #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" + #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" + opt="-l" suite="rt.conf" [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 @@ -37,178 +42,177 @@ function create_baseline() { [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" [[ "${suite}" = rt.conf ]] && opt="-l" - #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" - #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" - #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" - - git submodule update --init --recursive + git submodule update --init --recursive + pwd + ls -al .cicd/* + cd tests pwd - ls -al .cicd/* - cd tests - pwd - - [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf - - mkdir -p logs/ - export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) - - if [[ $machine =~ "Jet" ]] - then - echo "Creating baselines on $machine" - export dprefix=/lfs1/NAGAPE/$ACCNR/$USER - sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh - ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - elif [[ $machine =~ "Hercules" ]] - then - echo "Creating baselines on $machine" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh hercules develop-${BL_DATE} - chgrp noaa-hpc develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Orion" ]] - then - cd .. - module load git/2.28.0 - git submodule update --init --recursive - cd tests - echo "Creating baselines on $machine" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/work/noaa/epic/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh orion develop-${BL_DATE} - chgrp noaa-hpc develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_orion.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Gaea" ]] - then - echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - unset LD_LIBRARY_PATH - export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /gpfs/f5/epic/scratch/role.epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd ${DISKNM}/NEMSfv3gfs/ - chgrp ncep develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_gaea.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Hera" ]] - then - echo "Creating baselines on $machine" - export ACCNR=epic - sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh - ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_hera.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Derecho" ]] - then - echo "Creating baselines on $machine" - export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /glade/derecho/scratch/epicufsrt/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - else - echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - fi - echo "Testing concluded for $machine" + + [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf + + mkdir -p logs/ + export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + + if [[ $machine =~ "Jet" ]] + then + echo "Creating baselines on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh + ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh hercules develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + echo "Creating baselines on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/work/noaa/epic/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + ./adjust_permissions.sh orion develop-${BL_DATE} + chgrp noaa-hpc develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_orion.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Gaea" ]] + then + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH + export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /gpfs/f5/epic/scratch/role.epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd ${DISKNM}/NEMSfv3gfs/ + chgrp ncep develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Hera" ]] + then + echo "Creating baselines on $machine" + export ACCNR=epic + sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh + ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Creating baselines on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ + cd ${DISKNM}/NEMSfv3gfs/ + mkdir develop-${BL_DATE} + cd /glade/derecho/scratch/epicufsrt/FV3_RT + rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + cd $WORKSPACE/tests + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Creating baselines on $machine" + ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + + cd ${WORKSPACE} + + echo "Testing concluded for $machine" } function post_test() { - echo "Testing concluded...removing labels for $machine from $GIT_URL" - echo $CHANGE_ID - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') - echo "GIT_URL=${GIT_URL}" - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - - #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL + echo "Testing concluded...removing labels for $machine from $GIT_URL" + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + echo "GIT_URL=${GIT_URL}" + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + + #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL } diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 4134157115..e7d31b8e13 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -29,6 +29,11 @@ function regression_test() { local WORKSPACE="$(pwd)" account="-a ${ACCNR}" + workflow="-e" + #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" + #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" + #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" + opt="-l" suite="rt.conf" [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 @@ -37,136 +42,134 @@ function regression_test() { [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" [[ "${suite}" = rt.conf ]] && opt="-l" - #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" - #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" - #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" - - git submodule update --init --recursive + git submodule update --init --recursive + pwd + ls -al .cicd/* + cd tests pwd - ls -al .cicd/* - cd tests - pwd - - [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf - - mkdir -p logs/ - export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) - - if [[ $machine =~ "Jet" ]] - then - echo "Running regression tests on $machine" - export dprefix=/lfs1/NAGAPE/$ACCNR/$USER - sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - elif [[ $machine =~ "Hercules" ]] - then - echo "Running regression tests on $machine" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - export ACCNR=epic - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Orion" ]] - then - echo "Running regression tests on $machine" - cd .. - module load git/2.28.0 - git submodule update --init --recursive - cd tests - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_orion.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Gaea" ]] - then - echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - unset LD_LIBRARY_PATH - cd logs/ - cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_gaea.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Hera" ]] - then - echo "Running regression tests on $machine" - export ACCNR=epic - sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_hera.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Derecho" ]] - then - echo "Running regression tests on $machine" - export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - pwd - cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - else - echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log - fi - - echo "Testing concluded for $machine" + + [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf + + mkdir -p logs/ + export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + + if [[ $machine =~ "Jet" ]] + then + echo "Running regression tests on $machine" + export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + elif [[ $machine =~ "Hercules" ]] + then + echo "Running regression tests on $machine" + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export ACCNR=epic + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Orion" ]] + then + echo "Running regression tests on $machine" + cd .. + module load git/2.28.0 + git submodule update --init --recursive + cd tests + export dprefix=/work2/noaa/$ACCNR/$USER + sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_orion.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Gaea" ]] + then + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + unset LD_LIBRARY_PATH + cd logs/ + cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Hera" ]] + then + echo "Running regression tests on $machine" + export ACCNR=epic + sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_hera.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + elif [[ $machine =~ "Derecho" ]] + then + echo "Running regression tests on $machine" + export ACCNR=nral0032 + ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + cd logs/ + cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. && cd .. && cd .. + pwd + cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cd $WORKSPACE/tests/ + else + echo "Running regression tests on $machine" + ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + fi + + cd ${WORKSPACE} + + echo "Testing concluded for $machine" } function post_test() { - echo "Testing concluded...removing labels for $machine from $GIT_URL" - echo $CHANGE_ID - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') - echo "GIT_URL=${GIT_URL}" - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - - #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT + echo "Testing concluded...removing labels for $machine from $GIT_URL" + echo $CHANGE_ID + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + echo "GIT_URL=${GIT_URL}" + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + + #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT } regression_test diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 9ac36ac6e1..0aaa12eed2 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -14,6 +14,131 @@ export FC=${FC:-mpif90} BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} -cd "${TESTS_DIR}" +( + cd "${TESTS_DIR}" + pwd + ls -al ./rt.sh +) + pwd -ls -al ./rt.sh +echo "NODE_NAME=${NODE_NAME}" +echo "USER=${USER}" +echo "UFS_PLATFORM=<${UFS_PLATFORM}>" +echo "UFS_COMPILER=<${UFS_COMPILER}>" +echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS}>" +echo "WM_OPERATIONAL_TESTS=<${WM_OPERATIONAL_TESTS}>" +export workspace=$(pwd) +machine=${NODE_NAME} +echo "machine=<${machine}>" +machine_id=${UFS_PLATFORM} +if [[ ${UFS_PLATFORM} =~ clusternoaa ]] ; then + machine_id="noaacloud" + #sed -i -e "s|EPIC/spack-stack/spack-stack-1.5.0|spack-stack/spack-stack-1.5.1|g" modulefiles/ufs_noaacloud.intel.lua +fi +echo "machine_id=<${machine_id}>" + +status=0 + +export LMOD_SH_DBG_ON=0 +echo "LMOD_VERSION=${LMOD_VERSION}" + +/usr/bin/time -p \ + -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ + -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ +ls -l build/ufs_model +status=$? + +#[[ ${UFS_PLATFORM} == jet ]] && WM_REGRESSION_TESTS=false # takes too long +#[[ ${UFS_PLATFORM} == derecho ]] && WM_REGRESSION_TESTS=false +#[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false + +rm -f ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt +if [[ ${WM_REGRESSION_TESTS} = true ]] ; then + + set +x + if [[ ${UFS_PLATFORM} = orion ]] ; then + git --version + #module --ignore_cache load git/2.28.0 + git submodule update --init --recursive + fi + + if [[ ${UFS_PLATFORM} = gaea ]] ; then + source /gpfs/f5/epic/scratch/role.epic/contrib/Lmod_init_C5.sh + echo "LMOD_VERSION=${LMOD_VERSION}" + fi + + module use $PWD/modulefiles >/dev/null 2>&1 + module load ufs_${machine_id}.${UFS_COMPILER} || true + [[ ${UFS_PLATFORM} = gaea ]] && module load cmake/3.23.1 + module list + set -x + + #export PATH=$PATH:~/bin + echo "CHANGE_ID=$CHANGE_ID" + + export FV3_RT_DIR= + #export JENKINS_WORKSPACE= + workflow="-e" # -e = ecflow (default) || -r = rocoto + + #export ACCNR=epic + echo "ACCNR=${ACCNR}" + + opt="-l" + suite="rt.conf" + [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" + [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" + [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" + [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" + [[ ${suite} = rt.conf ]] && opt="-l" + + [[ ${WM_CREATE_BASELINE} = true ]] && export BL_DATE=$(cat tests/bl_date.conf | cut -d '=' -f2) + [[ ! -f tests/logs/RegressionTests_${UFS_PLATFORM,,}.log ]] || mv tests/logs/RegressionTests_${UFS_PLATFORM,,}.log tests/logs/RegressionTests_${UFS_PLATFORM,,}.log.orig + + rm -f ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_*-log.txt + if [[ ${WM_CREATE_BASELINE} = true ]] ; then + echo "start Creating baseline on ${UFS_PLATFORM} ..." + ls -al .cicd/* + echo "Pipeline Creating Baseline Tests ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}: (${opt} [${suite:=rt.conf}])" + /usr/bin/time -p \ + -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ + -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ + ./.cicd/scripts/create_baseline.sh ${account} ${baseline} ${workflow} -k ${opt} "${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt + status=${PIPESTATUS[0]} + echo "Pipeline Completed Baseline Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=$status" + else + echo "skip Creating baseline on ${UFS_PLATFORM}." + ls -al .cicd/* + echo "Pipeline Running Regression Tests ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}: (${opt} [${suite:=rt.conf}])" + /usr/bin/time -p \ + -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ + -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ + ./.cicd/scripts/regression_test.sh ${account} ${baseline} ${workflow} -k ${opt} "${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt + status=${PIPESTATUS[0]} + echo "Pipeline Completed Regression Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=$status" + fi + + cd tests/ + cd logs/ + #cp RegressionTests_${machine_id}.log ${JENKINS_WORKSPACE} + git remote -v + git fetch --no-recurse-submodules origin + git reset FETCH_HEAD --hard + cd .. #&& cd .. && cd .. + + pwd + ls -al . + ls -al $WORKSPACE/${machine_id}/tests/logs/. + + ls -al logs/. + ## Test Results ... + echo "ExperimentName: ${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt | tee ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + egrep " DIRECTORY: |Time: | Completed: |Result: " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + egrep " -- COMPILE | -- TEST " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + cd ${workspace} +else + echo "Pipeline skipping Regression Tests on ${UFS_PLATFORM} (${machine})" + echo "ExperimentName: null" > ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt +fi +find ${workspace}/tests/logs -ls +echo "Pipeline Tests on ${UFS_PLATFORM} complete. status=$status" +exit $status From 0cc14dd0a7913c2d72bb70eab82fee9c2fedc338 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 18 Nov 2024 16:56:22 -0600 Subject: [PATCH 015/106] check git for orion Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 3 ++- .cicd/scripts/regression_test.sh | 3 ++- .cicd/scripts/wm_test.sh | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index ad23154975..b97765c018 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -88,7 +88,8 @@ function create_baseline() { elif [[ $machine =~ "Orion" ]] then cd .. - module load git/2.28.0 + #module load git/2.28.0 + git --version git submodule update --init --recursive cd tests echo "Creating baselines on $machine" diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index e7d31b8e13..e50a70ff4f 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -79,7 +79,8 @@ function regression_test() { then echo "Running regression tests on $machine" cd .. - module load git/2.28.0 + #module load git/2.28.0 + git --version git submodule update --init --recursive cd tests export dprefix=/work2/noaa/$ACCNR/$USER diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 0aaa12eed2..8b3caa00bb 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -27,6 +27,7 @@ echo "UFS_PLATFORM=<${UFS_PLATFORM}>" echo "UFS_COMPILER=<${UFS_COMPILER}>" echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS}>" echo "WM_OPERATIONAL_TESTS=<${WM_OPERATIONAL_TESTS}>" +echo "WM_CREATE_BASELINE=<${WM_CREATE_BASELINE}>" export workspace=$(pwd) machine=${NODE_NAME} echo "machine=<${machine}>" @@ -57,8 +58,8 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then set +x if [[ ${UFS_PLATFORM} = orion ]] ; then - git --version #module --ignore_cache load git/2.28.0 + git --version git submodule update --init --recursive fi From 84805cd94f13dc5750448c4564ff9b19b9efa3c6 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 18 Nov 2024 17:06:31 -0600 Subject: [PATCH 016/106] regression test skips PW cluster hosts, for now Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_test.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 8b3caa00bb..1c50b3ea8d 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -43,15 +43,12 @@ status=0 export LMOD_SH_DBG_ON=0 echo "LMOD_VERSION=${LMOD_VERSION}" -/usr/bin/time -p \ - -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ - -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ ls -l build/ufs_model status=$? #[[ ${UFS_PLATFORM} == jet ]] && WM_REGRESSION_TESTS=false # takes too long #[[ ${UFS_PLATFORM} == derecho ]] && WM_REGRESSION_TESTS=false -#[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false +[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : rm -f ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt if [[ ${WM_REGRESSION_TESTS} = true ]] ; then From 7380a89a275d5266deb9b7ab0227030c1bf8bbaa Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 18 Nov 2024 17:41:22 -0600 Subject: [PATCH 017/106] fix array parsing on disk_usage.sh Signed-off-by: Bruce Kropp --- .cicd/scripts/disk_usage.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index 70b6041b91..c7b311c41b 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -38,7 +38,7 @@ function disk_usage() { echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ while read line ; do - arr="(${line})"; inode="${arr[0]}"; filename="${arr[1]}"; + arr=(${line}); inode="${arr[0]}"; filename="${arr[1]}"; echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' ${filename}),${inode:-0},$(du -Px -s -${size:-k} --time ${filename})" | tr '\t' ',' ; done | sort -t, -k5 -n #-r ) From 5a7d187d52df6dd4d69528418f81b6e941eb893a Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 18 Nov 2024 18:01:33 -0600 Subject: [PATCH 018/106] migrate build stage to wm_build.sh Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_build.sh | 47 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/.cicd/scripts/wm_build.sh b/.cicd/scripts/wm_build.sh index ee81b04a5d..c567aed1c3 100755 --- a/.cicd/scripts/wm_build.sh +++ b/.cicd/scripts/wm_build.sh @@ -19,6 +19,51 @@ BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} mkdir -p "${BUILD_DIR}" -cd "${BUILD_DIR}" +( + cd "${BUILD_DIR}" + pwd +) + pwd +echo "NODE_NAME=${NODE_NAME}" +echo "UFS_PLATFORM=${UFS_PLATFORM}" +echo "UFS_COMPILER=${UFS_COMPILER}" +export workspace=$(pwd) +machine=${NODE_NAME} +echo "machine=<${machine}>" +machine_id=${UFS_PLATFORM} +if [[ ${UFS_PLATFORM} =~ clusternoaa ]] ; then + machine_id="noaacloud" + sed -e "s|EPIC/spack-stack/spack-stack-1.5.0|spack-stack/spack-stack-1.5.1|g" -i modulefiles/ufs_noaacloud.intel.lua +fi +echo "machine_id=<${machine_id}>" + +if [[ ${UFS_PLATFORM} = derecho ]] ; then + export ACCNR=nral0032 +else + export ACCNR=epic +fi +echo "ACCNR=${ACCNR}" + +export LMOD_SH_DBG_ON=0 +echo "LMOD_VERSION=${LMOD_VERSION}" +if [[ ${UFS_PLATFORM} = gaea ]] ; then + source /gpfs/f5/epic/scratch/role.epic/contrib/Lmod_init_C5.sh + echo "LMOD_VERSION=${LMOD_VERSION}" +fi +set +x +module use $PWD/modulefiles >/dev/null 2>&1 +module load ufs_${machine_id}.${UFS_COMPILER} || true +[[ ${UFS_PLATFORM} = gaea ]] && module load cmake/3.23.1 || true +module list + +echo "Pipeline Building WM on ${UFS_PLATFORM} ${UFS_COMPILER} with Account=${ACCNR}." +export CMAKE_FLAGS="-DAPP=ATM -DCCPP_SUITES=FV3_GFS_v16" +/usr/bin/time -p \ + -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_build.json \ + -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ + ./build.sh | tee ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_build-log.txt +status=${PIPESTATUS[0]} +echo "Pipeline Completed WM build on ${UFS_PLATFORM} ${UFS_COMPILER}. status=$status" +ls -l build/ufs_model From beed12229f220f167d92d7f7c1307503eda84e6b Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 18 Nov 2024 18:06:03 -0600 Subject: [PATCH 019/106] enable create_baseline flow Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index b97765c018..9bd63a0c74 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -217,3 +217,4 @@ function post_test() { #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL } +create_baseline From a08312c40e8eefee8280cb801232f3b12e2598fd Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 19 Nov 2024 10:16:18 -0600 Subject: [PATCH 020/106] mkdir -p BLdate-dir Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 9bd63a0c74..e58f8a6a27 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -68,7 +68,7 @@ function create_baseline() { ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} + mkdir -p develop-${BL_DATE} cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd ${DISKNM}/NEMSfv3gfs/ @@ -99,7 +99,7 @@ function create_baseline() { ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log export DISKNM=/work/noaa/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} + mkdir -p develop-${BL_DATE} cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd ${DISKNM}/NEMSfv3gfs/ @@ -123,7 +123,7 @@ function create_baseline() { unset LD_LIBRARY_PATH export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} + mkdir -p develop-${BL_DATE} cd /gpfs/f5/epic/scratch/role.epic/FV3_RT rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd ${DISKNM}/NEMSfv3gfs/ @@ -147,7 +147,7 @@ function create_baseline() { ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} + mkdir -p develop-${BL_DATE} cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd $WORKSPACE/tests @@ -168,7 +168,7 @@ function create_baseline() { ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} + mkdir -p develop-${BL_DATE} cd /glade/derecho/scratch/epicufsrt/FV3_RT rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd $WORKSPACE/tests From 7f1bd33315241784d4540f719e295aba2aa374fb Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 19 Nov 2024 12:57:16 -0600 Subject: [PATCH 021/106] try to clear up rsync error for setting times Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index e58f8a6a27..e0a2dded65 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -70,7 +70,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh hercules develop-${BL_DATE} chgrp noaa-hpc develop-${BL_DATE} @@ -101,7 +101,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh orion develop-${BL_DATE} chgrp noaa-hpc develop-${BL_DATE} @@ -125,7 +125,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /gpfs/f5/epic/scratch/role.epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd ${DISKNM}/NEMSfv3gfs/ chgrp ncep develop-${BL_DATE} cd $WORKSPACE/tests @@ -149,7 +149,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ @@ -170,7 +170,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /glade/derecho/scratch/epicufsrt/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ From 6961e2d97f1b0e159e550b4ed7cd0966f22872e3 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 19 Nov 2024 14:16:41 -0600 Subject: [PATCH 022/106] capture and ignore rsync errors Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index e0a2dded65..73bbfa85f4 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -70,7 +70,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh hercules develop-${BL_DATE} chgrp noaa-hpc develop-${BL_DATE} @@ -101,7 +101,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh orion develop-${BL_DATE} chgrp noaa-hpc develop-${BL_DATE} @@ -125,7 +125,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /gpfs/f5/epic/scratch/role.epic/FV3_RT - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : cd ${DISKNM}/NEMSfv3gfs/ chgrp ncep develop-${BL_DATE} cd $WORKSPACE/tests @@ -149,7 +149,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ @@ -170,7 +170,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /glade/derecho/scratch/epicufsrt/FV3_RT - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ From f81b23f929ad6fd791e36bbd64c5cae8b256d449 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 19 Nov 2024 19:32:02 -0600 Subject: [PATCH 023/106] Catch errors on chgrp Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 73bbfa85f4..4471149478 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -72,8 +72,8 @@ function create_baseline() { cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh hercules develop-${BL_DATE} - chgrp noaa-hpc develop-${BL_DATE} + ./adjust_permissions.sh hercules develop-${BL_DATE} || : + chgrp noaa-hpc develop-${BL_DATE} || : cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ @@ -103,8 +103,8 @@ function create_baseline() { cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh orion develop-${BL_DATE} - chgrp noaa-hpc develop-${BL_DATE} + ./adjust_permissions.sh orion develop-${BL_DATE} || : + chgrp noaa-hpc develop-${BL_DATE} || : cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ @@ -127,7 +127,7 @@ function create_baseline() { cd /gpfs/f5/epic/scratch/role.epic/FV3_RT rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : cd ${DISKNM}/NEMSfv3gfs/ - chgrp ncep develop-${BL_DATE} + chgrp ncep develop-${BL_DATE} || : cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ From 6c8870c5409c94e46176e928e50ca64f983fe849 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 20 Nov 2024 11:51:59 -0600 Subject: [PATCH 024/106] warn on missing REGRESSION_TEST/ for baseline rsync Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 4471149478..82f1098f36 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -70,7 +70,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : + ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh hercules develop-${BL_DATE} || : chgrp noaa-hpc develop-${BL_DATE} || : @@ -101,7 +101,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : + ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh orion develop-${BL_DATE} || : chgrp noaa-hpc develop-${BL_DATE} || : @@ -125,7 +125,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /gpfs/f5/epic/scratch/role.epic/FV3_RT - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : + ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd ${DISKNM}/NEMSfv3gfs/ chgrp ncep develop-${BL_DATE} || : cd $WORKSPACE/tests @@ -149,7 +149,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : + ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ @@ -170,7 +170,7 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /glade/derecho/scratch/epicufsrt/FV3_RT - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || : + ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log cd logs/ From b338bc1541440725e97ddb4c9b47d38d7444ce71 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 20 Nov 2024 13:16:40 -0600 Subject: [PATCH 025/106] handle return condition on test scripts Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 16 +++++++++++++++- .cicd/scripts/regression_test.sh | 11 ++++++++++- .cicd/scripts/wm_test.sh | 7 ++++--- 3 files changed, 29 insertions(+), 5 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 82f1098f36..724d23e2cd 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -41,6 +41,7 @@ function create_baseline() { [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" [[ "${suite}" = rt.conf ]] && opt="-l" + local status=0 git submodule update --init --recursive pwd @@ -59,6 +60,7 @@ function create_baseline() { export dprefix=/lfs1/NAGAPE/$ACCNR/$USER sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} elif [[ $machine =~ "Hercules" ]] then echo "Creating baselines on $machine" @@ -66,6 +68,7 @@ function create_baseline() { sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} @@ -76,6 +79,7 @@ function create_baseline() { chgrp noaa-hpc develop-${BL_DATE} || : cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} cd logs/ cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -97,6 +101,7 @@ function create_baseline() { sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} @@ -107,6 +112,7 @@ function create_baseline() { chgrp noaa-hpc develop-${BL_DATE} || : cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} cd logs/ cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -120,6 +126,7 @@ function create_baseline() { then echo "Creating baselines on $machine" ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ @@ -130,6 +137,7 @@ function create_baseline() { chgrp ncep develop-${BL_DATE} || : cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} cd logs/ cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v @@ -145,6 +153,7 @@ function create_baseline() { export ACCNR=epic sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} @@ -152,6 +161,7 @@ function create_baseline() { ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} cd logs/ cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v @@ -166,6 +176,7 @@ function create_baseline() { echo "Creating baselines on $machine" export ACCNR=nral0032 ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} @@ -173,6 +184,7 @@ function create_baseline() { ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} cd logs/ cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v @@ -185,11 +197,13 @@ function create_baseline() { else echo "Creating baselines on $machine" ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} fi cd ${WORKSPACE} - echo "Testing concluded for $machine" + echo "Testing concluded for $machine. status=$status" + return $status } function post_test() { diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index e50a70ff4f..1fb7a915cb 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -41,6 +41,7 @@ function regression_test() { [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" [[ "${suite}" = rt.conf ]] && opt="-l" + local status=0 git submodule update --init --recursive pwd @@ -59,6 +60,7 @@ function regression_test() { export dprefix=/lfs1/NAGAPE/$ACCNR/$USER sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} elif [[ $machine =~ "Hercules" ]] then echo "Running regression tests on $machine" @@ -66,6 +68,7 @@ function regression_test() { sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} cd logs/ cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -86,6 +89,7 @@ function regression_test() { export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} cd logs/ cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v @@ -99,6 +103,7 @@ function regression_test() { then echo "Running regression tests on $machine" ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH cd logs/ cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace @@ -115,6 +120,7 @@ function regression_test() { export ACCNR=epic sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} cd logs/ cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v @@ -129,6 +135,7 @@ function regression_test() { echo "Running regression tests on $machine" export ACCNR=nral0032 ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} cd logs/ cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v @@ -141,11 +148,13 @@ function regression_test() { else echo "Running regression tests on $machine" ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + status=${PIPESTATUS[0]} fi cd ${WORKSPACE} - echo "Testing concluded for $machine" + echo "Testing concluded for $machine. status=$status" + return $status } function post_test() { diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 1c50b3ea8d..9adbff733c 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -52,6 +52,7 @@ status=$? rm -f ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt if [[ ${WM_REGRESSION_TESTS} = true ]] ; then + echo "Pipeline Reqression Tests on ${UFS_PLATFORM} starting." set +x if [[ ${UFS_PLATFORM} = orion ]] ; then @@ -133,10 +134,10 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then egrep " DIRECTORY: |Time: | Completed: |Result: " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt egrep " -- COMPILE | -- TEST " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt cd ${workspace} + find ${workspace}/tests/logs -ls + echo "Pipeline Reqression Tests on ${UFS_PLATFORM} complete. status=$status" else - echo "Pipeline skipping Regression Tests on ${UFS_PLATFORM} (${machine})" + echo "Pipeline Regression Tests on ${UFS_PLATFORM} (${machine}) skipped." echo "ExperimentName: null" > ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt fi -find ${workspace}/tests/logs -ls -echo "Pipeline Tests on ${UFS_PLATFORM} complete. status=$status" exit $status From 4d2bca22d5b8cb0851defd405682adce6ffc1397 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 20 Nov 2024 15:24:06 -0600 Subject: [PATCH 026/106] report error for missing REGRESSION_TEST/ Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 55 ++++++++++++++++++-------------- .cicd/scripts/regression_test.sh | 40 ++++++++++++----------- 2 files changed, 52 insertions(+), 43 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 724d23e2cd..cd29d0c858 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -1,11 +1,28 @@ #!/bin/bash -x -set -eu +export PATH=$PATH:~/bin echo "USER=${USER}" echo "WORKSPACE=${WORKSPACE}" -export machine=${NODE_NAME} export ACCNR=epic +export account="-a ${ACCNR}" +export workflow="-e" + #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" + #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" + #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" + +export opt="-l" +export suite="rt.conf" + [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 + [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" + [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" + [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" + [[ "${suite}" = rt.conf ]] && opt="-l" + +set -eu + +export machine=${NODE_NAME} + SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") @@ -24,23 +41,8 @@ ls -al .cicd/* ls -al ${TESTS_DIR}/rt.sh function create_baseline() { - export machine=${NODE_NAME} - export PATH=$PATH:~/bin + local machine=${1:-${NODE_NAME}} local WORKSPACE="$(pwd)" - - account="-a ${ACCNR}" - workflow="-e" - #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" - #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" - #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" - - opt="-l" - suite="rt.conf" - [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 - [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" - [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" - [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" - [[ "${suite}" = rt.conf ]] && opt="-l" local status=0 git submodule update --init --recursive @@ -73,7 +75,8 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT - ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + ls -l REGRESSION_TEST/. + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh hercules develop-${BL_DATE} || : chgrp noaa-hpc develop-${BL_DATE} || : @@ -106,7 +109,8 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ - ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + ls -l REGRESSION_TEST/. + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh orion develop-${BL_DATE} || : chgrp noaa-hpc develop-${BL_DATE} || : @@ -132,7 +136,8 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /gpfs/f5/epic/scratch/role.epic/FV3_RT - ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + ls -l REGRESSION_TEST/. + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd ${DISKNM}/NEMSfv3gfs/ chgrp ncep develop-${BL_DATE} || : cd $WORKSPACE/tests @@ -158,7 +163,8 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT - ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + ls -l REGRESSION_TEST/. + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} @@ -181,7 +187,8 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /glade/derecho/scratch/epicufsrt/FV3_RT - ls -ld REGRESSION_TEST/. && rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + ls -l REGRESSION_TEST/. + rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} @@ -231,4 +238,4 @@ function post_test() { #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL } -create_baseline +create_baseline ${machine} diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 1fb7a915cb..a6b823352e 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -1,11 +1,28 @@ #!/bin/bash -x -set -eu +export PATH=$PATH:~/bin echo "USER=${USER}" echo "WORKSPACE=${WORKSPACE}" -export machine=${NODE_NAME} export ACCNR=epic +export account="-a ${ACCNR}" +export workflow="-e" + #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" + #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" + #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" + +export opt="-l" +export suite="rt.conf" + [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 + [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" + [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" + [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" + [[ "${suite}" = rt.conf ]] && opt="-l" + +set -eu + +export machine=${NODE_NAME} + SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") @@ -24,23 +41,8 @@ ls -al .cicd/* ls -al ${TESTS_DIR}/rt.sh function regression_test() { - export machine=${NODE_NAME} - export PATH=$PATH:~/bin + local machine=${1:-${NODE_NAME}} local WORKSPACE="$(pwd)" - - account="-a ${ACCNR}" - workflow="-e" - #[[ ${UFS_PLATFORM} = jet ]] && workflow="-r" - #[[ ${UFS_PLATFORM} = hera ]] && workflow="-r" - #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && workflow="" - - opt="-l" - suite="rt.conf" - [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" || return 0 - [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" - [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" - [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" - [[ "${suite}" = rt.conf ]] && opt="-l" local status=0 git submodule update --init --recursive @@ -182,4 +184,4 @@ function post_test() { #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT } -regression_test +regression_test ${machine} From 1ad2783477a7d4a49bfce04717f45596786217db Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 20 Nov 2024 17:05:01 -0600 Subject: [PATCH 027/106] define and use machine_id as lower case machine, use workflow variable Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 48 +++++++++++++++++--------------- .cicd/scripts/regression_test.sh | 38 ++++++++++++++----------- .cicd/scripts/wm_test.sh | 10 ++----- 3 files changed, 49 insertions(+), 47 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index cd29d0c858..93405d1755 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -42,6 +42,7 @@ ls -al ${TESTS_DIR}/rt.sh function create_baseline() { local machine=${1:-${NODE_NAME}} + local machine_id=${machine,,} # tolower local WORKSPACE="$(pwd)" local status=0 @@ -61,7 +62,8 @@ function create_baseline() { echo "Creating baselines on $machine" export dprefix=/lfs1/NAGAPE/$ACCNR/$USER sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh - ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + local workflow="-r" + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} elif [[ $machine =~ "Hercules" ]] then @@ -69,7 +71,7 @@ function create_baseline() { export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ @@ -81,16 +83,16 @@ function create_baseline() { ./adjust_permissions.sh hercules develop-${BL_DATE} || : chgrp noaa-hpc develop-${BL_DATE} || : cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Orion" ]] then @@ -103,7 +105,7 @@ function create_baseline() { export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ @@ -115,21 +117,21 @@ function create_baseline() { ./adjust_permissions.sh orion develop-${BL_DATE} || : chgrp noaa-hpc develop-${BL_DATE} || : cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_orion.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Gaea" ]] then echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT @@ -141,23 +143,24 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ chgrp ncep develop-${BL_DATE} || : cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Hera" ]] then echo "Creating baselines on $machine" export ACCNR=epic sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh - ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + local workflow="-r" + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ @@ -166,22 +169,22 @@ function create_baseline() { ls -l REGRESSION_TEST/. rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_hera.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Derecho" ]] then echo "Creating baselines on $machine" export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -c -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ cd ${DISKNM}/NEMSfv3gfs/ @@ -190,20 +193,21 @@ function create_baseline() { ls -l REGRESSION_TEST/. rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ else echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + local workflow="-r" + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} fi diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index a6b823352e..83a823895c 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -42,6 +42,7 @@ ls -al ${TESTS_DIR}/rt.sh function regression_test() { local machine=${1:-${NODE_NAME}} + local machine_id=${machine,,} # tolower local WORKSPACE="$(pwd)" local status=0 @@ -61,7 +62,8 @@ function regression_test() { echo "Running regression tests on $machine" export dprefix=/lfs1/NAGAPE/$ACCNR/$USER sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + local workflow="-r" + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} elif [[ $machine =~ "Hercules" ]] then @@ -69,16 +71,16 @@ function regression_test() { export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_hercules.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_hercules.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Orion" ]] then @@ -90,66 +92,68 @@ function regression_test() { cd tests export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_orion.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_orion.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Gaea" ]] then echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH cd logs/ - cp RegressionTests_gaea.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_gaea.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Hera" ]] then echo "Running regression tests on $machine" export ACCNR=epic sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + local workflow="-r" + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_hera.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_hera.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ elif [[ $machine =~ "Derecho" ]] then echo "Running regression tests on $machine" export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -e ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_derecho.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace + cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_derecho.log $WORKSPACE/tests/logs/ + cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ cd $WORKSPACE/tests/ else echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -r ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + local workflow="-r" + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log status=${PIPESTATUS[0]} fi diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 9adbff733c..5c12b2a82b 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -101,7 +101,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then /usr/bin/time -p \ -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ - ./.cicd/scripts/create_baseline.sh ${account} ${baseline} ${workflow} -k ${opt} "${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt + ./.cicd/scripts/create_baseline.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} echo "Pipeline Completed Baseline Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=$status" else @@ -111,18 +111,12 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then /usr/bin/time -p \ -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ - ./.cicd/scripts/regression_test.sh ${account} ${baseline} ${workflow} -k ${opt} "${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt + ./.cicd/scripts/regression_test.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} echo "Pipeline Completed Regression Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=$status" fi cd tests/ - cd logs/ - #cp RegressionTests_${machine_id}.log ${JENKINS_WORKSPACE} - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. #&& cd .. && cd .. pwd ls -al . From 9cf9307c6fec50e6ae35875e7826adaabc82b085 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 20 Nov 2024 17:59:29 -0600 Subject: [PATCH 028/106] wm_test.sh ready Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_test.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 5c12b2a82b..aac48bf948 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -94,6 +94,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then [[ ! -f tests/logs/RegressionTests_${UFS_PLATFORM,,}.log ]] || mv tests/logs/RegressionTests_${UFS_PLATFORM,,}.log tests/logs/RegressionTests_${UFS_PLATFORM,,}.log.orig rm -f ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_*-log.txt + umask 002 if [[ ${WM_CREATE_BASELINE} = true ]] ; then echo "start Creating baseline on ${UFS_PLATFORM} ..." ls -al .cicd/* From e9153a09a46b4365d21faa7a0a328fe1664f3c9e Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 20 Nov 2024 18:39:05 -0600 Subject: [PATCH 029/106] handle empty CHANGE_ID Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index aac48bf948..2ae532d6ec 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -73,7 +73,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then set -x #export PATH=$PATH:~/bin - echo "CHANGE_ID=$CHANGE_ID" + echo "CHANGE_ID=${CHANGE_ID:=null}" export FV3_RT_DIR= #export JENKINS_WORKSPACE= From 62863e9bce04174600ff0592c45c8a234ae2934c Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 20 Nov 2024 18:40:01 -0600 Subject: [PATCH 030/106] handle empty CHANGE_ID Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 2ae532d6ec..42b4e7009e 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -73,7 +73,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then set -x #export PATH=$PATH:~/bin - echo "CHANGE_ID=${CHANGE_ID:=null}" + echo "CHANGE_ID=${CHANGE_ID:-null}" export FV3_RT_DIR= #export JENKINS_WORKSPACE= From cabf94bf94aa0f75c38fe104d81587d5b8b14c07 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 26 Nov 2024 12:30:01 -0600 Subject: [PATCH 031/106] jet changed from /lfs1 to /lfs5 Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 2 +- .cicd/scripts/regression_test.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 93405d1755..5f55190820 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -60,7 +60,7 @@ function create_baseline() { if [[ $machine =~ "Jet" ]] then echo "Creating baselines on $machine" - export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + export dprefix=/lfs5/NAGAPE/$ACCNR/$USER sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh local workflow="-r" ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 83a823895c..9b24bdc52e 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -60,7 +60,7 @@ function regression_test() { if [[ $machine =~ "Jet" ]] then echo "Running regression tests on $machine" - export dprefix=/lfs1/NAGAPE/$ACCNR/$USER + export dprefix=/lfs5/NAGAPE/$ACCNR/$USER sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh local workflow="-r" ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log From 5874b4f9724014f2e8534b7cd01b88efa82b907a Mon Sep 17 00:00:00 2001 From: Bruce Kropp - Raytheon <104453151+BruceKropp-Raytheon@users.noreply.github.com> Date: Mon, 2 Dec 2024 09:05:13 -0800 Subject: [PATCH 032/106] Create Jenkinsfile.metrics --- .cicd/Jenkinsfile.metrics | 164 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 164 insertions(+) create mode 100644 .cicd/Jenkinsfile.metrics diff --git a/.cicd/Jenkinsfile.metrics b/.cicd/Jenkinsfile.metrics new file mode 100644 index 0000000000..03b6b164d4 --- /dev/null +++ b/.cicd/Jenkinsfile.metrics @@ -0,0 +1,164 @@ +matchedNode = [] +generateBaselineNode = [] +for (label in pullRequest.labels) { + listOfLabelNodeNames = jenkins.model.Jenkins.instance.nodes.collect { + node -> node.getLabelString().contains(label) ? node.name : null + + if ((label.matches(node.getLabelString()+"-(.*)"))) { + matchedNode += node.getLabelString() + } + + if ((label.matches(node.getLabelString()+"(.*)-BL"))) { + generateBaselineNode += node.getLabelString() + } + } +} + +modifiedLabels = matchedNode.collect{"'" + it + "'"} +baselineLabels = generateBaselineNode.collect{"'" + it + "'"} +def generateStage(nodeLabel) { + return { + stage("Running on ${nodeLabel}") { + node(nodeLabel) { + cleanWs() + checkout scm + script { + try { + echo "Running on ${nodeLabel}" + if (baselineLabels.contains(nodeLabel)) { + sh ''' + + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + echo "Testing concluded...removing labels for $machine from $GIT_URL" + + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + git add logs/RegressionTests_$machine_name_logs.log + git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + git pull sshorigin $FORK_BRANCH + git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + + } + else { + sh ''' + + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + echo "Testing concluded...removing labels for $machine from $GIT_URL" + + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + git add logs/RegressionTests_$machine_name_logs.log + git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + git pull sshorigin $FORK_BRANCH + git push sshorigin HEAD:$FORK_BRANCH + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } + } + catch(err) { + sh ''' + export machine=${NODE_NAME} + export CHANGE_ID=${CHANGE_ID} + export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') + export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + cd $WORKSPACE/tests + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + echo "Testing concluded...removing labels for $machine from $GIT_URL" + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + + curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + currentBuild.result = 'FAILURE' + } + } + } + } + } +} + +def parallelStagesMap = modifiedLabels.collectEntries { + ["${it}" : generateStage(it)] +} + +pipeline { + agent none + environment { + ACCNR = 'epic' + AWS_PROD_ACCOUNT_ID = credentials('AWS_PROD_ACCOUNT_ID') + AWS_PROD_SNS_TOPIC = credentials('AWS_PROD_SNS_TOPIC') + GITHUB_TOKEN = credentials('GithubJenkinsNew') + GIT_URL = 'https://github.com/ufs-community/ufs-weather-model.git' + } + stages { + stage('Launch SonarQube') { + steps { + script { + echo "BRANCH_NAME=${env.CHANGE_BRANCH}" + echo "FORK_NAME=${env.CHANGE_FORK}" + echo "CHANGE_URL=${env.CHANGE_URL}" + echo "CHANGE_ID=${env.CHANGE_ID}" + build job: '/ufs-weather-model/ufs-wm-sonarqube', parameters: [ + string(name: 'BRANCH_NAME', value: env.CHANGE_BRANCH ?: 'develop'), + string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: ''), + string(name: 'CHANGE_URL', value: env.CHANGE_URL ?: ''), + string(name: 'CHANGE_ID', value: env.CHANGE_ID ?: '') + ], wait: false + } + } + } + stage('Run Regression Tests in Parallel') { + steps { + script { + parallel parallelStagesMap + } + } + } + } + post { + success { + node('built-in') { + echo 'This will run only if successful.' + sh ''' + aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":sunny: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *succeeded*"}}' + ''' + } + } + failure { + node('built-in') { + echo 'This will run only if the run was marked as unstable.' + sh ''' + aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":warning: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *failed!*"}}' + ''' + } + } + } +} From 2318541a50f04d3fefad8774edcf3f1f373516ff Mon Sep 17 00:00:00 2001 From: Bruce Kropp - Raytheon <104453151+BruceKropp-Raytheon@users.noreply.github.com> Date: Mon, 2 Dec 2024 09:12:49 -0800 Subject: [PATCH 033/106] Update Jenkinsfile.metrics to call wm_test.sh --- .cicd/Jenkinsfile.metrics | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.cicd/Jenkinsfile.metrics b/.cicd/Jenkinsfile.metrics index 03b6b164d4..de4400c566 100644 --- a/.cicd/Jenkinsfile.metrics +++ b/.cicd/Jenkinsfile.metrics @@ -26,8 +26,8 @@ def generateStage(nodeLabel) { try { echo "Running on ${nodeLabel}" if (baselineLabels.contains(nodeLabel)) { - sh ''' - + sh "WM_REGRESSION_TESTS=true WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/wm_test.sh"' + sh ''' git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" echo "Testing concluded...removing labels for $machine from $GIT_URL" @@ -51,8 +51,8 @@ def generateStage(nodeLabel) { } else { + sh "WM_REGRESSION_TESTS=true WM_CREATE_BASELINE=false" + 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/wm_test.sh"' sh ''' - git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" echo "Testing concluded...removing labels for $machine from $GIT_URL" @@ -99,6 +99,7 @@ def generateStage(nodeLabel) { s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] currentBuild.result = 'FAILURE' } + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/disk_usage.sh"' } } } From ed5bb2300699c5da5ddcb79af7ee4955375ec27b Mon Sep 17 00:00:00 2001 From: Bruce Kropp - Raytheon <104453151+BruceKropp-Raytheon@users.noreply.github.com> Date: Mon, 2 Dec 2024 09:27:47 -0800 Subject: [PATCH 034/106] Update Jenkinsfile.metrics define machine --- .cicd/Jenkinsfile.metrics | 46 ++++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/.cicd/Jenkinsfile.metrics b/.cicd/Jenkinsfile.metrics index de4400c566..0cf93a0981 100644 --- a/.cicd/Jenkinsfile.metrics +++ b/.cicd/Jenkinsfile.metrics @@ -26,8 +26,9 @@ def generateStage(nodeLabel) { try { echo "Running on ${nodeLabel}" if (baselineLabels.contains(nodeLabel)) { - sh "WM_REGRESSION_TESTS=true WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/wm_test.sh"' + sh "WM_REGRESSION_TESTS=true WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' sh ''' + export machine=${NODE_NAME} git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" echo "Testing concluded...removing labels for $machine from $GIT_URL" @@ -35,44 +36,45 @@ def generateStage(nodeLabel) { export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - git add logs/RegressionTests_$machine_name_logs.log - git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - git pull sshorigin $FORK_BRANCH - git push sshorigin HEAD:$FORK_BRANCH + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL - ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } else { - sh "WM_REGRESSION_TESTS=true WM_CREATE_BASELINE=false" + 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/wm_test.sh"' - sh ''' + sh "WM_REGRESSION_TESTS=true WM_CREATE_BASELINE=false" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + sh ''' + export machine=${NODE_NAME} git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" echo "Testing concluded...removing labels for $machine from $GIT_URL" export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + echo "Testing concluded...removing labels for $machine from $GIT_URL" git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - git add logs/RegressionTests_$machine_name_logs.log - git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - git pull sshorigin $FORK_BRANCH - git push sshorigin HEAD:$FORK_BRANCH - + #git add logs/RegressionTests_$machine_name_logs.log + #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + #git pull sshorigin $FORK_BRANCH + #git push sshorigin HEAD:$FORK_BRANCH + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL - ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } catch(err) { @@ -94,12 +96,12 @@ def generateStage(nodeLabel) { GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} ''' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] currentBuild.result = 'FAILURE' } - sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${UFS_PLATFORM}/.cicd/scripts/disk_usage.sh"' + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } } } From 5ea45a92a780ce2bfc42ed3f83dd620fae4266b8 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 2 Dec 2024 14:34:04 -0600 Subject: [PATCH 035/106] fix lint warnings Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 102 ++++++++++++++++--------------- .cicd/scripts/disk_usage.sh | 2 +- .cicd/scripts/regression_test.sh | 92 +++++++++++++++------------- .cicd/scripts/wm_build.sh | 3 +- .cicd/scripts/wm_test.sh | 29 ++++----- 5 files changed, 121 insertions(+), 107 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 5f55190820..741036ea0c 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -1,6 +1,6 @@ #!/bin/bash -x -export PATH=$PATH:~/bin +export PATH=${PATH}:~/bin echo "USER=${USER}" echo "WORKSPACE=${WORKSPACE}" export ACCNR=epic @@ -43,7 +43,8 @@ ls -al ${TESTS_DIR}/rt.sh function create_baseline() { local machine=${1:-${NODE_NAME}} local machine_id=${machine,,} # tolower - local WORKSPACE="$(pwd)" + local WORKSPACE + WORKSPACE="$(pwd)" local status=0 git submodule update --init --recursive @@ -55,23 +56,24 @@ function create_baseline() { [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf mkdir -p logs/ - export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + export BL_DATE - if [[ $machine =~ "Jet" ]] + if [[ ${machine} =~ "Jet" ]] then - echo "Creating baselines on $machine" + echo "Creating baselines on ${machine}" export dprefix=/lfs5/NAGAPE/$ACCNR/$USER sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh local workflow="-r" - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} - elif [[ $machine =~ "Hercules" ]] + elif [[ ${machine} =~ "Hercules" ]] then - echo "Creating baselines on $machine" + echo "Creating baselines on ${machine}" export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ @@ -83,29 +85,29 @@ function create_baseline() { ./adjust_permissions.sh hercules develop-${BL_DATE} || : chgrp noaa-hpc develop-${BL_DATE} || : cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ - elif [[ $machine =~ "Orion" ]] + elif [[ ${machine} =~ "Orion" ]] then cd .. #module load git/2.28.0 git --version git submodule update --init --recursive cd tests - echo "Creating baselines on $machine" + echo "Creating baselines on ${machine}" export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ @@ -117,21 +119,21 @@ function create_baseline() { ./adjust_permissions.sh orion develop-${BL_DATE} || : chgrp noaa-hpc develop-${BL_DATE} || : cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ - elif [[ $machine =~ "Gaea" ]] + elif [[ ${machine} =~ "Gaea" ]] then - echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + echo "Creating baselines on ${machine}" + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT @@ -143,24 +145,24 @@ function create_baseline() { cd ${DISKNM}/NEMSfv3gfs/ chgrp ncep develop-${BL_DATE} || : cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ - elif [[ $machine =~ "Hera" ]] + elif [[ ${machine} =~ "Hera" ]] then - echo "Creating baselines on $machine" + echo "Creating baselines on ${machine}" export ACCNR=epic sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh local workflow="-r" - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT cd ${DISKNM}/NEMSfv3gfs/ @@ -169,22 +171,22 @@ function create_baseline() { ls -l REGRESSION_TEST/. rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ - elif [[ $machine =~ "Derecho" ]] + elif [[ ${machine} =~ "Derecho" ]] then - echo "Creating baselines on $machine" + echo "Creating baselines on ${machine}" export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ cd ${DISKNM}/NEMSfv3gfs/ @@ -193,44 +195,46 @@ function create_baseline() { ls -l REGRESSION_TEST/. rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ else - echo "Creating baselines on $machine" + echo "Creating baselines on ${machine}" local workflow="-r" - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} fi cd ${WORKSPACE} - echo "Testing concluded for $machine. status=$status" + echo "Testing concluded for ${machine}. status=$status" return $status } function post_test() { - echo "Testing concluded...removing labels for $machine from $GIT_URL" + echo "Testing concluded...removing labels for ${machine} from $GIT_URL" echo $CHANGE_ID - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID" | jq -r '.head.repo.ssh_url') + FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID" | jq -r '.head.ref') + export SSH_ORIGIN + export FORK_BRANCH echo "GIT_URL=${GIT_URL}" git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + export machine_name_logs=$(echo ${machine} | awk '{ print tolower($1) }') - #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_${machine_name_logs}.log + #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " #git pull sshorigin $FORK_BRANCH #git push sshorigin HEAD:$FORK_BRANCH @@ -238,8 +242,10 @@ function post_test() { GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + export GIT_OWNER + export GIT_REPO_NAME - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-BL } create_baseline ${machine} diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index c7b311c41b..d952d4b3a6 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -38,7 +38,7 @@ function disk_usage() { echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ while read line ; do - arr=(${line}); inode="${arr[0]}"; filename="${arr[1]}"; + eval arr=("${line}"); inode="${arr[0]}"; filename="${arr[1]}"; echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' ${filename}),${inode:-0},$(du -Px -s -${size:-k} --time ${filename})" | tr '\t' ',' ; done | sort -t, -k5 -n #-r ) diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 9b24bdc52e..d69bbf07e9 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -1,6 +1,6 @@ #!/bin/bash -x -export PATH=$PATH:~/bin +export PATH=${PATH}:~/bin echo "USER=${USER}" echo "WORKSPACE=${WORKSPACE}" export ACCNR=epic @@ -43,7 +43,8 @@ ls -al ${TESTS_DIR}/rt.sh function regression_test() { local machine=${1:-${NODE_NAME}} local machine_id=${machine,,} # tolower - local WORKSPACE="$(pwd)" + local WORKSPACE + WORKSPACE="$(pwd)" local status=0 git submodule update --init --recursive @@ -55,36 +56,37 @@ function regression_test() { [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf mkdir -p logs/ - export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + export BL_DATE - if [[ $machine =~ "Jet" ]] + if [[ ${machine} =~ "Jet" ]] then - echo "Running regression tests on $machine" + echo "Running regression tests on ${machine}" export dprefix=/lfs5/NAGAPE/$ACCNR/$USER sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh local workflow="-r" - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} - elif [[ $machine =~ "Hercules" ]] + elif [[ ${machine} =~ "Hercules" ]] then - echo "Running regression tests on $machine" + echo "Running regression tests on ${machine}" export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ - elif [[ $machine =~ "Orion" ]] + elif [[ ${machine} =~ "Orion" ]] then - echo "Running regression tests on $machine" + echo "Running regression tests on ${machine}" cd .. #module load git/2.28.0 git --version @@ -92,91 +94,93 @@ function regression_test() { cd tests export dprefix=/work2/noaa/$ACCNR/$USER sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ - elif [[ $machine =~ "Gaea" ]] + elif [[ ${machine} =~ "Gaea" ]] then - echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + echo "Running regression tests on ${machine}" + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ - elif [[ $machine =~ "Hera" ]] + elif [[ ${machine} =~ "Hera" ]] then - echo "Running regression tests on $machine" + echo "Running regression tests on ${machine}" export ACCNR=epic sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh local workflow="-r" - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ - elif [[ $machine =~ "Derecho" ]] + elif [[ ${machine} =~ "Derecho" ]] then - echo "Running regression tests on $machine" + echo "Running regression tests on ${machine}" export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} cd logs/ - cp RegressionTests_${machine_id}.log $(dirname $WORKSPACE) #/glade/derecho/scratch/epicufsrt/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp $(dirname $WORKSPACE)/RegressionTests_${machine_id}.log $WORKSPACE/tests/logs/ + cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" cd $WORKSPACE/tests/ else - echo "Running regression tests on $machine" + echo "Running regression tests on ${machine}" local workflow="-r" - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-$machine.log + ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log status=${PIPESTATUS[0]} fi cd ${WORKSPACE} - echo "Testing concluded for $machine. status=$status" + echo "Testing concluded for ${machine}. status=$status" return $status } function post_test() { - echo "Testing concluded...removing labels for $machine from $GIT_URL" + echo "Testing concluded...removing labels for ${machine} from $GIT_URL" echo $CHANGE_ID - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') + SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID" | jq -r '.head.repo.ssh_url') + FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID" | jq -r '.head.ref') + export SSH_ORIGIN + export FORK_BRANCH echo "GIT_URL=${GIT_URL}" git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + export machine_name_logs=$(echo ${machine} | awk '{ print tolower($1) }') - #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + #git add logs/RegressionTests_${machine_name_logs}.log + #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " #git pull sshorigin $FORK_BRANCH #git push sshorigin HEAD:$FORK_BRANCH @@ -184,8 +188,10 @@ function post_test() { GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + export GIT_OWNER + export GIT_REPO_NAME - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-RT } regression_test ${machine} diff --git a/.cicd/scripts/wm_build.sh b/.cicd/scripts/wm_build.sh index c567aed1c3..1858d3ba85 100755 --- a/.cicd/scripts/wm_build.sh +++ b/.cicd/scripts/wm_build.sh @@ -28,7 +28,8 @@ pwd echo "NODE_NAME=${NODE_NAME}" echo "UFS_PLATFORM=${UFS_PLATFORM}" echo "UFS_COMPILER=${UFS_COMPILER}" -export workspace=$(pwd) +workspace=$(pwd) +export workspace machine=${NODE_NAME} echo "machine=<${machine}>" machine_id=${UFS_PLATFORM} diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 42b4e7009e..3cde2c7a0e 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -28,7 +28,8 @@ echo "UFS_COMPILER=<${UFS_COMPILER}>" echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS}>" echo "WM_OPERATIONAL_TESTS=<${WM_OPERATIONAL_TESTS}>" echo "WM_CREATE_BASELINE=<${WM_CREATE_BASELINE}>" -export workspace=$(pwd) +workspace=$(pwd) +export workflow machine=${NODE_NAME} echo "machine=<${machine}>" machine_id=${UFS_PLATFORM} @@ -43,7 +44,7 @@ status=0 export LMOD_SH_DBG_ON=0 echo "LMOD_VERSION=${LMOD_VERSION}" -ls -l build/ufs_model +ls -l build/ufs_model || : # just checking status=$? #[[ ${UFS_PLATFORM} == jet ]] && WM_REGRESSION_TESTS=false # takes too long @@ -75,13 +76,13 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then #export PATH=$PATH:~/bin echo "CHANGE_ID=${CHANGE_ID:-null}" - export FV3_RT_DIR= - #export JENKINS_WORKSPACE= - workflow="-e" # -e = ecflow (default) || -r = rocoto - #export ACCNR=epic echo "ACCNR=${ACCNR}" + export FV3_RT_DIR= + #export JENKINS_WORKSPACE= + #workflow="-e" # -e = ecflow (default) || -r = rocoto + opt="-l" suite="rt.conf" [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" @@ -118,16 +119,16 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then fi cd tests/ + pwd + ls -al . + ls -al $WORKSPACE/${machine_id}/tests/logs/. + ls -al logs/. - pwd - ls -al . - ls -al $WORKSPACE/${machine_id}/tests/logs/. + ## Test Results ... + echo "ExperimentName: ${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt | tee ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + egrep " DIRECTORY: |Time: | Completed: |Result: " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + egrep " -- COMPILE | -- TEST " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt - ls -al logs/. - ## Test Results ... - echo "ExperimentName: ${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt | tee ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt - egrep " DIRECTORY: |Time: | Completed: |Result: " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt - egrep " -- COMPILE | -- TEST " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt cd ${workspace} find ${workspace}/tests/logs -ls echo "Pipeline Reqression Tests on ${UFS_PLATFORM} complete. status=$status" From 963ab226b98b9d6efdd8c9e86bc297c7cf919b50 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 2 Dec 2024 15:03:43 -0600 Subject: [PATCH 036/106] superlint fixes Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 3 ++- .cicd/scripts/disk_usage.sh | 4 ++-- .cicd/scripts/regression_test.sh | 3 ++- .cicd/scripts/wm_test.sh | 3 ++- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 741036ea0c..5f8388aaf5 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -229,7 +229,8 @@ function post_test() { echo "GIT_URL=${GIT_URL}" git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo ${machine} | awk '{ print tolower($1) }') + machine_name_logs=$(echo ${machine} | awk '{ print tolower($1) }') + export machine_name_logs git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index d952d4b3a6..4d31ebef75 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -37,8 +37,8 @@ function disk_usage() { cd ${directory} || exit 1 echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ - while read line ; do - eval arr=("${line}"); inode="${arr[0]}"; filename="${arr[1]}"; + while IFS=' ' read -a line ; do + arr=(${line}); inode="${arr[0]}"; filename="${arr[1]}"; echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' ${filename}),${inode:-0},$(du -Px -s -${size:-k} --time ${filename})" | tr '\t' ',' ; done | sort -t, -k5 -n #-r ) diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index d69bbf07e9..faf1b73ca8 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -175,7 +175,8 @@ function post_test() { echo "GIT_URL=${GIT_URL}" git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo ${machine} | awk '{ print tolower($1) }') + machine_name_logs=$(echo ${machine} | awk '{ print tolower($1) }') + export machine_name_logs git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 3cde2c7a0e..63907610a1 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -91,7 +91,8 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" [[ ${suite} = rt.conf ]] && opt="-l" - [[ ${WM_CREATE_BASELINE} = true ]] && export BL_DATE=$(cat tests/bl_date.conf | cut -d '=' -f2) + [[ ${WM_CREATE_BASELINE} = true ]] && BL_DATE=$(cat tests/bl_date.conf | cut -d '=' -f2) + export BL_DATE [[ ! -f tests/logs/RegressionTests_${UFS_PLATFORM,,}.log ]] || mv tests/logs/RegressionTests_${UFS_PLATFORM,,}.log tests/logs/RegressionTests_${UFS_PLATFORM,,}.log.orig rm -f ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_*-log.txt From 76564b76d881f1d926688469a0fb6e68eea7fdec Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 2 Dec 2024 15:22:13 -0600 Subject: [PATCH 037/106] fix superlint on array initializatoin Signed-off-by: Bruce Kropp --- .cicd/scripts/disk_usage.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index 4d31ebef75..f20586be5b 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -37,8 +37,8 @@ function disk_usage() { cd ${directory} || exit 1 echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ - while IFS=' ' read -a line ; do - arr=(${line}); inode="${arr[0]}"; filename="${arr[1]}"; + while read line ; do + read -ra arr<<<"${line}"; inode="${arr[0]}"; filename="${arr[1]}"; echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' ${filename}),${inode:-0},$(du -Px -s -${size:-k} --time ${filename})" | tr '\t' ',' ; done | sort -t, -k5 -n #-r ) From db485ba1d8aff200c31301b3cdcac10a4bbf3ab4 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 2 Dec 2024 18:04:17 -0600 Subject: [PATCH 038/106] fix lint for vars Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 114 +++++++++++++++---------------- .cicd/scripts/disk_usage.sh | 8 +-- .cicd/scripts/regression_test.sh | 58 ++++++++-------- .cicd/scripts/wm_build.sh | 6 +- .cicd/scripts/wm_test.sh | 8 +-- 5 files changed, 97 insertions(+), 97 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 5f8388aaf5..cf07589917 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -38,7 +38,7 @@ TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} pwd ls -al .cicd/* -ls -al ${TESTS_DIR}/rt.sh +ls -al "${TESTS_DIR}"/rt.sh function create_baseline() { local machine=${1:-${NODE_NAME}} @@ -56,46 +56,46 @@ function create_baseline() { [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf mkdir -p logs/ - BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + BL_DATE=$(cut -d '=' -f2 bl_date.conf) export BL_DATE if [[ ${machine} =~ "Jet" ]] then echo "Creating baselines on ${machine}" - export dprefix=/lfs5/NAGAPE/$ACCNR/$USER + export dprefix=/lfs5/NAGAPE/${ACCNR}/${USER} sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh local workflow="-r" - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} elif [[ ${machine} =~ "Hercules" ]] then echo "Creating baselines on ${machine}" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export dprefix=/work2/noaa/${ACCNR}/${USER} + sed "s|/noaa/stmp/|/noaa/${ACCNR}/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ + cd "${DISKNM}"/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT ls -l REGRESSION_TEST/. - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." - cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh hercules develop-${BL_DATE} || : - chgrp noaa-hpc develop-${BL_DATE} || : - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + rsync -a --no-t REGRESSION_TEST/ "${DISKNM}/NEMSfv3gfs/develop-${BL_DATE}" || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + cd "${DISKNM}"/NEMSfv3gfs/ + ./adjust_permissions.sh hercules "develop-${BL_DATE}" || : + chgrp noaa-hpc "develop-${BL_DATE}" || : + cd ${WORKSPACE}/tests + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Orion" ]] then cd .. @@ -104,112 +104,112 @@ function create_baseline() { git submodule update --init --recursive cd tests echo "Creating baselines on ${machine}" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export dprefix=/work2/noaa/${ACCNR}/${USER} + sed "s|/noaa/stmp/|/noaa/${ACCNR}/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ + cd "${DISKNM}"/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ ls -l REGRESSION_TEST/. - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." - cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh orion develop-${BL_DATE} || : - chgrp noaa-hpc develop-${BL_DATE} || : - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + rsync -a --no-t REGRESSION_TEST/ "${DISKNM}/NEMSfv3gfs/develop-${BL_DATE}" || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + cd "${DISKNM}"/NEMSfv3gfs/ + ./adjust_permissions.sh orion "develop-${BL_DATE}" || : + chgrp noaa-hpc "develop-${BL_DATE}" || : + cd ${WORKSPACE}/tests + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Gaea" ]] then echo "Creating baselines on ${machine}" - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ + cd "${DISKNM}"/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /gpfs/f5/epic/scratch/role.epic/FV3_RT ls -l REGRESSION_TEST/. - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." - cd ${DISKNM}/NEMSfv3gfs/ - chgrp ncep develop-${BL_DATE} || : - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + rsync -a --no-t REGRESSION_TEST/ "${DISKNM}/NEMSfv3gfs/develop-${BL_DATE}" || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + cd "${DISKNM}"/NEMSfv3gfs/ + chgrp ncep "develop-${BL_DATE}" || : + cd ${WORKSPACE}/tests + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Hera" ]] then echo "Creating baselines on ${machine}" export ACCNR=epic sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh local workflow="-r" - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ + cd "${DISKNM}"/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT ls -l REGRESSION_TEST/. - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + rsync -a --no-t REGRESSION_TEST/ "${DISKNM}/NEMSfv3gfs/develop-${BL_DATE}" || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + cd ${WORKSPACE}/tests + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Derecho" ]] then echo "Creating baselines on ${machine}" export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ - cd ${DISKNM}/NEMSfv3gfs/ + cd "${DISKNM}"/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /glade/derecho/scratch/epicufsrt/FV3_RT ls -l REGRESSION_TEST/. - rsync -a --no-t REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + rsync -a --no-t REGRESSION_TEST/ "${DISKNM}/NEMSfv3gfs/develop-${BL_DATE}" || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." + cd ${WORKSPACE}/tests + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/glade/derecho/scratch/epicufsrt/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ else echo "Creating baselines on ${machine}" local workflow="-r" - ./rt.sh -a ${ACCNR} -c ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} fi diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index f20586be5b..0e4ef8d985 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -34,15 +34,15 @@ function disk_usage() { local size="${3:-k}" echo "Disk usage: ${JOB_NAME:-ci}/${UFS_PLATFORM}/$(basename ${directory})" ( - cd ${directory} || exit 1 + cd "${directory}" || exit 1 echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ - while read line ; do + while read -r line ; do read -ra arr<<<"${line}"; inode="${arr[0]}"; filename="${arr[1]}"; - echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' ${filename}),${inode:-0},$(du -Px -s -${size:-k} --time ${filename})" | tr '\t' ',' ; + echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' "${filename}" || true),${inode:-0},$(du -Px -s -${size:-k} --time "${filename}" || true)" | tr '\t' ',' || true; done | sort -t, -k5 -n #-r ) echo "" } -disk_usage ${1} ${2} ${3} | tee ${outfile} +disk_usage "${1}" "${2}" "${3}" | tee "${outfile}" diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index faf1b73ca8..643a5f3371 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -38,7 +38,7 @@ TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} pwd ls -al .cicd/* -ls -al ${TESTS_DIR}/rt.sh +ls -al "${TESTS_DIR}"/rt.sh function regression_test() { local machine=${1:-${NODE_NAME}} @@ -56,34 +56,34 @@ function regression_test() { [[ ${UFS_PLATFORM} =~ clusternoaa ]] && echo "export BL_DATE=20240426" > bl_date.conf || cat bl_date.conf mkdir -p logs/ - BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) + BL_DATE=$(cut -d '=' -f2 bl_date.conf) export BL_DATE if [[ ${machine} =~ "Jet" ]] then echo "Running regression tests on ${machine}" - export dprefix=/lfs5/NAGAPE/$ACCNR/$USER + export dprefix=/lfs5/NAGAPE/${ACCNR}/${USER} sed 's|/lfs4/HFIP/${ACCNR}/${USER}|/lfs4/HFIP/hfv3gfs/${USER}|g' -i rt.sh local workflow="-r" - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} elif [[ ${machine} =~ "Hercules" ]] then echo "Running regression tests on ${machine}" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh + export dprefix=/work2/noaa/${ACCNR}/${USER} + sed "s|/noaa/stmp/|/noaa/${ACCNR}/stmp/|g" -i rt.sh export ACCNR=epic - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Orion" ]] then echo "Running regression tests on ${machine}" @@ -92,70 +92,70 @@ function regression_test() { git --version git submodule update --init --recursive cd tests - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + export dprefix=/work2/noaa/${ACCNR}/${USER} + sed "s|/noaa/stmp/|/noaa/${ACCNR}/stmp/|g" -i rt.sh + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Gaea" ]] then echo "Running regression tests on ${machine}" - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Hera" ]] then echo "Running regression tests on ${machine}" export ACCNR=epic sed "s|QUEUE=batch|QUEUE=windfall|g" -i rt.sh local workflow="-r" - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Derecho" ]] then echo "Running regression tests on ${machine}" export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname $WORKSPACE)" #/glade/derecho/scratch/epicufsrt/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname $WORKSPACE)/RegressionTests_${machine_id}.log" "$WORKSPACE/tests/logs/" - cd $WORKSPACE/tests/ + cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cd ${WORKSPACE}/tests/ else echo "Running regression tests on ${machine}" local workflow="-r" - ./rt.sh -a ${ACCNR} ${workflow} ${opt} "${suite}" | tee $WORKSPACE/tests/logs/RT-run-${machine}.log + ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} fi diff --git a/.cicd/scripts/wm_build.sh b/.cicd/scripts/wm_build.sh index 1858d3ba85..936de88872 100755 --- a/.cicd/scripts/wm_build.sh +++ b/.cicd/scripts/wm_build.sh @@ -53,7 +53,7 @@ if [[ ${UFS_PLATFORM} = gaea ]] ; then echo "LMOD_VERSION=${LMOD_VERSION}" fi set +x -module use $PWD/modulefiles >/dev/null 2>&1 +module use ${PWD}/modulefiles >/dev/null 2>&1 module load ufs_${machine_id}.${UFS_COMPILER} || true [[ ${UFS_PLATFORM} = gaea ]] && module load cmake/3.23.1 || true module list @@ -61,10 +61,10 @@ module list echo "Pipeline Building WM on ${UFS_PLATFORM} ${UFS_COMPILER} with Account=${ACCNR}." export CMAKE_FLAGS="-DAPP=ATM -DCCPP_SUITES=FV3_GFS_v16" /usr/bin/time -p \ - -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_build.json \ + -o ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_build.json \ -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ ./build.sh | tee ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_build-log.txt status=${PIPESTATUS[0]} -echo "Pipeline Completed WM build on ${UFS_PLATFORM} ${UFS_COMPILER}. status=$status" +echo "Pipeline Completed WM build on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" ls -l build/ufs_model diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 63907610a1..c0d2463b17 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -67,7 +67,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then echo "LMOD_VERSION=${LMOD_VERSION}" fi - module use $PWD/modulefiles >/dev/null 2>&1 + module use ${PWD}/modulefiles >/dev/null 2>&1 module load ufs_${machine_id}.${UFS_COMPILER} || true [[ ${UFS_PLATFORM} = gaea ]] && module load cmake/3.23.1 module list @@ -112,7 +112,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ls -al .cicd/* echo "Pipeline Running Regression Tests ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}: (${opt} [${suite:=rt.conf}])" /usr/bin/time -p \ - -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ + -o ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ ./.cicd/scripts/regression_test.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} @@ -132,9 +132,9 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then cd ${workspace} find ${workspace}/tests/logs -ls - echo "Pipeline Reqression Tests on ${UFS_PLATFORM} complete. status=$status" + echo "Pipeline Reqression Tests on ${UFS_PLATFORM} complete. status=${status}" else echo "Pipeline Regression Tests on ${UFS_PLATFORM} (${machine}) skipped." echo "ExperimentName: null" > ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt fi -exit $status +exit ${status} From 7f9f96095b4a4d1df1515c11c8eaafff028dbf76 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 2 Dec 2024 18:43:59 -0600 Subject: [PATCH 039/106] ease up on .shellcheckrc lint limits Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 46 ++++++++++++++++---------------- .cicd/scripts/regression_test.sh | 46 ++++++++++++++++---------------- .shellcheckrc | 12 ++++++++- 3 files changed, 57 insertions(+), 47 deletions(-) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index cf07589917..e572d5e497 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -88,13 +88,13 @@ function create_baseline() { ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Orion" ]] then @@ -122,13 +122,13 @@ function create_baseline() { ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Gaea" ]] then @@ -148,13 +148,13 @@ function create_baseline() { ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Hera" ]] then @@ -174,13 +174,13 @@ function create_baseline() { ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Derecho" ]] then @@ -198,13 +198,13 @@ function create_baseline() { ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/glade/derecho/scratch/epicufsrt/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ else echo "Creating baselines on ${machine}" @@ -215,38 +215,38 @@ function create_baseline() { cd ${WORKSPACE} - echo "Testing concluded for ${machine}. status=$status" - return $status + echo "Testing concluded for ${machine}. status=${status}" + return ${status} } function post_test() { - echo "Testing concluded...removing labels for ${machine} from $GIT_URL" - echo $CHANGE_ID - SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID" | jq -r '.head.repo.ssh_url') - FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID" | jq -r '.head.ref') + echo "Testing concluded...removing labels for ${machine} from ${GIT_URL}" + echo "CHANGE_ID=${CHANGE_ID}" + SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') + FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') export SSH_ORIGIN export FORK_BRANCH echo "GIT_URL=${GIT_URL}" git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" - machine_name_logs=$(echo ${machine} | awk '{ print tolower($1) }') + machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') export machine_name_logs git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 #git add logs/RegressionTests_${machine_name_logs}.log #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH + #git pull sshorigin ${FORK_BRANCH} + #git push sshorigin HEAD:${FORK_BRANCH} tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) + GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) export GIT_OWNER export GIT_REPO_NAME #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-BL } -create_baseline ${machine} +create_baseline "${machine}" diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 643a5f3371..68eb13063e 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -76,13 +76,13 @@ function regression_test() { ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Orion" ]] then @@ -97,13 +97,13 @@ function regression_test() { ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/work/noaa/epic/role-epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/work/noaa/epic/role-epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Gaea" ]] then @@ -112,13 +112,13 @@ function regression_test() { status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/gpfs/f5/epic/scratch/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Hera" ]] then @@ -129,13 +129,13 @@ function regression_test() { ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/scratch2/NAGAPE/epic/role.epic/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ elif [[ ${machine} =~ "Derecho" ]] then @@ -144,13 +144,13 @@ function regression_test() { ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} cd logs/ - cp "RegressionTests_${machine_id}.log" "$(dirname ${WORKSPACE})" #/glade/derecho/scratch/epicufsrt/jenkins/workspace + cp "RegressionTests_${machine_id}.log" "$(dirname "${WORKSPACE}")" #/glade/derecho/scratch/epicufsrt/jenkins/workspace git remote -v git fetch --no-recurse-submodules origin git reset FETCH_HEAD --hard cd .. && cd .. && cd .. pwd - cp "$(dirname ${WORKSPACE})/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" + cp "$(dirname "${WORKSPACE}")/RegressionTests_${machine_id}.log" "${WORKSPACE}/tests/logs/" cd ${WORKSPACE}/tests/ else echo "Running regression tests on ${machine}" @@ -161,38 +161,38 @@ function regression_test() { cd ${WORKSPACE} - echo "Testing concluded for ${machine}. status=$status" - return $status + echo "Testing concluded for ${machine}. status=${status}" + return ${status} } function post_test() { - echo "Testing concluded...removing labels for ${machine} from $GIT_URL" - echo $CHANGE_ID - SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID" | jq -r '.head.repo.ssh_url') - FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID" | jq -r '.head.ref') + echo "Testing concluded...removing labels for ${machine} from ${GIT_URL}" + echo "CHANGE_ID=${CHANGE_ID}" + SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') + FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') export SSH_ORIGIN export FORK_BRANCH echo "GIT_URL=${GIT_URL}" git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" - machine_name_logs=$(echo ${machine} | awk '{ print tolower($1) }') + machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') export machine_name_logs git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 + git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 #git add logs/RegressionTests_${machine_name_logs}.log #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH + #git pull sshorigin ${FORK_BRANCH} + #git push sshorigin HEAD:${FORK_BRANCH} tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) + GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) export GIT_OWNER export GIT_REPO_NAME #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-RT } -regression_test ${machine} +regression_test "${machine}" diff --git a/.shellcheckrc b/.shellcheckrc index 95525eb590..346c35a13f 100644 --- a/.shellcheckrc +++ b/.shellcheckrc @@ -13,4 +13,14 @@ disable=SC1090 disable=SC1091 # Disable -p -m only applies to deepest directory -disable=SC2174 \ No newline at end of file +disable=SC2174 + +# Disable info on pipe-ing commands +disable=SC2312 + +# Disable info sed with single quote string litteral +disable=SC2016 + + +# Disable info on not double-quoting variables +disable=SC2086 From 61a4e89a3b6a455072fdbce543cbe5fdbe2a2732 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 2 Dec 2024 18:57:34 -0600 Subject: [PATCH 040/106] lint complains about egrep Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_test.sh | 14 +++++++------- .shellcheckrc | 4 +++- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index c0d2463b17..319fc3b103 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -91,7 +91,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" [[ ${suite} = rt.conf ]] && opt="-l" - [[ ${WM_CREATE_BASELINE} = true ]] && BL_DATE=$(cat tests/bl_date.conf | cut -d '=' -f2) + [[ ${WM_CREATE_BASELINE} = true ]] && BL_DATE=$(cut -d '=' -f2 tests/bl_date.conf) export BL_DATE [[ ! -f tests/logs/RegressionTests_${UFS_PLATFORM,,}.log ]] || mv tests/logs/RegressionTests_${UFS_PLATFORM,,}.log tests/logs/RegressionTests_${UFS_PLATFORM,,}.log.orig @@ -102,11 +102,11 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ls -al .cicd/* echo "Pipeline Creating Baseline Tests ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}: (${opt} [${suite:=rt.conf}])" /usr/bin/time -p \ - -o ${WORKSPACE}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ + -o ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ ./.cicd/scripts/create_baseline.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} - echo "Pipeline Completed Baseline Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=$status" + echo "Pipeline Completed Baseline Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" else echo "skip Creating baseline on ${UFS_PLATFORM}." ls -al .cicd/* @@ -116,19 +116,19 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ ./.cicd/scripts/regression_test.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} - echo "Pipeline Completed Regression Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=$status" + echo "Pipeline Completed Regression Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" fi cd tests/ pwd ls -al . - ls -al $WORKSPACE/${machine_id}/tests/logs/. + ls -al ${workspace}/${machine_id}/tests/logs/. ls -al logs/. ## Test Results ... echo "ExperimentName: ${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt | tee ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt - egrep " DIRECTORY: |Time: | Completed: |Result: " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt - egrep " -- COMPILE | -- TEST " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + grep -E " DIRECTORY: |Time: | Completed: |Result: " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + grep -E " -- COMPILE | -- TEST " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt cd ${workspace} find ${workspace}/tests/logs -ls diff --git a/.shellcheckrc b/.shellcheckrc index 346c35a13f..c1310edf16 100644 --- a/.shellcheckrc +++ b/.shellcheckrc @@ -21,6 +21,8 @@ disable=SC2312 # Disable info sed with single quote string litteral disable=SC2016 - # Disable info on not double-quoting variables disable=SC2086 + +# Disable info on &&, || chaining +disable=SC2015 From 9914c09b1d56cfffc09f44d428ec3315487f7a88 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 2 Dec 2024 19:30:38 -0600 Subject: [PATCH 041/106] superlinter checks on our own bash scripts Signed-off-by: Bruce Kropp --- .cicd/scripts/shellcheckrc | 12 ++++++++++++ .shellcheckrc | 12 ------------ 2 files changed, 12 insertions(+), 12 deletions(-) create mode 100644 .cicd/scripts/shellcheckrc diff --git a/.cicd/scripts/shellcheckrc b/.cicd/scripts/shellcheckrc new file mode 100644 index 0000000000..f36fec4502 --- /dev/null +++ b/.cicd/scripts/shellcheckrc @@ -0,0 +1,12 @@ + +# Disable info on pipe-ing commands +disable=SC2312 + +# Disable info sed with single quote string litteral +disable=SC2016 + +# Disable info on not double-quoting variables +disable=SC2086 + +# Disable info on &&, || chaining +disable=SC2015 diff --git a/.shellcheckrc b/.shellcheckrc index c1310edf16..d64d06418d 100644 --- a/.shellcheckrc +++ b/.shellcheckrc @@ -14,15 +14,3 @@ disable=SC1091 # Disable -p -m only applies to deepest directory disable=SC2174 - -# Disable info on pipe-ing commands -disable=SC2312 - -# Disable info sed with single quote string litteral -disable=SC2016 - -# Disable info on not double-quoting variables -disable=SC2086 - -# Disable info on &&, || chaining -disable=SC2015 From 23edf74df96e8b1d15ddadd660f4b0397fface61 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 2 Dec 2024 19:38:30 -0600 Subject: [PATCH 042/106] referring to specific platform files are to be tollerated Signed-off-by: Bruce Kropp --- .cicd/scripts/shellcheckrc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.cicd/scripts/shellcheckrc b/.cicd/scripts/shellcheckrc index f36fec4502..80ea97d188 100644 --- a/.cicd/scripts/shellcheckrc +++ b/.cicd/scripts/shellcheckrc @@ -1,3 +1,5 @@ +# Ignore source-able files from the varyious platforms in our matrix +disable=SC1091 # Disable info on pipe-ing commands disable=SC2312 From 274727853d657ef01f1aa43546b4e23252e5f368 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 3 Dec 2024 13:00:09 -0600 Subject: [PATCH 043/106] relax some shellcheck items Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile.metrics | 4 +- .cicd/scripts/{shellcheckrc => .shellcheckrc} | 5 +++ .cicd/scripts/create_baseline.sh | 41 +++++++++---------- .cicd/scripts/disk_usage.sh | 6 +-- .cicd/scripts/regression_test.sh | 25 ++++++----- .cicd/scripts/wm_test.sh | 5 ++- .shellcheckrc | 2 +- 7 files changed, 47 insertions(+), 41 deletions(-) rename .cicd/scripts/{shellcheckrc => .shellcheckrc} (58%) diff --git a/.cicd/Jenkinsfile.metrics b/.cicd/Jenkinsfile.metrics index 0cf93a0981..7e4f5d641f 100644 --- a/.cicd/Jenkinsfile.metrics +++ b/.cicd/Jenkinsfile.metrics @@ -26,7 +26,7 @@ def generateStage(nodeLabel) { try { echo "Running on ${nodeLabel}" if (baselineLabels.contains(nodeLabel)) { - sh "WM_REGRESSION_TESTS=true WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + sh "WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' sh ''' export machine=${NODE_NAME} git config user.email "ecc.platform@noaa.gov" @@ -51,7 +51,7 @@ def generateStage(nodeLabel) { s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } else { - sh "WM_REGRESSION_TESTS=true WM_CREATE_BASELINE=false" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + sh "WM_CREATE_BASELINE=false" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' sh ''' export machine=${NODE_NAME} git config user.email "ecc.platform@noaa.gov" diff --git a/.cicd/scripts/shellcheckrc b/.cicd/scripts/.shellcheckrc similarity index 58% rename from .cicd/scripts/shellcheckrc rename to .cicd/scripts/.shellcheckrc index 80ea97d188..09c3e06ca6 100644 --- a/.cicd/scripts/shellcheckrc +++ b/.cicd/scripts/.shellcheckrc @@ -1,3 +1,8 @@ +# Shellcheck is used by Super-Linter to flag BASH shell oddities during CI pipeline. +# global settings are at top-level: ../../.shellcheckrc +# https://github.com/koalaman/shellcheck +# https://github.com/super-linter/super-linter + # Ignore source-able files from the varyious platforms in our matrix disable=SC1091 diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index e572d5e497..bd8c8c3b54 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -38,7 +38,7 @@ TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} pwd ls -al .cicd/* -ls -al "${TESTS_DIR}"/rt.sh +ls -al ${TESTS_DIR}/rt.sh function create_baseline() { local machine=${1:-${NODE_NAME}} @@ -76,12 +76,12 @@ function create_baseline() { ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT - cd "${DISKNM}"/NEMSfv3gfs/ + cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT ls -l REGRESSION_TEST/. rsync -a --no-t REGRESSION_TEST/ "${DISKNM}/NEMSfv3gfs/develop-${BL_DATE}" || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." - cd "${DISKNM}"/NEMSfv3gfs/ + cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh hercules "develop-${BL_DATE}" || : chgrp noaa-hpc "develop-${BL_DATE}" || : cd ${WORKSPACE}/tests @@ -110,12 +110,12 @@ function create_baseline() { ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} export DISKNM=/work/noaa/epic/UFS-WM_RT - cd "${DISKNM}"/NEMSfv3gfs/ + cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ ls -l REGRESSION_TEST/. rsync -a --no-t REGRESSION_TEST/ "${DISKNM}/NEMSfv3gfs/develop-${BL_DATE}" || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." - cd "${DISKNM}"/NEMSfv3gfs/ + cd ${DISKNM}/NEMSfv3gfs/ ./adjust_permissions.sh orion "develop-${BL_DATE}" || : chgrp noaa-hpc "develop-${BL_DATE}" || : cd ${WORKSPACE}/tests @@ -137,12 +137,12 @@ function create_baseline() { status=${PIPESTATUS[0]} unset LD_LIBRARY_PATH export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT - cd "${DISKNM}"/NEMSfv3gfs/ + cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /gpfs/f5/epic/scratch/role.epic/FV3_RT ls -l REGRESSION_TEST/. rsync -a --no-t REGRESSION_TEST/ "${DISKNM}/NEMSfv3gfs/develop-${BL_DATE}" || echo "#### Warning! rsync $(pwd)/REGRESSION_TEST/ incomplete." - cd "${DISKNM}"/NEMSfv3gfs/ + cd ${DISKNM}/NEMSfv3gfs/ chgrp ncep "develop-${BL_DATE}" || : cd ${WORKSPACE}/tests ./rt.sh -a "${ACCNR}" "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" @@ -165,7 +165,7 @@ function create_baseline() { ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT - cd "${DISKNM}"/NEMSfv3gfs/ + cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT ls -l REGRESSION_TEST/. @@ -189,7 +189,7 @@ function create_baseline() { ./rt.sh -a "${ACCNR}" -c "${workflow}" "${opt}" "${suite}" | tee "${WORKSPACE}/tests/logs/RT-run-${machine}.log" status=${PIPESTATUS[0]} export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ - cd "${DISKNM}"/NEMSfv3gfs/ + cd ${DISKNM}/NEMSfv3gfs/ mkdir -p develop-${BL_DATE} cd /glade/derecho/scratch/epicufsrt/FV3_RT ls -l REGRESSION_TEST/. @@ -220,33 +220,32 @@ function create_baseline() { } function post_test() { + local machine=${1:-${NODE_NAME}} + local machine_id=${machine,,} # tolower + local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') echo "Testing concluded...removing labels for ${machine} from ${GIT_URL}" - echo "CHANGE_ID=${CHANGE_ID}" - SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') - FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') - export SSH_ORIGIN - export FORK_BRANCH echo "GIT_URL=${GIT_URL}" + echo "CHANGE_ID=${CHANGE_ID}" + git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" - machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') - export machine_name_logs + SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 #git add logs/RegressionTests_${machine_name_logs}.log #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " + + #FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') #git pull sshorigin ${FORK_BRANCH} #git push sshorigin HEAD:${FORK_BRANCH} tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) - GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) - export GIT_OWNER - export GIT_REPO_NAME - + #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) + #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-BL } create_baseline "${machine}" +#post_test "${machine}" diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index 0e4ef8d985..d54457cd34 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -34,15 +34,15 @@ function disk_usage() { local size="${3:-k}" echo "Disk usage: ${JOB_NAME:-ci}/${UFS_PLATFORM}/$(basename ${directory})" ( - cd "${directory}" || exit 1 + cd ${directory} || exit 1 echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ while read -r line ; do read -ra arr<<<"${line}"; inode="${arr[0]}"; filename="${arr[1]}"; - echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' "${filename}" || true),${inode:-0},$(du -Px -s -${size:-k} --time "${filename}" || true)" | tr '\t' ',' || true; + echo "${UFS_PLATFORM}-${UFS_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' "${filename:-.}" || true),${inode:-0},$(du -Px -s -${size:-k} --time "${filename:-null}" 2>/dev/null || true)" | tr '\t' ',' || true; done | sort -t, -k5 -n #-r ) echo "" } -disk_usage "${1}" "${2}" "${3}" | tee "${outfile}" +disk_usage "${1}" "${2}" "${3}" | tee ${outfile} diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 68eb13063e..f8b02816eb 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -38,7 +38,7 @@ TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} pwd ls -al .cicd/* -ls -al "${TESTS_DIR}"/rt.sh +ls -al ${TESTS_DIR}/rt.sh function regression_test() { local machine=${1:-${NODE_NAME}} @@ -166,33 +166,32 @@ function regression_test() { } function post_test() { + local machine=${1:-${NODE_NAME}} + local machine_id=${machine,,} # tolower + local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') echo "Testing concluded...removing labels for ${machine} from ${GIT_URL}" - echo "CHANGE_ID=${CHANGE_ID}" - SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') - FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') - export SSH_ORIGIN - export FORK_BRANCH echo "GIT_URL=${GIT_URL}" + echo "CHANGE_ID=${CHANGE_ID}" + git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" - machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') - export machine_name_logs + SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 #git add logs/RegressionTests_${machine_name_logs}.log #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " + + #FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') #git pull sshorigin ${FORK_BRANCH} #git push sshorigin HEAD:${FORK_BRANCH} tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) - GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) - export GIT_OWNER - export GIT_REPO_NAME - + #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) + #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-RT } regression_test "${machine}" +#post_test "${machine}" diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 319fc3b103..d2846e5d8f 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -47,9 +47,11 @@ echo "LMOD_VERSION=${LMOD_VERSION}" ls -l build/ufs_model || : # just checking status=$? +[[ -n "${WM_REGRESSION_TESTS}" ]] || WM_REGRESSION_TESTS=true # default #[[ ${UFS_PLATFORM} == jet ]] && WM_REGRESSION_TESTS=false # takes too long #[[ ${UFS_PLATFORM} == derecho ]] && WM_REGRESSION_TESTS=false [[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : +export WM_REGRESSION_TESTS rm -f ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt if [[ ${WM_REGRESSION_TESTS} = true ]] ; then @@ -122,8 +124,9 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then cd tests/ pwd ls -al . - ls -al ${workspace}/${machine_id}/tests/logs/. + ## Check for log files ... ls -al logs/. + ls -al ${WORKSPACE:-"${workspace}/.."}/${machine_id}/tests/logs/. || : ## Test Results ... echo "ExperimentName: ${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt | tee ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt diff --git a/.shellcheckrc b/.shellcheckrc index d64d06418d..95525eb590 100644 --- a/.shellcheckrc +++ b/.shellcheckrc @@ -13,4 +13,4 @@ disable=SC1090 disable=SC1091 # Disable -p -m only applies to deepest directory -disable=SC2174 +disable=SC2174 \ No newline at end of file From 203080f4917e23d825bb98cac63c3b949d22f8ec Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 3 Dec 2024 13:15:14 -0600 Subject: [PATCH 044/106] allow assign with local VAR=val Signed-off-by: Bruce Kropp --- .cicd/scripts/.shellcheckrc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.cicd/scripts/.shellcheckrc b/.cicd/scripts/.shellcheckrc index 09c3e06ca6..83d6166add 100644 --- a/.cicd/scripts/.shellcheckrc +++ b/.cicd/scripts/.shellcheckrc @@ -3,6 +3,9 @@ # https://github.com/koalaman/shellcheck # https://github.com/super-linter/super-linter +# Allow to assign with local VAR=... +disable=SC2155 + # Ignore source-able files from the varyious platforms in our matrix disable=SC1091 From b4dab06d5e6849aa5886772439500b923e0f5fd8 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 3 Dec 2024 18:44:52 -0600 Subject: [PATCH 045/106] clean out some unused vars Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile.metrics | 75 +++++--------------------------- .cicd/scripts/create_baseline.sh | 5 ++- .cicd/scripts/regression_test.sh | 5 ++- .cicd/scripts/wm_test.sh | 39 +++++------------ 4 files changed, 30 insertions(+), 94 deletions(-) diff --git a/.cicd/Jenkinsfile.metrics b/.cicd/Jenkinsfile.metrics index 7e4f5d641f..50fbc19a30 100644 --- a/.cicd/Jenkinsfile.metrics +++ b/.cicd/Jenkinsfile.metrics @@ -26,79 +26,28 @@ def generateStage(nodeLabel) { try { echo "Running on ${nodeLabel}" if (baselineLabels.contains(nodeLabel)) { - sh "WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' - sh ''' - export machine=${NODE_NAME} - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - echo "Testing concluded...removing labels for $machine from $GIT_URL" - - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL - ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + sh "WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } else { sh "WM_CREATE_BASELINE=false" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' - sh ''' - export machine=${NODE_NAME} - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - echo "Testing concluded...removing labels for $machine from $GIT_URL" - - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - echo "Testing concluded...removing labels for $machine from $GIT_URL" - git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT - ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } catch(err) { sh ''' - export machine=${NODE_NAME} - export CHANGE_ID=${CHANGE_ID} - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') - cd $WORKSPACE/tests - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - echo "Testing concluded...removing labels for $machine from $GIT_URL" - git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + export machine=${NODE_NAME} + export CHANGE_ID=${CHANGE_ID} - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + cd ${WORKSPACE}/tests + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} + echo "Testing concluded...removing labels for $machine from $GIT_URL" + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] currentBuild.result = 'FAILURE' } sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index bd8c8c3b54..98ada4f94f 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -223,6 +223,7 @@ function post_test() { local machine=${1:-${NODE_NAME}} local machine_id=${machine,,} # tolower local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') + local label=${2:-"undef"} echo "Testing concluded...removing labels for ${machine} from ${GIT_URL}" echo "GIT_URL=${GIT_URL}" echo "CHANGE_ID=${CHANGE_ID}" @@ -244,8 +245,8 @@ function post_test() { #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-BL + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} } create_baseline "${machine}" -#post_test "${machine}" +post_test "${machine}" "BL" diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index f8b02816eb..ded11fbc6c 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -169,6 +169,7 @@ function post_test() { local machine=${1:-${NODE_NAME}} local machine_id=${machine,,} # tolower local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') + local label=${2:-"undef"} echo "Testing concluded...removing labels for ${machine} from ${GIT_URL}" echo "GIT_URL=${GIT_URL}" echo "CHANGE_ID=${CHANGE_ID}" @@ -190,8 +191,8 @@ function post_test() { #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-RT + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} } regression_test "${machine}" -#post_test "${machine}" +post_test "${machine}" "RT" diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index d2846e5d8f..ea5ff5f2f4 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -28,8 +28,6 @@ echo "UFS_COMPILER=<${UFS_COMPILER}>" echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS}>" echo "WM_OPERATIONAL_TESTS=<${WM_OPERATIONAL_TESTS}>" echo "WM_CREATE_BASELINE=<${WM_CREATE_BASELINE}>" -workspace=$(pwd) -export workflow machine=${NODE_NAME} echo "machine=<${machine}>" machine_id=${UFS_PLATFORM} @@ -39,10 +37,10 @@ if [[ ${UFS_PLATFORM} =~ clusternoaa ]] ; then fi echo "machine_id=<${machine_id}>" -status=0 +workspace=$(pwd) +export workspace -export LMOD_SH_DBG_ON=0 -echo "LMOD_VERSION=${LMOD_VERSION}" +status=0 ls -l build/ufs_model || : # just checking status=$? @@ -54,9 +52,13 @@ status=$? export WM_REGRESSION_TESTS rm -f ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + if [[ ${WM_REGRESSION_TESTS} = true ]] ; then echo "Pipeline Reqression Tests on ${UFS_PLATFORM} starting." + export LMOD_SH_DBG_ON=0 + echo "LMOD_VERSION=${LMOD_VERSION}" + set +x if [[ ${UFS_PLATFORM} = orion ]] ; then #module --ignore_cache load git/2.28.0 @@ -75,26 +77,9 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then module list set -x - #export PATH=$PATH:~/bin echo "CHANGE_ID=${CHANGE_ID:-null}" - - #export ACCNR=epic echo "ACCNR=${ACCNR}" - export FV3_RT_DIR= - #export JENKINS_WORKSPACE= - #workflow="-e" # -e = ecflow (default) || -r = rocoto - - opt="-l" - suite="rt.conf" - [[ -n ${WM_OPERATIONAL_TESTS} ]] && opt="-n" && suite="${WM_OPERATIONAL_TESTS} ${UFS_COMPILER}" - [[ ${WM_OPERATIONAL_TESTS} = default ]] && opt="-n" && suite="control_p8 ${UFS_COMPILER}" - [[ ${WM_OPERATIONAL_TESTS} = comprehensive ]] && opt="-l" && suite="rt.conf" - [[ ${WM_OPERATIONAL_TESTS} = rt.conf ]] && opt="-l" && suite="rt.conf" - [[ ${suite} = rt.conf ]] && opt="-l" - - [[ ${WM_CREATE_BASELINE} = true ]] && BL_DATE=$(cut -d '=' -f2 tests/bl_date.conf) - export BL_DATE [[ ! -f tests/logs/RegressionTests_${UFS_PLATFORM,,}.log ]] || mv tests/logs/RegressionTests_${UFS_PLATFORM,,}.log tests/logs/RegressionTests_${UFS_PLATFORM,,}.log.orig rm -f ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_*-log.txt @@ -102,23 +87,23 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then if [[ ${WM_CREATE_BASELINE} = true ]] ; then echo "start Creating baseline on ${UFS_PLATFORM} ..." ls -al .cicd/* - echo "Pipeline Creating Baseline Tests ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}: (${opt} [${suite:=rt.conf}])" + echo "Pipeline Creating Baseline Tests ${WM_OPERATIONAL_TESTS:=rt.conf} on ${UFS_PLATFORM} ${UFS_COMPILER}" /usr/bin/time -p \ -o ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ ./.cicd/scripts/create_baseline.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} - echo "Pipeline Completed Baseline Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" + echo "Pipeline Completed Baseline Tests ${WM_OPERATIONAL_TESTS} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" else echo "skip Creating baseline on ${UFS_PLATFORM}." ls -al .cicd/* - echo "Pipeline Running Regression Tests ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}: (${opt} [${suite:=rt.conf}])" + echo "Pipeline Running Regression Tests ${WM_OPERATIONAL_TESTS:=rt.conf} on ${UFS_PLATFORM} ${UFS_COMPILER}" /usr/bin/time -p \ -o ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ ./.cicd/scripts/regression_test.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} - echo "Pipeline Completed Regression Tests ${opt} ${suite} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" + echo "Pipeline Completed Regression Tests ${WM_OPERATIONAL_TESTS} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" fi cd tests/ @@ -126,7 +111,6 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ls -al . ## Check for log files ... ls -al logs/. - ls -al ${WORKSPACE:-"${workspace}/.."}/${machine_id}/tests/logs/. || : ## Test Results ... echo "ExperimentName: ${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt | tee ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt @@ -140,4 +124,5 @@ else echo "Pipeline Regression Tests on ${UFS_PLATFORM} (${machine}) skipped." echo "ExperimentName: null" > ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt fi + exit ${status} From b7e629ea367b957d7f9579467ee6697f2a77dc2d Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 3 Dec 2024 19:06:29 -0600 Subject: [PATCH 046/106] set a real Jenkinsfile Signed-off-by: Bruce Kropp --- .cicd/{Jenkinsfile.metrics => Jenkinsfile} | 0 .cicd/scripts/create_baseline.sh | 2 +- .cicd/scripts/regression_test.sh | 2 +- .cicd/scripts/wm_test.sh | 6 +++--- 4 files changed, 5 insertions(+), 5 deletions(-) rename .cicd/{Jenkinsfile.metrics => Jenkinsfile} (100%) diff --git a/.cicd/Jenkinsfile.metrics b/.cicd/Jenkinsfile similarity index 100% rename from .cicd/Jenkinsfile.metrics rename to .cicd/Jenkinsfile diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 98ada4f94f..6fcaa6b6b3 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -224,7 +224,6 @@ function post_test() { local machine_id=${machine,,} # tolower local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') local label=${2:-"undef"} - echo "Testing concluded...removing labels for ${machine} from ${GIT_URL}" echo "GIT_URL=${GIT_URL}" echo "CHANGE_ID=${CHANGE_ID}" @@ -243,6 +242,7 @@ function post_test() { tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index ded11fbc6c..bb5d0d3c49 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -170,7 +170,6 @@ function post_test() { local machine_id=${machine,,} # tolower local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') local label=${2:-"undef"} - echo "Testing concluded...removing labels for ${machine} from ${GIT_URL}" echo "GIT_URL=${GIT_URL}" echo "CHANGE_ID=${CHANGE_ID}" @@ -189,6 +188,7 @@ function post_test() { tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index ea5ff5f2f4..ac777208e1 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -87,7 +87,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then if [[ ${WM_CREATE_BASELINE} = true ]] ; then echo "start Creating baseline on ${UFS_PLATFORM} ..." ls -al .cicd/* - echo "Pipeline Creating Baseline Tests ${WM_OPERATIONAL_TESTS:=rt.conf} on ${UFS_PLATFORM} ${UFS_COMPILER}" + echo "Pipeline Creating Baseline Tests ${WM_OPERATIONAL_TESTS:=default} on ${UFS_PLATFORM} ${UFS_COMPILER}" /usr/bin/time -p \ -o ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ @@ -97,7 +97,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then else echo "skip Creating baseline on ${UFS_PLATFORM}." ls -al .cicd/* - echo "Pipeline Running Regression Tests ${WM_OPERATIONAL_TESTS:=rt.conf} on ${UFS_PLATFORM} ${UFS_COMPILER}" + echo "Pipeline Running Regression Tests ${WM_OPERATIONAL_TESTS:=default} on ${UFS_PLATFORM} ${UFS_COMPILER}" /usr/bin/time -p \ -o ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_test.json \ -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ @@ -113,7 +113,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ls -al logs/. ## Test Results ... - echo "ExperimentName: ${suite}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt | tee ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + echo "ExperimentName: ${WM_OPERATIONAL_TESTS:=default}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt | tee ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt grep -E " DIRECTORY: |Time: | Completed: |Result: " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt grep -E " -- COMPILE | -- TEST " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt From 6cfb3f644d5ecbc831fe39e98cc7ca45aa897a16 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 3 Dec 2024 19:45:50 -0600 Subject: [PATCH 047/106] fix unassigned CHANGE_ID and GIT_URL Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 2 ++ .cicd/scripts/regression_test.sh | 2 ++ .cicd/scripts/wm_test.sh | 5 +++++ 3 files changed, 9 insertions(+) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index 6fcaa6b6b3..a174a70faa 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -224,6 +224,8 @@ function post_test() { local machine_id=${machine,,} # tolower local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') local label=${2:-"undef"} + GIT_URL=${GIT_URL:-"ufs-weather-model"} + CHANGE_ID=${CHANGE_ID:-"develop"} echo "GIT_URL=${GIT_URL}" echo "CHANGE_ID=${CHANGE_ID}" diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index bb5d0d3c49..7692c6386e 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -170,6 +170,8 @@ function post_test() { local machine_id=${machine,,} # tolower local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') local label=${2:-"undef"} + GIT_URL=${GIT_URL:-"ufs-weather-model"} + CHANGE_ID=${CHANGE_ID:-"develop"} echo "GIT_URL=${GIT_URL}" echo "CHANGE_ID=${CHANGE_ID}" diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index ac777208e1..9913e4beed 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -20,7 +20,12 @@ TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} ls -al ./rt.sh ) +export GIT_URL=${GIT_URL:-"ufs-weather-model"} +export CHANGE_ID=${CHANGE_ID:-"develop"} + pwd +echo "GIT_URL=${GIT_URL}" +echo "CHANGE_ID=${CHANGE_ID}" echo "NODE_NAME=${NODE_NAME}" echo "USER=${USER}" echo "UFS_PLATFORM=<${UFS_PLATFORM}>" From 494ce476e38d3955fd18fb61f3a3a9cfee8105ae Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 4 Dec 2024 10:41:45 -0600 Subject: [PATCH 048/106] fix tar log path Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 2 ++ .cicd/scripts/regression_test.sh | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index a174a70faa..d0cbeffa6a 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -224,6 +224,8 @@ function post_test() { local machine_id=${machine,,} # tolower local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') local label=${2:-"undef"} + local WORKSPACE + WORKSPACE="$(pwd)" GIT_URL=${GIT_URL:-"ufs-weather-model"} CHANGE_ID=${CHANGE_ID:-"develop"} echo "GIT_URL=${GIT_URL}" diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index 7692c6386e..a2a2f3c311 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -170,6 +170,8 @@ function post_test() { local machine_id=${machine,,} # tolower local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') local label=${2:-"undef"} + local WORKSPACE + WORKSPACE="$(pwd)" GIT_URL=${GIT_URL:-"ufs-weather-model"} CHANGE_ID=${CHANGE_ID:-"develop"} echo "GIT_URL=${GIT_URL}" From 6481422f6f1ac5cc439185ac3e72ddd637cc7d92 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 4 Dec 2024 13:59:12 -0600 Subject: [PATCH 049/106] move posting results to a separate script Signed-off-by: Bruce Kropp --- .cicd/scripts/create_baseline.sh | 34 ---------------- .cicd/scripts/post_test_results.sh | 64 ++++++++++++++++++++++++++++++ .cicd/scripts/regression_test.sh | 34 ---------------- .cicd/scripts/wm_test.sh | 5 +++ 4 files changed, 69 insertions(+), 68 deletions(-) create mode 100755 .cicd/scripts/post_test_results.sh diff --git a/.cicd/scripts/create_baseline.sh b/.cicd/scripts/create_baseline.sh index d0cbeffa6a..7563f34a9c 100755 --- a/.cicd/scripts/create_baseline.sh +++ b/.cicd/scripts/create_baseline.sh @@ -219,38 +219,4 @@ function create_baseline() { return ${status} } -function post_test() { - local machine=${1:-${NODE_NAME}} - local machine_id=${machine,,} # tolower - local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') - local label=${2:-"undef"} - local WORKSPACE - WORKSPACE="$(pwd)" - GIT_URL=${GIT_URL:-"ufs-weather-model"} - CHANGE_ID=${CHANGE_ID:-"develop"} - echo "GIT_URL=${GIT_URL}" - echo "CHANGE_ID=${CHANGE_ID}" - - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - - SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') - git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 - #git add logs/RegressionTests_${machine_name_logs}.log - #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " - - #FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') - #git pull sshorigin ${FORK_BRANCH} - #git push sshorigin HEAD:${FORK_BRANCH} - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - - echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" - #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) - #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} -} - create_baseline "${machine}" -post_test "${machine}" "BL" diff --git a/.cicd/scripts/post_test_results.sh b/.cicd/scripts/post_test_results.sh new file mode 100755 index 0000000000..b64362ece6 --- /dev/null +++ b/.cicd/scripts/post_test_results.sh @@ -0,0 +1,64 @@ +#!/bin/bash -x +# RT - RegressionTest label +# BL - Baselins label + +export machine=${1:-${NODE_NAME}} +label=$2 +[[ -n "${label}" ]] || exit 1 + +export PATH=${PATH}:~/bin +echo "USER=${USER}" +echo "WORKSPACE=${WORKSPACE}" +export ACCNR=epic + +export account="-a ${ACCNR}" + +set -eu + +SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") +SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") +UFS_MODEL_DIR=$(realpath "${SCRIPTS_DIR}/../..") +readonly UFS_MODEL_DIR +echo "UFS MODEL DIR: ${UFS_MODEL_DIR}" + +export CC=${CC:-mpicc} +export CXX=${CXX:-mpicxx} +export FC=${FC:-mpif90} + +BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} +TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} + +function post_test() { + local machine=${1:-${NODE_NAME}} + local machine_id=${machine,,} # tolower + local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') + local label=${2:-"undef"} + local WORKSPACE + WORKSPACE="$(pwd)" + GIT_URL=${GIT_URL:-"ufs-weather-model"} + CHANGE_ID=${CHANGE_ID:-"develop"} + echo "GIT_URL=${GIT_URL}" + echo "CHANGE_ID=${CHANGE_ID}" + + git config user.email "ecc.platform@noaa.gov" + git config user.name "epic-cicd-jenkins" + + SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 + #git add logs/RegressionTests_${machine_name_logs}.log + #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " + + #FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') + #git pull sshorigin ${FORK_BRANCH} + #git push sshorigin HEAD:${FORK_BRANCH} + + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + + echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" + #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) + #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} +} + +post_test "${machine}" "${label}" diff --git a/.cicd/scripts/regression_test.sh b/.cicd/scripts/regression_test.sh index a2a2f3c311..c56ae35ab0 100755 --- a/.cicd/scripts/regression_test.sh +++ b/.cicd/scripts/regression_test.sh @@ -165,38 +165,4 @@ function regression_test() { return ${status} } -function post_test() { - local machine=${1:-${NODE_NAME}} - local machine_id=${machine,,} # tolower - local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') - local label=${2:-"undef"} - local WORKSPACE - WORKSPACE="$(pwd)" - GIT_URL=${GIT_URL:-"ufs-weather-model"} - CHANGE_ID=${CHANGE_ID:-"develop"} - echo "GIT_URL=${GIT_URL}" - echo "CHANGE_ID=${CHANGE_ID}" - - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - - SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') - git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 - #git add logs/RegressionTests_${machine_name_logs}.log - #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " - - #FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') - #git pull sshorigin ${FORK_BRANCH} - #git push sshorigin HEAD:${FORK_BRANCH} - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - - echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" - #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) - #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} -} - regression_test "${machine}" -post_test "${machine}" "RT" diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 9913e4beed..fd39faaa3b 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -1,5 +1,7 @@ #!/bin/bash -x set -eu +export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME}} +export UFS_COMPILER=${UFS_COMPILER:-intel} SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") @@ -33,6 +35,7 @@ echo "UFS_COMPILER=<${UFS_COMPILER}>" echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS}>" echo "WM_OPERATIONAL_TESTS=<${WM_OPERATIONAL_TESTS}>" echo "WM_CREATE_BASELINE=<${WM_CREATE_BASELINE}>" + machine=${NODE_NAME} echo "machine=<${machine}>" machine_id=${UFS_PLATFORM} @@ -99,6 +102,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ./.cicd/scripts/create_baseline.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} echo "Pipeline Completed Baseline Tests ${WM_OPERATIONAL_TESTS} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" + ./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "BL" else echo "skip Creating baseline on ${UFS_PLATFORM}." ls -al .cicd/* @@ -109,6 +113,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ./.cicd/scripts/regression_test.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} echo "Pipeline Completed Regression Tests ${WM_OPERATIONAL_TESTS} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" + ./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "RT" fi cd tests/ From 21a0f299f9e9e8a62ea1ffa5d6c31e482ed2682c Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 4 Dec 2024 15:45:23 -0600 Subject: [PATCH 050/106] move post to a separate stage Signed-off-by: Bruce Kropp --- .cicd/scripts/post_test_results.sh | 2 +- .cicd/scripts/wm_test.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.cicd/scripts/post_test_results.sh b/.cicd/scripts/post_test_results.sh index b64362ece6..236c08b8dd 100755 --- a/.cicd/scripts/post_test_results.sh +++ b/.cicd/scripts/post_test_results.sh @@ -30,7 +30,7 @@ TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} function post_test() { local machine=${1:-${NODE_NAME}} - local machine_id=${machine,,} # tolower + #local machine_id=${machine,,} # tolower local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') local label=${2:-"undef"} local WORKSPACE diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index fd39faaa3b..5f1ea7ae02 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -102,7 +102,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ./.cicd/scripts/create_baseline.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} echo "Pipeline Completed Baseline Tests ${WM_OPERATIONAL_TESTS} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" - ./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "BL" + #./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "BL" else echo "skip Creating baseline on ${UFS_PLATFORM}." ls -al .cicd/* @@ -113,7 +113,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ./.cicd/scripts/regression_test.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} echo "Pipeline Completed Regression Tests ${WM_OPERATIONAL_TESTS} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" - ./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "RT" + #./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "RT" fi cd tests/ From c986e97fde2a7480b58558f239642e11a219b252 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Wed, 4 Dec 2024 20:25:28 -0600 Subject: [PATCH 051/106] allow PW cluster machines Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 5f1ea7ae02..13babb4e85 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -56,7 +56,7 @@ status=$? [[ -n "${WM_REGRESSION_TESTS}" ]] || WM_REGRESSION_TESTS=true # default #[[ ${UFS_PLATFORM} == jet ]] && WM_REGRESSION_TESTS=false # takes too long #[[ ${UFS_PLATFORM} == derecho ]] && WM_REGRESSION_TESTS=false -[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : +#[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : export WM_REGRESSION_TESTS rm -f ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt From b4ee3089e398cd01fc539d1295118c9e583c7806 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Thu, 5 Dec 2024 11:44:05 -0600 Subject: [PATCH 052/106] add Post Regression Test as a stage Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 126 ++++++++++++++++++++++++++-------------------- 1 file changed, 72 insertions(+), 54 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 50fbc19a30..e37eee7f01 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -1,60 +1,78 @@ matchedNode = [] generateBaselineNode = [] for (label in pullRequest.labels) { - listOfLabelNodeNames = jenkins.model.Jenkins.instance.nodes.collect { - node -> node.getLabelString().contains(label) ? node.name : null + listOfLabelNodeNames = jenkins.model.Jenkins.instance.nodes.collect { + node -> node.getLabelString().contains(label) ? node.name : null - if ((label.matches(node.getLabelString()+"-(.*)"))) { - matchedNode += node.getLabelString() - } + if ((label.matches(node.getLabelString()+"-(.*)"))) { + matchedNode += node.getLabelString() + } - if ((label.matches(node.getLabelString()+"(.*)-BL"))) { - generateBaselineNode += node.getLabelString() - } - } + if ((label.matches(node.getLabelString()+"(.*)-BL"))) { + generateBaselineNode += node.getLabelString() + } + } } modifiedLabels = matchedNode.collect{"'" + it + "'"} baselineLabels = generateBaselineNode.collect{"'" + it + "'"} def generateStage(nodeLabel) { return { - stage("Running on ${nodeLabel}") { + stage("Test on ${nodeLabel}") { node(nodeLabel) { - cleanWs() - checkout scm - script { - try { - echo "Running on ${nodeLabel}" - if (baselineLabels.contains(nodeLabel)) { - sh "WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' - } - else { - sh "WM_CREATE_BASELINE=false" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' - } - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - } - catch(err) { - sh ''' - export machine=${NODE_NAME} - export CHANGE_ID=${CHANGE_ID} + cleanWs() + checkout scm + script { + try { + echo "Running on ${nodeLabel}" + if (baselineLabels.contains(nodeLabel)) { + sh "WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + } + else { + sh "WM_CREATE_BASELINE=false" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + } + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } + catch(err) { + sh ''' + export machine=${NODE_NAME} + export CHANGE_ID=${CHANGE_ID} - cd ${WORKSPACE}/tests - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + cd ${WORKSPACE}/tests + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - echo "Testing concluded...removing labels for $machine from $GIT_URL" - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} - ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - currentBuild.result = 'FAILURE' - } - sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + echo "Testing concluded...removing labels for $machine from $GIT_URL" + GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) + GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + currentBuild.result = 'FAILURE' + } + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + } + } + } + stage("Post from ${nodeLabel}") { + node(nodeLabel) { + script { + try { + echo "Post Results from ${nodeLabel}" + if (baselineLabels.contains(nodeLabel)) { + sh 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} BL"' + } + else { + sh 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} RT"' + } + } + catch(err) { + echo "Error: Post Results from ${nodeLabel}" + } } - } - } - } + } + } + } } def parallelStagesMap = modifiedLabels.collectEntries { @@ -93,24 +111,24 @@ pipeline { parallel parallelStagesMap } } - } - } - post { - success { - node('built-in') { + } + } + post { + success { + node('built-in') { echo 'This will run only if successful.' sh ''' aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":sunny: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *succeeded*"}}' ''' - } - } - failure { - node('built-in') { + } + } + failure { + node('built-in') { echo 'This will run only if the run was marked as unstable.' sh ''' aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":warning: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *failed!*"}}' ''' - } - } - } + } + } + } } From 0b9e9e6bf5403d43324d7ae39f0ea8c5576c8950 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Thu, 5 Dec 2024 16:04:14 -0600 Subject: [PATCH 053/106] add flag whether to post results here or let it happen in a separate CI stage Signed-off-by: Bruce Kropp --- .cicd/scripts/post_test_results.sh | 6 +++--- .cicd/scripts/wm_test.sh | 7 +++++-- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.cicd/scripts/post_test_results.sh b/.cicd/scripts/post_test_results.sh index 236c08b8dd..f100820b47 100755 --- a/.cicd/scripts/post_test_results.sh +++ b/.cicd/scripts/post_test_results.sh @@ -31,7 +31,7 @@ TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} function post_test() { local machine=${1:-${NODE_NAME}} #local machine_id=${machine,,} # tolower - local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') + #local machine_name_logs=$(echo "${machine}" | awk '{ print tolower($1) }') local label=${2:-"undef"} local WORKSPACE WORKSPACE="$(pwd)" @@ -46,14 +46,14 @@ function post_test() { SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 - #git add logs/RegressionTests_${machine_name_logs}.log + #git add logs/RegressionTests_${machine,,}.log #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " #FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') #git pull sshorigin ${FORK_BRANCH} #git push sshorigin HEAD:${FORK_BRANCH} - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + #tar --create --gzip --verbose --dereference --file "${machine,,}.tgz" ${WORKSPACE}/tests/logs/*.log echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 13babb4e85..4a2b3437d4 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -35,6 +35,7 @@ echo "UFS_COMPILER=<${UFS_COMPILER}>" echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS}>" echo "WM_OPERATIONAL_TESTS=<${WM_OPERATIONAL_TESTS}>" echo "WM_CREATE_BASELINE=<${WM_CREATE_BASELINE}>" +echo "WM_POST_TEST_RESULTS=<${WM_POST_TEST_RESULTS}>" machine=${NODE_NAME} echo "machine=<${machine}>" @@ -58,6 +59,8 @@ status=$? #[[ ${UFS_PLATFORM} == derecho ]] && WM_REGRESSION_TESTS=false #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : export WM_REGRESSION_TESTS +[[ -n "${WM_CREATE_BASELINE}" ]] || WM_CREATE_BASELINE=false # default +[[ -n "${WM_POST_TEST_RESULTS}" ]] || WM_POST_TEST_RESULTS=false # default rm -f ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt @@ -102,7 +105,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ./.cicd/scripts/create_baseline.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} echo "Pipeline Completed Baseline Tests ${WM_OPERATIONAL_TESTS} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" - #./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "BL" + [[ ${WM_POST_TEST_RESULTS} = true ]] && ./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "BL" || echo "post test results seprately" else echo "skip Creating baseline on ${UFS_PLATFORM}." ls -al .cicd/* @@ -113,7 +116,7 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ./.cicd/scripts/regression_test.sh | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt status=${PIPESTATUS[0]} echo "Pipeline Completed Regression Tests ${WM_OPERATIONAL_TESTS} on ${UFS_PLATFORM} ${UFS_COMPILER}. status=${status}" - #./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "RT" + [[ ${WM_POST_TEST_RESULTS} = true ]] && ./.cicd/scripts/post_test_results.sh "${UFS_PLATFORM}" "RT" || echo "post test results seprately" fi cd tests/ From 5f9a2ae8bc852c292216d207bb1baff640870a5b Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Thu, 5 Dec 2024 18:44:54 -0600 Subject: [PATCH 054/106] fix unbound vars Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_test.sh | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 4a2b3437d4..49c8c0a0a7 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -32,10 +32,10 @@ echo "NODE_NAME=${NODE_NAME}" echo "USER=${USER}" echo "UFS_PLATFORM=<${UFS_PLATFORM}>" echo "UFS_COMPILER=<${UFS_COMPILER}>" -echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS}>" -echo "WM_OPERATIONAL_TESTS=<${WM_OPERATIONAL_TESTS}>" -echo "WM_CREATE_BASELINE=<${WM_CREATE_BASELINE}>" -echo "WM_POST_TEST_RESULTS=<${WM_POST_TEST_RESULTS}>" +echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS}:-"">" +echo "WM_OPERATIONAL_TESTS=<${WM_OPERATIONAL_TESTS:-""}>" +echo "WM_CREATE_BASELINE=<${WM_CREATE_BASELINE:-""}>" +echo "WM_POST_TEST_RESULTS=<${WM_POST_TEST_RESULTS:-""}>" machine=${NODE_NAME} echo "machine=<${machine}>" @@ -60,7 +60,9 @@ status=$? #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : export WM_REGRESSION_TESTS [[ -n "${WM_CREATE_BASELINE}" ]] || WM_CREATE_BASELINE=false # default +export WM_CREATE_BASELINE [[ -n "${WM_POST_TEST_RESULTS}" ]] || WM_POST_TEST_RESULTS=false # default +export WM_POST_TEST_RESULTS rm -f ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt From c75fc53e98cdb0164f508d924de46ccd88c2fe79 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Thu, 5 Dec 2024 19:15:55 -0600 Subject: [PATCH 055/106] fix another unbound var Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_test.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 49c8c0a0a7..152150c809 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -54,14 +54,14 @@ status=0 ls -l build/ufs_model || : # just checking status=$? -[[ -n "${WM_REGRESSION_TESTS}" ]] || WM_REGRESSION_TESTS=true # default +[[ -n "${WM_REGRESSION_TESTS:-""}" ]] || WM_REGRESSION_TESTS=true # default #[[ ${UFS_PLATFORM} == jet ]] && WM_REGRESSION_TESTS=false # takes too long #[[ ${UFS_PLATFORM} == derecho ]] && WM_REGRESSION_TESTS=false #[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : export WM_REGRESSION_TESTS -[[ -n "${WM_CREATE_BASELINE}" ]] || WM_CREATE_BASELINE=false # default +[[ -n "${WM_CREATE_BASELINE:-""}" ]] || WM_CREATE_BASELINE=false # default export WM_CREATE_BASELINE -[[ -n "${WM_POST_TEST_RESULTS}" ]] || WM_POST_TEST_RESULTS=false # default +[[ -n "${WM_POST_TEST_RESULTS:-""}" ]] || WM_POST_TEST_RESULTS=false # default export WM_POST_TEST_RESULTS rm -f ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt From ac925edefee9bf40b2c041628ac4ffe9eac2e261 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 11:20:31 -0600 Subject: [PATCH 056/106] exclude RT from PW cluster machines Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index e37eee7f01..6b6e173bc6 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -18,10 +18,20 @@ modifiedLabels = matchedNode.collect{"'" + it + "'"} baselineLabels = generateBaselineNode.collect{"'" + it + "'"} def generateStage(nodeLabel) { return { - stage("Test on ${nodeLabel}") { + stage("Initialize on ${nodeLabel}") { node(nodeLabel) { cleanWs() checkout scm + } + } + stage("Test on ${nodeLabel}") { + when { + beforeAgent true + allOf { + not { expression { return env.NODE_NAME.contains('clusternoaa') } } + } + } + node(nodeLabel) { script { try { echo "Running on ${nodeLabel}" @@ -55,6 +65,12 @@ def generateStage(nodeLabel) { } } stage("Post from ${nodeLabel}") { + when { + beforeAgent true + allOf { + not { expression { return env.NODE_NAME.contains('clusternoaa') } } + } + } node(nodeLabel) { script { try { From 6f1a412a3fe9194f1ec641bbf81746d9ddb213ca Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 12:06:45 -0600 Subject: [PATCH 057/106] clear Jenkinsfile when conditions Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 12 -- .cicd/Jenkinsfile.pipeline | 389 ------------------------------------- 2 files changed, 401 deletions(-) delete mode 100644 .cicd/Jenkinsfile.pipeline diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 6b6e173bc6..f900be4885 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -25,12 +25,6 @@ def generateStage(nodeLabel) { } } stage("Test on ${nodeLabel}") { - when { - beforeAgent true - allOf { - not { expression { return env.NODE_NAME.contains('clusternoaa') } } - } - } node(nodeLabel) { script { try { @@ -65,12 +59,6 @@ def generateStage(nodeLabel) { } } stage("Post from ${nodeLabel}") { - when { - beforeAgent true - allOf { - not { expression { return env.NODE_NAME.contains('clusternoaa') } } - } - } node(nodeLabel) { script { try { diff --git a/.cicd/Jenkinsfile.pipeline b/.cicd/Jenkinsfile.pipeline deleted file mode 100644 index 57d9bc62fb..0000000000 --- a/.cicd/Jenkinsfile.pipeline +++ /dev/null @@ -1,389 +0,0 @@ -matchedNode = [] -generateBaselineNode = [] -for (label in pullRequest.labels) { - listOfLabelNodeNames = jenkins.model.Jenkins.instance.nodes.collect { - node -> node.getLabelString().contains(label) ? node.name : null - - if ((label.matches(node.getLabelString()+"-(.*)"))) { - matchedNode += node.getLabelString() - } - - if ((label.matches(node.getLabelString()+"(.*)-BL"))) { - generateBaselineNode += node.getLabelString() - } - } -} - -modifiedLabels = matchedNode.collect{"'" + it + "'"} -baselineLabels = generateBaselineNode.collect{"'" + it + "'"} -def generateStage(nodeLabel) { - return { - stage("Running on ${nodeLabel}") { - node(nodeLabel) { - cleanWs() - checkout scm - script { - try { - echo "Running on ${nodeLabel}" - if (baselineLabels.contains(nodeLabel)) { - sh ''' - git submodule update --init --recursive - ls -al .cicd/* - cd tests - pwd - export BL_DATE=$(cat bl_date.conf | cut -d '=' -f2) - export machine=${NODE_NAME} - export PATH=$PATH:~/bin - echo $CHANGE_ID - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') - - if [[ $machine =~ "Jet" ]] - then - echo "Creating baselines on $machine" - export dprefix=/lfs1/NAGAPE/$ACCNR/$USER - ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - elif [[ $machine =~ "Hercules" ]] - then - echo "Creating baselines on $machine" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/work/noaa/epic/hercules/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /work2/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh hercules develop-${BL_DATE} - chgrp noaa-hpc develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Orion" ]] - then - cd .. - module load git/2.28.0 - git submodule update --init --recursive - cd tests - echo "Creating baselines on $machine" - export dprefix=/work2/noaa/$ACCNR/$USER - sed -i 's|/work/noaa/stmp/${USER}|/work/noaa/epic/stmp/role-epic/|g' rt.sh - export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/work/noaa/epic/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /work/noaa/epic/stmp/role-epic/stmp/role-epic/FV3_RT/ - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd ${DISKNM}/NEMSfv3gfs/ - ./adjust_permissions.sh orion develop-${BL_DATE} - chgrp noaa-hpc develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_orion.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Gaea" ]] - then - echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - unset LD_LIBRARY_PATH - export DISKNM=/gpfs/f5/epic/world-shared/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /gpfs/f5/epic/scratch/role.epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd ${DISKNM}/NEMSfv3gfs/ - chgrp ncep develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Hera" ]] - then - echo "Creating baselines on $machine" - export ACCNR=epic - ./rt.sh -a ${ACCNR} -c -r -l rt.conf - export DISKNM=/scratch2/NAGAPE/epic/UFS-WM_RT - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /scratch1/NCEPDEV/stmp4/role.epic/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_hera.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Derecho" ]] - then - echo "Creating baselines on $machine" - export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -c -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - export DISKNM=/glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/ - cd ${DISKNM}/NEMSfv3gfs/ - mkdir develop-${BL_DATE} - cd /glade/derecho/scratch/epicufsrt/FV3_RT - rsync -a REGRESSION_TEST/ ${DISKNM}/NEMSfv3gfs/develop-${BL_DATE} - cd $WORKSPACE/tests - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - else - echo "Creating baselines on $machine" - ./rt.sh -a ${ACCNR} -c -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - fi - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - echo "Testing concluded...removing labels for $machine from $GIT_URL" - - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - - curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-BL - ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - - } - else { - sh ''' - git submodule update --init --recursive - pwd - cd tests - export machine=${NODE_NAME} - export PATH=$PATH:~/bin - echo $CHANGE_ID - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') - - if [[ $machine =~ "Jet" ]] - then - echo "Running regression tests on $machine" - export dprefix=/lfs1/NAGAPE/$ACCNR/$USER - ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - elif [[ $machine =~ "Hercules" ]] - then - echo "Running regression tests on $machine" - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - export ACCNR=epic - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hercules.log /work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_hercules.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Orion" ]] - then - echo "Running regression tests on $machine" - cd .. - module load git/2.28.0 - git submodule update --init --recursive - cd tests - export dprefix=/work2/noaa/$ACCNR/$USER - sed "s|/noaa/stmp/|/noaa/$ACCNR/stmp/|g" -i rt.sh - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_orion.log /work/noaa/epic/role-epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_orion.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Gaea" ]] - then - echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - unset LD_LIBRARY_PATH - cd logs/ - cp RegressionTests_gaea.log /gpfs/f5/epic/scratch/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_gaea.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Hera" ]] - then - echo "Running regression tests on $machine" - export ACCNR=epic - ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_hera.log /scratch2/NAGAPE/epic/role.epic/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_hera.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - elif [[ $machine =~ "Derecho" ]] - then - echo "Running regression tests on $machine" - export ACCNR=nral0032 - ./rt.sh -a ${ACCNR} -e -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - cd logs/ - cp RegressionTests_derecho.log /glade/derecho/scratch/epicufsrt/jenkins/workspace - git remote -v - git fetch --no-recurse-submodules origin - git reset FETCH_HEAD --hard - cd .. && cd .. && cd .. - cp RegressionTests_derecho.log $WORKSPACE/tests/logs/ - cd $WORKSPACE/tests/ - else - echo "Running regression tests on $machine" - ./rt.sh -a ${ACCNR} -r -l rt.conf | tee $WORKSPACE/tests/logs/RT-run-$machine.log - fi - - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - echo "Testing concluded...removing labels for $machine from $GIT_URL" - #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - #git add logs/RegressionTests_$machine_name_logs.log - #git commit -m "[AutoRT] $machine Job Completed.\n\n\n on-behalf-of @ufs-community " - #git pull sshorigin $FORK_BRANCH - #git push sshorigin HEAD:$FORK_BRANCH - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - - curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/$machine-RT - - ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - } - } - catch(err) { - sh ''' - export machine=${NODE_NAME} - export CHANGE_ID=${CHANGE_ID} - export SSH_ORIGIN=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.repo.ssh_url') - export FORK_BRANCH=$(curl --silent https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/$CHANGE_ID | jq -r '.head.ref') - cd $WORKSPACE/tests - git config user.email "ecc.platform@noaa.gov" - git config user.name "epic-cicd-jenkins" - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - echo "Testing concluded...removing labels for $machine from $GIT_URL" - #git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - #git remote add sshorigin $SSH_ORIGIN > /dev/null 2>&1 - - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - - GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) - GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) - - curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} - ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - currentBuild.result = 'FAILURE' - } - } - } - } - } -} - -def parallelStagesMap = modifiedLabels.collectEntries { - ["${it}" : generateStage(it)] -} - -pipeline { - agent none - environment { - ACCNR = 'epic' - AWS_PROD_ACCOUNT_ID = credentials('AWS_PROD_ACCOUNT_ID') - AWS_PROD_SNS_TOPIC = credentials('AWS_PROD_SNS_TOPIC') - GITHUB_TOKEN = credentials('GithubJenkinsNew') - GIT_URL = 'https://github.com/ufs-community/ufs-weather-model.git' - } - stages { - stage('Launch SonarQube') { - steps { - script { - echo "BRANCH_NAME=${env.CHANGE_BRANCH}" - echo "FORK_NAME=${env.CHANGE_FORK}" - echo "CHANGE_URL=${env.CHANGE_URL}" - echo "CHANGE_ID=${env.CHANGE_ID}" - build job: '/ufs-weather-model/ufs-wm-sonarqube', parameters: [ - string(name: 'BRANCH_NAME', value: env.CHANGE_BRANCH ?: 'develop'), - string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: ''), - string(name: 'CHANGE_URL', value: env.CHANGE_URL ?: ''), - string(name: 'CHANGE_ID', value: env.CHANGE_ID ?: '') - ], wait: false - } - } - } - stage('Run Regression Tests in Parallel') { - steps { - script { - parallel parallelStagesMap - } - } - } - } - post { - success { - node('built-in') { - echo 'This will run only if successful.' - sh ''' - aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":sunny: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *succeeded*"}}' - ''' - } - } - failure { - node('built-in') { - echo 'This will run only if the run was marked as unstable.' - sh ''' - aws sns publish --topic-arn "arn:aws:sns:us-east-1:${AWS_PROD_ACCOUNT_ID}:${AWS_PROD_SNS_TOPIC}" --region us-east-1 --message '{"version":"1.0","source":"custom","content":{"description":":warning: Jenkins build *'"$JOB_NAME"' '"$BUILD_NUMBER"'* with *PR-'"$CHANGE_ID"'* *failed!*"}}' - ''' - } - } - } -} From 5422ed8596effa1ebdf6cf56d4207848c4fe2be8 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 12:56:04 -0600 Subject: [PATCH 058/106] full path to GNU bash for login exec Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index f900be4885..4c950f956a 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -30,10 +30,10 @@ def generateStage(nodeLabel) { try { echo "Running on ${nodeLabel}" if (baselineLabels.contains(nodeLabel)) { - sh "WM_CREATE_BASELINE=true" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + sh "WM_CREATE_BASELINE=true" + '/usr/bin/bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } else { - sh "WM_CREATE_BASELINE=false" + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + sh "WM_CREATE_BASELINE=false" + '/usr/bin/bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } From 09404c7bd5498ffa25a9636fddd21fbb78334305 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 12:59:04 -0600 Subject: [PATCH 059/106] add space before bash for login exec Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 4c950f956a..a690c80ffa 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -30,10 +30,10 @@ def generateStage(nodeLabel) { try { echo "Running on ${nodeLabel}" if (baselineLabels.contains(nodeLabel)) { - sh "WM_CREATE_BASELINE=true" + '/usr/bin/bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + sh "WM_CREATE_BASELINE=true " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } else { - sh "WM_CREATE_BASELINE=false" + '/usr/bin/bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + sh "WM_CREATE_BASELINE=false " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } From 528d1468d3cffe42611840121435ca239ab3abb6 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 13:02:14 -0600 Subject: [PATCH 060/106] fix typo in wm_test.sh Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 152150c809..1d24f52097 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -32,7 +32,7 @@ echo "NODE_NAME=${NODE_NAME}" echo "USER=${USER}" echo "UFS_PLATFORM=<${UFS_PLATFORM}>" echo "UFS_COMPILER=<${UFS_COMPILER}>" -echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS}:-"">" +echo "WM_REGRESSION_TESTS=<${WM_REGRESSION_TESTS:-""}>" echo "WM_OPERATIONAL_TESTS=<${WM_OPERATIONAL_TESTS:-""}>" echo "WM_CREATE_BASELINE=<${WM_CREATE_BASELINE:-""}>" echo "WM_POST_TEST_RESULTS=<${WM_POST_TEST_RESULTS:-""}>" From 264bed24ac4024e2de033f393439c35906327bad Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 15:50:38 -0600 Subject: [PATCH 061/106] add a template build stage Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index a690c80ffa..aa40b924f4 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -22,6 +22,16 @@ def generateStage(nodeLabel) { node(nodeLabel) { cleanWs() checkout scm + script { + sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + } + } + } + stage("Build on ${nodeLabel}") { + node(nodeLabel) { + script { + sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + } } } stage("Test on ${nodeLabel}") { @@ -54,7 +64,7 @@ def generateStage(nodeLabel) { s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] currentBuild.result = 'FAILURE' } - sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } } } From 64a282dc7da612c84a7f4f13a8a7796ab6b922d3 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 17:04:21 -0600 Subject: [PATCH 062/106] Build stage to run wm_build.sh Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index aa40b924f4..b1ae872b15 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -30,7 +30,9 @@ def generateStage(nodeLabel) { stage("Build on ${nodeLabel}") { node(nodeLabel) { script { - sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + echo "Building on ${nodeLabel}" + sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' + sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } } } @@ -95,6 +97,10 @@ def parallelStagesMap = modifiedLabels.collectEntries { pipeline { agent none + parameters { + // Regression Test Suite ? + choice(name: 'WM_OPERATIONAL_TESTS', choices: ['default', 'control_p8' 'cpld_control_p8' 'comprehensive' 'rt.sh', 'none'], description: 'Specify the suite of tests to run') + } environment { ACCNR = 'epic' AWS_PROD_ACCOUNT_ID = credentials('AWS_PROD_ACCOUNT_ID') From 77486beb1aa0b34f5efc4ee65cf245219434066b Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 18:12:14 -0600 Subject: [PATCH 063/106] fix param choices Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index b1ae872b15..0ec7ec5ff0 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -99,7 +99,7 @@ pipeline { agent none parameters { // Regression Test Suite ? - choice(name: 'WM_OPERATIONAL_TESTS', choices: ['default', 'control_p8' 'cpld_control_p8' 'comprehensive' 'rt.sh', 'none'], description: 'Specify the suite of tests to run') + choice(name: 'WM_OPERATIONAL_TESTS', choices: ['default', 'control_p8', 'cpld_control_p8', 'comprehensive', 'rt.sh', 'none'], description: 'Specify the suite of tests to run') } environment { ACCNR = 'epic' From 11ffcca981a1096046e948aa2e07cdd939aa9c9d Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 18:25:19 -0600 Subject: [PATCH 064/106] default vars Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_build.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.cicd/scripts/wm_build.sh b/.cicd/scripts/wm_build.sh index 936de88872..09808b9316 100755 --- a/.cicd/scripts/wm_build.sh +++ b/.cicd/scripts/wm_build.sh @@ -1,5 +1,7 @@ #!/bin/bash set -eu +export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME}} +export UFS_COMPILER=${UFS_COMPILER:-intel} SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") From 93a42732f036aee303f1d0228f7d0cc48e9879aa Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 18:29:42 -0600 Subject: [PATCH 065/106] machine_id tolower Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/scripts/wm_build.sh b/.cicd/scripts/wm_build.sh index 09808b9316..73d19a394c 100755 --- a/.cicd/scripts/wm_build.sh +++ b/.cicd/scripts/wm_build.sh @@ -1,6 +1,6 @@ #!/bin/bash set -eu -export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME}} +export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME,,}} export UFS_COMPILER=${UFS_COMPILER:-intel} SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") From a04386c0322737f8b4f0236ce11188af3a23aea9 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 18:49:08 -0600 Subject: [PATCH 066/106] disable build since NetCDF is not found Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 0ec7ec5ff0..3bfa2f095f 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -31,7 +31,7 @@ def generateStage(nodeLabel) { node(nodeLabel) { script { echo "Building on ${nodeLabel}" - sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' + echo 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } } From 7c2bf03b3d7984aa796cbbf65ec2b7172d4006c2 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 19:02:00 -0600 Subject: [PATCH 067/106] NODE_NAME, machine, UFS_PLATFORM, machine_id, ... Signed-off-by: Bruce Kropp --- .cicd/scripts/wm_build.sh | 12 ++++++------ .cicd/scripts/wm_test.sh | 16 ++++++++-------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.cicd/scripts/wm_build.sh b/.cicd/scripts/wm_build.sh index 73d19a394c..a3b08b8bed 100755 --- a/.cicd/scripts/wm_build.sh +++ b/.cicd/scripts/wm_build.sh @@ -1,6 +1,6 @@ #!/bin/bash set -eu -export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME,,}} +export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME}} export UFS_COMPILER=${UFS_COMPILER:-intel} SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") @@ -34,14 +34,14 @@ workspace=$(pwd) export workspace machine=${NODE_NAME} echo "machine=<${machine}>" -machine_id=${UFS_PLATFORM} -if [[ ${UFS_PLATFORM} =~ clusternoaa ]] ; then +machine_id=${UFS_PLATFORM,,} +if [[ ${UFS_PLATFORM,,} =~ clusternoaa ]] ; then machine_id="noaacloud" sed -e "s|EPIC/spack-stack/spack-stack-1.5.0|spack-stack/spack-stack-1.5.1|g" -i modulefiles/ufs_noaacloud.intel.lua fi echo "machine_id=<${machine_id}>" -if [[ ${UFS_PLATFORM} = derecho ]] ; then +if [[ ${UFS_PLATFORM,,} = derecho ]] ; then export ACCNR=nral0032 else export ACCNR=epic @@ -50,14 +50,14 @@ echo "ACCNR=${ACCNR}" export LMOD_SH_DBG_ON=0 echo "LMOD_VERSION=${LMOD_VERSION}" -if [[ ${UFS_PLATFORM} = gaea ]] ; then +if [[ ${UFS_PLATFORM,,} = gaea ]] ; then source /gpfs/f5/epic/scratch/role.epic/contrib/Lmod_init_C5.sh echo "LMOD_VERSION=${LMOD_VERSION}" fi set +x module use ${PWD}/modulefiles >/dev/null 2>&1 module load ufs_${machine_id}.${UFS_COMPILER} || true -[[ ${UFS_PLATFORM} = gaea ]] && module load cmake/3.23.1 || true +[[ ${UFS_PLATFORM,,} = gaea ]] && module load cmake/3.23.1 || true module list echo "Pipeline Building WM on ${UFS_PLATFORM} ${UFS_COMPILER} with Account=${ACCNR}." diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 1d24f52097..0740242299 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -39,8 +39,8 @@ echo "WM_POST_TEST_RESULTS=<${WM_POST_TEST_RESULTS:-""}>" machine=${NODE_NAME} echo "machine=<${machine}>" -machine_id=${UFS_PLATFORM} -if [[ ${UFS_PLATFORM} =~ clusternoaa ]] ; then +machine_id=${UFS_PLATFORM,,} +if [[ ${UFS_PLATFORM,,} =~ clusternoaa ]] ; then machine_id="noaacloud" #sed -i -e "s|EPIC/spack-stack/spack-stack-1.5.0|spack-stack/spack-stack-1.5.1|g" modulefiles/ufs_noaacloud.intel.lua fi @@ -55,9 +55,9 @@ ls -l build/ufs_model || : # just checking status=$? [[ -n "${WM_REGRESSION_TESTS:-""}" ]] || WM_REGRESSION_TESTS=true # default -#[[ ${UFS_PLATFORM} == jet ]] && WM_REGRESSION_TESTS=false # takes too long -#[[ ${UFS_PLATFORM} == derecho ]] && WM_REGRESSION_TESTS=false -#[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : +#[[ ${UFS_PLATFORM,,} == jet ]] && WM_REGRESSION_TESTS=false # takes too long +#[[ ${UFS_PLATFORM,,} == derecho ]] && WM_REGRESSION_TESTS=false +#[[ ${UFS_PLATFORM,,} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : export WM_REGRESSION_TESTS [[ -n "${WM_CREATE_BASELINE:-""}" ]] || WM_CREATE_BASELINE=false # default export WM_CREATE_BASELINE @@ -73,20 +73,20 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then echo "LMOD_VERSION=${LMOD_VERSION}" set +x - if [[ ${UFS_PLATFORM} = orion ]] ; then + if [[ ${UFS_PLATFORM,,} = orion ]] ; then #module --ignore_cache load git/2.28.0 git --version git submodule update --init --recursive fi - if [[ ${UFS_PLATFORM} = gaea ]] ; then + if [[ ${UFS_PLATFORM,,} = gaea ]] ; then source /gpfs/f5/epic/scratch/role.epic/contrib/Lmod_init_C5.sh echo "LMOD_VERSION=${LMOD_VERSION}" fi module use ${PWD}/modulefiles >/dev/null 2>&1 module load ufs_${machine_id}.${UFS_COMPILER} || true - [[ ${UFS_PLATFORM} = gaea ]] && module load cmake/3.23.1 + [[ ${UFS_PLATFORM,,} = gaea ]] && module load cmake/3.23.1 module list set -x From f7f5531dac4611b5e288669a528efc93c6fc20d5 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Fri, 6 Dec 2024 19:14:41 -0600 Subject: [PATCH 068/106] show node name Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 3bfa2f095f..bf0c68d498 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -23,7 +23,7 @@ def generateStage(nodeLabel) { cleanWs() checkout scm script { - sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } } } @@ -31,6 +31,9 @@ def generateStage(nodeLabel) { node(nodeLabel) { script { echo "Building on ${nodeLabel}" + currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} " + //currentBuild.description = "" + echo 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } From cce39783faa9dba6933585cf027ae43da13c3158 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 9 Dec 2024 11:02:52 -0600 Subject: [PATCH 069/106] add boolean whether to run build stage before regression test Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index bf0c68d498..48f247aedd 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -28,13 +28,20 @@ def generateStage(nodeLabel) { } } stage("Build on ${nodeLabel}") { + when { + beforeAgent true + allOf { + expression { params.WM_BUILD } + } + } + node(nodeLabel) { script { echo "Building on ${nodeLabel}" currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} " //currentBuild.description = "" - echo 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' + sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } } @@ -101,6 +108,7 @@ def parallelStagesMap = modifiedLabels.collectEntries { pipeline { agent none parameters { + booleanParam name: 'WM_BUILD', defaultValue: false, description: 'Whether to attempt to compile the model code tests' // Regression Test Suite ? choice(name: 'WM_OPERATIONAL_TESTS', choices: ['default', 'control_p8', 'cpld_control_p8', 'comprehensive', 'rt.sh', 'none'], description: 'Specify the suite of tests to run') } From c057426267ad5ed4cd5434d5574397c115a3e22f Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 9 Dec 2024 11:32:59 -0600 Subject: [PATCH 070/106] add step when to run build stage before regression test Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 48f247aedd..d68389d058 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -36,13 +36,15 @@ def generateStage(nodeLabel) { } node(nodeLabel) { - script { + steps { + script { echo "Building on ${nodeLabel}" currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} " //currentBuild.description = "" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + } } } } From bf7f6a7340f070fda3e612b8cc2ac7ee0155ea29 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 9 Dec 2024 11:35:14 -0600 Subject: [PATCH 071/106] add step when to run build inside node before regression test Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index d68389d058..f4fed8025f 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -28,6 +28,7 @@ def generateStage(nodeLabel) { } } stage("Build on ${nodeLabel}") { + node(nodeLabel) { when { beforeAgent true allOf { @@ -35,7 +36,6 @@ def generateStage(nodeLabel) { } } - node(nodeLabel) { steps { script { echo "Building on ${nodeLabel}" From 61b888f00bb3b086460f980f73975303f58c0c51 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 9 Dec 2024 11:43:13 -0600 Subject: [PATCH 072/106] add conditional if to build stage Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index f4fed8025f..649d0fb442 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -28,15 +28,8 @@ def generateStage(nodeLabel) { } } stage("Build on ${nodeLabel}") { - node(nodeLabel) { - when { - beforeAgent true - allOf { - expression { params.WM_BUILD } - } - } - - steps { + if (params.WM_BUILD == true ) { + node(nodeLabel) { script { echo "Building on ${nodeLabel}" currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} " From e6b399b09633a0bfedfba26cbf47a18db406b51b Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 9 Dec 2024 11:50:52 -0600 Subject: [PATCH 073/106] add description to build stage Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 649d0fb442..d5d234f74f 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -23,6 +23,7 @@ def generateStage(nodeLabel) { cleanWs() checkout scm script { + currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel}" sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } } @@ -39,7 +40,9 @@ def generateStage(nodeLabel) { sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } } - } + } else { + echo "Building on ${nodeLabel} skipped" + } } stage("Test on ${nodeLabel}") { node(nodeLabel) { From 3b52ffd49f2e0a7313a349c2748aa48fab35276d Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 9 Dec 2024 12:17:13 -0600 Subject: [PATCH 074/106] add call to wm_init.sh to help metrics job Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index d5d234f74f..a538e5eb6f 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -24,6 +24,7 @@ def generateStage(nodeLabel) { checkout scm script { currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel}" + sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } } From 562afcd9ddf088a2727d7deb01b1a284ee0ca50a Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 9 Dec 2024 13:35:13 -0600 Subject: [PATCH 075/106] limit parsing STAGE_NAME to just the first arg Signed-off-by: Bruce Kropp --- .cicd/scripts/disk_usage.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index d54457cd34..2035f25fe8 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -26,7 +26,7 @@ else fi echo "STAGE_NAME=${STAGE_NAME}" # from pipeline -outfile="${4:-${workspace}-${UFS_COMPILER}-disk-usage${STAGE_NAME}.csv}" +outfile="${4:-${workspace}-${UFS_COMPILER}-disk-usage${STAGE_NAME%% *}.csv}" function disk_usage() { local directory="${1:-${PWD}}" From 46907d7dcac0230329a799efe3f9576686b1743f Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 9 Dec 2024 16:23:49 -0600 Subject: [PATCH 076/106] set env default for UFS_COMPILER Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index a538e5eb6f..f5297a1c25 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -117,6 +117,7 @@ pipeline { AWS_PROD_SNS_TOPIC = credentials('AWS_PROD_SNS_TOPIC') GITHUB_TOKEN = credentials('GithubJenkinsNew') GIT_URL = 'https://github.com/ufs-community/ufs-weather-model.git' + UFS_COMPILER = 'intel' } stages { stage('Launch SonarQube') { From 5670e7d0dc86026b3e69643fc4af283cdb82d226 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Mon, 9 Dec 2024 17:50:36 -0600 Subject: [PATCH 077/106] UFS_PLATFORM should be all lower case Signed-off-by: Bruce Kropp --- .cicd/scripts/disk_usage.sh | 2 +- .cicd/scripts/wm_build.sh | 10 +++++----- .cicd/scripts/wm_init.sh | 4 ++++ .cicd/scripts/wm_test.sh | 22 +++++++++++----------- 4 files changed, 21 insertions(+), 17 deletions(-) diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index 2035f25fe8..bf5f04b9be 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -2,7 +2,7 @@ # Output a CSV report of disk usage on subdirs of some path # Usage: -# [JOB_NAME=] [BUILD_NUMBER=] [UFS_COMPILER=] [UFS_PLATFORM=] disk_usage path depth size outfile.csv +# [JOB_NAME=] [BUILD_NUMBER=] [UFS_COMPILER=] [UFS_PLATFORM=] disk_usage path depth size outfile.csv # # args: # directory=$1 diff --git a/.cicd/scripts/wm_build.sh b/.cicd/scripts/wm_build.sh index a3b08b8bed..e4a4c0822a 100755 --- a/.cicd/scripts/wm_build.sh +++ b/.cicd/scripts/wm_build.sh @@ -1,6 +1,6 @@ #!/bin/bash set -eu -export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME}} +export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME,,}} export UFS_COMPILER=${UFS_COMPILER:-intel} SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") @@ -35,13 +35,13 @@ export workspace machine=${NODE_NAME} echo "machine=<${machine}>" machine_id=${UFS_PLATFORM,,} -if [[ ${UFS_PLATFORM,,} =~ clusternoaa ]] ; then +if [[ ${UFS_PLATFORM} =~ clusternoaa ]] ; then machine_id="noaacloud" sed -e "s|EPIC/spack-stack/spack-stack-1.5.0|spack-stack/spack-stack-1.5.1|g" -i modulefiles/ufs_noaacloud.intel.lua fi echo "machine_id=<${machine_id}>" -if [[ ${UFS_PLATFORM,,} = derecho ]] ; then +if [[ ${UFS_PLATFORM} = derecho ]] ; then export ACCNR=nral0032 else export ACCNR=epic @@ -50,14 +50,14 @@ echo "ACCNR=${ACCNR}" export LMOD_SH_DBG_ON=0 echo "LMOD_VERSION=${LMOD_VERSION}" -if [[ ${UFS_PLATFORM,,} = gaea ]] ; then +if [[ ${UFS_PLATFORM} = gaea ]] ; then source /gpfs/f5/epic/scratch/role.epic/contrib/Lmod_init_C5.sh echo "LMOD_VERSION=${LMOD_VERSION}" fi set +x module use ${PWD}/modulefiles >/dev/null 2>&1 module load ufs_${machine_id}.${UFS_COMPILER} || true -[[ ${UFS_PLATFORM,,} = gaea ]] && module load cmake/3.23.1 || true +[[ ${UFS_PLATFORM} = gaea ]] && module load cmake/3.23.1 || true module list echo "Pipeline Building WM on ${UFS_PLATFORM} ${UFS_COMPILER} with Account=${ACCNR}." diff --git a/.cicd/scripts/wm_init.sh b/.cicd/scripts/wm_init.sh index 27db702be1..231b1e9b2b 100755 --- a/.cicd/scripts/wm_init.sh +++ b/.cicd/scripts/wm_init.sh @@ -1,5 +1,7 @@ #!/bin/bash set -eu +export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME,,}} +export UFS_COMPILER=${UFS_COMPILER:-intel} SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") SCRIPTS_DIR=$(dirname "${SCRIPT_REALPATH}") @@ -16,3 +18,5 @@ TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} cd "${UFS_MODEL_DIR}" pwd +echo "UFS_PLATFORM=<${UFS_PLATFORM}>" +echo "UFS_COMPILER=<${UFS_COMPILER}>" diff --git a/.cicd/scripts/wm_test.sh b/.cicd/scripts/wm_test.sh index 0740242299..1377696730 100755 --- a/.cicd/scripts/wm_test.sh +++ b/.cicd/scripts/wm_test.sh @@ -1,6 +1,6 @@ #!/bin/bash -x set -eu -export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME}} +export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME,,}} export UFS_COMPILER=${UFS_COMPILER:-intel} SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") @@ -40,7 +40,7 @@ echo "WM_POST_TEST_RESULTS=<${WM_POST_TEST_RESULTS:-""}>" machine=${NODE_NAME} echo "machine=<${machine}>" machine_id=${UFS_PLATFORM,,} -if [[ ${UFS_PLATFORM,,} =~ clusternoaa ]] ; then +if [[ ${UFS_PLATFORM} =~ clusternoaa ]] ; then machine_id="noaacloud" #sed -i -e "s|EPIC/spack-stack/spack-stack-1.5.0|spack-stack/spack-stack-1.5.1|g" modulefiles/ufs_noaacloud.intel.lua fi @@ -55,9 +55,9 @@ ls -l build/ufs_model || : # just checking status=$? [[ -n "${WM_REGRESSION_TESTS:-""}" ]] || WM_REGRESSION_TESTS=true # default -#[[ ${UFS_PLATFORM,,} == jet ]] && WM_REGRESSION_TESTS=false # takes too long -#[[ ${UFS_PLATFORM,,} == derecho ]] && WM_REGRESSION_TESTS=false -#[[ ${UFS_PLATFORM,,} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : +#[[ ${UFS_PLATFORM} == jet ]] && WM_REGRESSION_TESTS=false # takes too long +#[[ ${UFS_PLATFORM} == derecho ]] && WM_REGRESSION_TESTS=false +#[[ ${UFS_PLATFORM} =~ clusternoaa ]] && WM_REGRESSION_TESTS=false || : export WM_REGRESSION_TESTS [[ -n "${WM_CREATE_BASELINE:-""}" ]] || WM_CREATE_BASELINE=false # default export WM_CREATE_BASELINE @@ -73,27 +73,27 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then echo "LMOD_VERSION=${LMOD_VERSION}" set +x - if [[ ${UFS_PLATFORM,,} = orion ]] ; then + if [[ ${UFS_PLATFORM} = orion ]] ; then #module --ignore_cache load git/2.28.0 git --version git submodule update --init --recursive fi - if [[ ${UFS_PLATFORM,,} = gaea ]] ; then + if [[ ${UFS_PLATFORM} = gaea ]] ; then source /gpfs/f5/epic/scratch/role.epic/contrib/Lmod_init_C5.sh echo "LMOD_VERSION=${LMOD_VERSION}" fi module use ${PWD}/modulefiles >/dev/null 2>&1 module load ufs_${machine_id}.${UFS_COMPILER} || true - [[ ${UFS_PLATFORM,,} = gaea ]] && module load cmake/3.23.1 + [[ ${UFS_PLATFORM} = gaea ]] && module load cmake/3.23.1 module list set -x echo "CHANGE_ID=${CHANGE_ID:-null}" echo "ACCNR=${ACCNR}" - [[ ! -f tests/logs/RegressionTests_${UFS_PLATFORM,,}.log ]] || mv tests/logs/RegressionTests_${UFS_PLATFORM,,}.log tests/logs/RegressionTests_${UFS_PLATFORM,,}.log.orig + [[ ! -f tests/logs/RegressionTests_${UFS_PLATFORM}.log ]] || mv tests/logs/RegressionTests_${UFS_PLATFORM}.log tests/logs/RegressionTests_${UFS_PLATFORM}.log.orig rm -f ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_*-log.txt umask 002 @@ -129,8 +129,8 @@ if [[ ${WM_REGRESSION_TESTS} = true ]] ; then ## Test Results ... echo "ExperimentName: ${WM_OPERATIONAL_TESTS:=default}" | tee -a ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-wm_test-log.txt | tee ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt - grep -E " DIRECTORY: |Time: | Completed: |Result: " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt - grep -E " -- COMPILE | -- TEST " logs/RegressionTests_${UFS_PLATFORM,,}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + grep -E " DIRECTORY: |Time: | Completed: |Result: " logs/RegressionTests_${UFS_PLATFORM}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt + grep -E " -- COMPILE | -- TEST " logs/RegressionTests_${UFS_PLATFORM}.log | tee -a ${workspace}/wm_test_results-${UFS_PLATFORM}-${UFS_COMPILER}.txt cd ${workspace} find ${workspace}/tests/logs -ls From b9f7917687ccd41414ed7ae7deec811d4687578a Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 11:04:22 -0600 Subject: [PATCH 078/106] save metrics and results files to s3 Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index f5297a1c25..aa9ec28869 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -26,6 +26,9 @@ def generateStage(nodeLabel) { currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } } } @@ -34,11 +37,13 @@ def generateStage(nodeLabel) { node(nodeLabel) { script { echo "Building on ${nodeLabel}" - currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} " + currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" //currentBuild.description = "" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } else { @@ -57,6 +62,8 @@ def generateStage(nodeLabel) { sh "WM_CREATE_BASELINE=false " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}/wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } catch(err) { sh ''' @@ -76,6 +83,9 @@ def generateStage(nodeLabel) { currentBuild.result = 'FAILURE' } sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageTest.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } } } @@ -117,6 +127,7 @@ pipeline { AWS_PROD_SNS_TOPIC = credentials('AWS_PROD_SNS_TOPIC') GITHUB_TOKEN = credentials('GithubJenkinsNew') GIT_URL = 'https://github.com/ufs-community/ufs-weather-model.git' + UFS_PLATFORM = "${NODE_NAME}" UFS_COMPILER = 'intel' } stages { From aed73bde8b37ffb67f99a6539a2e8a0fc5b71823 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 11:33:23 -0600 Subject: [PATCH 079/106] set env.UFS_PLATFORM Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index aa9ec28869..c7f4c682f5 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -38,7 +38,7 @@ def generateStage(nodeLabel) { script { echo "Building on ${nodeLabel}" currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" - //currentBuild.description = "" + currentBuild.description = "build ${UFS_PLATFORM}/${UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' @@ -127,7 +127,7 @@ pipeline { AWS_PROD_SNS_TOPIC = credentials('AWS_PROD_SNS_TOPIC') GITHUB_TOKEN = credentials('GithubJenkinsNew') GIT_URL = 'https://github.com/ufs-community/ufs-weather-model.git' - UFS_PLATFORM = "${NODE_NAME}" + UFS_PLATFORM = env.NODE_NAME UFS_COMPILER = 'intel' } stages { From 19e172bfc63cff04cf727bdd1a984bb9ee9069c2 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 11:46:02 -0600 Subject: [PATCH 080/106] set env.UFS_PLATFORM in each node stage Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index c7f4c682f5..1f0fd5630e 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -20,10 +20,13 @@ def generateStage(nodeLabel) { return { stage("Initialize on ${nodeLabel}") { node(nodeLabel) { + environment { + UFS_PLATFORM = "${NODE_NAME}" + } cleanWs() checkout scm script { - currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel}" + currentBuild.displayName = "#${BUILD_NUMBER} ${UFS_PLATFORM}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] @@ -35,6 +38,9 @@ def generateStage(nodeLabel) { stage("Build on ${nodeLabel}") { if (params.WM_BUILD == true ) { node(nodeLabel) { + environment { + UFS_PLATFORM = "${NODE_NAME}" + } script { echo "Building on ${nodeLabel}" currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" @@ -52,6 +58,9 @@ def generateStage(nodeLabel) { } stage("Test on ${nodeLabel}") { node(nodeLabel) { + environment { + UFS_PLATFORM = "${NODE_NAME}" + } script { try { echo "Running on ${nodeLabel}" @@ -127,7 +136,6 @@ pipeline { AWS_PROD_SNS_TOPIC = credentials('AWS_PROD_SNS_TOPIC') GITHUB_TOKEN = credentials('GithubJenkinsNew') GIT_URL = 'https://github.com/ufs-community/ufs-weather-model.git' - UFS_PLATFORM = env.NODE_NAME UFS_COMPILER = 'intel' } stages { From da5c8f9999bf9c41a6697b799d81aa86055394ea Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 11:50:21 -0600 Subject: [PATCH 081/106] use env.UFS_PLATFORM in init stage Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 1f0fd5630e..367b3bbace 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -26,7 +26,7 @@ def generateStage(nodeLabel) { cleanWs() checkout scm script { - currentBuild.displayName = "#${BUILD_NUMBER} ${UFS_PLATFORM}" + currentBuild.displayName = "#${BUILD_NUMBER} ${env.UFS_PLATFORM}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] From fb40659479068b215adcea68e0caaff1c6e0163e Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 12:19:22 -0600 Subject: [PATCH 082/106] use env.UFS_PLATFORM in node Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 367b3bbace..ee72829e77 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -29,10 +29,10 @@ def generateStage(nodeLabel) { currentBuild.displayName = "#${BUILD_NUMBER} ${env.UFS_PLATFORM}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - } } } stage("Build on ${nodeLabel}") { @@ -44,13 +44,13 @@ def generateStage(nodeLabel) { script { echo "Building on ${nodeLabel}" currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" - currentBuild.description = "build ${UFS_PLATFORM}/${UFS_COMPILER}" + currentBuild.description = "build ${env.UFS_PLATFORM}/${UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' + } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - } } } else { echo "Building on ${nodeLabel} skipped" From 4f49dda9088aa88937f3a74154e2a3594ac7b086 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 12:30:38 -0600 Subject: [PATCH 083/106] use nodeLabel in node Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index ee72829e77..e47d561a2d 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -23,15 +23,17 @@ def generateStage(nodeLabel) { environment { UFS_PLATFORM = "${NODE_NAME}" } + script { + currentBuild.displayName = "#${BUILD_NUMBER} ${env.UFS_PLATFORM}" + } cleanWs() checkout scm script { - currentBuild.displayName = "#${BUILD_NUMBER} ${env.UFS_PLATFORM}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${nodeLabel}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${nodeLabel}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } From 503944b868939653ab0ca53c133963b2a044b46b Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 12:33:27 -0600 Subject: [PATCH 084/106] try UFS_PLATFORM Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index e47d561a2d..094d70530e 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -24,7 +24,7 @@ def generateStage(nodeLabel) { UFS_PLATFORM = "${NODE_NAME}" } script { - currentBuild.displayName = "#${BUILD_NUMBER} ${env.UFS_PLATFORM}" + currentBuild.displayName = "#${BUILD_NUMBER} ${UFS_PLATFORM}" } cleanWs() checkout scm @@ -33,7 +33,7 @@ def generateStage(nodeLabel) { sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${nodeLabel}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${nodeLabel}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } @@ -46,7 +46,7 @@ def generateStage(nodeLabel) { script { echo "Building on ${nodeLabel}" currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" - currentBuild.description = "build ${env.UFS_PLATFORM}/${UFS_COMPILER}" + currentBuild.description = "build ${UFS_PLATFORM}/${UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' From 8175086409ae455047297ff263909a89591425e4 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 12:36:03 -0600 Subject: [PATCH 085/106] try env.NODE_NAME Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 094d70530e..80e4457763 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -21,10 +21,10 @@ def generateStage(nodeLabel) { stage("Initialize on ${nodeLabel}") { node(nodeLabel) { environment { - UFS_PLATFORM = "${NODE_NAME}" + UFS_PLATFORM = "${env.NODE_NAME}" } script { - currentBuild.displayName = "#${BUILD_NUMBER} ${UFS_PLATFORM}" + currentBuild.displayName = "#${BUILD_NUMBER} ${env.UFS_PLATFORM}" } cleanWs() checkout scm @@ -34,6 +34,7 @@ def generateStage(nodeLabel) { } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${nodeLabel}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } From 8ad81c36d88ff4c05c13c55594914fac993f13db Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 12:42:28 -0600 Subject: [PATCH 086/106] try env.UFS_PLATFORM Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 80e4457763..c14d4087f2 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -32,8 +32,7 @@ def generateStage(nodeLabel) { sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${nodeLabel}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } @@ -45,10 +44,10 @@ def generateStage(nodeLabel) { UFS_PLATFORM = "${NODE_NAME}" } script { - echo "Building on ${nodeLabel}" currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" - currentBuild.description = "build ${UFS_PLATFORM}/${UFS_COMPILER}" + //currentBuild.description = "build ${UFS_PLATFORM}/${UFS_COMPILER}" + echo "Building on ${nodeLabel}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } From 8c8953db49ac6c4e04b57f614e2c20dc1bef0d1d Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 12:48:00 -0600 Subject: [PATCH 087/106] try NODE_NAME Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index c14d4087f2..2923beb93f 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -20,11 +20,8 @@ def generateStage(nodeLabel) { return { stage("Initialize on ${nodeLabel}") { node(nodeLabel) { - environment { - UFS_PLATFORM = "${env.NODE_NAME}" - } script { - currentBuild.displayName = "#${BUILD_NUMBER} ${env.UFS_PLATFORM}" + currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel}" } cleanWs() checkout scm @@ -32,20 +29,16 @@ def generateStage(nodeLabel) { sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } stage("Build on ${nodeLabel}") { if (params.WM_BUILD == true ) { node(nodeLabel) { - environment { - UFS_PLATFORM = "${NODE_NAME}" - } script { currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" - //currentBuild.description = "build ${UFS_PLATFORM}/${UFS_COMPILER}" + currentBuild.description = "build ${NODE_NAME}/${UFS_COMPILER}" echo "Building on ${nodeLabel}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' From 6305c6af152bc8a9c6b93f29249b17b0cac03e03 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 12:57:19 -0600 Subject: [PATCH 088/106] try to lower NODE_NAME Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 2923beb93f..258c835cff 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -30,7 +30,7 @@ def generateStage(nodeLabel) { sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME,,}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } stage("Build on ${nodeLabel}") { @@ -38,14 +38,14 @@ def generateStage(nodeLabel) { node(nodeLabel) { script { currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" - currentBuild.description = "build ${NODE_NAME}/${UFS_COMPILER}" + currentBuild.description = "build ${NODE_NAME,,}/${UFS_COMPILER}" echo "Building on ${nodeLabel}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' } - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME,,}-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } else { echo "Building on ${nodeLabel} skipped" @@ -54,7 +54,7 @@ def generateStage(nodeLabel) { stage("Test on ${nodeLabel}") { node(nodeLabel) { environment { - UFS_PLATFORM = "${NODE_NAME}" + UFS_PLATFORM = "${NODE_NAME,,}" } script { try { From 4524bc753dff767a3806783b4b0aa59dff2ff8ac Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 13:07:36 -0600 Subject: [PATCH 089/106] try to lower nodeLabel Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 258c835cff..a243683b05 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -20,17 +20,21 @@ def generateStage(nodeLabel) { return { stage("Initialize on ${nodeLabel}") { node(nodeLabel) { + environment { + UFS_PLATFORM = nodeLabel.toLowerCase() + } script { currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel}" } cleanWs() checkout scm script { + echo "UFS_PLATFORM=${env.UFS_PLAFTORM} UFS_COMPILER=${env.UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' - } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME,,}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } } } stage("Build on ${nodeLabel}") { @@ -38,14 +42,14 @@ def generateStage(nodeLabel) { node(nodeLabel) { script { currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" - currentBuild.description = "build ${NODE_NAME,,}/${UFS_COMPILER}" + currentBuild.description = "build ${NODE_NAME}/${UFS_COMPILER}" echo "Building on ${nodeLabel}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' - } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME,,}-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } } } else { echo "Building on ${nodeLabel} skipped" @@ -54,7 +58,7 @@ def generateStage(nodeLabel) { stage("Test on ${nodeLabel}") { node(nodeLabel) { environment { - UFS_PLATFORM = "${NODE_NAME,,}" + UFS_PLATFORM = nodeLabel.toLowerCase() } script { try { From 20f10e348724aa44a437335b60aeaaf89b54f652 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 13:09:05 -0600 Subject: [PATCH 090/106] try to lower nodeLabel, replaced Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index a243683b05..0062dcb29e 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -33,7 +33,7 @@ def generateStage(nodeLabel) { sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME,,}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } @@ -48,7 +48,7 @@ def generateStage(nodeLabel) { sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME,,}-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } else { From b082520c0e84908c2eefd7a8d08b1117ac430f0c Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 13:27:52 -0600 Subject: [PATCH 091/106] try to lower nodeLabel, fix typo Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 7 +++++-- .cicd/scripts/disk_usage.sh | 4 +++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 0062dcb29e..2a606c277b 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -29,17 +29,20 @@ def generateStage(nodeLabel) { cleanWs() checkout scm script { - echo "UFS_PLATFORM=${env.UFS_PLAFTORM} UFS_COMPILER=${env.UFS_COMPILER}" + echo "UFS_PLATFORM=${env.UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } stage("Build on ${nodeLabel}") { if (params.WM_BUILD == true ) { node(nodeLabel) { + environment { + UFS_PLATFORM = nodeLabel.toLowerCase() + } script { currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" currentBuild.description = "build ${NODE_NAME}/${UFS_COMPILER}" diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index bf5f04b9be..5a49ebd9f0 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -10,6 +10,8 @@ # size=$3 # outfile=$4 +export UFS_PLATFORM=${UFS_PLATFORM:-${NODE_NAME,,}} +export UFS_COMPILER=${UFS_COMPILER:-intel} [[ -n ${WORKSPACE} ]] || WORKSPACE="$(pwd)" [[ -n ${UFS_PLATFORM} ]] || UFS_PLATFORM="$(hostname -s 2>/dev/null)" || UFS_PLATFORM="$(hostname 2>/dev/null)" [[ -n ${UFS_COMPILER} ]] || UFS_COMPILER="compiler" @@ -25,7 +27,7 @@ else workspace="$(cd -- "${script_dir}/../.." && pwd)" fi -echo "STAGE_NAME=${STAGE_NAME}" # from pipeline +echo "STAGE_NAME=${STAGE_NAME%% *}" # from pipeline outfile="${4:-${workspace}-${UFS_COMPILER}-disk-usage${STAGE_NAME%% *}.csv}" function disk_usage() { From fa667b693f0815100b7b92e59f9833bc2c99c271 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 13:56:10 -0600 Subject: [PATCH 092/106] create an init stage time file Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 16 ++++++++-------- .cicd/scripts/wm_init.sh | 20 +++++++++++++++++++- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 2a606c277b..8c4aa6d0de 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -21,7 +21,7 @@ def generateStage(nodeLabel) { stage("Initialize on ${nodeLabel}") { node(nodeLabel) { environment { - UFS_PLATFORM = nodeLabel.toLowerCase() + UFS_PLATFORM = env.nodeLabel.toLowerCase() } script { currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel}" @@ -29,11 +29,11 @@ def generateStage(nodeLabel) { cleanWs() checkout scm script { - echo "UFS_PLATFORM=${env.UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" + echo "nodeLabel=${nodeLabel} NODE_NAME=${NODE_NAME} UFS_PLATFORM=${env.UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } @@ -50,8 +50,8 @@ def generateStage(nodeLabel) { echo "Building on ${nodeLabel}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${NODE_NAME}-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } else { @@ -94,8 +94,8 @@ def generateStage(nodeLabel) { currentBuild.result = 'FAILURE' } sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-time-wm_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}-*-disk-usageTest.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-time-wm_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-disk-usageTest.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } diff --git a/.cicd/scripts/wm_init.sh b/.cicd/scripts/wm_init.sh index 231b1e9b2b..2146b9e7ae 100755 --- a/.cicd/scripts/wm_init.sh +++ b/.cicd/scripts/wm_init.sh @@ -17,6 +17,24 @@ BUILD_DIR=${BUILD_DIR:-${UFS_MODEL_DIR}/build} TESTS_DIR=${TESTS_DIR:-${UFS_MODEL_DIR}/tests} cd "${UFS_MODEL_DIR}" -pwd echo "UFS_PLATFORM=<${UFS_PLATFORM}>" echo "UFS_COMPILER=<${UFS_COMPILER}>" + +pwd +echo "NODE_NAME=${NODE_NAME}" +echo "UFS_PLATFORM=${UFS_PLATFORM}" +echo "UFS_COMPILER=${UFS_COMPILER}" +workspace=$(pwd) +export workspace +machine=${NODE_NAME} +echo "machine=<${machine}>" +machine_id=${UFS_PLATFORM,,} +if [[ ${UFS_PLATFORM} =~ clusternoaa ]] ; then + machine_id="noaacloud" +fi +echo "machine_id=<${machine_id}>" + +/usr/bin/time -p \ + -o ${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-time-wm_init.json \ + -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' \ + pwd From 5c6903b5801608afe6cfd7497ada695a2644a8d1 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 15:02:30 -0600 Subject: [PATCH 093/106] disk_usage outfile Signed-off-by: Bruce Kropp --- .cicd/scripts/disk_usage.sh | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh index 5a49ebd9f0..533897c8cb 100755 --- a/.cicd/scripts/disk_usage.sh +++ b/.cicd/scripts/disk_usage.sh @@ -15,20 +15,23 @@ export UFS_COMPILER=${UFS_COMPILER:-intel} [[ -n ${WORKSPACE} ]] || WORKSPACE="$(pwd)" [[ -n ${UFS_PLATFORM} ]] || UFS_PLATFORM="$(hostname -s 2>/dev/null)" || UFS_PLATFORM="$(hostname 2>/dev/null)" [[ -n ${UFS_COMPILER} ]] || UFS_COMPILER="compiler" +echo "STAGE_NAME=${STAGE_NAME%% *}" # from pipeline script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" +echo "script_dir=${script_dir}" # Get repository root from Jenkins WORKSPACE variable if set, otherwise, set # relative to script directory. declare workspace if [[ -d "${WORKSPACE}/${UFS_PLATFORM}" ]]; then workspace="${WORKSPACE}/${UFS_PLATFORM}" + outfile="${4:-${workspace}-${UFS_PLATFORM}-${UFS_COMPILER}-disk-usage${STAGE_NAME%% *}.csv}" else workspace="$(cd -- "${script_dir}/../.." && pwd)" + outfile="${4:-${workspace}/${UFS_PLATFORM}-${UFS_COMPILER}-disk-usage${STAGE_NAME%% *}.csv}" fi - -echo "STAGE_NAME=${STAGE_NAME%% *}" # from pipeline -outfile="${4:-${workspace}-${UFS_COMPILER}-disk-usage${STAGE_NAME%% *}.csv}" +echo "workspace=${workspace}" +echo "outfile=${outfile}" function disk_usage() { local directory="${1:-${PWD}}" From da6e094cb4f670207babca15ce4676836cb8c50c Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 15:19:32 -0600 Subject: [PATCH 094/106] define a platform var in init stage Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 8c4aa6d0de..c863be9909 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -29,20 +29,21 @@ def generateStage(nodeLabel) { cleanWs() checkout scm script { - echo "nodeLabel=${nodeLabel} NODE_NAME=${NODE_NAME} UFS_PLATFORM=${env.UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" + def MY_PLATFORM = nodeLabel.toLowerCase() + echo "nodeLabel=${nodeLabel} MY_PLATFORM=${MY_PLATFORM} NODE_NAME=${NODE_NAME} UFS_PLATFORM=${env.UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${MY_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } stage("Build on ${nodeLabel}") { if (params.WM_BUILD == true ) { node(nodeLabel) { - environment { - UFS_PLATFORM = nodeLabel.toLowerCase() - } + environment { + UFS_PLATFORM = ${nodeLabel.toLowerCase()} + } script { currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" currentBuild.description = "build ${NODE_NAME}/${UFS_COMPILER}" @@ -59,7 +60,8 @@ def generateStage(nodeLabel) { } } stage("Test on ${nodeLabel}") { - node(nodeLabel) { + if (params.WM_OPERATIONAL_TEST != "none" ) { + node(nodeLabel) { environment { UFS_PLATFORM = nodeLabel.toLowerCase() } @@ -98,10 +100,12 @@ def generateStage(nodeLabel) { s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-disk-usageTest.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } - } + } + } } stage("Post from ${nodeLabel}") { - node(nodeLabel) { + if (params.WM_OPERATIONAL_TEST != "none" ) { + node(nodeLabel) { script { try { echo "Post Results from ${nodeLabel}" @@ -116,6 +120,7 @@ def generateStage(nodeLabel) { echo "Error: Post Results from ${nodeLabel}" } } + } } } } From 7f75cce95583e46faaaf6242644e936b8ce3e3ce Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 15:56:46 -0600 Subject: [PATCH 095/106] remove single quotes from UFS_PLATFORM Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index c863be9909..5a81c0d525 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -20,21 +20,18 @@ def generateStage(nodeLabel) { return { stage("Initialize on ${nodeLabel}") { node(nodeLabel) { - environment { - UFS_PLATFORM = env.nodeLabel.toLowerCase() - } script { - currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel}" + currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${params.WM_OPERATIONAL_TEST}" ) { } cleanWs() checkout scm script { - def MY_PLATFORM = nodeLabel.toLowerCase() - echo "nodeLabel=${nodeLabel} MY_PLATFORM=${MY_PLATFORM} NODE_NAME=${NODE_NAME} UFS_PLATFORM=${env.UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" + def UFS_PLATFORM = nodeLabel.replaceAll("'","") + echo "nodeLabel=${nodeLabel} NODE_NAME=${NODE_NAME} UFS_PLATFORM=${env.UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${MY_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } @@ -60,7 +57,7 @@ def generateStage(nodeLabel) { } } stage("Test on ${nodeLabel}") { - if (params.WM_OPERATIONAL_TEST != "none" ) { + if (params.WM_OPERATIONAL_TEST != 'none' ) { node(nodeLabel) { environment { UFS_PLATFORM = nodeLabel.toLowerCase() @@ -104,7 +101,7 @@ def generateStage(nodeLabel) { } } stage("Post from ${nodeLabel}") { - if (params.WM_OPERATIONAL_TEST != "none" ) { + if (params.WM_OPERATIONAL_TEST != 'none' ) { node(nodeLabel) { script { try { From 1fff46b7021d8e42169464c6e27baa8c7c918c58 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 15:58:12 -0600 Subject: [PATCH 096/106] remove single quotes from UFS_PLATFORM typo Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 5a81c0d525..c591c25fb5 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -21,7 +21,7 @@ def generateStage(nodeLabel) { stage("Initialize on ${nodeLabel}") { node(nodeLabel) { script { - currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${params.WM_OPERATIONAL_TEST}" ) { + currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${params.WM_OPERATIONAL_TEST}" } cleanWs() checkout scm From 7d3ce302f6b2325fac8b88ee32306a79ff5b9d2f Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 16:01:22 -0600 Subject: [PATCH 097/106] WM_OPERATIONAL_TESTS typo Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index c591c25fb5..86a34df6c4 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -21,7 +21,7 @@ def generateStage(nodeLabel) { stage("Initialize on ${nodeLabel}") { node(nodeLabel) { script { - currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${params.WM_OPERATIONAL_TEST}" + currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${params.WM_OPERATIONAL_TESTS}" } cleanWs() checkout scm @@ -57,7 +57,7 @@ def generateStage(nodeLabel) { } } stage("Test on ${nodeLabel}") { - if (params.WM_OPERATIONAL_TEST != 'none' ) { + if (params.WM_OPERATIONAL_TESTS != 'none' ) { node(nodeLabel) { environment { UFS_PLATFORM = nodeLabel.toLowerCase() @@ -101,7 +101,7 @@ def generateStage(nodeLabel) { } } stage("Post from ${nodeLabel}") { - if (params.WM_OPERATIONAL_TEST != 'none' ) { + if (params.WM_OPERATIONAL_TESTS != 'none' ) { node(nodeLabel) { script { try { From 0a0214b753549715a8f942533e6c3ddf194467d3 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 16:13:41 -0600 Subject: [PATCH 098/106] use def UFS_PLATFORM Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 86a34df6c4..eb84dee47e 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -30,7 +30,7 @@ def generateStage(nodeLabel) { echo "nodeLabel=${nodeLabel} NODE_NAME=${NODE_NAME} UFS_PLATFORM=${env.UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageInit*.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } @@ -38,18 +38,16 @@ def generateStage(nodeLabel) { stage("Build on ${nodeLabel}") { if (params.WM_BUILD == true ) { node(nodeLabel) { - environment { - UFS_PLATFORM = ${nodeLabel.toLowerCase()} - } script { - currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${UFS_COMPILER}" - currentBuild.description = "build ${NODE_NAME}/${UFS_COMPILER}" + def UFS_PLATFORM = nodeLabel.replaceAll("'","") + currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel.replaceAll("'","")} ${UFS_COMPILER}" + currentBuild.description = "build ${UFS_PLATFORM}/${UFS_COMPILER}" echo "Building on ${nodeLabel}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-time-wm_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageBuild.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } else { @@ -59,12 +57,10 @@ def generateStage(nodeLabel) { stage("Test on ${nodeLabel}") { if (params.WM_OPERATIONAL_TESTS != 'none' ) { node(nodeLabel) { - environment { - UFS_PLATFORM = nodeLabel.toLowerCase() - } script { + def UFS_PLATFORM = nodeLabel.replaceAll("'","") try { - echo "Running on ${nodeLabel}" + echo "Running Tests on ${nodeLabel}" if (baselineLabels.contains(nodeLabel)) { sh "WM_CREATE_BASELINE=true " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } @@ -72,7 +68,7 @@ def generateStage(nodeLabel) { sh "WM_CREATE_BASELINE=false " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.UFS_PLATFORM}/wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}/wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } catch(err) { @@ -93,11 +89,13 @@ def generateStage(nodeLabel) { currentBuild.result = 'FAILURE' } sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-time-wm_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "*-*-disk-usageTest.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-time-wm_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageTest.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } + } else { + echo "Running Tests on ${nodeLabel} skipped" } } stage("Post from ${nodeLabel}") { From 833c2ce771c040677a0c0f469bcf35703e123307 Mon Sep 17 00:00:00 2001 From: Bruce Kropp Date: Tue, 10 Dec 2024 17:19:51 -0600 Subject: [PATCH 099/106] save test_results and zip of logs Signed-off-by: Bruce Kropp --- .cicd/Jenkinsfile | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index eb84dee47e..6c8844a269 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -18,10 +18,10 @@ modifiedLabels = matchedNode.collect{"'" + it + "'"} baselineLabels = generateBaselineNode.collect{"'" + it + "'"} def generateStage(nodeLabel) { return { - stage("Initialize on ${nodeLabel}") { + stage("Initialize on ${nodeLabel.replaceAll("'","")}") { node(nodeLabel) { script { - currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel} ${params.WM_OPERATIONAL_TESTS}" + currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel.replaceAll("'","")} test=${params.WM_OPERATIONAL_TESTS}" } cleanWs() checkout scm @@ -35,13 +35,13 @@ def generateStage(nodeLabel) { } } } - stage("Build on ${nodeLabel}") { + stage("Build on ${nodeLabel.replaceAll("'","")}") { if (params.WM_BUILD == true ) { node(nodeLabel) { script { def UFS_PLATFORM = nodeLabel.replaceAll("'","") - currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel.replaceAll("'","")} ${UFS_COMPILER}" - currentBuild.description = "build ${UFS_PLATFORM}/${UFS_COMPILER}" + //currentBuild.displayName = "#${BUILD_NUMBER} ${nodeLabel.replaceAll("'","")} ${UFS_COMPILER}" + currentBuild.description = "build ${UFS_PLATFORM}-${UFS_COMPILER}" echo "Building on ${nodeLabel}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_build.sh"' @@ -54,7 +54,7 @@ def generateStage(nodeLabel) { echo "Building on ${nodeLabel} skipped" } } - stage("Test on ${nodeLabel}") { + stage("Test on ${nodeLabel.replaceAll("'","")}") { if (params.WM_OPERATIONAL_TESTS != 'none' ) { node(nodeLabel) { script { @@ -68,8 +68,7 @@ def generateStage(nodeLabel) { sh "WM_CREATE_BASELINE=false " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}/wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "tests/wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } catch(err) { sh ''' @@ -92,16 +91,19 @@ def generateStage(nodeLabel) { s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-time-wm_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageTest.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + sh "[[ -d tests/logs ]] && cd tests/logs && tar --create --gzip --verbose --dereference --file ../../wm_test_logs-${UFS_PLATFORM}-${env.UFS_COMPILER}.tgz log_${UFS_PLATFORM}/* RegressionTests_${UFS_PLATFORM}.log || cat /dev/null > ../../wm_test_logs-${UFS_PLATFORM}-${env.UFS_COMPILER}.tgz" + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "wm_test_logs-${UFS_PLATFORM}-${env.UFS_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } else { echo "Running Tests on ${nodeLabel} skipped" } } - stage("Post from ${nodeLabel}") { + stage("Post from ${nodeLabel.replaceAll("'","")}") { if (params.WM_OPERATIONAL_TESTS != 'none' ) { node(nodeLabel) { script { + def UFS_PLATFORM = nodeLabel.replaceAll("'","") try { echo "Post Results from ${nodeLabel}" if (baselineLabels.contains(nodeLabel)) { From ce090f4fb805d1ea38f0153f8aa5ec6abf4c2fac Mon Sep 17 00:00:00 2001 From: epic-cicd-jenkins Date: Tue, 10 Dec 2024 17:38:31 -0600 Subject: [PATCH 100/106] save zips Signed-off-by: epic-cicd-jenkins --- .cicd/Jenkinsfile | 1 + .cicd/scripts/post_test_results.sh | 2 ++ 2 files changed, 3 insertions(+) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 6c8844a269..a4a9e9bab2 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -106,6 +106,7 @@ def generateStage(nodeLabel) { def UFS_PLATFORM = nodeLabel.replaceAll("'","") try { echo "Post Results from ${nodeLabel}" + sh 'bash --login "ls -l ${WORKSPACE}/.cicd/scripts"' if (baselineLabels.contains(nodeLabel)) { sh 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} BL"' } diff --git a/.cicd/scripts/post_test_results.sh b/.cicd/scripts/post_test_results.sh index f100820b47..049c745e7e 100755 --- a/.cicd/scripts/post_test_results.sh +++ b/.cicd/scripts/post_test_results.sh @@ -13,6 +13,8 @@ export ACCNR=epic export account="-a ${ACCNR}" +which jq + set -eu SCRIPT_REALPATH=$(realpath "${BASH_SOURCE[0]}") From 3b45b808d3ffbc04b47f65cb9c07011e0ce0242a Mon Sep 17 00:00:00 2001 From: epic-cicd-jenkins Date: Tue, 10 Dec 2024 19:07:31 -0600 Subject: [PATCH 101/106] use environment vars in post script Signed-off-by: epic-cicd-jenkins --- .cicd/Jenkinsfile | 7 +++++-- .cicd/scripts/post_test_results.sh | 4 ++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index a4a9e9bab2..e4f238f792 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -66,9 +66,12 @@ def generateStage(nodeLabel) { } else { sh "WM_CREATE_BASELINE=false " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' + sh ''' + ${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} RT + ''' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "tests/wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "./wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } catch(err) { sh ''' @@ -111,7 +114,7 @@ def generateStage(nodeLabel) { sh 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} BL"' } else { - sh 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} RT"' + sh "WM_TEST_LABEL=RT " + 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh"' } } catch(err) { diff --git a/.cicd/scripts/post_test_results.sh b/.cicd/scripts/post_test_results.sh index 049c745e7e..17007dc59f 100755 --- a/.cicd/scripts/post_test_results.sh +++ b/.cicd/scripts/post_test_results.sh @@ -1,9 +1,9 @@ #!/bin/bash -x # RT - RegressionTest label -# BL - Baselins label +# BL - Baseline label export machine=${1:-${NODE_NAME}} -label=$2 +label=${2:-${WM_TEST_LABEL}} [[ -n "${label}" ]] || exit 1 export PATH=${PATH}:~/bin From 8abcea5942c5d7fc621a163a16a040510dbf93f0 Mon Sep 17 00:00:00 2001 From: epic-cicd-jenkins Date: Wed, 11 Dec 2024 13:05:10 -0600 Subject: [PATCH 102/106] post zip and add call to ufs-wm-metrics job Signed-off-by: epic-cicd-jenkins --- .cicd/Jenkinsfile | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index e4f238f792..2021f2be97 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -66,12 +66,10 @@ def generateStage(nodeLabel) { } else { sh "WM_CREATE_BASELINE=false " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' - sh ''' - ${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} RT - ''' + //sh ''' ${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} RT ''' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "./wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } catch(err) { sh ''' @@ -109,9 +107,9 @@ def generateStage(nodeLabel) { def UFS_PLATFORM = nodeLabel.replaceAll("'","") try { echo "Post Results from ${nodeLabel}" - sh 'bash --login "ls -l ${WORKSPACE}/.cicd/scripts"' if (baselineLabels.contains(nodeLabel)) { - sh 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} BL"' + //sh 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} BL"' + sh "WM_TEST_LABEL=BL " + 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh"' } else { sh "WM_TEST_LABEL=RT " + 'bash --login "${WORKSPACE}/.cicd/scripts/post_test_results.sh"' @@ -188,5 +186,20 @@ pipeline { ''' } } + always { + script { + // Trigger another job to collect all build statistics + CI_JOB_NAME=env.JOB_NAME.replace("/${env.BRANCH_NAME}","") + CI_BRANCH_NAME=env.BRANCH_NAME.replace("%2F","%252F") + echo "post: Triggering ufs-weather-model/ufs-wm-metrics job for ${CI_JOB_NAME} on branch build ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..." + + build job: '/ufs-weather-model/ufs-wm-metrics', parameters: [ + string(name: 'CI_JOB_NAME', value: "${CI_JOB_NAME}"), + string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}") + ], wait: false + + echo "#### post: ufs-weather-model/ufs-wm-metrics COMPLETE." + } + } } } From 9fa5ba6d5f4633ec098103b59372644481ffb3ab Mon Sep 17 00:00:00 2001 From: epic-cicd-jenkins Date: Wed, 11 Dec 2024 14:07:13 -0600 Subject: [PATCH 103/106] create a logs tar Signed-off-by: epic-cicd-jenkins --- .cicd/Jenkinsfile | 3 +-- .cicd/scripts/post_test_results.sh | 20 ++++++++++++-------- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 2021f2be97..e981d39fb5 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -27,7 +27,7 @@ def generateStage(nodeLabel) { checkout scm script { def UFS_PLATFORM = nodeLabel.replaceAll("'","") - echo "nodeLabel=${nodeLabel} NODE_NAME=${NODE_NAME} UFS_PLATFORM=${env.UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" + echo "nodeLabel=${nodeLabel} NODE_NAME=${NODE_NAME} UFS_PLATFORM=${UFS_PLATFORM} UFS_COMPILER=${env.UFS_COMPILER}" sh 'bash --login "${WORKSPACE}/.cicd/scripts/wm_init.sh"' sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-time-wm_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] @@ -66,7 +66,6 @@ def generateStage(nodeLabel) { } else { sh "WM_CREATE_BASELINE=false " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' - //sh ''' ${WORKSPACE}/.cicd/scripts/post_test_results.sh ${NODE_NAME} RT ''' } s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] diff --git a/.cicd/scripts/post_test_results.sh b/.cicd/scripts/post_test_results.sh index 17007dc59f..e3766a0f74 100755 --- a/.cicd/scripts/post_test_results.sh +++ b/.cicd/scripts/post_test_results.sh @@ -48,19 +48,23 @@ function post_test() { SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 - #git add logs/RegressionTests_${machine,,}.log - #git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " + git add tests/logs/RegressionTests_${machine,,}.log + git status + git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " - #FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') - #git pull sshorigin ${FORK_BRANCH} + FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') + git pull sshorigin ${FORK_BRANCH} + git status #git push sshorigin HEAD:${FORK_BRANCH} - #tar --create --gzip --verbose --dereference --file "${machine,,}.tgz" ${WORKSPACE}/tests/logs/*.log - echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" - #GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) - #GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) + GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) + GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) + echo "GIT_OWNER=${GIT_OWNER} GIT_REPO_NAME=${GIT_REPO_NAME}" + echo "https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label}" #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} } +pwd post_test "${machine}" "${label}" +tar --create --gzip --verbose --dereference --file "${machine,,}.tgz" tests/logs/*.log From a7426e0b793648d181d2a90f23c2727ae011fcc3 Mon Sep 17 00:00:00 2001 From: epic-cicd-jenkins Date: Wed, 11 Dec 2024 15:04:30 -0600 Subject: [PATCH 104/106] debug GIT vars Signed-off-by: epic-cicd-jenkins --- .cicd/scripts/post_test_results.sh | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.cicd/scripts/post_test_results.sh b/.cicd/scripts/post_test_results.sh index e3766a0f74..4389d3af4a 100755 --- a/.cicd/scripts/post_test_results.sh +++ b/.cicd/scripts/post_test_results.sh @@ -42,6 +42,7 @@ function post_test() { echo "GIT_URL=${GIT_URL}" echo "CHANGE_ID=${CHANGE_ID}" +set -x git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" @@ -57,14 +58,17 @@ function post_test() { git status #git push sshorigin HEAD:${FORK_BRANCH} - echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) +set +x + + echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" echo "GIT_OWNER=${GIT_OWNER} GIT_REPO_NAME=${GIT_REPO_NAME}" echo "https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label}" #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} } pwd -post_test "${machine}" "${label}" tar --create --gzip --verbose --dereference --file "${machine,,}.tgz" tests/logs/*.log +set +x +post_test "${machine}" "${label}" From fe9a128780d23e03c48703cae48cc0cb2545ed0c Mon Sep 17 00:00:00 2001 From: epic-cicd-jenkins Date: Wed, 11 Dec 2024 17:28:34 -0600 Subject: [PATCH 105/106] move log tar-ing to Jenkinsfile Signed-off-by: epic-cicd-jenkins --- .cicd/Jenkinsfile | 29 ++++++++++++++++------------- .cicd/scripts/post_test_results.sh | 25 +++++++++++++------------ 2 files changed, 29 insertions(+), 25 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index e981d39fb5..563cad9d07 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -67,32 +67,35 @@ def generateStage(nodeLabel) { else { sh "WM_CREATE_BASELINE=false " + 'bash --login "${WORKSPACE}/.cicd/scripts/wm_test.sh"' } - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "wm_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - } - catch(err) { + } + catch(err) { sh ''' export machine=${NODE_NAME} export CHANGE_ID=${CHANGE_ID} - - cd ${WORKSPACE}/tests - export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') - tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log - echo "Testing concluded...removing labels for $machine from $GIT_URL" GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} ''' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] currentBuild.result = 'FAILURE' } + + sh ''' + export machine=${NODE_NAME} + export CHANGE_ID=${CHANGE_ID} + cd ${WORKSPACE}/tests + export machine_name_logs=$(echo $machine | awk '{ print tolower($1) }') + tar --create --gzip --verbose --dereference --file "${machine_name_logs}.tgz" ${WORKSPACE}/tests/logs/*.log + ''' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: true, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "**/*tgz*", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + sh "STAGE_NAME='${env.STAGE_NAME}' " + 'bash --login "${WORKSPACE}/.cicd/scripts/disk_usage.sh"' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-time-wm_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageTest.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-time-wm_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${UFS_PLATFORM}-*-disk-usageTest.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] - sh "[[ -d tests/logs ]] && cd tests/logs && tar --create --gzip --verbose --dereference --file ../../wm_test_logs-${UFS_PLATFORM}-${env.UFS_COMPILER}.tgz log_${UFS_PLATFORM}/* RegressionTests_${UFS_PLATFORM}.log || cat /dev/null > ../../wm_test_logs-${UFS_PLATFORM}-${env.UFS_COMPILER}.tgz" - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "wm_test_logs-${UFS_PLATFORM}-${env.UFS_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + sh "[[ -d tests/logs ]] && cd tests/logs && tar --create --gzip --verbose --dereference --file ../../wm_test_logs-${UFS_PLATFORM}-${env.UFS_COMPILER}.tgz log_${UFS_PLATFORM}/* RegressionTests_${UFS_PLATFORM}.log || cat /dev/null > ../../wm_test_logs-${UFS_PLATFORM}-${env.UFS_COMPILER}.tgz" + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "wm_test_logs-${UFS_PLATFORM}-${env.UFS_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } else { diff --git a/.cicd/scripts/post_test_results.sh b/.cicd/scripts/post_test_results.sh index 4389d3af4a..0283f76733 100755 --- a/.cicd/scripts/post_test_results.sh +++ b/.cicd/scripts/post_test_results.sh @@ -1,7 +1,8 @@ -#!/bin/bash -x +#!/bin/bash # RT - RegressionTest label # BL - Baseline label +set -x export machine=${1:-${NODE_NAME}} label=${2:-${WM_TEST_LABEL}} [[ -n "${label}" ]] || exit 1 @@ -42,33 +43,33 @@ function post_test() { echo "GIT_URL=${GIT_URL}" echo "CHANGE_ID=${CHANGE_ID}" + GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) + GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) + echo "GIT_OWNER=${GIT_OWNER} GIT_REPO_NAME=${GIT_REPO_NAME}" + set -x git config user.email "ecc.platform@noaa.gov" git config user.name "epic-cicd-jenkins" - SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') - git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 - git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 git add tests/logs/RegressionTests_${machine,,}.log git status git commit -m "[AutoRT] ${machine} Job Completed.\n\n\n on-behalf-of @ufs-community " + SSH_ORIGIN=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.repo.ssh_url') + git remote -v | grep -w sshorigin > /dev/null 2>&1 && git remote remove sshorigin > /dev/null 2>&1 + git remote add sshorigin ${SSH_ORIGIN} > /dev/null 2>&1 || return 0 + FORK_BRANCH=$(curl --silent "https://api.github.com/repos/ufs-community/ufs-weather-model/pulls/${CHANGE_ID}" | jq -r '.head.ref') - git pull sshorigin ${FORK_BRANCH} + git pull sshorigin ${FORK_BRANCH} || return 0 git status - #git push sshorigin HEAD:${FORK_BRANCH} - - GIT_OWNER=$(echo ${GIT_URL} | cut -d '/' -f4) - GIT_REPO_NAME=$(echo ${GIT_URL} | cut -d '/' -f5 | cut -d '.' -f1) + git push sshorigin HEAD:${FORK_BRANCH} || return 0 set +x echo "Testing concluded...removing label ${label} for ${machine} from ${GIT_URL}" - echo "GIT_OWNER=${GIT_OWNER} GIT_REPO_NAME=${GIT_REPO_NAME}" - echo "https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label}" + echo "https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels /${machine}-${label}" #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/${machine}-${label} } pwd -tar --create --gzip --verbose --dereference --file "${machine,,}.tgz" tests/logs/*.log set +x post_test "${machine}" "${label}" From 00e800baf8062eb6457c79533fe60f6c41cc66c7 Mon Sep 17 00:00:00 2001 From: epic-cicd-jenkins Date: Wed, 11 Dec 2024 17:59:07 -0600 Subject: [PATCH 106/106] print debugging for posting results to repo Signed-off-by: epic-cicd-jenkins --- .cicd/Jenkinsfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 563cad9d07..a606e96b29 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -73,9 +73,12 @@ def generateStage(nodeLabel) { sh ''' export machine=${NODE_NAME} export CHANGE_ID=${CHANGE_ID} - echo "Testing concluded...removing labels for $machine from $GIT_URL" GIT_OWNER=$(echo $GIT_URL | cut -d '/' -f4) GIT_REPO_NAME=$(echo $GIT_URL | cut -d '/' -f5 | cut -d '.' -f1) + set +x + + echo "Testing concluded...removing labels for $machine from $GIT_URL" + echo "https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels /{$machine-RT,$machine-BL}" #curl --silent -X DELETE -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/${GIT_OWNER}/${GIT_REPO_NAME}/issues/${CHANGE_ID}/labels/{$machine-RT,$machine-BL} ''' currentBuild.result = 'FAILURE'