From 5a8a5aa13b0143c871dc466e0ed062c55c7cd573 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Thu, 31 Oct 2024 12:10:07 -0400 Subject: [PATCH 1/3] Fix the name of the TC tracker filenames in archive.py (#3030) This corrects the names of the product files created by the `tracker` job when attempting to rename the experiment and push the file to the `ARCDIR` within the `arch` job. --- ush/python/pygfs/task/archive.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index d138474e9a..108cd2ed27 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -88,11 +88,6 @@ def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str if not os.path.isdir(arch_dict.ROTDIR): raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({arch_dict.ROTDIR}) does not exist!") - if arch_dict.RUN in ["gdas", "gfs"]: - - # Copy the cyclone track files and rename the experiments - Archive._rename_cyclone_expt(arch_dict) - # Collect datasets that need to be archived # Each dataset represents one tarball @@ -371,14 +366,14 @@ def _rename_cyclone_expt(arch_dict) -> None: if run == "gfs": in_track_file = (track_dir_in + "/avno.t" + - cycle_HH + "z.cycle.trackatcfunix") + cycle_HH + "z.cyclone.trackatcfunix") in_track_p_file = (track_dir_in + "/avnop.t" + - cycle_HH + "z.cycle.trackatcfunixp") + cycle_HH + "z.cyclone.trackatcfunix") elif run == "gdas": in_track_file = (track_dir_in + "/gdas.t" + - cycle_HH + "z.cycle.trackatcfunix") + cycle_HH + "z.cyclone.trackatcfunix") in_track_p_file = (track_dir_in + "/gdasp.t" + - cycle_HH + "z.cycle.trackatcfunixp") + cycle_HH + "z.cyclone.trackatcfunix") if not os.path.isfile(in_track_file): # Do not attempt to archive the outputs @@ -416,7 +411,7 @@ def replace_string_from_to_file(filename_in, filename_out, search_str, replace_s with open("/tmp/track_file", "w") as new_file: new_file.writelines(out_lines) - shutil.move("tmp/track_file", filename_out) + shutil.move("/tmp/track_file", filename_out) replace_string_from_to_file(in_track_file, out_track_file, "AVNO", pslot4) replace_string_from_to_file(in_track_p_file, out_track_p_file, "AVNO", pslot4) From ca8cd7af51daa20636a2045feb95105dc5c3510d Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Thu, 31 Oct 2024 20:37:07 +0000 Subject: [PATCH 2/3] Auto provisioning of PW clusters from GitHub CI added (#3051) # Description This update to the GitHub dispatched CI pipeline to execute the self-hosted GitHub Runner on Parallel Works now adds the feature that starts up the virtual compute cluster automatically. We now have a complete end-to-end automated process for running CI tests in Parallel Works. Next steps would be tear-down and adding more test to see if it scales. It also has the update for getting a PR to load up when its originating from a forked repo. # Type of change - [ ] Bug fix (fixes something broken) - [x] New feature (adds functionality) - [ ] Maintenance (code refactor, clean-up, new CI test, etc.) # Change characteristics - Is this a breaking change (a change in existing functionality)? NO - Does this change require a documentation update? YES - Does this change require an update to any of the following submodules? NO (If YES, please add a link to any PRs that are pending.) - [ ] EMC verif-global - [ ] GDAS - [ ] GFS-utils - [ ] GSI - [ ] GSI-monitor - [ ] GSI-utils - [ ] UFS-utils - [ ] UFS-weather-model - [ ] wxflow # How has this been tested? The start up aspected has been tested from my forked repo but could not test repos that are forked. The test from forked repos has to be tested once the workflow pipeline in the **develop** branch. # Checklist - [x] Any dependent changes have been merged and published - [x] My code follows the style guidelines of this project - [x] I have performed a self-review of my own code - [x] I have commented my code, particularly in hard-to-understand areas - [ ] I have documented my code, including function, input, and output descriptions - [x] My changes generate no new warnings - [x] New and existing tests pass with my changes - [x] This change is covered by an existing CI test or a new one has been added - [ ] Any new scripts have been added to the .github/CODEOWNERS file with owners - [ ] I have made corresponding changes to the system documentation if necessary --------- Co-authored-by: tmcguinness Co-authored-by: tmcguinness --- .github/workflows/pw_aws_ci.yaml | 36 +++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index 245e219dd4..c59f027920 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -31,24 +31,57 @@ env: MACHINE_ID: noaacloud jobs: + + run-start-clusters: + runs-on: ubuntu-latest + env: + PW_PLATFORM_HOST: noaa.parallel.works + steps: + - name: Checkout pw-cluster-automation repository + uses: actions/checkout@v4 + with: + repository: TerrenceMcGuinness-NOAA/pw-cluster-automation + path: pw-cluster-automation + ref: pw_cluster_noaa + + - name: Run startClusters + run: | + mkdir -p ~/.ssh + echo "${{ secrets.ID_RSA_AWS }}" > ~/.ssh/id_rsa + echo "${{ secrets.PW_API_KEY }}" > ~/.ssh/pw_api.key + chmod 700 ~/.ssh + chmod 600 ~/.ssh/id_rsa + chmod 600 ~/.ssh/pw_api.key + if [ "${{ github.event.inputs.os }}" == "rocky" ]; then + clustername="globalworkflowciplatformrocky8" + elif [ "${{ github.event.inputs.os }}" == "centos" ]; then + clustername="awsemctmcgc7i48xlargeciplatform" + fi + python3 pw-cluster-automation/startClusters.py $clustername + fetch-branch: + needs: run-start-clusters runs-on: ubuntu-latest env: GH_TOKEN: ${{ secrets.GITHUBTOKEN }} outputs: branch: ${{ steps.get-branch.outputs.branch }} + repo: ${{ steps.get-branch.outputs.repo }} steps: - - name: Fetch branch name for PR + - name: Fetch branch name and repo for PR id: get-branch run: | pr_number=${{ github.event.inputs.pr_number }} repo=${{ github.repository }} if [ "$pr_number" -eq "0" ]; then branch=${{ github.event.inputs.ref }} + repo_url="https://github.com/${{ github.repository_owner }}/${{ github.repository }}.git" else branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName') + repo_url=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.url') fi echo "::set-output name=branch::$branch" + echo "::set-output name=repo::$repo_url" checkout: needs: fetch-branch @@ -64,6 +97,7 @@ jobs: with: path: ${{ github.run_id }}/HOMEgfs submodules: 'recursive' + repository: ${{ needs.fetch-branch.outputs.repo }} ref: ${{ needs.fetch-branch.outputs.branch }} build-link: From d95630a56bf8b1ac430b33f687259cf44cc63b76 Mon Sep 17 00:00:00 2001 From: Eric Sinsky - NOAA <48259628+EricSinsky-NOAA@users.noreply.github.com> Date: Fri, 1 Nov 2024 02:13:02 -0400 Subject: [PATCH 3/3] Add more ocean variables for post-processing in GEFS (#2995) This PR adds an ocean variable `tob` (Sea Water Potential Temperature at Sea Floor) for post-processing in GEFS, which is a variable that has been requested for GEFSv13 and the reforecast. Also, this PR moves the atmos variable `PEVPR` from the "b" group to the "a" group of pgrb products in GEFS. This was requested by a reforecast stakeholder. Resolves #2993 --- parm/post/oceanice_products_gefs.yaml | 2 +- parm/product/gefs.0p25.fFFF.paramlist.a.txt | 1 + parm/product/gefs.0p25.fFFF.paramlist.b.txt | 1 - sorc/gfs_utils.fd | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/post/oceanice_products_gefs.yaml b/parm/post/oceanice_products_gefs.yaml index fea88df2bb..f961fab83f 100644 --- a/parm/post/oceanice_products_gefs.yaml +++ b/parm/post/oceanice_products_gefs.yaml @@ -33,7 +33,7 @@ ocean: {% elif model_grid == 'mx500' %} ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267] {% endif %} - subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo'] + subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'tob', 'so', 'uo', 'vo'] data_in: copy: - ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"] diff --git a/parm/product/gefs.0p25.fFFF.paramlist.a.txt b/parm/product/gefs.0p25.fFFF.paramlist.a.txt index 303752ac17..4bb87c32ff 100644 --- a/parm/product/gefs.0p25.fFFF.paramlist.a.txt +++ b/parm/product/gefs.0p25.fFFF.paramlist.a.txt @@ -19,6 +19,7 @@ :CIN:180-0 mb above ground: :CIN:surface: :HLCY:3000-0 m above ground: +:PEVPR:surface: :TCDC:entire atmosphere (considered as a single layer): :WEASD:surface: :SNOD:surface: diff --git a/parm/product/gefs.0p25.fFFF.paramlist.b.txt b/parm/product/gefs.0p25.fFFF.paramlist.b.txt index ccad9da4d0..5c406ce34d 100644 --- a/parm/product/gefs.0p25.fFFF.paramlist.b.txt +++ b/parm/product/gefs.0p25.fFFF.paramlist.b.txt @@ -151,7 +151,6 @@ :O3MR:5 mb: :O3MR:70 mb: :O3MR:7 mb: -:PEVPR:surface: :PLI:30-0 mb above ground: :PLPL:255-0 mb above ground: :POT:0.995 sigma level: diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index a00cc0949e..856a42076a 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit a00cc0949e2f901e73b58d54834517743916c69a +Subproject commit 856a42076a65256aaae9b29f4891532cb4a3fbca