Skip to content

Commit

Permalink
Merge branch 'develop' into feature/marineenvar
Browse files Browse the repository at this point in the history
  • Loading branch information
guillaumevernieres authored Nov 1, 2024
2 parents 5c1148a + d95630a commit 2f51957
Show file tree
Hide file tree
Showing 6 changed files with 43 additions and 14 deletions.
36 changes: 35 additions & 1 deletion .github/workflows/pw_aws_ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,24 +31,57 @@ env:
MACHINE_ID: noaacloud

jobs:

run-start-clusters:
runs-on: ubuntu-latest
env:
PW_PLATFORM_HOST: noaa.parallel.works
steps:
- name: Checkout pw-cluster-automation repository
uses: actions/checkout@v4
with:
repository: TerrenceMcGuinness-NOAA/pw-cluster-automation
path: pw-cluster-automation
ref: pw_cluster_noaa

- name: Run startClusters
run: |
mkdir -p ~/.ssh
echo "${{ secrets.ID_RSA_AWS }}" > ~/.ssh/id_rsa
echo "${{ secrets.PW_API_KEY }}" > ~/.ssh/pw_api.key
chmod 700 ~/.ssh
chmod 600 ~/.ssh/id_rsa
chmod 600 ~/.ssh/pw_api.key
if [ "${{ github.event.inputs.os }}" == "rocky" ]; then
clustername="globalworkflowciplatformrocky8"
elif [ "${{ github.event.inputs.os }}" == "centos" ]; then
clustername="awsemctmcgc7i48xlargeciplatform"
fi
python3 pw-cluster-automation/startClusters.py $clustername
fetch-branch:
needs: run-start-clusters
runs-on: ubuntu-latest
env:
GH_TOKEN: ${{ secrets.GITHUBTOKEN }}
outputs:
branch: ${{ steps.get-branch.outputs.branch }}
repo: ${{ steps.get-branch.outputs.repo }}
steps:
- name: Fetch branch name for PR
- name: Fetch branch name and repo for PR
id: get-branch
run: |
pr_number=${{ github.event.inputs.pr_number }}
repo=${{ github.repository }}
if [ "$pr_number" -eq "0" ]; then
branch=${{ github.event.inputs.ref }}
repo_url="https://github.com/${{ github.repository_owner }}/${{ github.repository }}.git"
else
branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName')
repo_url=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.url')
fi
echo "::set-output name=branch::$branch"
echo "::set-output name=repo::$repo_url"
checkout:
needs: fetch-branch
Expand All @@ -64,6 +97,7 @@ jobs:
with:
path: ${{ github.run_id }}/HOMEgfs
submodules: 'recursive'
repository: ${{ needs.fetch-branch.outputs.repo }}
ref: ${{ needs.fetch-branch.outputs.branch }}

build-link:
Expand Down
2 changes: 1 addition & 1 deletion parm/post/oceanice_products_gefs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ ocean:
{% elif model_grid == 'mx500' %}
ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267]
{% endif %}
subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo']
subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'tob', 'so', 'uo', 'vo']
data_in:
copy:
- ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
Expand Down
1 change: 1 addition & 0 deletions parm/product/gefs.0p25.fFFF.paramlist.a.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
:CIN:180-0 mb above ground:
:CIN:surface:
:HLCY:3000-0 m above ground:
:PEVPR:surface:
:TCDC:entire atmosphere (considered as a single layer):
:WEASD:surface:
:SNOD:surface:
Expand Down
1 change: 0 additions & 1 deletion parm/product/gefs.0p25.fFFF.paramlist.b.txt
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,6 @@
:O3MR:5 mb:
:O3MR:70 mb:
:O3MR:7 mb:
:PEVPR:surface:
:PLI:30-0 mb above ground:
:PLPL:255-0 mb above ground:
:POT:0.995 sigma level:
Expand Down
15 changes: 5 additions & 10 deletions ush/python/pygfs/task/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,11 +88,6 @@ def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str
if not os.path.isdir(arch_dict.ROTDIR):
raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({arch_dict.ROTDIR}) does not exist!")

if arch_dict.RUN in ["gdas", "gfs"]:

# Copy the cyclone track files and rename the experiments
Archive._rename_cyclone_expt(arch_dict)

# Collect datasets that need to be archived
# Each dataset represents one tarball

Expand Down Expand Up @@ -371,14 +366,14 @@ def _rename_cyclone_expt(arch_dict) -> None:

if run == "gfs":
in_track_file = (track_dir_in + "/avno.t" +
cycle_HH + "z.cycle.trackatcfunix")
cycle_HH + "z.cyclone.trackatcfunix")
in_track_p_file = (track_dir_in + "/avnop.t" +
cycle_HH + "z.cycle.trackatcfunixp")
cycle_HH + "z.cyclone.trackatcfunix")
elif run == "gdas":
in_track_file = (track_dir_in + "/gdas.t" +
cycle_HH + "z.cycle.trackatcfunix")
cycle_HH + "z.cyclone.trackatcfunix")
in_track_p_file = (track_dir_in + "/gdasp.t" +
cycle_HH + "z.cycle.trackatcfunixp")
cycle_HH + "z.cyclone.trackatcfunix")

if not os.path.isfile(in_track_file):
# Do not attempt to archive the outputs
Expand Down Expand Up @@ -416,7 +411,7 @@ def replace_string_from_to_file(filename_in, filename_out, search_str, replace_s
with open("/tmp/track_file", "w") as new_file:
new_file.writelines(out_lines)

shutil.move("tmp/track_file", filename_out)
shutil.move("/tmp/track_file", filename_out)

replace_string_from_to_file(in_track_file, out_track_file, "AVNO", pslot4)
replace_string_from_to_file(in_track_p_file, out_track_p_file, "AVNO", pslot4)
Expand Down

0 comments on commit 2f51957

Please sign in to comment.