From bd97af961eb407234aab40beb02c5af9017ef684 Mon Sep 17 00:00:00 2001 From: Yaping Wang <49168260+ypwang19@users.noreply.github.com> Date: Thu, 2 Jan 2025 13:11:59 -0500 Subject: [PATCH 1/7] Update jcb-base for aerosol VarBC (#1426) # Description This PR adds the aerosol bias file names to the jcb-base template. # Companion PRs https://github.com/NOAA-EMC/global-workflow/pull/3189 # Issues Resolves #https://github.com/NOAA-EMC/global-workflow/issues/3172 # Automated CI tests to run in Global Workflow - [ ] C96C48_hybatmaerosnowDA --------- Co-authored-by: ypwang19 Co-authored-by: Cory Martin --- parm/aero/jcb-base.yaml.j2 | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/parm/aero/jcb-base.yaml.j2 b/parm/aero/jcb-base.yaml.j2 index 6d1cf7b7f..3c48b74b3 100644 --- a/parm/aero/jcb-base.yaml.j2 +++ b/parm/aero/jcb-base.yaml.j2 @@ -128,12 +128,6 @@ aero_obsbiascovout_prefix: "{{APREFIX}}" aero_obsbiascovout_suffix: ".satbias_cov.nc" bias_files: - atms_n20: rad_varbc_params.tar - atms_npp: rad_varbc_params.tar - mtiasi_metop-a: rad_varbc_params.tar - mtiasi_metop-b: rad_varbc_params.tar - amsua_n19: rad_varbc_params.tar - ssmis_f17: rad_varbc_params.tar - ssmis_f18: rad_varbc_params.tar - cris-fsr_n20: rad_varbc_params.tar - cris-fsr_npp: rad_varbc_params.tar + viirs_npp_aod: aero_varbc_params.tar + viirs_n20_aod: aero_varbc_params.tar + viirs_n21_aod: aero_varbc_params.tar From 322ce42e8c8d0126d88d2395d196222e5224a7ba Mon Sep 17 00:00:00 2001 From: Yaping Wang <49168260+ypwang19@users.noreply.github.com> Date: Fri, 3 Jan 2025 15:33:31 -0500 Subject: [PATCH 2/7] Update jcb-gdas hash for aerosol-Varbc (#1431) # Description # Companion PRs (https://github.com/NOAA-EMC/jcb-gdas/pull/61) # Issues https://github.com/NOAA-EMC/global-workflow/issues/3172 Co-authored-by: ypwang19 --- parm/jcb-gdas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/jcb-gdas b/parm/jcb-gdas index d75038349..10a744aa2 160000 --- a/parm/jcb-gdas +++ b/parm/jcb-gdas @@ -1 +1 @@ -Subproject commit d7503834951a75ae12a1c38dfc884892f19905aa +Subproject commit 10a744aa2a8103e523aba54bebc44a965c2ec466 From c665ec924ac8ca4f0f830bae4d72f56943073a08 Mon Sep 17 00:00:00 2001 From: Anna Shlyaeva Date: Mon, 6 Jan 2025 07:16:08 -0700 Subject: [PATCH 3/7] Bugfix for ensemble update: rollback changes in gdas_ens_handler (#1430) # Description Bugfix for https://github.com/NOAA-EMC/GDASApp/issues/1429. I am rolling back the change in `gdas_ens_handler` from https://github.com/NOAA-EMC/GDASApp/pull/1417 that resulted in ensemble increments missing ssh which is used (at least to check dimensions) in mom6 IAU. From looking at the original PR I don't think this change was intentional, and I _think_ it's OK to roll it back, although it would be good if someone with the knowledge of this workflow can review. # Issues Fixes https://github.com/NOAA-EMC/GDASApp/issues/1429 Co-authored-by: Anna Shlyaeva --- utils/soca/gdas_ens_handler.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/utils/soca/gdas_ens_handler.h b/utils/soca/gdas_ens_handler.h index 2355c9623..89c6e2a2c 100644 --- a/utils/soca/gdas_ens_handler.h +++ b/utils/soca/gdas_ens_handler.h @@ -151,14 +151,14 @@ namespace gdasapp { oops::Log::info() << "recentered incr " << i << ":" << incr << std::endl; // Append the vertical geometry (for MOM6 IAU) - postProcIncr.appendLayer(incr); - oops::Log::info() << "incr " << i << ":" << incr << std::endl; + soca::Increment mom6_incr = postProcIncr.appendLayer(incr); + oops::Log::info() << "incr " << i << ":" << mom6_incr << std::endl; // Set variables to zero if specified in the configuration postProcIncr.setToZero(incr); // Save the increments used to initialize the ensemble forecast - result = postProcIncr.save(incr, i+1); + result = postProcIncr.save(mom6_incr, i+1); } return result; } From a36255a4322122a16b9614ac107a9d087f14a58d Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Mon, 6 Jan 2025 10:32:44 -0500 Subject: [PATCH 4/7] Add observation window to Argo float obs and other BUFR-sourced obs (#1423) # Description Adds observation window to Argo float obs (4 cycles back and forward) and same facility for other BUFR-sourced obs. Since current BUFR to IODA converters handle one input file at a time, this is the interface this PR assumes, and so puts multiple argo IODA files, one per cycle, in the `obs` directory under `COM`. Tested with argo # Companion PRs NA # Issues Partially addresses https://github.com/NOAA-EMC/GDASApp/issues/1132 # Automated CI tests to run in Global Workflow - [ ] atm_jjob - [ ] C96C48_ufs_hybatmDA - [ ] C96C48_hybatmaerosnowDA - [x] C48mx500_3DVarAOWCDA - [ ] C48mx500_hybAOWCDA - [ ] C96C48_hybatmDA --- parm/soca/obsprep/obsprep_config.yaml | 3 ++ ush/soca/prep_ocean_obs.py | 57 +++++++++++++++++---------- ush/soca/prep_ocean_obs_utils.py | 31 +++++++++------ 3 files changed, 59 insertions(+), 32 deletions(-) diff --git a/parm/soca/obsprep/obsprep_config.yaml b/parm/soca/obsprep/obsprep_config.yaml index 905f49be6..02f4edb5d 100644 --- a/parm/soca/obsprep/obsprep_config.yaml +++ b/parm/soca/obsprep/obsprep_config.yaml @@ -244,6 +244,9 @@ observations: provider: GTS dmpdir subdir: atmos type: bufr + window: + back: 4 + forward: 4 dmpdir regex: 'gdas.*.subpfl.*.bufr_d' - obs space: diff --git a/ush/soca/prep_ocean_obs.py b/ush/soca/prep_ocean_obs.py index da7b2da6a..e751964ab 100644 --- a/ush/soca/prep_ocean_obs.py +++ b/ush/soca/prep_ocean_obs.py @@ -138,39 +138,51 @@ def initialize(self): interval = timedelta(hours=assim_freq * i) window_cdates.append(cdate + interval) - input_files = prep_ocean_obs_utils.obs_fetch(self.task_config, - self.task_config, - obsprep_space, - window_cdates) + # fetch the obs files to DATA directory and get the list of files and cycles + fetched_files = prep_ocean_obs_utils.obs_fetch(self.task_config, + self.task_config, + obsprep_space, + window_cdates) - if not input_files: + if not fetched_files: logger.warning(f"No files found for obs source {obtype}, skipping") break # go to next observer in OBS_YAML - obsprep_space['input files'] = input_files obsprep_space['window begin'] = self.window_begin obsprep_space['window end'] = self.window_end - ioda_filename = f"{RUN}.t{cyc:02d}z.{obs_space_name}.{cdatestr}.nc4" - obsprep_space['output file'] = ioda_filename ioda_config_file = obtype + '2ioda.yaml' + obsprep_space['conversion config file'] = ioda_config_file # set up the config file for conversion to IODA for bufr and # netcdf files respectively if obsprep_space['type'] == 'bufr': + # create a pre-filled template file for the bufr2ioda converter, + # which will be overwritten for each input cycle bufrconv_config = { 'RUN': RUN, 'current_cycle': cdate, 'DMPDIR': COMIN_OBS, 'COM_OBS': COMIN_OBS, 'OCEAN_BASIN_FILE': OCEAN_BASIN_FILE} - obsprep_space['conversion config file'] = ioda_config_file - bufr2iodapy = BUFR2IODA_PY_DIR + '/bufr2ioda_' + obtype + '.py' + bufr2iodapy = os.path.join(BUFR2IODA_PY_DIR, f'bufr2ioda_{obtype}.py') obsprep_space['bufr2ioda converter'] = bufr2iodapy - tmpl_filename = 'bufr2ioda_' + obtype + '.yaml' + tmpl_filename = f"bufr2ioda_{obtype}.yaml" bufrconv_template = os.path.join(BUFR2IODA_TMPL_DIR, tmpl_filename) + output_files = [] # files to save to COM directory + bufrconv_files = [] # files needed to populate the IODA converter config + # for each cycle of the retrieved obs bufr files... + for input_file, cycle in fetched_files: + cycletime = cycle[8:10] + ioda_filename = f"{RUN}.t{cycletime}z.{obs_space_name}.{cycle}.nc4" + output_files.append(ioda_filename) + bufrconv_files.append((cycle, input_file, ioda_filename)) + + obsprep_space['output file'] = output_files + obsprep_space['bufrconv files'] = bufrconv_files try: bufrconv = parse_j2yaml(bufrconv_template, bufrconv_config) + bufrconv.update(obsprep_space) bufrconv.save(ioda_config_file) except Exception as e: logger.warning(f"An exeception {e} occured while trying to create BUFR2IODA config") @@ -180,7 +192,10 @@ def initialize(self): obsspaces_to_convert.append({"obs space": obsprep_space}) elif obsprep_space['type'] == 'nc': - obsprep_space['conversion config file'] = ioda_config_file + + obsprep_space['input files'] = [f[0] for f in fetched_files] + ioda_filename = f"{RUN}.t{cyc:02d}z.{obs_space_name}.{cdatestr}.nc4" + obsprep_space['output file'] = [ioda_filename] save_as_yaml(obsprep_space, ioda_config_file) obsspaces_to_convert.append({"obs space": obsprep_space}) @@ -192,7 +207,7 @@ def initialize(self): logger.critical("Ill-formed OBS_YAML or OBSPREP_YAML file, exiting") raise - # yes, there is redundancy between the yamls fed to the ioda converter and here, + # yes, there is redundancy between the yamls fed to the ioda converters and here, # this seems safer and easier than being selective about the fields save_as_yaml({"observations": obsspaces_to_convert}, self.task_config.conversion_list_file) @@ -258,16 +273,18 @@ def finalize(self): obsspaces_to_save = YAMLFile(self.task_config.save_list_file) - for obsspace_to_save in obsspaces_to_save['observations']: - - output_file = os.path.basename(obsspace_to_save['output file']) - conv_config_file = os.path.basename(obsspace_to_save['conversion config file']) - output_file_dest = os.path.join(COMOUT_OBS, output_file) + for obs_space in obsspaces_to_save['observations']: + files_to_save = [] + conv_config_file = os.path.basename(obs_space['conversion config file']) conv_config_file_dest = os.path.join(COMOUT_OBS, conv_config_file) + files_to_save.append([conv_config_file, conv_config_file_dest]) + + for output_file in obs_space['output file']: + output_file_dest = os.path.join(COMOUT_OBS, output_file) + files_to_save.append([output_file, output_file_dest]) try: - FileHandler({'copy': [[output_file, output_file_dest]]}).sync() - FileHandler({'copy': [[conv_config_file, conv_config_file_dest]]}).sync() + FileHandler({'copy': files_to_save}).sync() except Exception as e: logger.warning(f"An exeception {e} occured while trying to run gen_bufr_json") except OSError: diff --git a/ush/soca/prep_ocean_obs_utils.py b/ush/soca/prep_ocean_obs_utils.py index 9ecb06464..c8f42a375 100755 --- a/ush/soca/prep_ocean_obs_utils.py +++ b/ush/soca/prep_ocean_obs_utils.py @@ -2,7 +2,7 @@ import os import fnmatch import subprocess -from wxflow import FileHandler, Logger +from wxflow import FileHandler, Logger, YAMLFile logger = Logger() @@ -36,10 +36,10 @@ def obs_fetch(config, task_config, obsprep_space, cycles): for root, _, files in os.walk(full_input_dir): for filename in fnmatch.filter(files, dumpdir_regex): - target_file = PDY + cyc + '-' + filename - matching_files.append((full_input_dir, filename, target_file)) + target_file = f"{PDY}{cyc}-{filename}" + matching_files.append((full_input_dir, filename, target_file, f"{PDY}{cyc}")) - for full_input_dir, filename, target_file in matching_files: + for full_input_dir, filename, target_file, _ in matching_files: file_path = os.path.join(full_input_dir, filename) file_destination = os.path.join(COMIN_OBS, target_file) file_copy.append([file_path, file_destination]) @@ -50,7 +50,7 @@ def obs_fetch(config, task_config, obsprep_space, cycles): FileHandler({'copy': file_copy}).sync() # return the modified file names for the IODA converters - return [f[2] for f in matching_files] + return [(f[2], f[3]) for f in matching_files] def run_netcdf_to_ioda(obsspace_to_convert, OCNOBS2IODAEXEC): @@ -69,11 +69,18 @@ def run_netcdf_to_ioda(obsspace_to_convert, OCNOBS2IODAEXEC): def run_bufr_to_ioda(obsspace_to_convert): logger.info(f"running run_bufr_to_ioda on {obsspace_to_convert['name']}") bufrconv_yaml = obsspace_to_convert['conversion config file'] + bufrconv_config = YAMLFile(bufrconv_yaml) bufr2iodapy = obsspace_to_convert['bufr2ioda converter'] - try: - subprocess.run(['python', bufr2iodapy, '-c', bufrconv_yaml], check=True) - return 0 - except subprocess.CalledProcessError as e: - logger.warning(f"bufr2ioda converter failed with error >{e}<, \ - return code {e.returncode}") - return e.returncode + obtype = obsspace_to_convert['name'] + + for cycle, input_file, output_file in obsspace_to_convert['bufrconv files']: + bufrconv_config['input_file'] = input_file + bufrconv_config['output_file'] = output_file + bufrconv_config['cycle_datetime'] = cycle + config_filename = f"{cycle}.{bufrconv_yaml}" + bufrconv_config.save(config_filename) + try: + subprocess.run(['python', bufr2iodapy, '-c', config_filename], check=True) + except subprocess.CalledProcessError as e: + logger.warning(f"bufr2ioda converter failed with error >{e}<, \ + return code {e.returncode}") From 614c7b6d849eee7ebb22a9d847ba360a54092c81 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Fri, 10 Jan 2025 09:28:45 -0500 Subject: [PATCH 5/7] Update JEDI hashes (#1437) --- sorc/fv3-jedi | 2 +- sorc/ioda | 2 +- sorc/iodaconv | 2 +- sorc/oops | 2 +- sorc/saber | 2 +- sorc/soca | 2 +- sorc/ufo | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/sorc/fv3-jedi b/sorc/fv3-jedi index d0f54bf94..e5139862b 160000 --- a/sorc/fv3-jedi +++ b/sorc/fv3-jedi @@ -1 +1 @@ -Subproject commit d0f54bf94322a36734a3573061f6cc62396eece4 +Subproject commit e5139862b86b23ac6c2808e112b9038ff19826af diff --git a/sorc/ioda b/sorc/ioda index 4764d6151..a3774b348 160000 --- a/sorc/ioda +++ b/sorc/ioda @@ -1 +1 @@ -Subproject commit 4764d615138bc57d6c15ee7020ab9b75199d8563 +Subproject commit a3774b348296e81e5b6f524eee33475cad0ad67e diff --git a/sorc/iodaconv b/sorc/iodaconv index c47953462..a2f538327 160000 --- a/sorc/iodaconv +++ b/sorc/iodaconv @@ -1 +1 @@ -Subproject commit c47953462af2a2777849786cb853a445cbf7b15d +Subproject commit a2f538327ed37be6f98a21ee6a5acf3ef4dd96b8 diff --git a/sorc/oops b/sorc/oops index 40e6ecc2e..519e05663 160000 --- a/sorc/oops +++ b/sorc/oops @@ -1 +1 @@ -Subproject commit 40e6ecc2e53b0744eb00d3ab3c4d4e7fcb5f0c9b +Subproject commit 519e05663d74a8b2081a83b61a800bec9edfeaf9 diff --git a/sorc/saber b/sorc/saber index a2c7e4551..29764edcf 160000 --- a/sorc/saber +++ b/sorc/saber @@ -1 +1 @@ -Subproject commit a2c7e45513b2d1280128703d6fc21ecb444cdc0e +Subproject commit 29764edcf46fadf8e6b70a3d1095149fe79e038c diff --git a/sorc/soca b/sorc/soca index 945b4ab57..f60b0e24d 160000 --- a/sorc/soca +++ b/sorc/soca @@ -1 +1 @@ -Subproject commit 945b4ab57d0a69306e7f91da94797a79e547331c +Subproject commit f60b0e24d517847cc14f51643a40c9cf8982887a diff --git a/sorc/ufo b/sorc/ufo index c9e1c5558..2dd764a63 160000 --- a/sorc/ufo +++ b/sorc/ufo @@ -1 +1 @@ -Subproject commit c9e1c5558e904701cc3f5a822330a855d33875ac +Subproject commit 2dd764a63ac6b800aad455cd6307175199f33d7a From d6277a405ba744cfa7e333a0318ad9a093d54993 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Fri, 10 Jan 2025 11:35:47 -0500 Subject: [PATCH 6/7] Update wcoss2.intel.lua to spack-stack/1.6.0 (#1435) --- modulefiles/GDAS/wcoss2.intel.lua | 99 +++++++++++++++++++++---------- test/gw-ci/create_exp.sh | 3 + ush/module-setup.sh | 2 + 3 files changed, 73 insertions(+), 31 deletions(-) diff --git a/modulefiles/GDAS/wcoss2.intel.lua b/modulefiles/GDAS/wcoss2.intel.lua index 2c3224f33..b7d042233 100644 --- a/modulefiles/GDAS/wcoss2.intel.lua +++ b/modulefiles/GDAS/wcoss2.intel.lua @@ -3,49 +3,86 @@ Load environment for running the GDAS application with Intel compilers and MPI. ]]) local pkgName = myModuleName() -local pkgVersion = myModuleVersion() +local pkgVersion = myModuleVersion() or "1.0" local pkgNameVer = myModuleFullName() -prepend_path("MODULEPATH", "/apps/dev/lmodules/core") - -load("PrgEnv-intel/8.2.0") -load("cmake/3.20.2") -load("craype") -load("cray-pals") -load("git/2.29.0") -load("intel/19.1.3.304") -load("cray-mpich/8.1.12") -load("hdf5/1.12.2") -load("netcdf/4.7.4") -load("udunits/2.2.28") +prepend_path("MODULEPATH", "/apps/ops/test/spack-stack-1.6.0-nco/envs/nco-intel-19.1.3.304/install/modulefiles/Core") + +load("PrgEnv-intel/8.3.3") +load("stack-intel/19.1.3.304") +load("stack-cray-mpich/8.1.9") +load("stack-python/3.10.13") +load("craype/2.7.17") +load("cray-pals/1.3.2") +load("cmake/3.23.1") +load("gettext/0.19.7") +load("pcre2/10.42") +load("curl/8.4.0") +load("zlib/1.2.13") +load("git/2.35.3") +load("pkg-config/0.29.2") +load("hdf5/1.14.0") +load("parallel-netcdf/1.12.2") +load("netcdf-c/4.9.2") +load("nccmp/1.9.0.1") +load("netcdf-fortran/4.6.1") +load("nco/5.0.6") +load("parallelio/2.5.10") +load("boost/1.83.0") +load("bufr/12.0.1") +load("ecbuild/3.7.2") +load("openjpeg/2.5.0") load("eigen/3.4.0") -load("boost/1.79.0") -load("gsl-lite/v0.40.0") -load("sp/2.4.0") -load("python/3.8.6") -load("ecbuild/3.7.0") -load("qhull/2020.2") -load("eckit/1.24.4") +load("openblas/0.3.24") +load("eckit/1.24.5") +load("fftw/3.3.10") load("fckit/0.11.0") +load("fms/2023.04") +load("esmf/8.5.0") load("atlas/0.35.0") -load("nccmp") -load("nco/5.0.6") -load("gsl/2.7") +load("sp/2.5.0") +load("gsl-lite/0.37.0") +load("libjpeg/2.1.0") +load("libpng/1.6.37") +load("libxt/1.1.5") +load("libxmu/1.1.4") +load("libxpm/3.5.12") +load("libxaw/1.0.13") +load("udunits/2.2.28") +load("ncview/2.1.9") +load("netcdf-cxx4/4.3.1") +load("core/rocoto/1.3.5") load("prod_util/2.0.14") -load("bufr/12.0.1") -load("fms-C/2023.04") -load("esmf-C/8.6.0") --- hack for pybind11 -setenv("pybind11_ROOT", "/apps/spack/python/3.8.6/intel/19.1.3.304/pjn2nzkjvqgmjw4hmyz43v5x4jbxjzpk/lib/python3.8/site-packages/pybind11/share/cmake/pybind11") +load("py-setuptools/63.4.3") +load("py-jinja2/3.1.2") +load("py-netcdf4/1.5.8") +load("py-pybind11/2.11.1") +load("py-pycodestyle/2.11.0") +load("py-pyyaml/5.4.1") +load("py-scipy/1.10.1") +load("py-xarray/2023.7.0") +load("py-f90nml/1.4.3") +load("py-pip/23.1.2") +load("py-bottleneck/1.3.7") +load("py-numexpr/2.8.4") +load("py-six/1.16.0") +load("py-python-dateutil/2.8.2") +load("py-pytz/2023.3") +load("py-pandas/1.5.3") +load("py-numpy/1.24.4") +load("py-markupsafe/2.1.3") +load("py-cftime/1.0.3.4") +load("py-packaging/23.1") -- hack for git-lfs prepend_path("PATH", "/apps/spack/git-lfs/2.11.0/gcc/11.2.0/m6b6nl5kfqngfteqbggydc7kflxere3s/bin") --- hack for FMS -setenv('fms_ROOT', '/apps/prod/hpc-stack/i-19.1.3.304__m-8.1.12__h-1.14.0__n-4.9.2__p-2.5.10__e-8.6.0pnetcdf/intel-19.1.3.304/cray-mpich-8.1.12/fms/2023.04') +setenv("CC","cc") +setenv("CXX","CC") +setenv("FC","ftn") -local mpiexec = '/pe/intel/compilers_and_libraries_2020.4.304/linux/mpi/intel64/bin/mpirun' +local mpiexec = '/opt/cray/pals/1.3.2/bin/mpirun' local mpinproc = '-n' setenv('MPIEXEC_EXEC', mpiexec) setenv('MPIEXEC_NPROC', mpinproc) diff --git a/test/gw-ci/create_exp.sh b/test/gw-ci/create_exp.sh index b4c1535db..d3f117a64 100755 --- a/test/gw-ci/create_exp.sh +++ b/test/gw-ci/create_exp.sh @@ -13,6 +13,9 @@ expyaml=${expyaml_ctest} export pslot=${pslot_ctest} export RUNTESTS=${exp_path}/${pslot} export HPC_ACCOUNT="da-cpu" +if [[ $MACHINE_ID = wcoss2 ]]; then + export HPC_ACCOUNT="GFS-DEV" +fi # Source the gw environement source ${HOMEgfs}/workflow/gw_setup.sh diff --git a/ush/module-setup.sh b/ush/module-setup.sh index a9b87e9e4..11536daf3 100755 --- a/ush/module-setup.sh +++ b/ush/module-setup.sh @@ -38,6 +38,8 @@ elif [[ $MACHINE_ID = s4* ]] ; then elif [[ $MACHINE_ID = wcoss2 ]]; then # We are on WCOSS2 + # Ignore default modules of the same version lower in the search path (req'd by spack-stack) + export LMOD_TMOD_FIND_FIRST=yes module reset elif [[ $MACHINE_ID = cheyenne* ]] ; then From 15113adfdbf2500ec2d5099fc9b62b21fbfcc9b8 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Fri, 10 Jan 2025 13:53:16 -0500 Subject: [PATCH 7/7] clean up stable_driver.sh (#1434) --- ci/run_ci.sh | 12 ++++++++++++ ci/stable_driver.sh | 41 ++++++++++++++++++----------------------- 2 files changed, 30 insertions(+), 23 deletions(-) diff --git a/ci/run_ci.sh b/ci/run_ci.sh index 9e67b1e12..cc39dcc63 100755 --- a/ci/run_ci.sh +++ b/ci/run_ci.sh @@ -91,6 +91,18 @@ if [[ $TEST_WORKFLOW == 1 ]]; then fi # ============================================================================== # run ctests + +# PATCH START +# MSU role-da can not use /work/noaa/stmp at present. The logic below +# modifies the stmp path used by g-w so that role-da can run g-w based +# ctests. This logic will be removed after MSU role-da is added to the +# stmp group. +if [[ "${TARGET}" = "orion" || "${TARGET}" = "hercules" ]]; then + echo "***WARNING*** apply MSU stmp patch to $workflow_dir/workflow/hosts/${TARGET}.yaml" + sed -i "s|/noaa/stmp|/noaa/da|g" $workflow_dir/workflow/hosts/${TARGET}.yaml +fi +# PATCH END + cd $gdasapp_dir/build module use $gdasapp_dir/modulefiles module load GDAS/$TARGET diff --git a/ci/stable_driver.sh b/ci/stable_driver.sh index ca51f6f7c..7bf936325 100755 --- a/ci/stable_driver.sh +++ b/ci/stable_driver.sh @@ -73,7 +73,7 @@ $gdasdir/ush/submodules/update_develop.sh $gdasdir # ============================================================================== # email information -PEOPLE="Cory.R.Martin@noaa.gov Russ.Treadon@noaa.gov Guillaume.Vernieres@noaa.gov David.New@noaa.gov" +PEOPLE="Cory.R.Martin@noaa.gov David.New@noaa.gov Russ.Treadon@noaa.gov" BODY=$stableroot/$datestr/stable_nightly # ============================================================================== @@ -84,41 +84,36 @@ total=0 if [ $ci_status -eq 0 ]; then cd $gdasdir # checkout feature/stable-nightly - git stash - total=$(($total+$?)) - if [ $total -ne 0 ]; then - echo "Unable to git stash" >> $stableroot/$datestr/output - fi git checkout feature/stable-nightly - total=$(($total+$?)) - if [ $total -ne 0 ]; then + rc=$? + total=$(($total+$rc)) + if [ $rc -ne 0 ]; then echo "Unable to checkout feature/stable-nightly" >> $stableroot/$datestr/output fi # merge in develop git merge develop - total=$(($total+$?)) - if [ $total -ne 0 ]; then + rc=$? + total=$(($total+$rc)) + if [ $rc -ne 0 ]; then echo "Unable to merge develop" >> $stableroot/$datestr/output fi # add in submodules - git stash pop - total=$(($total+$?)) - if [ $total -ne 0 ]; then - echo "Unable to git stash pop" >> $stableroot/$datestr/output - fi - $my_dir/../ush/submodules/add_submodules.sh $gdasdir - total=$(($total+$?)) - if [ $total -ne 0 ]; then + $gdasdir/ush/submodules/add_submodules.sh $gdasdir + rc=$? + total=$(($total+$rc)) + if [ $rc -ne 0 ]; then echo "Unable to add updated submodules to commit" >> $stableroot/$datestr/output fi git diff-index --quiet HEAD || git commit -m "Update to new stable build on $datestr" - total=$(($total+$?)) - if [ $total -ne 0 ]; then + rc=$? + total=$(($total+$rc)) + if [ $rc -ne 0 ]; then echo "Unable to commit" >> $stableroot/$datestr/output fi git push --set-upstream origin feature/stable-nightly - total=$(($total+$?)) - if [ $total -ne 0 ]; then + rc=$? + total=$(($total+$rc)) + if [ $rc -ne 0 ]; then echo "Unable to push" >> $stableroot/$datestr/output fi if [ $total -ne 0 ]; then @@ -152,4 +147,4 @@ mail -r "Darth Vader - NOAA Affiliate " -s "$SUBJECT" "$PE # ============================================================================== # scrub working directory for older files -find $stableroot/* -maxdepth 1 -mtime +3 -exec rm -rf {} \; +find $stableroot/* -maxdepth 1 -mtime +1 -exec rm -rf {} \;