From 75c0227564fe1cef3eb1e8df03bc7332c77f9442 Mon Sep 17 00:00:00 2001 From: amazroo Date: Sun, 11 Apr 2021 08:49:54 -0600 Subject: [PATCH 01/19] add nbm options to suppPrecipMod.py --- Template/template_forcing_engine.config | 3 ++- core/suppPrecipMod.py | 24 ++++++++++++++++-------- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/Template/template_forcing_engine.config b/Template/template_forcing_engine.config index 249e719..614e8f3 100755 --- a/Template/template_forcing_engine.config +++ b/Template/template_forcing_engine.config @@ -286,6 +286,7 @@ DownscalingParamDirs = # 5 - CONUS MRMS GRIB2 hourly MultiSensor QPE (Pass 2 or Pass 1) # 6 - Hawaii MRMS GRIB2 hourly MultiSensor QPE (Pass 2 or Pass 1) # 7 - MRMS SBCv2 Liquid Water Fraction (netCDF only) +# 8 - NBM hourly forecast APCP (grib2) SuppPcp = [] # Specify the file type for each supplemental precipitation file (comma separated) @@ -342,4 +343,4 @@ cfsEnsNumber = 1 # input files every 15 minutes, 60 minutes, 3-hours, etc. Please specify the # length of custom input frequencies to match the number of custom NetCDF inputs # selected above in the Logistics section. -custom_input_fcst_freq = [60] \ No newline at end of file +custom_input_fcst_freq = [60] diff --git a/core/suppPrecipMod.py b/core/suppPrecipMod.py index 53dffc9..44edeaa 100755 --- a/core/suppPrecipMod.py +++ b/core/suppPrecipMod.py @@ -83,7 +83,8 @@ def define_product(self): 4: "WRF_ARW_PuertoRico_2p5km_PCP", 5: "CONUS_MRMS_1HR_MultiSensor", 6: "Hawaii_MRMS_1HR_MultiSensor", - 7: "MRMS_LiquidWaterFraction" + 7: "MRMS_LiquidWaterFraction", + 8: "NBM_APCP" } self.productName = product_names[self.keyValue] @@ -110,7 +111,8 @@ def define_product(self): 4: None, 5: None, 6: None, - 7: None + 7: None, + 8: None } self.grib_vars = grib_vars_in[self.keyValue] @@ -121,7 +123,8 @@ def define_product(self): 4: ['BLAH'], 5: ['BLAH'], 6: ['BLAH'], - 7: ['BLAH'] + 7: ['BLAH'], + 8: ['BLAH'] } self.grib_levels = grib_levels_in[self.keyValue] @@ -132,7 +135,8 @@ def define_product(self): 4: ['APCP_surface'], 5: ['MultiSensorQPE01H_0mabovemeansealevel'], 6: ['MultiSensorQPE01H_0mabovemeansealevel'], - 7: ['sbcv2_lwf'] + 7: ['sbcv2_lwf'], + 8: ['APCP_surface'] } self.netcdf_var_names = netcdf_variables[self.keyValue] @@ -143,7 +147,8 @@ def define_product(self): 4: None, 5: None, 6: None, - 7: None + 7: None, + 8: None } self.rqi_netcdf_var_names = netcdf_rqi_variables[self.keyValue] @@ -154,7 +159,8 @@ def define_product(self): 4: 3, 5: 3, 6: 3, - 7: 8 # LQFRAC + 7: 8, # LQFRAC + 8: 3 } self.output_var_idx = output_variables[self.keyValue] @@ -176,7 +182,8 @@ def calc_neighbor_files(self,ConfigOptions,dCurrent,MpiConfig): 4: time_handling.find_hourly_wrf_arw_neighbors, 5: time_handling.find_hourly_mrms_radar_neighbors, 6: time_handling.find_hourly_mrms_radar_neighbors, - 7: time_handling.find_sbcv2_lwf_neighbors + 7: time_handling.find_sbcv2_lwf_neighbors, + 8: time_handling.find_hourly_nbm_apcp_neighbors } find_neighbor_files[self.keyValue](self, ConfigOptions, dCurrent, MpiConfig) @@ -208,7 +215,8 @@ def regrid_inputs(self,ConfigOptions,wrfHyroGeoMeta,MpiConfig): 4: regrid.regrid_hourly_wrf_arw_hi_res_pcp, 5: regrid.regrid_mrms_hourly, 6: regrid.regrid_mrms_hourly, - 7: regrid.regrid_sbcv2_liquid_water_fraction + 7: regrid.regrid_sbcv2_liquid_water_fraction, + 8: regrid.regrid_hourly_nbm_apcp, } regrid_inputs[self.keyValue](self,ConfigOptions,wrfHyroGeoMeta,MpiConfig) #try: From 49f211f75fe60aa60eaabdf04641d856a5455065 Mon Sep 17 00:00:00 2001 From: amazroo Date: Sun, 11 Apr 2021 08:53:18 -0600 Subject: [PATCH 02/19] Add option 8 nbm to core/config.py --- core/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/config.py b/core/config.py index ab64b03..7a6afd1 100755 --- a/core/config.py +++ b/core/config.py @@ -1008,7 +1008,7 @@ def read_config(self): # Check to make sure supplemental precip options make sense. Also read in the RQI threshold # if any radar products where chosen. for suppOpt in self.supp_precip_forcings: - if suppOpt < 0 or suppOpt > 7: + if suppOpt < 0 or suppOpt > 8: err_handler.err_out_screen('Please specify SuppForcing values between 1 and 7.') # Read in RQI threshold to apply to radar products. if suppOpt == 1 or suppOpt == 2: From 058373f607782b8f54e1272f27da1c4e87733023 Mon Sep 17 00:00:00 2001 From: amazroo Date: Mon, 12 Apr 2021 06:32:45 -0600 Subject: [PATCH 03/19] Add find_hourly_nbm_apcp_neighbors to core/time_handling.py --- Template/template_forcing_engine.config | 2 +- core/time_handling.py | 112 ++++++++++++++++++++++++ 2 files changed, 113 insertions(+), 1 deletion(-) diff --git a/Template/template_forcing_engine.config b/Template/template_forcing_engine.config index 614e8f3..bce3077 100755 --- a/Template/template_forcing_engine.config +++ b/Template/template_forcing_engine.config @@ -286,7 +286,7 @@ DownscalingParamDirs = # 5 - CONUS MRMS GRIB2 hourly MultiSensor QPE (Pass 2 or Pass 1) # 6 - Hawaii MRMS GRIB2 hourly MultiSensor QPE (Pass 2 or Pass 1) # 7 - MRMS SBCv2 Liquid Water Fraction (netCDF only) -# 8 - NBM hourly forecast APCP (grib2) +# 8 - NBM hourly forecast QMD-CONUS-APCP (grib2) SuppPcp = [] # Specify the file type for each supplemental precipitation file (comma separated) diff --git a/core/time_handling.py b/core/time_handling.py index fd83126..0a59544 100755 --- a/core/time_handling.py +++ b/core/time_handling.py @@ -1617,3 +1617,115 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf if not os.path.isfile(input_forcings.file_in2): if input_forcings.regridded_precip2 is not None: input_forcings.regridded_precip2[:, :] = config_options.globalNdv + + +def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_current, mpi_config): + """ + Function to calculate the previous and next NBM/QMD/CONUS files. This + will also calculate the neighboring radar quality index (RQI) files as well. + :param supplemental_precip: + :param config_options: + :param d_current: + :param mpi_config: + :return: + """ + # First we need to find the nearest previous and next hour, which is + # the previous/next NBM files we will be using. + current_yr = d_current.year + current_mo = d_current.month + current_day = d_current.day + current_hr = d_current.hour + current_min = d_current.minute + + # Set the input file frequency to be hourly. + supplemental_precip.input_frequency = 60.0 + + prev_date1 = datetime.datetime(current_yr, current_mo, current_day, current_hr) + dt_tmp = d_current - prev_date1 + if dt_tmp.total_seconds() == 0: + # We are on the hour, we can set this date to the be the "next" date. + next_nbm_date = d_current + prev_nbm_date = d_current - datetime.timedelta(seconds=3600.0) + else: + # We are between two NBM hours. + prev_nbm_date = prev_date1 + next_nbm_date = prev_nbm_date + datetime.timedelta(seconds=3600.0) + + supplemental_precip.pcp_date1 = prev_nbm_date + #supplemental_precip.pcp_date2 = next_mrms_date + supplemental_precip.pcp_date2 = prev_nbm_date + + # Calculate expected file paths. + if supplemental_precip.keyValue == 8: + tmp_file1 = supplemental_precip.inDir + "blend." + \ + supplemental_precip.pcp_date1.strftime('%Y%m%d') + \ + "/" + supplemental_precip.pcp_date1.strftime('%H') + \ + "/qmd/blend.t" + supplemental_precip.pcp_date1.strftime('%H') + \ + "z.qmd.f" + format(int(supplemental_precip.pcp_date1.strftime('%H')), '03d') + ".co" \ + + supplemental_precip.file_ext + ('.gz' if supplemental_precip.fileType != NETCDF else '') + tmp_file2 = supplemental_precip.inDir + "blend." + \ + supplemental_precip.pcp_date2.strftime('%Y%m%d') + \ + "/" + supplemental_precip.pcp_date2.strftime('%H') + \ + "/qmd/blend.t" + supplemental_precip.pcp_date2.strftime('%H') + \ + "z.qmd.f" + format(int(supplemental_precip.pcp_date2.strftime('%H')), '03d') + ".co" \ + + supplemental_precip.file_ext + ('.gz' if supplemental_precip.fileType != NETCDF else '') + else: + tmp_file1 = tmp_file2 = "" + + if mpi_config.rank == 0: + config_options.statusMsg = "Previous NBM supplemental file: " + tmp_file1 + err_handler.log_msg(config_options, mpi_config) + config_options.statusMsg = "Next NBM supplemental file: " + tmp_file2 + err_handler.log_msg(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Check to see if files are already set. If not, then reset, grids and + # regridding objects to communicate things need to be re-established. + if supplemental_precip.file_in1 != tmp_file1 or supplemental_precip.file_in2 != tmp_file2: + if config_options.current_output_step == 1: + supplemental_precip.regridded_precip1 = supplemental_precip.regridded_precip1 + supplemental_precip.regridded_precip2 = supplemental_precip.regridded_precip2 + else: + # The forecast window has shifted. Reset fields 2 to + # be fields 1. + supplemental_precip.regridded_precip1 = supplemental_precip.regridded_precip1 + supplemental_precip.regridded_precip2 = supplemental_precip.regridded_precip2 + supplemental_precip.file_in1 = tmp_file1 + supplemental_precip.file_in2 = tmp_file2 + supplemental_precip.regridComplete = False + + # If either file does not exist, set to None. This will instruct downstream regridding steps to + # set the regridded states to the global NDV. That ensures no supplemental precipitation will be + # added to the final output grids. + + # if not os.path.isfile(tmp_file1) or not os.path.isfile(tmp_file2): + # if MpiConfig.rank == 0: + # ConfigOptions.statusMsg = "MRMS files are missing. Will not process " \ + # "supplemental precipitation" + # errMod.log_warning(ConfigOptions,MpiConfig) + # supplemental_precip.file_in2 = None + # supplemental_precip.file_in1 = None + + # errMod.check_program_status(ConfigOptions, MpiConfig) + + # Ensure we have the necessary new file + if mpi_config.rank == 0: + if not os.path.isfile(supplemental_precip.file_in2) and (supplemental_precip.keyValue == 5 or supplemental_precip.keyValue == 6): + config_options.statusMsg = "NBM file {} not found, will attempt to use {} instead.".format( + supplemental_precip.file_in2, supplemental_precip.file_in1) + err_handler.log_warning(config_options, mpi_config) + supplemental_precip.file_in2 = supplemental_precip.file_in1 + if not os.path.isfile(supplemental_precip.file_in2): + if supplemental_precip.enforce == 1: + config_options.errMsg = "Expected input NBM file: " + supplemental_precip.file_in2 + " not found." + err_handler.log_critical(config_options, mpi_config) + else: + config_options.statusMsg = "Expected input NBM file: " + supplemental_precip.file_in2 + \ + " not found. " + "Will not use in final layering." + err_handler.log_warning(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If the file is missing, set the local slab of arrays to missing. + if not os.path.isfile(supplemental_precip.file_in2): + if supplemental_precip.regridded_precip2 is not None: + supplemental_precip.regridded_precip2[:, :] = config_options.globalNdv From 1ddb55af2680f36b27d2a9c171b02945ab991226 Mon Sep 17 00:00:00 2001 From: amazroo Date: Thu, 15 Apr 2021 13:51:23 -0600 Subject: [PATCH 04/19] Edit the naming of NBM option in the config file --- core/suppPrecipMod.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/suppPrecipMod.py b/core/suppPrecipMod.py index 44edeaa..431d2aa 100755 --- a/core/suppPrecipMod.py +++ b/core/suppPrecipMod.py @@ -84,7 +84,7 @@ def define_product(self): 5: "CONUS_MRMS_1HR_MultiSensor", 6: "Hawaii_MRMS_1HR_MultiSensor", 7: "MRMS_LiquidWaterFraction", - 8: "NBM_APCP" + 8: "NBM_QMD_CONUS_APCP" } self.productName = product_names[self.keyValue] From 45b8a7e9765697cc8b3c23ad75a0656c3ac9ce9c Mon Sep 17 00:00:00 2001 From: amazroo Date: Fri, 16 Apr 2021 09:18:18 -0600 Subject: [PATCH 05/19] Add regrid_hourly_nbm_apcp to core/regrid.py --- core/regrid.py | 104 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) diff --git a/core/regrid.py b/core/regrid.py index 9de4ebe..fa3ceeb 100755 --- a/core/regrid.py +++ b/core/regrid.py @@ -2297,6 +2297,110 @@ def regrid_sbcv2_liquid_water_fraction(supplemental_forcings, config_options, wr err_handler.check_program_status(config_options, mpi_config) +def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_meta, mpi_config): + """ + Function for handling regridding hourly forecasted NBM precipitation. + :param supplemental_precip: + :param config_options: + :param wrf_hydro_geo_meta: + :param mpi_config: + :return: + """ + # If the expected file is missing, this means we are allowing missing files, simply + # exit out of this routine as the regridded fields have already been set to NDV. + if not os.path.exists(supplemental_precip.file_in1): + return + + # Check to see if the regrid complete flag for this + # output time step is true. This entails the necessary + # inputs have already been regridded and we can move on. + if supplemental_precip.regridComplete: + return + + id_tmp = ioMod.open_netcdf_forcing(supplemental_precip.file_in1, config_options, mpi_config) + + # Check to see if we need to calculate regridding weights. + calc_regrid_flag = check_supp_pcp_regrid_status(id_tmp, supplemental_precip, config_options, + wrf_hydro_geo_meta, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + if calc_regrid_flag: + if mpi_config.rank == 0: + config_options.statusMsg = "Calculating NBM regridding weights." + err_handler.log_msg(config_options, mpi_config) + calculate_supp_pcp_weights(supplemental_precip, id_tmp, supplemental_precip.file_in1, config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Regrid the input variables. + var_tmp = None + if mpi_config.rank == 0: + config_options.statusMsg = "Regridding NBM APCP Precipitation." + err_handler.log_msg(config_options, mpi_config) + try: + var_tmp = id_tmp.variables['APCP_surface'][0, :, :] + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to extract precipitation from NBM file: " + \ + supplemental_precip.file_in1 + " (" + str(err) + ")" + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + var_sub_tmp = mpi_config.scatter_array(supplemental_precip, var_tmp, config_options) + err_handler.check_program_status(config_options, mpi_config) + + try: + supplemental_precip.esmf_field_in.data[:, :] = var_sub_tmp + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place NBM precipitation into local ESMF field: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + supplemental_precip.esmf_field_out = supplemental_precip.regridObj(supplemental_precip.esmf_field_in, + supplemental_precip.esmf_field_out) + except ValueError as ve: + config_options.errMsg = "Unable to regrid NBM supplemental precipitation: " + str(ve) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Set any pixel cells outside the input domain to the global missing value. + try: + supplemental_precip.esmf_field_out.data[np.where(supplemental_precip.regridded_mask == 0)] = \ + config_options.globalNdv + except (ValueError, ArithmeticError) as npe: + config_options.errMsg = "Unable to run mask search on NBM supplemental precipitation: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + supplemental_precip.regridded_precip2[:, :] = supplemental_precip.esmf_field_out.data + err_handler.check_program_status(config_options, mpi_config) + + # Convert the hourly precipitation total from kg.m-2.hour-1 to a rate of mm.s-1 + try: + ind_valid = np.where(supplemental_precip.regridded_precip2 != config_options.globalNdv) + supplemental_precip.regridded_precip2[ind_valid] = supplemental_precip.regridded_precip2[ind_valid] / 3600.0 + del ind_valid + except (ValueError, ArithmeticError, AttributeError, KeyError) as npe: + config_options.errMsg = "Unable to run NDV search on NBM supplemental precipitation: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If we are on the first timestep, set the previous regridded field to be + # the latest as there are no states for time 0. + if config_options.current_output_step == 1: + supplemental_precip.regridded_precip1[:, :] = \ + supplemental_precip.regridded_precip2[:, :] + err_handler.check_program_status(config_options, mpi_config) + + # Close the temporary NetCDF file and remove it. + if mpi_config.rank == 0: + try: + id_tmp.close() + except OSError: + config_options.errMsg = "Unable to close NetCDF file: " + supplemental_precip.file_in1 + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + def check_regrid_status(id_tmp, force_count, input_forcings, config_options, wrf_hydro_geo_meta, mpi_config): """ Function for checking to see if regridding weights need to be From 1c69c5657bb83dbfd15abcf150d77ea842e138e2 Mon Sep 17 00:00:00 2001 From: amazroo Date: Fri, 16 Apr 2021 09:22:14 -0600 Subject: [PATCH 06/19] Edit supplemental_precip.keyValue == 8 for NBM --- core/time_handling.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/time_handling.py b/core/time_handling.py index 0a59544..3f28bd6 100755 --- a/core/time_handling.py +++ b/core/time_handling.py @@ -1710,7 +1710,7 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren # Ensure we have the necessary new file if mpi_config.rank == 0: - if not os.path.isfile(supplemental_precip.file_in2) and (supplemental_precip.keyValue == 5 or supplemental_precip.keyValue == 6): + if not os.path.isfile(supplemental_precip.file_in2) and (supplemental_precip.keyValue == 8 ): config_options.statusMsg = "NBM file {} not found, will attempt to use {} instead.".format( supplemental_precip.file_in2, supplemental_precip.file_in1) err_handler.log_warning(config_options, mpi_config) From 2ff540dae61d8642bbc9936fcbff9c40181f90fb Mon Sep 17 00:00:00 2001 From: amazroo Date: Fri, 16 Apr 2021 09:23:21 -0600 Subject: [PATCH 07/19] Fix the error message on the range of acceptable Supp_precip options --- core/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/config.py b/core/config.py index 7a6afd1..d50a65b 100755 --- a/core/config.py +++ b/core/config.py @@ -1009,7 +1009,7 @@ def read_config(self): # if any radar products where chosen. for suppOpt in self.supp_precip_forcings: if suppOpt < 0 or suppOpt > 8: - err_handler.err_out_screen('Please specify SuppForcing values between 1 and 7.') + err_handler.err_out_screen('Please specify SuppForcing values between 1 and 8.') # Read in RQI threshold to apply to radar products. if suppOpt == 1 or suppOpt == 2: try: From 5026c9ef3b79dcabff470352fc1e6a12165515fa Mon Sep 17 00:00:00 2001 From: amazroo Date: Fri, 16 Apr 2021 11:07:16 -0600 Subject: [PATCH 08/19] Remove '.gz' from nbm file extension --- core/time_handling.py | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/core/time_handling.py b/core/time_handling.py index 3f28bd6..d0531ce 100755 --- a/core/time_handling.py +++ b/core/time_handling.py @@ -1662,13 +1662,13 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren "/" + supplemental_precip.pcp_date1.strftime('%H') + \ "/qmd/blend.t" + supplemental_precip.pcp_date1.strftime('%H') + \ "z.qmd.f" + format(int(supplemental_precip.pcp_date1.strftime('%H')), '03d') + ".co" \ - + supplemental_precip.file_ext + ('.gz' if supplemental_precip.fileType != NETCDF else '') + + supplemental_precip.file_ext tmp_file2 = supplemental_precip.inDir + "blend." + \ supplemental_precip.pcp_date2.strftime('%Y%m%d') + \ "/" + supplemental_precip.pcp_date2.strftime('%H') + \ "/qmd/blend.t" + supplemental_precip.pcp_date2.strftime('%H') + \ "z.qmd.f" + format(int(supplemental_precip.pcp_date2.strftime('%H')), '03d') + ".co" \ - + supplemental_precip.file_ext + ('.gz' if supplemental_precip.fileType != NETCDF else '') + + supplemental_precip.file_ext else: tmp_file1 = tmp_file2 = "" @@ -1694,19 +1694,6 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren supplemental_precip.file_in2 = tmp_file2 supplemental_precip.regridComplete = False - # If either file does not exist, set to None. This will instruct downstream regridding steps to - # set the regridded states to the global NDV. That ensures no supplemental precipitation will be - # added to the final output grids. - - # if not os.path.isfile(tmp_file1) or not os.path.isfile(tmp_file2): - # if MpiConfig.rank == 0: - # ConfigOptions.statusMsg = "MRMS files are missing. Will not process " \ - # "supplemental precipitation" - # errMod.log_warning(ConfigOptions,MpiConfig) - # supplemental_precip.file_in2 = None - # supplemental_precip.file_in1 = None - - # errMod.check_program_status(ConfigOptions, MpiConfig) # Ensure we have the necessary new file if mpi_config.rank == 0: From 514448687cb7b55d1402f4c32601822bf81181bc Mon Sep 17 00:00:00 2001 From: amazroo Date: Fri, 16 Apr 2021 11:08:35 -0600 Subject: [PATCH 09/19] Add ioMod.open_grib2 for nbm data files --- core/regrid.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/core/regrid.py b/core/regrid.py index fa3ceeb..5290d9a 100755 --- a/core/regrid.py +++ b/core/regrid.py @@ -2317,7 +2317,23 @@ def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_me if supplemental_precip.regridComplete: return - id_tmp = ioMod.open_netcdf_forcing(supplemental_precip.file_in1, config_options, mpi_config) + + nbm_tmp_nc = config_options.scratch_dir + "/NBM_PCP_TMP-{}.nc".format(mkfilename()) + if os.path.isfile(nbm_tmp_nc): + config_options.statusMsg = "Found old temporary file: " + \ + nbm_tmp_nc + " - Removing....." + err_handler.log_warning(config_options, mpi_config) + try: + os.remove(nbm_tmp_nc) + except OSError: + config_options.errMsg = "Unable to remove file: " + nbm_tmp_nc + err_handler.log_critical(config_options, mpi_config) + + # Perform a GRIB dump to NetCDF for the MRMS precip and RQI data. + cmd1 = "$WGRIB2 " + supplemental_precip.file_in1 + " -netcdf " + nbm_tmp_nc + id_tmp = ioMod.open_grib2(supplemental_precip.file_in1, nbm_tmp_nc, cmd1, config_options, + mpi_config, supplemental_precip.netcdf_var_names[0]) + err_handler.check_program_status(config_options, mpi_config) # Check to see if we need to calculate regridding weights. calc_regrid_flag = check_supp_pcp_regrid_status(id_tmp, supplemental_precip, config_options, From 648a59157ebc994e6ff7a023c88d5393c2dfb314 Mon Sep 17 00:00:00 2001 From: amazroo Date: Thu, 29 Apr 2021 10:20:27 -0600 Subject: [PATCH 10/19] Fix indentation mistake in core/regrid.py --- core/regrid.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/core/regrid.py b/core/regrid.py index 5290d9a..07f39b3 100755 --- a/core/regrid.py +++ b/core/regrid.py @@ -2323,11 +2323,11 @@ def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_me config_options.statusMsg = "Found old temporary file: " + \ nbm_tmp_nc + " - Removing....." err_handler.log_warning(config_options, mpi_config) - try: - os.remove(nbm_tmp_nc) - except OSError: - config_options.errMsg = "Unable to remove file: " + nbm_tmp_nc - err_handler.log_critical(config_options, mpi_config) + try: + os.remove(nbm_tmp_nc) + except OSError: + config_options.errMsg = "Unable to remove file: " + nbm_tmp_nc + err_handler.log_critical(config_options, mpi_config) # Perform a GRIB dump to NetCDF for the MRMS precip and RQI data. cmd1 = "$WGRIB2 " + supplemental_precip.file_in1 + " -netcdf " + nbm_tmp_nc From bcb3e9c8c29a4867de6f089e43096f22c73e693f Mon Sep 17 00:00:00 2001 From: amazroo Date: Mon, 3 May 2021 08:11:34 -0600 Subject: [PATCH 11/19] Fix NBM file searching: seperate NBM cycle hour/NBM forecast hour --- core/time_handling.py | 69 ++++++++++++++++++++++++++++--------------- 1 file changed, 45 insertions(+), 24 deletions(-) diff --git a/core/time_handling.py b/core/time_handling.py index d0531ce..1d15a3f 100755 --- a/core/time_handling.py +++ b/core/time_handling.py @@ -1636,46 +1636,65 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren current_day = d_current.day current_hr = d_current.hour current_min = d_current.minute + + # First find the current NBM forecast cycle that we are using. + current_nbm_cycle = config_options.current_fcst_cycle - \ + datetime.timedelta(seconds=supplemental_precip.userCycleOffset * 60.0) + + # Calculate the current forecast hour within this NBM cycle. + dt_tmp = d_current - current_nbm_cycle + current_nbm_hour = int(dt_tmp.days*24) + int(dt_tmp.seconds/3600.0) # Set the input file frequency to be hourly. supplemental_precip.input_frequency = 60.0 - prev_date1 = datetime.datetime(current_yr, current_mo, current_day, current_hr) - dt_tmp = d_current - prev_date1 - if dt_tmp.total_seconds() == 0: - # We are on the hour, we can set this date to the be the "next" date. - next_nbm_date = d_current - prev_nbm_date = d_current - datetime.timedelta(seconds=3600.0) + # Calculate the previous file to process. + min_since_last_output = (current_nbm_hour*60) % supplemental_precip.input_frequency + if min_since_last_output == 0: + min_since_last_output = supplemental_precip.input_frequency + prev_nbm_date = d_current - datetime.timedelta(seconds=min_since_last_output*60) + supplemental_precip.fcst_date1 = prev_nbm_date + if min_since_last_output == supplemental_precip.input_frequency: + min_until_next_output = 0 else: - # We are between two NBM hours. - prev_nbm_date = prev_date1 - next_nbm_date = prev_nbm_date + datetime.timedelta(seconds=3600.0) + min_until_next_output = supplemental_precip.input_frequency - min_since_last_output + next_nbm_date = d_current + datetime.timedelta(seconds=min_until_next_output * 60) + supplemental_precip.fcst_date2 = next_nbm_date - supplemental_precip.pcp_date1 = prev_nbm_date - #supplemental_precip.pcp_date2 = next_mrms_date - supplemental_precip.pcp_date2 = prev_nbm_date + # Calculate the output forecast hours needed based on the prev/next dates. + dt_tmp = next_nbm_date - current_nbm_cycle + next_nbm_forecast_hour = int(dt_tmp.days*24.0) + int(dt_tmp.seconds/3600.0) + supplemental_precip.fcst_hour2 = next_nbm_forecast_hour + dt_tmp = prev_nbm_date - current_nbm_cycle + prev_nbm_forecast_hour = int(dt_tmp.days*24.0) + int(dt_tmp.seconds/3600.0) + supplemental_precip.fcst_hour1 = prev_nbm_forecast_hour + # If we are on the first NBM forecast hour (1), and we have calculated the previous forecast + # hour to be 0, simply set both hours to be 1. Hour 0 will not produce the fields we need, and + # no interpolation is required. + if prev_nbm_forecast_hour == 0: + prev_nbm_forecast_hour = 1 # Calculate expected file paths. if supplemental_precip.keyValue == 8: - tmp_file1 = supplemental_precip.inDir + "blend." + \ - supplemental_precip.pcp_date1.strftime('%Y%m%d') + \ - "/" + supplemental_precip.pcp_date1.strftime('%H') + \ - "/qmd/blend.t" + supplemental_precip.pcp_date1.strftime('%H') + \ - "z.qmd.f" + format(int(supplemental_precip.pcp_date1.strftime('%H')), '03d') + ".co" \ + tmp_file1 = supplemental_precip.inDir + "/blend." + \ + current_nbm_cycle.strftime('%Y%m%d') + \ + "/" + current_nbm_cycle.strftime('%H') + \ + "/qmd/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.qmd.f" + str(next_nbm_forecast_hour).zfill(3) + ".co" \ + supplemental_precip.file_ext - tmp_file2 = supplemental_precip.inDir + "blend." + \ - supplemental_precip.pcp_date2.strftime('%Y%m%d') + \ - "/" + supplemental_precip.pcp_date2.strftime('%H') + \ - "/qmd/blend.t" + supplemental_precip.pcp_date2.strftime('%H') + \ - "z.qmd.f" + format(int(supplemental_precip.pcp_date2.strftime('%H')), '03d') + ".co" \ + tmp_file2 = supplemental_precip.inDir + "/blend." + \ + current_nbm_cycle.strftime('%Y%m%d') + \ + "/" + current_nbm_cycle.strftime('%H') + \ + "/qmd/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.qmd.f" + str(prev_nbm_forecast_hour).zfill(3) + ".co" \ + supplemental_precip.file_ext else: tmp_file1 = tmp_file2 = "" if mpi_config.rank == 0: - config_options.statusMsg = "Previous NBM supplemental file: " + tmp_file1 + config_options.statusMsg = "Prev NBM supplemental file: " + tmp_file2 err_handler.log_msg(config_options, mpi_config) - config_options.statusMsg = "Next NBM supplemental file: " + tmp_file2 + config_options.statusMsg = "Next NBM supplemental file: " + tmp_file1 err_handler.log_msg(config_options, mpi_config) err_handler.check_program_status(config_options, mpi_config) @@ -1710,6 +1729,8 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren config_options.statusMsg = "Expected input NBM file: " + supplemental_precip.file_in2 + \ " not found. " + "Will not use in final layering." err_handler.log_warning(config_options, mpi_config) + config_options.statusMsg = "You can use Util/pull_s3_grib_vars.py to Download NBM data from AWS-S3 archive." + err_handler.log_warning(config_options, mpi_config) err_handler.check_program_status(config_options, mpi_config) # If the file is missing, set the local slab of arrays to missing. From 319fc8bd095857a1dfa4a1c9ce2e235a3cec3f9f Mon Sep 17 00:00:00 2001 From: amazroo Date: Mon, 3 May 2021 08:13:58 -0600 Subject: [PATCH 12/19] Add Util/pull_s3_grib_vars.py for NBM data downloading --- Util/pull_s3_grib_vars.py | 136 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100755 Util/pull_s3_grib_vars.py diff --git a/Util/pull_s3_grib_vars.py b/Util/pull_s3_grib_vars.py new file mode 100755 index 0000000..9636a6a --- /dev/null +++ b/Util/pull_s3_grib_vars.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python + +import argparse +import sys +import re +from io import BytesIO +from io import TextIOWrapper + +import boto3 +from botocore import UNSIGNED +from botocore.client import Config + +""" +Script pulls grib2 data from s3 by variable + +Example Usage: ./pull_s3_grib_vars.py s3://noaa-nbm-grib2-pds/blend.20201001/06/qmd/blend.t06z.qmd.f012.ak.grib2 s3://noaa-nbm-grib2-pds/blend.20201001/06/qmd/blend.t06z.qmd.f012.ak.grib2.idx ./blend.t06z.qmd.f012.ak.grib2 "^.*APCP:surface:6-12 hour acc fcst:$" +""" + + +def file_exists_s3(bucket_name,key,verbose=False): + """ + Checks if a file exists in hopefully the most performant way possible. + """ + #client = boto3.client('s3') + client = boto3.client('s3', config=Config(signature_version=UNSIGNED)) + response_json = client.list_objects_v2( + Bucket=bucket_name, + Prefix=key, + ) + if verbose: + print(response_json) + for o in response_json.get('Contents', []): + if o['Key'] == key: + return True + + return False + + +def parse_s3_url(url): + """ + Break a url into bucket and key + """ + from urllib.parse import urlparse + up = urlparse(url) + bucket = up.netloc + key = up.path.lstrip('/') + return bucket, key + + +def load_txt_stream_s3(bucket_name,key): + """ + Gets a buffered text stream of the data for bucket_name and key. + """ + #client = boto3.client('s3') + client = boto3.client('s3', config=Config(signature_version=UNSIGNED)) + obj = client.get_object(Bucket=bucket_name, Key=key) + bytestream = BytesIO(obj['Body'].read()) + txtstream = TextIOWrapper(bytestream) + return txtstream + + +def load_byte_stream_s3(bucket_name,key,byte_range=""): + """ + Gets a buffered byte stream of the data for bucket_name and key. + """ + #client = boto3.client('s3') + client = boto3.client('s3', config=Config(signature_version=UNSIGNED)) + obj = client.get_object(Bucket=bucket_name, Key=key, Range=byte_range) + bytestream = BytesIO(obj['Body'].read()) + return bytestream + + +def pull_s3_grib_vars(grib_file,grib_index,out_file,var_regex_strs,debug): + var_regexs = [re.compile(var_regex) for var_regex in var_regex_strs] + grib_bucket,grib_key = parse_s3_url(grib_file) + grib_idx_bucket,grib_idx_key = parse_s3_url(grib_index) + + if not file_exists_s3(grib_bucket,grib_key): + print("Grib file not found") + return False + if not file_exists_s3(grib_idx_bucket,grib_idx_key): + print("Grib index file not found") + return False + + found_match = False + with load_txt_stream_s3(grib_idx_bucket,grib_idx_key) as idx_file: + line = idx_file.readline() + while line: + line = line.rstrip() + if debug: + print("Index line: %s:" % line) + for var_regex in var_regexs: + if var_regex.match(line): + found_match = True + print("Matched Index line: %s" % line) + line_num,beg_offset,ref_date,var_name,var_prime,hour_desc,level = line.split(":") + + pos = idx_file.tell() + next_line = idx_file.readline() + idx_file.seek(pos) + + if next_line: + next_line = next_line.rstrip() + _,end_offset,_,_,_,_,_ = next_line.split(":") + end_offset = str(int(end_offset)-1) + else: + end_offset = '' + + byte_range = "bytes=%s-%s" % (beg_offset,end_offset) + print("Downloading %s %s" % (grib_file, byte_range)) + file_part = load_byte_stream_s3(grib_bucket, grib_key, byte_range) + with open(out_file,"ab+") as of: + of.write(bytes(file_part.read())) + line = idx_file.readline() + + if not found_match: + print("Regex patterns not found in the index file") + + return found_match + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('grib_file',type=str,help='.grib2 file') + parser.add_argument('grib_index',type=str,help='.grib2.idx file') + parser.add_argument('out_file',type=str,help='.grib2 file') + parser.add_argument('var_regex_strs',nargs="+",type=str,help='var_regex1 [var_regex2 ...]') + parser.add_argument("--debug",action='store_true',help="Debug output flag") + + args = parser.parse_args() + success = pull_s3_grib_vars(args.grib_file,args.grib_index,args.out_file,args.var_regex_strs,args.debug) + sys.exit(not success) + + +if __name__ == '__main__': + main() From 37d58f75dbe296bee7580524ab9153a6a9240966 Mon Sep 17 00:00:00 2001 From: amazroo Date: Mon, 10 May 2021 07:48:06 -0600 Subject: [PATCH 13/19] Add .config file example for running NBM supplementary precip --- .../template_forcing_engine_NBM_test.config | 337 ++++++++++++++++++ 1 file changed, 337 insertions(+) create mode 100755 Template/template_forcing_engine_NBM_test.config diff --git a/Template/template_forcing_engine_NBM_test.config b/Template/template_forcing_engine_NBM_test.config new file mode 100755 index 0000000..262336b --- /dev/null +++ b/Template/template_forcing_engine_NBM_test.config @@ -0,0 +1,337 @@ +#-------------------------------------------------------------------- +# WRF-Hydro Forcing Engine Configuration File +# +# Input options to the forcing engine include: +# 1.) Choices for input forcing files to use. +# 2.) Options for specifying date ranges and forecast intervals +# for input files. +# 3.) Choices for ESMF regridding techniques. +# 4.) Choices for optional downscaling techniques. +# 5.) Choices for optional bias correction techniques. +# 6.) Choices for optional supplemental precipitation products. +# 7.) Choices for optional ensemble member variations. +# 8.) Choices for output directories to place final output files. + +[Input] +# Choose a set of value(s) of forcing variables to be processed for +# WRF-Hydro. Please be advised that the order of which the values are +# chosen below are the order that the final products will be layered +# into the final LDASIN files. See documentation for additional +# information and examples. +# The following is a global set of key values to map forcing files +# to variables within LDASIN files for WRF-Hydro. The forcing engine +# will map files to external variable names internally. For custom +# external native forcing files (see documenation), the code will +# expect a set of named variables to process. The following is a +# mapping of numeric values to external input native forcing files: +# 1 - NLDAS GRIB retrospective files +# 2 - NARR GRIB retrospective files +# 3 - GFS GRIB2 Global production files on the full gaussian grid +# 4 - NAM Nest GRIB2 Conus production files +# 5 - HRRR GRIB2 Conus production files +# 6 - RAP GRIB2 Conus 13km production files +# 7 - CFSv2 6-hourly GRIB2 Global production files +# 8 - WRF-ARW - GRIB2 Hawaii nest files +# 9 - GFS GRIB2 Global production files on 0.25 degree lat/lon grids. +# 10 - Custom NetCDF hourly forcing files +# 11 - Custom NetCDF hourly forcing files +# 12 - Custom NetCDF hourly forcing files +# 13 - Hawaii 3-km NAM Nest. +# 14 - Puerto Rico 3-km NAM Nest. +# 15 - Alaska 3-km Alaska Nest +# 16 - NAM_Nest_3km_Hawaii_Radiation-Only +# 17 - NAM_Nest_3km_PuertoRico_Radiation-Only +# 18 - WRF-ARW GRIB2 PuertoRico +InputForcings = [3] + +# Specify the file type for each forcing (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +InputForcingTypes = GRIB2 + +# Specify the input directories for each forcing product. +InputForcingDirectories = /glade/scratch/zhangyx/ForcingEngine/Forcing_Inputs/GFS_13km_global + +# Specify whether the input forcings listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +# NOTE!!! If not files are found for any products, code will error out indicating +# the final field is all missing values. +InputMandatory = [1] + +[Output] +# Specify the output frequency in minutes. +# Note that any frequencies at higher intervals +# than what is provided as input will entail input +# forcing data being temporally interpolated. +OutputFrequency = 60 + +# Specify a top level output directory. For re-forecasts +# and forecasts, sub-directories for each forecast cycle +# will be generated. For retrospective processing, final +# output files will be placed in this directory. +OutDir = /glade/scratch/mazrooei/FE/test_nbm/ + +# Specify a scratch directory that will be used +# for storage of temporary files. These files +# will be removed automatically by the program. +ScratchDir = /glade/scratch/mazrooei/FE/test_nbm/tmp/ + +# Flag to activate scale_factor / add_offset byte packing in +# the output files. +# 0 - Deactivate compression +# 1 - Activate compression +compressOutput = 0 + +# Flag to use floating point output vs scale_factor / add_offset byte packing in +# the output files (the default) +# 0 - Use scale/offset encoding +# 1 - Use floating-point encoding +floatOutput = 0 + +[Retrospective] +# Specify to process forcings in retrosective mode +# 0 - No +# 1 - Yes +RetroFlag = 0 + +# Choose the beginning date of processing forcing files. +# NOTE - Dates are given in YYYYMMDDHHMM format +# If in real-time forecasting mode, leave as -9999. +# These dates get over-ridden in lookBackHours. +BDateProc = 202103010000 +EDateProc = 202103030000 + +[Forecast] +# If this is AnA run, set AnAFlag to 1, otherwise 0. +# Setting this flag will change the behavior of some Bias Correction routines as wel +# as the ForecastInputOffsets options (see below for more information) +AnAFlag = 0 + +# ONLY for realtime forecasting. +# - Specify a lookback period in minutes to process data. +# This overrides any BDateProc/EDateProc options passed above. +# If no LookBack specified, please specify -9999. +#LookBack = 1440 +LookBack = -9999 + +# If running reforecasts, specify a window below. This will override +# using the LookBack value to calculate a processing window. +RefcstBDateProc = 202103121200 +RefcstEDateProc = 202103150000 + +# Specify a forecast frequency in minutes. This value specifies how often +# to generate a set of forecast forcings. If generating hourly retrospective +# forcings, specify this value to be 60. +ForecastFrequency = 360 + +# Forecast cycles are determined by splitting up a day by equal +# ForecastFrequency interval. If there is a desire to shift the +# cycles to a different time step, ForecastShift will shift forecast +# cycles ahead by a determined set of minutes. For example, ForecastFrequency +# of 6 hours will produce forecasts cycles at 00, 06, 12, and 18 UTC. However, +# a ForecastShift of 1 hour will produce forecast cycles at 01, 07, +# 13, and 18 UTC. NOTE - This is only used by the realtime instance +# to calculate forecast cycles accordingly. Re-forecasts will use the beginning +# and ending dates specified in conjunction with the forecast frequency +# to determine forecast cycle dates. +ForecastShift = 0 + +# Specify how much (in minutes) of each input forcing is desires for each +# forecast cycle. See documentation for examples. The length of +# this array must match the input forcing choices. +ForecastInputHorizons = [14400] + +# This option is for applying an offset to input forcings to use a different +# forecasted interval. For example, a user may wish to use 4-5 hour forecasted +# fields from an NWP grid from one of their input forcings. In that instance +# the offset would be 4 hours, but 0 for other remaining forcings. +ForecastInputOffsets = [0] + +[Geospatial] +# Specify a geogrid file that defines the WRF-Hydro (or NWM) domain to which +# the forcings are being processed to. +GeogridIn = /glade/p/cisl/nwc/nwmv21_finals/CONUS/DOMAIN_FinalParams/geo_em.d01.conus_1km_NWMv2.1.nc + +# Specify the optional land spatial metadata file. If found, coordinate projection information +# and coordinate will be translated from to the final output file. +SpatialMetaIn = /glade/p/cisl/nwc/nwmv21_finals/CONUS/DOMAIN_FinalParams/GEOGRID_LDASOUT_Spatial_Metadata_1km_NWMv2.1.nc + +[Regridding] +# Choose regridding options for each input forcing files being used. Options available are: +# 1 - ESMF Bilinear +# 2 - ESMF Nearest Neighbor +# 3 - ESMF Conservative Bilinear +RegridOpt = [1] + +[Interpolation] +# Specify an temporal interpolation for the forcing variables. +# Interpolation will be done between the two neighboring +# input forcing states that exist. If only one nearest +# state exist (I.E. only a state forward in time, or behind), +# then that state will be used as a "nearest neighbor". +# NOTE - All input options here must be of the same length +# of the input forcing number. Also note all temporal interpolation +# occurs BEFORE downscaling and bias correction. +# 0 - No temporal interpolation. WARNING - Will result in states from +# the nearest later forecast data point will be used if output +# timesteps are in-between two input forecast points. +# 1 - Nearest temporal neighbor. +# 2 - Weighted linear average between input points. +forcingTemporalInterpolation = [0] + +[BiasCorrection] +# Choose bias correction options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +# Specify a temperature bias correction method. +# 0 - No bias correctioni +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +TemperatureBiasCorrection = [0] + +# Specify a surface pressure bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PressureBiasCorrection = [0] + +# Specify a specific humidity bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +HumidityBiasCorrection = [0] + +# Specify a wind bias correction. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +WindBiasCorrection = [0] + +# Specify a bias correction for incoming short wave radiation flux. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis (USE WITH CAUTION). +SwBiasCorrection = [0] + +# Specify a bias correction for incoming long wave radiation flux. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis, blanket adjustment (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +LwBiasCorrection = [0] + +# Specify a bias correction for precipitation. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PrecipBiasCorrection = [0] + +[Downscaling] +# Choose downscaling options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +# Specify a temperature downscaling method: +# 0 - No downscaling. +# 1 - Use a simple lapse rate of 6.75 degrees Celsius to get from the model elevation +# to the WRF-Hydro elevation. +# 2 - Use a pre-calculated lapse rate regridded to the WRF-Hydro domain. +TemperatureDownscaling = [2] + +# Specify a surface pressure downscaling method: +# 0 - No downscaling. +# 1 - Use input elevation and WRF-Hydro elevation to downscale +# surface pressure. +PressureDownscaling = [1] + +# Specify a shortwave radiation downscaling routine. +# 0 - No downscaling +# 1 - Run a topographic adjustment using the WRF-Hydro elevation +ShortwaveDownscaling = [1] + +# Specify a precipitation downscaling routine. +# 0 - No downscaling +# 1 - NWM mountain mapper downscaling using monthly PRISM climo. +PrecipDownscaling = [0] + +# Specify a specific humidity downscaling routine. +# 0 - No downscaling +# 1 - Use regridded humidity, along with downscaled temperature/pressure +# to extrapolate a downscaled surface specific humidty. +HumidityDownscaling = [1] + +# Specify the input parameter directory containing necessary downscaling grids. +DownscalingParamDirs = /glade/p/cisl/nwc/nwm_forcings/NWM_v21_Params/Medium_Range + +[SuppForcing] +# Choose a set of supplemental precipitation file(s) to layer +# into the final LDASIN forcing files processed from +# the options above. The following is a mapping of +# numeric values to external input native forcing files: +# 1 - MRMS GRIB2 hourly radar-only QPE +# 2 - MRMS GRIB2 hourly gage-corrected radar QPE +# 3 - WRF-ARW 2.5 km 48-hr Hawaii nest precipitation. +# 4 - WRF-ARW 2.5 km 48-hr Puerto Rico nest precipitation. +# 5 - Hawaii MRMS GRIB2 hourly MultiSensor QPE +SuppPcp = [8] + +# Specify the file type for each supplemental precipitation file (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +SuppPcpForcingTypes = GRIB2 + +# Specify the correponding supplemental precipitation directories +# that will be searched for input files. +SuppPcpDirectories = /glade/scratch/mazrooei/DATA/NBM/noaa-nbm-grib2-pds_APCP + +# Specify regridding options for the supplemental precipitation products. +RegridOptSuppPcp = [1] + +# Specify whether the Supplemental Precips listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +SuppPcpMandatory = [1] + +# Specify the time interpretation methods for the supplemental precipitation +# products. +SuppPcpTemporalInterpolation = [0] + +# In AnA runs, this value is the offset from the available forecast and 00z +# For example, if forecast are available at 06z and 18z, set this value to 6 +SuppPcpInputOffsets = [0] + +# Optional RQI method for radar-based data. +# 0 - Do not use any RQI filtering. Use all radar-based estimates. +# 1 - Use hourly MRMS Radar Quality Index grids. +# 2 - Use NWM monthly climatology grids (NWM only!!!!) +RqiMethod = 0 + +# Optional RQI threshold to be used to mask out. Currently used for MRMS products. +# Please choose a value from 0.0-1.0. Associated radar quality index files will be expected +# from MRMS data. +RqiThreshold = 0.9 + +# Specify an optional directory that contains supplemental precipitation parameter fields, +# I.E monthly RQI climatology +SuppPcpParamDir = /glade/p/cisl/nwc/nwm_forcings/NWM_v21_Params/Medium_Range + +[Ensembles] +# Choose ensemble options for each input forcing file being used. Ensemble options include: +# FILL IN ENSEMBLE OPTIONS HERE..... +# Choose the CFS ensemble member number to process +cfsEnsNumber = + +[Custom] +# These are options for specifying custom input NetCDF forcing files (in minutes). +# Choose the input frequency of files that are being processed. I.E., are the +# input files every 15 minutes, 60 minutes, 3-hours, etc. Please specify the +# length of custom input frequencies to match the number of custom NetCDF inputs +# selected above in the Logistics section. +custom_input_fcst_freq = [] + From 27b4f83d699116ab86d8cfc83855180054b8c806 Mon Sep 17 00:00:00 2001 From: amazroo Date: Thu, 19 Aug 2021 14:35:50 -0600 Subject: [PATCH 14/19] Changing the NBM dataset from QMD to Core --- .gitignore | 1 + core/regrid.py | 5 ++++- core/suppPrecipMod.py | 2 +- core/time_handling.py | 28 ++++++++++++++++++---------- 4 files changed, 24 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index ec29afb..df0b5ee 100755 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ /.idea/ .DS_Store +core/__pycache__/ diff --git a/core/regrid.py b/core/regrid.py index 07f39b3..7e3e0a3 100755 --- a/core/regrid.py +++ b/core/regrid.py @@ -2393,7 +2393,10 @@ def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_me # Convert the hourly precipitation total from kg.m-2.hour-1 to a rate of mm.s-1 try: ind_valid = np.where(supplemental_precip.regridded_precip2 != config_options.globalNdv) - supplemental_precip.regridded_precip2[ind_valid] = supplemental_precip.regridded_precip2[ind_valid] / 3600.0 + if supplemental_precip.input_frequency == 60.0: + supplemental_precip.regridded_precip2[ind_valid] = supplemental_precip.regridded_precip2[ind_valid] / 3600.0 + elif supplemental_precip.input_frequency == 360.0: + supplemental_precip.regridded_precip2[ind_valid] = supplemental_precip.regridded_precip2[ind_valid] / 21600.0 #uniform disaggregation for 6-hourly nbm data del ind_valid except (ValueError, ArithmeticError, AttributeError, KeyError) as npe: config_options.errMsg = "Unable to run NDV search on NBM supplemental precipitation: " + str(npe) diff --git a/core/suppPrecipMod.py b/core/suppPrecipMod.py index 431d2aa..e990832 100755 --- a/core/suppPrecipMod.py +++ b/core/suppPrecipMod.py @@ -84,7 +84,7 @@ def define_product(self): 5: "CONUS_MRMS_1HR_MultiSensor", 6: "Hawaii_MRMS_1HR_MultiSensor", 7: "MRMS_LiquidWaterFraction", - 8: "NBM_QMD_CONUS_APCP" + 8: "NBM_CORE_CONUS_APCP" } self.productName = product_names[self.keyValue] diff --git a/core/time_handling.py b/core/time_handling.py index 1d15a3f..bd9d92b 100755 --- a/core/time_handling.py +++ b/core/time_handling.py @@ -1621,7 +1621,7 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_current, mpi_config): """ - Function to calculate the previous and next NBM/QMD/CONUS files. This + Function to calculate the previous and next NBM/CORE/CONUS files. This will also calculate the neighboring radar quality index (RQI) files as well. :param supplemental_precip: :param config_options: @@ -1629,6 +1629,11 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren :param mpi_config: :return: """ + nbm_out_freq = { + 36: 60, + 240: 360 + } + # First we need to find the nearest previous and next hour, which is # the previous/next NBM files we will be using. current_yr = d_current.year @@ -1645,9 +1650,12 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren dt_tmp = d_current - current_nbm_cycle current_nbm_hour = int(dt_tmp.days*24) + int(dt_tmp.seconds/3600.0) - # Set the input file frequency to be hourly. - supplemental_precip.input_frequency = 60.0 - + # Set the input file frequency to be hourly for f001-f036 and 6-hourly beyond f036. + if current_nbm_hour <= 36: + supplemental_precip.input_frequency = 60.0 + else: + supplemental_precip.input_frequency = 360.0 + # Calculate the previous file to process. min_since_last_output = (current_nbm_hour*60) % supplemental_precip.input_frequency if min_since_last_output == 0: @@ -1679,15 +1687,15 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren tmp_file1 = supplemental_precip.inDir + "/blend." + \ current_nbm_cycle.strftime('%Y%m%d') + \ "/" + current_nbm_cycle.strftime('%H') + \ - "/qmd/blend.t" + current_nbm_cycle.strftime('%H') + \ - "z.qmd.f" + str(next_nbm_forecast_hour).zfill(3) + ".co" \ - + supplemental_precip.file_ext + "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.core.f" + str(next_nbm_forecast_hour).zfill(3) + ".co" \ + + supplemental_precip.file_ext + "_APCP" tmp_file2 = supplemental_precip.inDir + "/blend." + \ current_nbm_cycle.strftime('%Y%m%d') + \ "/" + current_nbm_cycle.strftime('%H') + \ - "/qmd/blend.t" + current_nbm_cycle.strftime('%H') + \ - "z.qmd.f" + str(prev_nbm_forecast_hour).zfill(3) + ".co" \ - + supplemental_precip.file_ext + "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.core.f" + str(prev_nbm_forecast_hour).zfill(3) + ".co" \ + + supplemental_precip.file_ext + "_APCP" else: tmp_file1 = tmp_file2 = "" From e38c08ef5f109766097b8de000f1a2b1324636e2 Mon Sep 17 00:00:00 2001 From: amazroo Date: Thu, 9 Sep 2021 14:33:51 -0600 Subject: [PATCH 15/19] reading the NBM raw product and extracting the correct precip field --- core/regrid.py | 11 +++++++++-- core/time_handling.py | 4 ++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/core/regrid.py b/core/regrid.py index 7e3e0a3..85a99a1 100755 --- a/core/regrid.py +++ b/core/regrid.py @@ -2329,8 +2329,15 @@ def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_me config_options.errMsg = "Unable to remove file: " + nbm_tmp_nc err_handler.log_critical(config_options, mpi_config) - # Perform a GRIB dump to NetCDF for the MRMS precip and RQI data. - cmd1 = "$WGRIB2 " + supplemental_precip.file_in1 + " -netcdf " + nbm_tmp_nc + # Perform a GRIB dump to NetCDF for the precip data. + fieldnbm_match1 = "\":APCP:\"" + fieldnbm_match2 = "\"" + str(supplemental_precip.fcst_hour1) + "-" + str(supplemental_precip.fcst_hour2) + "\"" + fieldnbm_notmatch1 = "\"prob\"" #We don't want the probabilistic QPF layers + cmd1 = "$WGRIB2 " + supplemental_precip.file_in1 + " -match " + fieldnbm_match1 \ + + " -match " + fieldnbm_match2 \ + + " -not " + fieldnbm_notmatch1 \ + + " -netcdf " + nbm_tmp_nc + print("HereHere:" , cmd1) id_tmp = ioMod.open_grib2(supplemental_precip.file_in1, nbm_tmp_nc, cmd1, config_options, mpi_config, supplemental_precip.netcdf_var_names[0]) err_handler.check_program_status(config_options, mpi_config) diff --git a/core/time_handling.py b/core/time_handling.py index bd9d92b..5036598 100755 --- a/core/time_handling.py +++ b/core/time_handling.py @@ -1689,13 +1689,13 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren "/" + current_nbm_cycle.strftime('%H') + \ "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ "z.core.f" + str(next_nbm_forecast_hour).zfill(3) + ".co" \ - + supplemental_precip.file_ext + "_APCP" + + supplemental_precip.file_ext tmp_file2 = supplemental_precip.inDir + "/blend." + \ current_nbm_cycle.strftime('%Y%m%d') + \ "/" + current_nbm_cycle.strftime('%H') + \ "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ "z.core.f" + str(prev_nbm_forecast_hour).zfill(3) + ".co" \ - + supplemental_precip.file_ext + "_APCP" + + supplemental_precip.file_ext else: tmp_file1 = tmp_file2 = "" From 8b73d1450d1ad9d890f7fb66e84b242395080c31 Mon Sep 17 00:00:00 2001 From: amazroo Date: Fri, 24 Sep 2021 08:42:57 -0600 Subject: [PATCH 16/19] Add `SuppPcpMaxHours` to limit usage of supplemental precip --- core/config.py | 36 ++++++++++++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/core/config.py b/core/config.py index d50a65b..1e1288f 100755 --- a/core/config.py +++ b/core/config.py @@ -29,6 +29,7 @@ def __init__(self, config): self.supp_precip_param_dir = None self.input_force_mandatory = None self.supp_precip_mandatory = None + self.supp_pcp_max_hours = None self.number_inputs = None self.number_supp_pcp = None self.number_custom_inputs = 0 @@ -1008,8 +1009,8 @@ def read_config(self): # Check to make sure supplemental precip options make sense. Also read in the RQI threshold # if any radar products where chosen. for suppOpt in self.supp_precip_forcings: - if suppOpt < 0 or suppOpt > 8: - err_handler.err_out_screen('Please specify SuppForcing values between 1 and 8.') + if suppOpt < 0 or suppOpt > 9: + err_handler.err_out_screen('Please specify SuppForcing values between 1 and 9.') # Read in RQI threshold to apply to radar products. if suppOpt == 1 or suppOpt == 2: try: @@ -1122,6 +1123,28 @@ def read_config(self): err_handler.err_out_screen('Invalid SuppPcpTemporalInterpolation chosen in the configuration file. ' 'Please choose a value of 0-2 for each corresponding input forcing') + + # Read in max time option + try: + self.supp_pcp_max_hours = json.loads(config['SuppForcing']['SuppPcpMaxHours']) + except (KeyError, configparser.NoOptionError): + self.supp_pcp_max_hours = None # if missing, don't care, just assume all time + + except json.decoder.JSONDecodeError: + err_handler.err_out_screen('Improper SuppPcpMaxHours options specified in the ' + 'configuration file.') + + if type(self.supp_pcp_max_hours) is list: + if len(self.supp_pcp_max_hours) != self.number_supp_pcp: + err_handler.err_out_screen('Number of SuppPcpMaxHours ({}) must match the number ' + 'of SuppPcp inputs ({}) in the configuration file, or ' + 'supply a single threshold for all inputs'.format( + len(self.supp_pcp_max_hours), self.number_supp_pcp)) + elif type(self.supp_pcp_max_hours) is float: + # Support 'classic' mode of single threshold + self.supp_pcp_max_hours = [self.supp_pcp_max_hours] * self.number_supp_pcp + + # Read in the SuppPcpInputOffsets options. try: self.supp_input_offsets = json.loads(config['SuppForcing']['SuppPcpInputOffsets']) @@ -1194,3 +1217,12 @@ def read_config(self): f'This number ({len(self.customFcstFreq)}) must ' f'match the frequency of custom input forcings selected ' f'({self.number_custom_inputs}).') + + + @property + def use_data_at_current_time(self): + if self.supp_pcp_max_hours is not None: + hrs_since_start = self.current_output_date - self.current_fcst_cycle + return hrs_since_start <= datetime.timedelta(hours = self.supp_pcp_max_hours) + else: + return True From b35a15948102b35499c68b5d32f7a5aecc7b3815 Mon Sep 17 00:00:00 2001 From: amazroo Date: Fri, 24 Sep 2021 08:53:59 -0600 Subject: [PATCH 17/19] Add NBM Supp Precip for Alaska --- core/regrid.py | 10 ++++++++-- core/suppPrecipMod.py | 22 +++++++++++++++------- core/time_handling.py | 22 +++++++++++++++++++++- 3 files changed, 44 insertions(+), 10 deletions(-) diff --git a/core/regrid.py b/core/regrid.py index 85a99a1..202186b 100755 --- a/core/regrid.py +++ b/core/regrid.py @@ -2306,6 +2306,13 @@ def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_me :param mpi_config: :return: """ + # Do we want to use NBM data at this timestep? If not, log and continue + if not config_options.use_data_at_current_time: + if mpi_config.rank == 0: + config_options.statusMsg = "Exceeded max hours for NBM precipitation, will not use NBM in final layering." + err_handler.log_msg(config_options, mpi_config) + return + # If the expected file is missing, this means we are allowing missing files, simply # exit out of this routine as the regridded fields have already been set to NDV. if not os.path.exists(supplemental_precip.file_in1): @@ -2317,7 +2324,6 @@ def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_me if supplemental_precip.regridComplete: return - nbm_tmp_nc = config_options.scratch_dir + "/NBM_PCP_TMP-{}.nc".format(mkfilename()) if os.path.isfile(nbm_tmp_nc): config_options.statusMsg = "Found old temporary file: " + \ @@ -2329,6 +2335,7 @@ def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_me config_options.errMsg = "Unable to remove file: " + nbm_tmp_nc err_handler.log_critical(config_options, mpi_config) + # Perform a GRIB dump to NetCDF for the precip data. fieldnbm_match1 = "\":APCP:\"" fieldnbm_match2 = "\"" + str(supplemental_precip.fcst_hour1) + "-" + str(supplemental_precip.fcst_hour2) + "\"" @@ -2337,7 +2344,6 @@ def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_me + " -match " + fieldnbm_match2 \ + " -not " + fieldnbm_notmatch1 \ + " -netcdf " + nbm_tmp_nc - print("HereHere:" , cmd1) id_tmp = ioMod.open_grib2(supplemental_precip.file_in1, nbm_tmp_nc, cmd1, config_options, mpi_config, supplemental_precip.netcdf_var_names[0]) err_handler.check_program_status(config_options, mpi_config) diff --git a/core/suppPrecipMod.py b/core/suppPrecipMod.py index e990832..c1c4478 100755 --- a/core/suppPrecipMod.py +++ b/core/suppPrecipMod.py @@ -84,7 +84,8 @@ def define_product(self): 5: "CONUS_MRMS_1HR_MultiSensor", 6: "Hawaii_MRMS_1HR_MultiSensor", 7: "MRMS_LiquidWaterFraction", - 8: "NBM_CORE_CONUS_APCP" + 8: "NBM_CORE_CONUS_APCP", + 9: "NBM_CORE_ALASKA_APCP" } self.productName = product_names[self.keyValue] @@ -112,7 +113,8 @@ def define_product(self): 5: None, 6: None, 7: None, - 8: None + 8: None, + 9: None } self.grib_vars = grib_vars_in[self.keyValue] @@ -124,7 +126,8 @@ def define_product(self): 5: ['BLAH'], 6: ['BLAH'], 7: ['BLAH'], - 8: ['BLAH'] + 8: ['BLAH'], + 9: ['BLAH'] } self.grib_levels = grib_levels_in[self.keyValue] @@ -136,7 +139,8 @@ def define_product(self): 5: ['MultiSensorQPE01H_0mabovemeansealevel'], 6: ['MultiSensorQPE01H_0mabovemeansealevel'], 7: ['sbcv2_lwf'], - 8: ['APCP_surface'] + 8: ['APCP_surface'], + 9: ['APCP_surface'] } self.netcdf_var_names = netcdf_variables[self.keyValue] @@ -148,7 +152,8 @@ def define_product(self): 5: None, 6: None, 7: None, - 8: None + 8: None, + 9: None } self.rqi_netcdf_var_names = netcdf_rqi_variables[self.keyValue] @@ -160,7 +165,8 @@ def define_product(self): 5: 3, 6: 3, 7: 8, # LQFRAC - 8: 3 + 8: 3, + 9: 3 } self.output_var_idx = output_variables[self.keyValue] @@ -183,7 +189,8 @@ def calc_neighbor_files(self,ConfigOptions,dCurrent,MpiConfig): 5: time_handling.find_hourly_mrms_radar_neighbors, 6: time_handling.find_hourly_mrms_radar_neighbors, 7: time_handling.find_sbcv2_lwf_neighbors, - 8: time_handling.find_hourly_nbm_apcp_neighbors + 8: time_handling.find_hourly_nbm_apcp_neighbors, + 9: time_handling.find_hourly_nbm_apcp_neighbors } find_neighbor_files[self.keyValue](self, ConfigOptions, dCurrent, MpiConfig) @@ -217,6 +224,7 @@ def regrid_inputs(self,ConfigOptions,wrfHyroGeoMeta,MpiConfig): 6: regrid.regrid_mrms_hourly, 7: regrid.regrid_sbcv2_liquid_water_fraction, 8: regrid.regrid_hourly_nbm_apcp, + 9: regrid.regrid_hourly_nbm_apcp } regrid_inputs[self.keyValue](self,ConfigOptions,wrfHyroGeoMeta,MpiConfig) #try: diff --git a/core/time_handling.py b/core/time_handling.py index 5036598..fc7b514 100755 --- a/core/time_handling.py +++ b/core/time_handling.py @@ -1696,6 +1696,19 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ "z.core.f" + str(prev_nbm_forecast_hour).zfill(3) + ".co" \ + supplemental_precip.file_ext + elif supplemental_precip.keyValue == 9: + tmp_file1 = supplemental_precip.inDir + "/blend." + \ + current_nbm_cycle.strftime('%Y%m%d') + \ + "/" + current_nbm_cycle.strftime('%H') + \ + "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.core.f" + str(next_nbm_forecast_hour).zfill(3) + ".ak" \ + + supplemental_precip.file_ext + tmp_file2 = supplemental_precip.inDir + "/blend." + \ + current_nbm_cycle.strftime('%Y%m%d') + \ + "/" + current_nbm_cycle.strftime('%H') + \ + "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.core.f" + str(prev_nbm_forecast_hour).zfill(3) + ".ak" \ + + supplemental_precip.file_ext else: tmp_file1 = tmp_file2 = "" @@ -1724,7 +1737,7 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren # Ensure we have the necessary new file if mpi_config.rank == 0: - if not os.path.isfile(supplemental_precip.file_in2) and (supplemental_precip.keyValue == 8 ): + if not os.path.isfile(supplemental_precip.file_in2) and ((supplemental_precip.keyValue == 8) or (supplemental_precip.keyValue == 9)): config_options.statusMsg = "NBM file {} not found, will attempt to use {} instead.".format( supplemental_precip.file_in2, supplemental_precip.file_in1) err_handler.log_warning(config_options, mpi_config) @@ -1745,3 +1758,10 @@ def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_curren if not os.path.isfile(supplemental_precip.file_in2): if supplemental_precip.regridded_precip2 is not None: supplemental_precip.regridded_precip2[:, :] = config_options.globalNdv + + # Do we want to use NBM data at this timestep? If not, set the local slab of arrays to missing. + if not config_options.use_data_at_current_time: + if supplemental_precip.regridded_precip2 is not None: + supplemental_precip.regridded_precip2[:, :] = config_options.globalNdv + if supplemental_precip.regridded_precip1 is not None: + supplemental_precip.regridded_precip1[:, :] = config_options.globalNdv \ No newline at end of file From bb721dfe464bc01de5f9903c092eeec37317a4ab Mon Sep 17 00:00:00 2001 From: amazroo Date: Wed, 29 Sep 2021 10:01:02 -0600 Subject: [PATCH 18/19] --- Util/pull_s3_grib_vars.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Util/pull_s3_grib_vars.py b/Util/pull_s3_grib_vars.py index 9636a6a..06359eb 100755 --- a/Util/pull_s3_grib_vars.py +++ b/Util/pull_s3_grib_vars.py @@ -1,4 +1,6 @@ #!/usr/bin/env python +# Developed by Bill Petzke at RAL NCAR +# 04/05/2021 import argparse import sys From 97377e23bf2c5b3ab944a3fb37a75829f8895714 Mon Sep 17 00:00:00 2001 From: Ryan Cabell Date: Tue, 22 Mar 2022 14:49:38 -0600 Subject: [PATCH 19/19] NWM Alaska Short-Range support (#74) * Map Alaska Short-Range forecast cycles to HRRR forecast hours/cycles * Consolidate duplicate handling of supp_precip fields --- core/config.py | 28 +++++++ core/regrid.py | 140 ++++++++++++++++++++++++++++++++- core/suppPrecipMod.py | 37 ++++----- core/time_handling.py | 178 +++++++++++++++++++++++++++++++++++++++--- 4 files changed, 349 insertions(+), 34 deletions(-) diff --git a/core/config.py b/core/config.py index 627eb00..79179f6 100755 --- a/core/config.py +++ b/core/config.py @@ -1159,6 +1159,26 @@ def read_config(self): err_handler.err_out_screen('Invalid SuppPcpTemporalInterpolation chosen in the configuration file. ' 'Please choose a value of 0-2 for each corresponding input forcing') + # Read in max time option + try: + self.supp_pcp_max_hours = json.loads(config['SuppForcing']['SuppPcpMaxHours']) + except (KeyError, configparser.NoOptionError): + self.supp_pcp_max_hours = None # if missing, don't care, just assume all time + + except json.decoder.JSONDecodeError: + err_handler.err_out_screen('Improper SuppPcpMaxHours options specified in the ' + 'configuration file.') + + if type(self.supp_pcp_max_hours) is list: + if len(self.supp_pcp_max_hours) != self.number_supp_pcp: + err_handler.err_out_screen('Number of SuppPcpMaxHours ({}) must match the number ' + 'of SuppPcp inputs ({}) in the configuration file, or ' + 'supply a single threshold for all inputs'.format( + len(self.supp_pcp_max_hours), self.number_supp_pcp)) + elif type(self.supp_pcp_max_hours) is float: + # Support 'classic' mode of single threshold + self.supp_pcp_max_hours = [self.supp_pcp_max_hours] * self.number_supp_pcp + # Read in the SuppPcpInputOffsets options. try: self.supp_input_offsets = json.loads(config['SuppForcing']['SuppPcpInputOffsets']) @@ -1231,3 +1251,11 @@ def read_config(self): f'This number ({len(self.customFcstFreq)}) must ' f'match the frequency of custom input forcings selected ' f'({self.number_custom_inputs}).') + + @property + def use_data_at_current_time(self): + if self.supp_pcp_max_hours is not None: + hrs_since_start = self.current_output_date - self.current_fcst_cycle + return hrs_since_start <= datetime.timedelta(hours = self.supp_pcp_max_hours) + else: + return True diff --git a/core/regrid.py b/core/regrid.py index b7d74b5..5b7e47b 100755 --- a/core/regrid.py +++ b/core/regrid.py @@ -332,7 +332,7 @@ def regrid_conus_hrrr(input_forcings, config_options, wrf_hydro_geo_meta, mpi_co return # Create a path for a temporary NetCDF file - input_forcings.tmpFile = config_options.scratch_dir + "/" + "HRRR_CONUS_TMP-{}.nc".format(mkfilename()) + input_forcings.tmpFile = config_options.scratch_dir + "/" + "HRRR_TMP-{}.nc".format(mkfilename()) if input_forcings.fileType != NETCDF: # This file shouldn't exist.... but if it does (previously failed @@ -351,7 +351,7 @@ def regrid_conus_hrrr(input_forcings, config_options, wrf_hydro_geo_meta, mpi_co fields = [] for force_count, grib_var in enumerate(input_forcings.grib_vars): if mpi_config.rank == 0: - config_options.statusMsg = "Converting CONUS HRRR Variable: " + grib_var + config_options.statusMsg = "Converting HRRR Variable: " + grib_var err_handler.log_msg(config_options, mpi_config) time_str = "{}-{} hour acc fcst".format(input_forcings.fcst_hour1, input_forcings.fcst_hour2) \ if grib_var == 'APCP' else str(input_forcings.fcst_hour2) + " hour fcst" @@ -372,7 +372,7 @@ def regrid_conus_hrrr(input_forcings, config_options, wrf_hydro_geo_meta, mpi_co for force_count, grib_var in enumerate(input_forcings.grib_vars): if mpi_config.rank == 0: - config_options.statusMsg = "Processing Conus HRRR Variable: " + grib_var + config_options.statusMsg = "Processing HRRR Variable: " + grib_var err_handler.log_msg(config_options, mpi_config) calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings, @@ -2564,6 +2564,140 @@ def regrid_sbcv2_liquid_water_fraction(supplemental_forcings, config_options, wr err_handler.check_program_status(config_options, mpi_config) +def regrid_hourly_nbm_apcp(supplemental_precip, config_options, wrf_hydro_geo_meta, mpi_config): + """ + Function for handling regridding hourly forecasted NBM precipitation. + :param supplemental_precip: + :param config_options: + :param wrf_hydro_geo_meta: + :param mpi_config: + :return: + """ + # If the expected file is missing, this means we are allowing missing files, simply + # exit out of this routine as the regridded fields have already been set to NDV. + if not os.path.exists(supplemental_precip.file_in1): + return + + # Check to see if the regrid complete flag for this + # output time step is true. This entails the necessary + # inputs have already been regridded and we can move on. + if supplemental_precip.regridComplete: + return + + + nbm_tmp_nc = config_options.scratch_dir + "/NBM_PCP_TMP-{}.nc".format(mkfilename()) + if os.path.isfile(nbm_tmp_nc): + config_options.statusMsg = "Found old temporary file: " + \ + nbm_tmp_nc + " - Removing....." + err_handler.log_warning(config_options, mpi_config) + try: + os.remove(nbm_tmp_nc) + except OSError: + config_options.errMsg = "Unable to remove file: " + nbm_tmp_nc + err_handler.log_critical(config_options, mpi_config) + + # Perform a GRIB dump to NetCDF for the precip data. + fieldnbm_match1 = "\":APCP:\"" + fieldnbm_match2 = "\"" + str(supplemental_precip.fcst_hour1) + "-" + str(supplemental_precip.fcst_hour2) + "\"" + fieldnbm_notmatch1 = "\"prob\"" #We don't want the probabilistic QPF layers + cmd1 = "$WGRIB2 " + supplemental_precip.file_in1 + " -match " + fieldnbm_match1 \ + + " -match " + fieldnbm_match2 \ + + " -not " + fieldnbm_notmatch1 \ + + " -netcdf " + nbm_tmp_nc + id_tmp = ioMod.open_grib2(supplemental_precip.file_in1, nbm_tmp_nc, cmd1, config_options, + mpi_config, supplemental_precip.netcdf_var_names[0]) + err_handler.check_program_status(config_options, mpi_config) + + # Check to see if we need to calculate regridding weights. + calc_regrid_flag = check_supp_pcp_regrid_status(id_tmp, supplemental_precip, config_options, + wrf_hydro_geo_meta, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + if calc_regrid_flag: + if mpi_config.rank == 0: + config_options.statusMsg = "Calculating NBM regridding weights." + err_handler.log_msg(config_options, mpi_config) + calculate_supp_pcp_weights(supplemental_precip, id_tmp, supplemental_precip.file_in1, config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Regrid the input variables. + var_tmp = None + if mpi_config.rank == 0: + config_options.statusMsg = "Regridding NBM APCP Precipitation." + err_handler.log_msg(config_options, mpi_config) + try: + var_tmp = id_tmp.variables['APCP_surface'][0, :, :] + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to extract precipitation from NBM file: " + \ + supplemental_precip.file_in1 + " (" + str(err) + ")" + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + var_sub_tmp = mpi_config.scatter_array(supplemental_precip, var_tmp, config_options) + err_handler.check_program_status(config_options, mpi_config) + + try: + supplemental_precip.esmf_field_in.data[:, :] = var_sub_tmp + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place NBM precipitation into local ESMF field: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + supplemental_precip.esmf_field_out = supplemental_precip.regridObj(supplemental_precip.esmf_field_in, + supplemental_precip.esmf_field_out) + except ValueError as ve: + config_options.errMsg = "Unable to regrid NBM supplemental precipitation: " + str(ve) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Set any pixel cells outside the input domain to the global missing value. + try: + supplemental_precip.esmf_field_out.data[np.where(supplemental_precip.regridded_mask == 0)] = \ + config_options.globalNdv + except (ValueError, ArithmeticError) as npe: + config_options.errMsg = "Unable to run mask search on NBM supplemental precipitation: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + supplemental_precip.regridded_precip2[:, :] = supplemental_precip.esmf_field_out.data + err_handler.check_program_status(config_options, mpi_config) + + # Convert the hourly precipitation total from kg.m-2.hour-1 to a rate of mm.s-1 + try: + ind_valid = np.where(supplemental_precip.regridded_precip2 != config_options.globalNdv) + if supplemental_precip.input_frequency == 60.0: + supplemental_precip.regridded_precip2[ind_valid] = supplemental_precip.regridded_precip2[ind_valid] / 3600.0 + elif supplemental_precip.input_frequency == 360.0: + supplemental_precip.regridded_precip2[ind_valid] = supplemental_precip.regridded_precip2[ind_valid] / 21600.0 #uniform disaggregation for 6-hourly nbm data + del ind_valid + except (ValueError, ArithmeticError, AttributeError, KeyError) as npe: + config_options.errMsg = "Unable to run NDV search on NBM supplemental precipitation: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If we are on the first timestep, set the previous regridded field to be + # the latest as there are no states for time 0. + if config_options.current_output_step == 1: + supplemental_precip.regridded_precip1[:, :] = \ + supplemental_precip.regridded_precip2[:, :] + err_handler.check_program_status(config_options, mpi_config) + + # Close the temporary NetCDF file and remove it. + if mpi_config.rank == 0: + try: + id_tmp.close() + except OSError: + config_options.errMsg = "Unable to close NetCDF file: " + supplemental_precip.file_in1 + err_handler.log_critical(config_options, mpi_config) + try: + os.remove(nbm_tmp_nc) + except OSError: + config_options.errMsg = "Unable to remove temporary NBM NetCDF file: " + nbm_tmp_nc + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + def check_regrid_status(id_tmp, force_count, input_forcings, config_options, wrf_hydro_geo_meta, mpi_config): """ Function for checking to see if regridding weights need to be diff --git a/core/suppPrecipMod.py b/core/suppPrecipMod.py index bd2cc61..40cf9e2 100755 --- a/core/suppPrecipMod.py +++ b/core/suppPrecipMod.py @@ -86,8 +86,8 @@ def define_product(self): 5: "CONUS_MRMS_1HR_MultiSensor", 6: "Hawaii_MRMS_1HR_MultiSensor", 7: "MRMS_LiquidWaterFraction", - 8: "Amir1", #Amir merge - 9: "Amir2", #Amir merge + 8: "NBM_CORE_CONUS_APCP", + 9: "NBM_CORE_ALASKA_APCP", 10: "AK_MRMS", 11: "AK_Stage_IV_Precip-MRMS" } @@ -117,8 +117,8 @@ def define_product(self): 5: None, 6: None, 7: None, - 8: None, #Amir merge - 9: None, #Amir merge + 8: None, + 9: None, 10: None, 11: None } @@ -132,8 +132,8 @@ def define_product(self): 5: ['BLAH'], 6: ['BLAH'], 7: ['BLAH'], - 8: ['BLAH'], #Amir merge - 9: ['BLAH'], #Amir merge + 8: ['BLAH'], + 9: ['BLAH'], 10: ['BLAH'], 11: ['BLAH'] } @@ -147,8 +147,8 @@ def define_product(self): 5: ['MultiSensorQPE01H_0mabovemeansealevel'], 6: ['MultiSensorQPE01H_0mabovemeansealevel'], 7: ['sbcv2_lwf'], - 8: ['AmirVar'], #Amir merge - 9: ['AmirVar'], #Amir merge + 8: ['APCP_surface'], + 9: ['APCP_surface'], 10: ['MultiSensorQPE01H_0mabovemeansealevel'], 11: [] #Set dynamically since we have have Stage IV and MRMS } @@ -162,8 +162,8 @@ def define_product(self): 5: None, 6: None, 7: None, - 8: None, #Amir merge - 9: None, #Amir merge + 8: None, + 9: None, 10: None, 11: None } @@ -177,8 +177,8 @@ def define_product(self): 5: 3, 6: 3, 7: 8, # LQFRAC - 8: 3, #Amir Merge - 9: 3, #Amir Merge + 8: 3, + 9: 3, 10: 3, 11: 3 } @@ -203,8 +203,8 @@ def calc_neighbor_files(self,ConfigOptions,dCurrent,MpiConfig): 5: time_handling.find_hourly_mrms_radar_neighbors, 6: time_handling.find_hourly_mrms_radar_neighbors, 7: time_handling.find_sbcv2_lwf_neighbors, - 8: time_handling.find_hourly_mrms_radar_neighbors, #Amir merge - 9: time_handling.find_hourly_mrms_radar_neighbors, #Amir merge + 8: time_handling.find_hourly_nbm_apcp_neighbors, + 9: time_handling.find_hourly_nbm_apcp_neighbors, 10: time_handling.find_hourly_mrms_radar_neighbors, 11: time_handling.find_ak_ext_ana_precip_neighbors } @@ -239,8 +239,8 @@ def regrid_inputs(self,ConfigOptions,wrfHyroGeoMeta,MpiConfig): 5: regrid.regrid_mrms_hourly, 6: regrid.regrid_mrms_hourly, 7: regrid.regrid_sbcv2_liquid_water_fraction, - 8: regrid.regrid_mrms_hourly, #Amir merge - 9: regrid.regrid_mrms_hourly, #Amir merge + 8: regrid.regrid_hourly_nbm_apcp, + 9: regrid.regrid_hourly_nbm_apcp, 10: regrid.regrid_mrms_hourly, 11: regrid.regrid_ak_ext_ana_pcp } @@ -309,8 +309,9 @@ def initDict(ConfigOptions,GeoMetaWrfHydro): InputDict[supp_pcp_key].userCycleOffset = ConfigOptions.supp_input_offsets[supp_pcp_tmp] - InputDict[supp_pcp_key].rqiMethod = ConfigOptions.rqiMethod[supp_pcp_tmp] - InputDict[supp_pcp_key].rqiThresh = ConfigOptions.rqiThresh[supp_pcp_tmp] + if ConfigOptions.rqiMethod is not None: + InputDict[supp_pcp_key].rqiMethod = ConfigOptions.rqiMethod[supp_pcp_tmp] + InputDict[supp_pcp_key].rqiThresh = ConfigOptions.rqiThresh[supp_pcp_tmp] return InputDict diff --git a/core/time_handling.py b/core/time_handling.py index 56b0c86..37878c3 100755 --- a/core/time_handling.py +++ b/core/time_handling.py @@ -47,9 +47,9 @@ def calculate_lookback_window(config_options): dt_tmp = d_current_utc - config_options.b_date_proc n_fcst_steps = math.floor((dt_tmp.days*1440+dt_tmp.seconds/60.0) / config_options.fcst_freq) #Special case for HRRR AK when we don't need/want more than one forecast cycle - #if 19 in config_options.input_forcings: - # n_fcst_steps = 0 - + if 19 in config_options.input_forcings: + n_fcst_steps = 0 + config_options.nFcsts = int(n_fcst_steps) + 1 config_options.e_date_proc = config_options.b_date_proc + datetime.timedelta( seconds=n_fcst_steps * config_options.fcst_freq * 60) @@ -575,27 +575,38 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config current_hrrr_cycle -= datetime.timedelta(hours=6) else: current_hrrr_cycle -= datetime.timedelta(hours=3) + + # Calculate the current forecast hour within this HRRR cycle. + dt_tmp = d_current - current_hrrr_cycle + current_hrrr_hour = int(dt_tmp.days * 24) + int(dt_tmp.seconds / 3600.0) else: current_hrrr_cycle = config_options.current_fcst_cycle - \ datetime.timedelta(seconds=input_forcings.userCycleOffset * 60.0) + + # Map the native forecast hour to the shifted HRRR cycles + hrrr_cycle = (current_hrrr_cycle.hour // 3 * 3) - 3 + if hrrr_cycle < 0: + hrrr_cycle += 24 + + # throw out the first 3 hours of the cycle + current_hrrr_hour = (current_hrrr_cycle.hour % 3) + 3 - if current_hrrr_cycle.hour % 6 != 0: - hrrr_horizon = default_horizon + if current_hrrr_cycle.hour % 6 == 0: + hrrr_horizon = 48 else: - hrrr_horizon = six_hr_horizon + hrrr_horizon = 18 + + # print(f"HRRR cycle is {current_hrrr_cycle}, hour is {current_hrrr_hour}") # If the user has specified a forcing horizon that is greater than what is available # for this time period, throw an error. - if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > hrrr_horizon: - config_options.errMsg = "User has specified a HRRR conus forecast horizon " + \ - "that is greater than the maximum allowed hours of: " + str(hrrr_horizon) + spec_horizon = (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 + if spec_horizon > hrrr_horizon: + config_options.errMsg = f"User has specified a HRRR Alaska forecast horizon {spec_horizon} " + \ + f"that is greater than the maximum allowed hours of: {hrrr_horizon}" err_handler.log_critical(config_options, mpi_config) err_handler.check_program_status(config_options, mpi_config) - # Calculate the current forecast hour within this HRRR cycle. - dt_tmp = d_current - current_hrrr_cycle - current_hrrr_hour = int(dt_tmp.days*24) + int(dt_tmp.seconds/3600.0) - # Calculate the previous file to process. min_since_last_output = (current_hrrr_hour * 60) % 60 if min_since_last_output == 0: @@ -2156,3 +2167,144 @@ def find_ak_ext_ana_precip_neighbors(supplemental_precip, config_options, d_curr else: #print(f"Using StageIV hour {d_current}") _find_ak_ext_ana_precip_stage4(supplemental_precip, config_options, d_current, mpi_config) + + +def find_hourly_nbm_apcp_neighbors(supplemental_precip, config_options, d_current, mpi_config): + """ + Function to calculate the previous and next NBM/CORE/CONUS files. This + will also calculate the neighboring radar quality index (RQI) files as well. + :param supplemental_precip: + :param config_options: + :param d_current: + :param mpi_config: + :return: + """ + nbm_out_freq = { + 36: 60, + 240: 360 + } + + # First we need to find the nearest previous and next hour, which is + # the previous/next NBM files we will be using. + current_yr = d_current.year + current_mo = d_current.month + current_day = d_current.day + current_hr = d_current.hour + current_min = d_current.minute + + # First find the current NBM forecast cycle that we are using. + current_nbm_cycle = config_options.current_fcst_cycle - \ + datetime.timedelta(seconds=supplemental_precip.userCycleOffset * 60.0) + + # Calculate the current forecast hour within this NBM cycle. + dt_tmp = d_current - current_nbm_cycle + current_nbm_hour = int(dt_tmp.days*24) + int(dt_tmp.seconds/3600.0) + + # Set the input file frequency to be hourly for f001-f036 and 6-hourly beyond f036. + if current_nbm_hour <= 36: + supplemental_precip.input_frequency = 60.0 + else: + supplemental_precip.input_frequency = 360.0 + + # Calculate the previous file to process. + min_since_last_output = (current_nbm_hour*60) % supplemental_precip.input_frequency + if min_since_last_output == 0: + min_since_last_output = supplemental_precip.input_frequency + prev_nbm_date = d_current - datetime.timedelta(seconds=min_since_last_output*60) + supplemental_precip.fcst_date1 = prev_nbm_date + if min_since_last_output == supplemental_precip.input_frequency: + min_until_next_output = 0 + else: + min_until_next_output = supplemental_precip.input_frequency - min_since_last_output + next_nbm_date = d_current + datetime.timedelta(seconds=min_until_next_output * 60) + supplemental_precip.fcst_date2 = next_nbm_date + + # Calculate the output forecast hours needed based on the prev/next dates. + dt_tmp = next_nbm_date - current_nbm_cycle + next_nbm_forecast_hour = int(dt_tmp.days*24.0) + int(dt_tmp.seconds/3600.0) + supplemental_precip.fcst_hour2 = next_nbm_forecast_hour + dt_tmp = prev_nbm_date - current_nbm_cycle + prev_nbm_forecast_hour = int(dt_tmp.days*24.0) + int(dt_tmp.seconds/3600.0) + supplemental_precip.fcst_hour1 = prev_nbm_forecast_hour + # If we are on the first NBM forecast hour (1), and we have calculated the previous forecast + # hour to be 0, simply set both hours to be 1. Hour 0 will not produce the fields we need, and + # no interpolation is required. + if prev_nbm_forecast_hour == 0: + prev_nbm_forecast_hour = 1 + + # Calculate expected file paths. + if supplemental_precip.keyValue == 8: + tmp_file1 = supplemental_precip.inDir + "/blend." + \ + current_nbm_cycle.strftime('%Y%m%d') + \ + "/" + current_nbm_cycle.strftime('%H') + \ + "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.core.f" + str(next_nbm_forecast_hour).zfill(3) + ".co" \ + + supplemental_precip.file_ext + tmp_file2 = supplemental_precip.inDir + "/blend." + \ + current_nbm_cycle.strftime('%Y%m%d') + \ + "/" + current_nbm_cycle.strftime('%H') + \ + "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.core.f" + str(prev_nbm_forecast_hour).zfill(3) + ".co" \ + + supplemental_precip.file_ext + elif supplemental_precip.keyValue == 9: + tmp_file1 = supplemental_precip.inDir + "/blend." + \ + current_nbm_cycle.strftime('%Y%m%d') + \ + "/" + current_nbm_cycle.strftime('%H') + \ + "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.core.f" + str(next_nbm_forecast_hour).zfill(3) + ".ak" \ + + supplemental_precip.file_ext + tmp_file2 = supplemental_precip.inDir + "/blend." + \ + current_nbm_cycle.strftime('%Y%m%d') + \ + "/" + current_nbm_cycle.strftime('%H') + \ + "/core/blend.t" + current_nbm_cycle.strftime('%H') + \ + "z.core.f" + str(prev_nbm_forecast_hour).zfill(3) + ".ak" \ + + supplemental_precip.file_ext + else: + tmp_file1 = tmp_file2 = "" + + if mpi_config.rank == 0: + config_options.statusMsg = "Prev NBM supplemental file: " + tmp_file2 + err_handler.log_msg(config_options, mpi_config) + config_options.statusMsg = "Next NBM supplemental file: " + tmp_file1 + err_handler.log_msg(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Check to see if files are already set. If not, then reset, grids and + # regridding objects to communicate things need to be re-established. + if supplemental_precip.file_in1 != tmp_file1 or supplemental_precip.file_in2 != tmp_file2: + supplemental_precip.regridded_precip1 = supplemental_precip.regridded_precip1 + supplemental_precip.regridded_precip2 = supplemental_precip.regridded_precip2 + supplemental_precip.file_in1 = tmp_file1 + supplemental_precip.file_in2 = tmp_file2 + supplemental_precip.regridComplete = False + + # Ensure we have the necessary new file + if mpi_config.rank == 0: + if not os.path.isfile(supplemental_precip.file_in2) and ((supplemental_precip.keyValue == 8) or (supplemental_precip.keyValue == 9)): + config_options.statusMsg = "NBM file {} not found, will attempt to use {} instead.".format( + supplemental_precip.file_in2, supplemental_precip.file_in1) + err_handler.log_warning(config_options, mpi_config) + supplemental_precip.file_in2 = supplemental_precip.file_in1 + if not os.path.isfile(supplemental_precip.file_in2): + if supplemental_precip.enforce == 1: + config_options.errMsg = "Expected input NBM file: " + supplemental_precip.file_in2 + " not found." + err_handler.log_critical(config_options, mpi_config) + else: + config_options.statusMsg = "Expected input NBM file: " + supplemental_precip.file_in2 + \ + " not found. " + "Will not use in final layering." + err_handler.log_warning(config_options, mpi_config) + config_options.statusMsg = "You can use Util/pull_s3_grib_vars.py to Download NBM data from AWS-S3 archive." + err_handler.log_warning(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If the file is missing, set the local slab of arrays to missing. + if not os.path.isfile(supplemental_precip.file_in2): + if supplemental_precip.regridded_precip2 is not None: + supplemental_precip.regridded_precip2[:, :] = config_options.globalNdv + + # Do we want to use NBM data at this timestep? If not, set the local slab of arrays to missing. + if not config_options.use_data_at_current_time: + if supplemental_precip.regridded_precip2 is not None: + supplemental_precip.regridded_precip2[:, :] = config_options.globalNdv + if supplemental_precip.regridded_precip1 is not None: + supplemental_precip.regridded_precip1[:, :] = config_options.globalNdv