From b71b40f2148d80f786c8ae6f4ba202e20d75ef70 Mon Sep 17 00:00:00 2001
From: scout teehee <34626346+luceboxed@users.noreply.github.com>
Date: Mon, 9 Mar 2026 20:57:20 -0400
Subject: [PATCH 1/6] starting docs
---
README.md | 4 +-
doc/basic_setup.md | 190 +++++++++++++++++++++++++++++++++++++
doc/metis.md | 20 ++++
doc/regional_mesh_setup.md | 96 +++++++++++++++++++
scripts/mesh_resolution.py | 93 ++++++++++++++++++
scripts/run_init.sh | 18 ++++
6 files changed, 420 insertions(+), 1 deletion(-)
create mode 100644 doc/basic_setup.md
create mode 100644 doc/metis.md
create mode 100644 doc/regional_mesh_setup.md
create mode 100644 scripts/mesh_resolution.py
create mode 100644 scripts/run_init.sh
diff --git a/README.md b/README.md
index 1633aad..37e3ed4 100644
--- a/README.md
+++ b/README.md
@@ -1 +1,3 @@
-# MPAS
\ No newline at end of file
+Various documentation and files on how to run an MPAS-A model on an HPC.
+
+Note: Many documents say to consult a "/files" directory, but because of Github file size constraints, not all files can be uploaded into it. If you really need them, please reach out to me!
diff --git a/doc/basic_setup.md b/doc/basic_setup.md
new file mode 100644
index 0000000..b8bfad9
--- /dev/null
+++ b/doc/basic_setup.md
@@ -0,0 +1,190 @@
+# MPAS-A Compilation and Basic Setup
+
+## Preqs
+
+You will need access to the following modules/compilers:
+
+- mpif90
+
+- mpicc
+
+- netCDF netCDF/4.9.3-gompi-2025a
+
+- netCDF-Fortran
+
+- PnetCDF PnetCDF/1.12.3-gompi-2023b
+
+---
+## Compilation!
+
+The first step in compiling MPAS is to grab it's source code, which is on Github:
+
+`git clone https://github.com/MPAS-Dev/MPAS-Model.git`
+
+Then, move into MPAS-Model: `cd MPAS-Model`. Provided we have the proper compilers, you just need to run:
+
+`make -j# gnu CORE=init_atmosphere`
+
+...where # is the number of jobs you wish to use to compile. 8 is a good number.
+
+You should see a message like this, if successful:
+
+```
+*******************************************************************************
+MPAS was built with default single-precision reals.
+Debugging is off.
+Parallel version is on.
+Using the mpi_f08 module.
+Papi libraries are off.
+TAU Hooks are off.
+MPAS was built without OpenMP support.
+MPAS was built without OpenMP-offload GPU support.
+MPAS was built without OpenACC accelerator support.
+Position-dependent code was generated.
+MPAS was built with .F files.
+The native timer interface is being used
+Using the SMIOL library.
+*******************************************************************************
+```
+
+Additionally, there should be new files in the directory: `init_atmosphere_model`, `namelist.init_atmosphere`, and `streams.init_atmosphere`! For WRF users, `init_atmosphere` is essentially MPAS's version of the WRF's WPS, bundled into one neat executable. Next, we need to compile the actual model:
+
+`make -j# gnu CORE=atmosphere`
+
+You might see some warnings while it compiles. You can ignore them if it doesn't crash the compilation. You should see a message like this, if successful:
+
+```
+*******************************************************************************
+MPAS was built with default single-precision reals.
+Debugging is off.
+Parallel version is on.
+Using the mpi_f08 module.
+Papi libraries are off.
+TAU Hooks are off.
+MPAS was built without OpenMP support.
+MPAS was built without OpenMP-offload GPU support.
+MPAS was built without OpenACC accelerator support.
+Position-dependent code was generated.
+MPAS was built with .F files.
+The native timer interface is being used
+Using the SMIOL library.
+*******************************************************************************
+```
+
+Now, you should have new files: `atmosphere_model`, `namelist.atmosphere`, `streams.atmosphere`, and a whole bunch of `stream_list.atmosphere.*` files.
+
+It's good practice not to run your simulations directly in this folder, but to instead symlink the executables to a different folder and then copy (not move!) the namelist files into that same folder. I went one folder up and made a new folder named `mpas_sim`, you can name it whatever you like. In this documentation, this is now the MPAS directory.
+
+```sh
+cd ../
+mkdir mpas_sim
+cd mpas_sim
+ln -s ../MPAS-Model/init_atmosphere_model .
+ln -s ../MPAS-Model/atmosphere_model .
+cp ../MPAS-Model/namelist.* .
+cp ../MPAS-Model/streams.* .
+```
+
+The directory is almost fully setup, but we still need a mesh! You can download your favorite from [here](https://www2.mmm.ucar.edu/projects/mpas/site/downloads/meshes.html), and unpack it. You'll need the .grid.nc and a .graph.part. file. The number at the end of the graph file should correspond to the number of MPI tasks you plan to run MPAS with. You'll also notice these meshes are global and (for variable resolution) centered on 0, 0. If you'd like to change either of those, see `regional_setup.md`.
+
+Bring your mesh `.grid.nc` file and your `.graph.info.part.` file into your MPAS directory. Your directory now has almost all the files it needs to start running MPAS simulations!
+
+---
+## Static fields!
+
+Let's setup static fields (aka, geography and land types) for our model. We have to download the geography dataset first... I personally don't like putting the geography dataset into my model directory, so I will go one folder up. We'll use the standard NCAR provided dataset, which extracts into `mpas_static`.
+
+```sh
+cd ..
+wget https://www2.mmm.ucar.edu/projects/mpas/mpas_static.tar.bz2
+tar -xvf mpas_static.tar.bz2
+rm mpas_static.tar.bz2
+```
+
+Remember to clean up the archive once you're done!
+
+Now, we'll need to start modifying our `namelist.init_atmosphere` and `streams.init_atmosphere`. Because this is essentially every step of the WPS packed in one, we'll have to run it multiple times with different configurations. You can go about this however you want, but I find that making a `templates` folder inside our `mpas_sim` directory and having each step's config saved in a different spot makes things easier, especially when trying to automate a model. In this setup doc, though, I'll assume we're just overwriting the same namelist and streams file each time.
+
+Open your `namelist.init_atmosphere` file and replace **everything** with:
+
+```
+&nhyd_model
+ config_init_case = 7
+/
+&data_sources
+ config_geog_data_path = '/scratch/mbc18672/mpas/mpas_static'
+ config_noahmp_static = false
+/
+&preproc_stages
+ config_static_interp = true
+ config_native_gwd_static = true
+ config_native_gwd_gsl_static = false
+ config_vertical_grid = false
+ config_met_interp = false
+ config_input_sst = false
+ config_frac_seaice = false
+/
+&decomposition
+ config_block_decomp_file_prefix = 'pr.graph.info.part.'
+/
+```
+
+- `config_geog_data_path` should be YOUR own path to your geography dataset.
+
+- `config_block_decomp_file_prefix` should be the part of YOUR own graph file all the way up to the number.
+
+Open your `streams.init_atmosphere` file and change **these lines** to the following:
+
+```
+
+
+
+```
+
+- `input`'s `filename_template` should be the name of your .grid.nc file.
+
+- `output`'s `filename_template` should be the name you want your static fields file to be. Good practice is simply swapping out `grid` for `static`.
+
+- Everything else not specified can remain the same.
+
+Now its time to actually interpolate the static fields! Using your HPC's scheduler (or on the login node if you're a SICKO (just kidding, please schedule it)) run:
+
+`mpiexec -np # ./init_atmosphere_model >& log.init_atmosphere.0000.out`
+
+or, if on a SLURM HPC:
+
+`srun -n # ./init_atmosphere_model`
+
+... where # is the number of MPI jobs to run, EQUAL to the number at the end of your graph file. Ensure you have all the modules required enabled (netCDF and PnetCDF)! Once it starts, you can `tail -f` that log file to watch it.
+
+If all is works just fine, you should see a success message
+
+```
+ ********************************************************
+ Finished running the init_atmosphere core
+ ********************************************************
+```
+
+and have a brand new `*.static.nc` file in your MPAS directory! Unless you change meshes or geography datasets (for whatever reason), you won't need to rerun this part again! Hooray!
+
+---
+## Forcing/initial and boundary conditions
+
+to come
+
+---
+## Sample files
+
+I've included my .sh files to schedule both init_atmosphere and atmosphere models to SLURM under /scripts.
+
+---
+## Credit where credit's due
+
+While most of this is from my own workflow, this is kind of an adaptation of part 1 [NCAR's Sept. 2025 MPAS-A Boulder Tutorial](https://www2.mmm.ucar.edu/projects/mpas/tutorial/Boulder2025/) that I attended.
diff --git a/doc/metis.md b/doc/metis.md
new file mode 100644
index 0000000..3fcdc2d
--- /dev/null
+++ b/doc/metis.md
@@ -0,0 +1,20 @@
+METIS is used to split your mesh into multiple parts in a .graph file for parallel processing, which is required for MPAS to function with your mesh.
+
+To install METIS:
+```
+export INSTALL_DIR=(dir)
+
+git clone https://github.com/KarypisLab/GKlib.git
+cd GKlib
+make config prefix=${INSTALL_DIR}/GKlib
+make
+make install
+cd ..
+
+git clone https://github.com/KarypisLab/METIS.git
+cd METIS/
+make config prefix=${INSTALL_DIR}/METIS gklib_path=${INSTALL_DIR}/GKlib
+make
+make install
+```
+... where (dir) is the directory in which you want METIS to be installed.
diff --git a/doc/regional_mesh_setup.md b/doc/regional_mesh_setup.md
new file mode 100644
index 0000000..ef06631
--- /dev/null
+++ b/doc/regional_mesh_setup.md
@@ -0,0 +1,96 @@
+# MPAS-A Regional Variable Resolution Mesh Setup
+
+## Preqs
+
+In order to setup a regional simulation (or even a global sim!) you will need to grab a mesh from here:
+
+https://www2.mmm.ucar.edu/projects/mpas/site/downloads/meshes.html
+
+This contains both quasi-uniform meshses that are a consistent resolution across the entire mesh and variable resolution meshes, which dynamically refine in. While this tutorial should work for a quasi-uniform mesh, it will be primarily focused on running a variable resolution regional simulation.
+
+These archives will contain a .grid.nc file (your actual mesh) and a multitude of graph files (used to parallelize MPAS). Grab the mesh file and, if running a global sim, the graph file that corresponds to the exact number of MPI tasks you plan to use for your simulation (if your number is not on there, we'll make a new one later on anyways for regional sims!)
+
+Additionally, for running a regional simulation, you will need these three tools:
+
+- [MPAS-Tools](https://github.com/MPAS-Dev/MPAS-Tools)
+
+- [MPAS-Limited-Area](https://github.com/MPAS-Dev/MPAS-Limited-Area)
+
+- gpmetis. Instructions can be found in `metis.md`
+
+For some more finer control over resolution, you may also like to grab:
+
+- [scale-region](https://github.com/mgduda/scale_region)
+
+You will also need a Conda (or otherwise) environment that has:
+
+- netcdf4
+
+- netcdf-fortran
+
+- numpy
+
+- A Fortran compiler (conda install -c conda-forge fortran-compiler should suffice)
+
+(for supplemental `mesh_resolution.py`)
+
+- xarray
+
+- cartopy
+
+These instructions do not necessarily have to be run on your HPC cluster, as I ran them all on my home desktop running Arch Linux. These instructions should also work for Windows and Mac systems, but I haven't tested those out properly.
+
+## Setup!
+
+Firstly, grab a lat, lon of where you intend the *center* of your mesh to be. You can use Google Maps or OpenStreetMap to find coordinates. In this example, I roughly eyeballed the center of Puerto Rico on Google Maps to be 18.22660024342388, -66.4777993164418. I'm also deciding to use the 15-3km (Circular Refinement) Mesh.
+
+---
+
+Now, lets first move the center of this mesh to wherever you have `grid_rotate`, an MPAS-Tools utility. This should be within `MPAS-Tools/mesh_tools/grid_rotate`. Run `make` to build the utility. Once it's made, grab your `.grid.nc` file from the archive and move it into the directory.
+
+Before we start, we need to modify the `namelist.input`! These NCAR provided meshes are already centered at (0,0), so you can ignore the original settings. Modify the new lat, lon to your liking. As hinted by the utility name, you can also rotate your mesh here too, if needed!
+
+All that's left is to run the file: `./grid_rotate (input grid name) (output grid name)`. I named my grid `x5.6488066.rotated.grid.nc`, but you can do whatever you wish. It's good practice to leave it ending in `*.grid.nc`. This might take a minute or two with a high resolution mesh.
+
+If you just want to rotate where your global variable resolution grid refines into, then you're done! You should be able to use the same .graph.info.part. file in your MPAS simulations.
+
+---
+
+(Optional) If you'd like more control over the resolution of your final mesh, then we can use the `scale-region` tool! Bring your new rotated mesh into the folder and run: `python ./scale_region.py (input grid name) (output grid name) (scale factor) (lat) (lon)`, where:
+
+- (input grid name) is the name of your input grid
+
+- (output grid name) is the name of the resultant scaled grid. Following a similar structure as the rotation, i went with `x5.6488066.scaled.grid.nc`.
+
+- (scale factor) is the factor you wish to scale your resolution by (i.e. 3.0 will triple resolution, sharpening a 15-3km grid down to 5-1km.)
+
+- (lat) & (lon) are the lat, lon of the center point of your grid.
+
+This can take a good bit of time, depending on resolution and scale factor. It is quite memory intensive! Scaling my 15-3km with a scale factor of 2.0 took just around 1hr15min and ~8gb of mem! If you haven't yet already, this would be a great time to look at `compile.md` and begin setting up and compiling MPAS on your HPC cluster.
+
+In case you want a view of what your mesh looks like after this process, I've provided a `mesh_resolution.py` script from NCAR's MPAS-A tutorial in this repo's scripts (/scripts) that will output an image with approx. mesh resolutions contoured. You can run it with `python ./mesh_resolution.py (grid file)`. This'll output a plot of the mesh resolution to your pwd under `mesh_resolution.png`.
+
+---
+
+Next, we're going to take this modified (either rotated OR rotated + scaled) mesh and make it so that it only contains a regional area vs the entire world. Move your new `.grid.nc` file to wherever you have MPAS-Limited-Area. Within that folder, go into /docs/points-example, and grab a points file that appropriate for your mesh. Because I'm trying to do a regional sim around PR, the india.circle.pts should do just fine. Copy that up back into the parent `MPAS-Limited-Area` folder.
+
+Open that pts file and modify it to your liking. Because I'm using the circle, all I need to specify is the center in lat, lon. The name may be whatever you wish, and it will dictate the name of the resultant new `.grid.nc` file (i.e. if name is `pr`, then you will get `pr.grid.nc`). If you are using something other than Circle, see the documentation [here](https://github.com/MPAS-Dev/MPAS-Limited-Area?tab=readme-ov-file#points-pts-syntax) for more specific info. When you've modified to whatever degree you like, copy that points file into the parent repo folder.
+
+Once ready, run `python ./create_region (pts file) (grid file)`. This may take a some time, especially depending on your mesh resolution and size of your region. You can also preview what your region would look like with `python ./create_region --plots (pts file)`, which creates a plot under `region.png`.
+
+This may take some iterating to find the perfect mesh, but in this repo's scripts (/scripts) directory I've included a `mesh_resolution.py` script provided from NCAR MPAS-A tutorials to output an image to give you a rough idea of what your regional mesh may look like with approx resolution contoured. If you followed the optional part, this is the same script. You can run it with `python ./mesh_resolution.py (grid file)`. This'll output a plot of the mesh resolution to your pwd under `mesh_resolution.png`.
+
+(?) Now, for reasons that are not really well documented, putting a variable resolution mesh in here will *not* proprotionally scale down your mesh but will instead simply cut out everything outside of it. So, if you have a 5-1km mesh and only set the area below 1.5km inside your limited area, you will end up with a 1.5km-1km limited area mesh.
+
+All that's left to do is to section the new graph file into however many MPI tasks we plan to run our model with. With gpmetis (located in ./METIS/bin/), run: `./gpmetis /path/to/(.graph.info) #`, where # is the number of MPI tasks you plan to run with. This should output a file with the format of `(name).grid.info.part.#` in the same directory as the `.graph.info` file.
+
+We now have everything we'll need to do a regional sim! Simply copy the new `grid.nc` and `.graph.info.part.` files into where you're prepping your MPAS simulation and continue on the guide!
+
+---
+## Sample files
+In this repo's /files/, my test PR mesh that I created alongside this document can be found should you wish to poke at the finished products yourself.
+
+---
+## Credit where credit's due:
+
+While most of this is from my own workflow, this is essentially combining parts #4 and #5 from [NCAR's Sept. 2025 MPAS-A Boulder Tutorial](https://www2.mmm.ucar.edu/projects/mpas/tutorial/Boulder2025/)
diff --git a/scripts/mesh_resolution.py b/scripts/mesh_resolution.py
new file mode 100644
index 0000000..47d10c1
--- /dev/null
+++ b/scripts/mesh_resolution.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+
+def func(x):
+ '''
+ format numbers for terse labels by removing plus signs and unnecessary zeros
+ '''
+ s = '%.0g' % x
+ if 'e' in s:
+ tup = s.split('e')
+ significand = tup[0].rstrip('0').rstrip('.')
+ sign = tup[1][0].replace('+', '')
+ exponent = tup[1][1:].lstrip('0')
+ s = ('%se%s%s' % (significand, sign, exponent)).rstrip('e')
+ return s
+
+
+if __name__ == '__main__':
+
+ import sys
+
+ #
+ # Get the name of the file containing the static information
+ #
+ if len(sys.argv) != 2:
+ print('')
+ print('Usage: '+sys.argv[0]+' ')
+ print('')
+ exit(0)
+
+ import xarray as xr
+ import numpy as np
+ import cartopy.crs as ccrs
+ import cartopy.feature as cfeature
+ import matplotlib.tri as tri
+ import matplotlib.pyplot as plt
+ import matplotlib.ticker as ticker
+ from matplotlib.colors import ListedColormap
+ import math
+
+ ds_i = xr.open_dataset(sys.argv[1])
+
+ # Convert to degrees from radians¶
+ lonData = np.degrees(ds_i.lonCell)
+ latData = np.degrees(ds_i.latCell)
+
+ # convert lonData to range [-180, 180]
+ lonData = ((lonData + 180) % 360) - 180
+
+ triang = tri.Triangulation(lonData, latData)
+ uxda_area = ds_i.areaCell.data
+ uxda_den = ds_i.meshDensity.data
+
+ # If it appears that we are on a unit sphere (within a factor of 2.0),
+ # scale areas for a sphere with radius 6371229.0 m
+ if np.sum(uxda_area) < 4.0 * math.pi * 2.0:
+ uxda_area = uxda_area * 6371229.0 * 6371229.0
+
+ minSpacingKm = math.sqrt(min(uxda_area)*2.0/math.sqrt(3.0))*0.001 # 6371.229
+ fld = minSpacingKm /np.power(uxda_den,0.25)
+
+ # by setting the central meridian to 180°, there is a gap in polar region
+ #fig, ax = plt.subplots(figsize=(10, 5), subplot_kw={'projection': ccrs.PlateCarree(central_longitude=180)}, dpi=300)
+ fig, ax = plt.subplots(figsize=(10, 5), subplot_kw={'projection': ccrs.PlateCarree()}, dpi=300)
+
+ # ax.set_global or plot_ax.set_extent
+ #ax.set_extent([np.min(lon), np.max(lon), np.min(lat), np.max(lat)], crs=ccrs.PlateCarree())
+ #ax.set_extent([-40, 40, -30, 30], crs=ccrs.PlateCarree())
+ ax.set_global
+
+ # Choose resolution of map features.
+ # Note that these features are downloaded when plotting for the first time, and for the entire globe,
+ # so requesting high resolution can take several minutes.
+ scale = '110m' # '50m' # '10m'
+
+ ax.add_feature(cfeature.OCEAN.with_scale(scale))
+ ax.add_feature(cfeature.STATES.with_scale(scale))
+ ax.add_feature(cfeature.LAKES.with_scale(scale), alpha=0.5)
+ ax.add_feature(cfeature.COASTLINE.with_scale(scale))
+
+ mm = ax.tricontour(triang, fld,
+ transform=ccrs.PlateCarree(),
+ colors=['0.25', '0.25', '0.25', '0.25', '0.25'],
+ linewidths=[0.5, 0.5, 0.5, 0.5, 0.5] )
+
+ ax.clabel(mm, mm.levels ) #, fmt=func)
+
+ # mask land
+ ax.add_feature(cfeature.LAND, facecolor='cornsilk', zorder=1)
+
+ plt.suptitle('Approximate mesh resolution (km)', fontweight='bold', fontsize=14)
+
+ print('Saving mesh_resolution.png')
+ plt.savefig('mesh_resolution.png', dpi=150, bbox_inches='tight')
diff --git a/scripts/run_init.sh b/scripts/run_init.sh
new file mode 100644
index 0000000..75403ea
--- /dev/null
+++ b/scripts/run_init.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+#SBATCH --job-name=mpas_static_fields # Job name (testBowtie2)
+#SBATCH --partition=batch # Partition name (batch, highmem_p, or gpu_p)
+#SBATCH --nodes=1 # Number of compute nodes for resources to be spread out over (increase only if using MPI enabled software)
+#SBATCH --ntasks=30 # 1 task (process) for below commands
+#SBATCH --cpus-per-task=1 # CPU core count per task, by default 1 CPU core per task
+#SBATCH --mem=250G # Memory per node (4GB); by default using M as unit
+#SBATCH --time=1:00:00 # Time limit hrs:min:sec or days-hours:minutes:seconds
+#SBATCH --output=%x_%j.out # Standard output log, e.g., testBowtie2_12345.out
+#SBATCH --mail-user=yourmail@uga.edu # Where to send mail
+#SBATCH --mail-type=END,FAIL # Mail events (BEGIN, END, FAIL, ALL)
+
+ml netCDF/4.9.3-gompi-2025a
+ml PnetCDF/1.12.3-gompi-2023b
+
+cd /path/to/mpas_sim/
+
+srun -n 30 ./init_atmosphere_model >& log.init_atmosphere.0000.out
From d1a8ee1658db5e41b018f56b62ea7d3b4b114d03 Mon Sep 17 00:00:00 2001
From: "Scout C." <34626346+luceboxed@users.noreply.github.com>
Date: Mon, 9 Mar 2026 21:00:54 -0400
Subject: [PATCH 2/6] Update basic_setup.md
---
doc/basic_setup.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/basic_setup.md b/doc/basic_setup.md
index b8bfad9..1aabb33 100644
--- a/doc/basic_setup.md
+++ b/doc/basic_setup.md
@@ -8,11 +8,11 @@ You will need access to the following modules/compilers:
- mpicc
-- netCDF netCDF/4.9.3-gompi-2025a
+- netCDF
- netCDF-Fortran
-- PnetCDF PnetCDF/1.12.3-gompi-2023b
+- PnetCDF
---
## Compilation!
From 654f5eb92855e6eaf324b7f86ebf7694a3a3d836 Mon Sep 17 00:00:00 2001
From: "Scout C." <34626346+luceboxed@users.noreply.github.com>
Date: Tue, 10 Mar 2026 00:17:13 -0400
Subject: [PATCH 3/6] Update regional_mesh_setup.md
---
doc/regional_mesh_setup.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/regional_mesh_setup.md b/doc/regional_mesh_setup.md
index ef06631..bcb4dde 100644
--- a/doc/regional_mesh_setup.md
+++ b/doc/regional_mesh_setup.md
@@ -52,7 +52,7 @@ Before we start, we need to modify the `namelist.input`! These NCAR provided mes
All that's left is to run the file: `./grid_rotate (input grid name) (output grid name)`. I named my grid `x5.6488066.rotated.grid.nc`, but you can do whatever you wish. It's good practice to leave it ending in `*.grid.nc`. This might take a minute or two with a high resolution mesh.
-If you just want to rotate where your global variable resolution grid refines into, then you're done! You should be able to use the same .graph.info.part. file in your MPAS simulations.
+If you just want to rotate where your ariable resolution grid refines into and leave your simulation as a global one, then you're done! You should be able to use the same .graph.info.part. from the original archive in your MPAS simulations.
---
From 268caa1d454b7698ce0775aec18571e558be7741 Mon Sep 17 00:00:00 2001
From: "Scout C." <34626346+luceboxed@users.noreply.github.com>
Date: Wed, 11 Mar 2026 17:33:34 -0400
Subject: [PATCH 4/6] first half gfs ic/lbc
---
doc/basic_setup.md | 217 +++++++++++++++++++++++++++++++++-
doc/metis.md | 20 ----
doc/regional_mesh_setup.md | 26 +++-
scripts/gfs_pre01_download.sh | 51 ++++++++
4 files changed, 289 insertions(+), 25 deletions(-)
delete mode 100644 doc/metis.md
create mode 100644 scripts/gfs_pre01_download.sh
diff --git a/doc/basic_setup.md b/doc/basic_setup.md
index 1aabb33..ebd2306 100644
--- a/doc/basic_setup.md
+++ b/doc/basic_setup.md
@@ -47,7 +47,7 @@ Using the SMIOL library.
*******************************************************************************
```
-Additionally, there should be new files in the directory: `init_atmosphere_model`, `namelist.init_atmosphere`, and `streams.init_atmosphere`! For WRF users, `init_atmosphere` is essentially MPAS's version of the WRF's WPS, bundled into one neat executable. Next, we need to compile the actual model:
+Additionally, there should be new files in the directory: `init_atmosphere_model`, `namelist.init_atmosphere`, and `streams.init_atmosphere`! For WRF users, `init_atmosphere` is essentially MPAS's version of the WRF's WRF Preprocessing System (WPS), bundled into one neat executable. Next, we need to compile the actual model:
`make -j# gnu CORE=atmosphere`
@@ -175,9 +175,215 @@ If all is works just fine, you should see a success message
and have a brand new `*.static.nc` file in your MPAS directory! Unless you change meshes or geography datasets (for whatever reason), you won't need to rerun this part again! Hooray!
---
-## Forcing/initial and boundary conditions
+## Forcing/initial and boundary conditions!
-to come
+### Welcome Back ungrib!
+
+**If you plan on JUST using ERA5/ungribbed data, this part can be skipped unless you'd like to be prepared for the instance you will need to use real model data.**
+
+Now that we have our static fields set and ready to go, next we need to give our model its initial forcing data and (if running a regional sim) boundary conditions. MPAS can ingest datasets from a model like the GFS and reanalysis like ERA5, but as of now the main branch of MPAS CANNOT support datasets from models like HRRR and NAM due to issues with soil levels.[^1]
+
+Now, above, while I said `init_atmosphere_model` is essentially the WRF WPS all in one executible, there still is one part of the WPS that we require: `ungrib`. Clone WPS to a location of your choosing (good practice: the parent folder of `mpas_sim`.)
+
+```sh
+cd ..
+git clone https://github.com/wrf-model/WPS.git
+cd WPS
+```
+
+Now that we're in the `WPS` folder, we will need to configure it! We'll run this:
+
+`./configure --nowrf --build-grib2-libs`
+
+... which tells WPS not to look for an already compiled WRF model and compiles libraries for ungrib to work. You will be given a screen to select the platform you will be compiling for. Choose the one that corresponds to the compiler on your HPC with the serial option! Once you've done that, all thats left to do is to compile ungrib:
+
+`./compile ungrib`
+
+We don't need to compile the entirety of the WPS, so we just specify only to compile ungrib. This may take a minute or two to finish, and it doesn't really give a pretty "finished!" message, so you should go confirm if it actually compiled before moving on. Run this afterwards while inside the WPS folder, and if you get a similar message and file-size you should be good to go:
+
+```sh
+mbc18672@c4-16 WPS$ ls -lh ./ungrib/src/ungrib.exe
+-rwxr-xr-x 1 mbc18672 whlab 2414408 Mar 11 15:52 ./ungrib/src/ungrib.exe
+```
+
+The next part of this guide splits into two paths depending on the type of dataset you're using: gribbed model data (in this guide, GFS) or netCDF reanalysis data (in this guide, ERA5). For a quick sanity check, let's look at what your parent directory should contain. Here's the results of me running `tree -Ld 1`, which lists out the folders in the current directory and then the folders INSIDE those folders:
+
+```sh
+mbc18672@c4-16 mpas$ tree -Ld 2
+.
+├── MPAS-Model
+│ ├── cmake
+│ ├── default_inputs
+│ ├── docs
+│ ├── src
+│ └── testing_and_setup
+├── WPS
+│ ├── arch
+│ ├── cmake
+│ ├── external
+│ ├── geogrid
+│ ├── grib2
+│ ├── metgrid
+│ ├── ungrib
+│ └── util
+├── mpas_sim
+│ └── templates
+└── mpas_static
+ ├── albedo_modis
+ ├── albedo_ncep
+ ├── greenfrac
+ ├── greenfrac_fpar_modis
+ ├── landuse_30s
+ ├── maxsnowalb
+ ├── maxsnowalb_modis
+ ├── modis_landuse_20class_30s
+ ├── soilgrids
+ ├── soiltemp_1deg
+ ├── soiltype_top_30s
+ ├── topo_30s
+ └── topo_gmted2010_30s
+
+35 directories
+```
+
+It doesn't have to look exact (especially if you use different folder names or a different organization structure), but if it's similar, you're so far so good!
+
+### Using GFS Data
+
+(If you understand how to use ungrib, this part of the guide may just be review!)
+
+If you are using another dataset than GFS, remember to update configurations as needed (namelists, Vtables, directory naming, etc.) and also know the provided download script will ONLY automate GFS downloads.
+
+We'll first need to collect the GFS datasets we'll be using. Let's lay the groundwork and set up a structure to store our files in. You can do this your own way, but I'm going to go back up to the parent folder and create a new `DATA` folder which will then have subfolders of `GFS`, `ERA5`, and `METDATA`.
+
+```sh
+cd ..
+mkdir DATA
+cd DATA
+mkdir GFS
+mkdir ERA5
+mkdir METDATA
+```
+
+While you could manually go to NOAA NOMADS and manually download hourly GFS gribbed data, I've provided the script our UGA-MPAS model uses to automatically fetch these files inside `/scripts/gfs_pre01_download.sh`. Go into that script and change `DATA_DIR="/path/to/download/DATA/GFS"` to whatever path you've decided you want your GFS data to be downloaded to. Now, when we run this script we will download hours 0-##, where ## is whatever you decide (up to 384) as the script's argument. This guide will prepare our model for a simple 24-hour run, but these instructions should work for runs of any length. Let's run our script:
+
+```sh
+chmod +x gfs_pre01_download.sh
+./gfs_pre01_download.sh 24
+```
+
+`chmod +x` marks our file as executable, if not already marked. Now we must wait for all the files to finish downloading, which depends heavily on the amount of hours you chose and internet speed of your HPC. When it finishes, you should get a similar message to:
+
+`All GFS files downloaded successfully to /scratch/mbc18672/mpas/DATA/GFS`
+
+... using your own data path. Head to your data directory where your GFS data is stored and run `ls -l`.
+
+```sh
+mbc18672@c4-16 GFS$ ls -l
+total 11105773
+-rw-r--r-- 1 mbc18672 whlab 510710733 Mar 11 11:32 gfs.t12z.pgrb2.0p25.f000
+-rw-r--r-- 1 mbc18672 whlab 542346908 Mar 11 11:32 gfs.t12z.pgrb2.0p25.f001
+-rw-r--r-- 1 mbc18672 whlab 544136015 Mar 11 11:32 gfs.t12z.pgrb2.0p25.f002
+-rw-r--r-- 1 mbc18672 whlab 545561658 Mar 11 11:33 gfs.t12z.pgrb2.0p25.f003
+-rw-r--r-- 1 mbc18672 whlab 543540371 Mar 11 11:33 gfs.t12z.pgrb2.0p25.f004
+-rw-r--r-- 1 mbc18672 whlab 545450213 Mar 11 11:33 gfs.t12z.pgrb2.0p25.f005
+-rw-r--r-- 1 mbc18672 whlab 547246385 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f006
+-rw-r--r-- 1 mbc18672 whlab 545255414 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f007
+-rw-r--r-- 1 mbc18672 whlab 545966342 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f008
+-rw-r--r-- 1 mbc18672 whlab 548019963 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f009
+-rw-r--r-- 1 mbc18672 whlab 547616955 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f010
+-rw-r--r-- 1 mbc18672 whlab 547770205 Mar 11 11:35 gfs.t12z.pgrb2.0p25.f011
+-rw-r--r-- 1 mbc18672 whlab 549861686 Mar 11 11:35 gfs.t12z.pgrb2.0p25.f012
+-rw-r--r-- 1 mbc18672 whlab 545379320 Mar 11 11:35 gfs.t12z.pgrb2.0p25.f013
+-rw-r--r-- 1 mbc18672 whlab 548745114 Mar 11 11:35 gfs.t12z.pgrb2.0p25.f014
+-rw-r--r-- 1 mbc18672 whlab 547414283 Mar 11 11:36 gfs.t12z.pgrb2.0p25.f015
+-rw-r--r-- 1 mbc18672 whlab 547789611 Mar 11 11:36 gfs.t12z.pgrb2.0p25.f016
+-rw-r--r-- 1 mbc18672 whlab 549778021 Mar 11 11:36 gfs.t12z.pgrb2.0p25.f017
+-rw-r--r-- 1 mbc18672 whlab 549684424 Mar 11 11:37 gfs.t12z.pgrb2.0p25.f018
+-rw-r--r-- 1 mbc18672 whlab 548082784 Mar 11 11:37 gfs.t12z.pgrb2.0p25.f019
+-rw-r--r-- 1 mbc18672 whlab 550309999 Mar 11 11:37 gfs.t12z.pgrb2.0p25.f020
+-rw-r--r-- 1 mbc18672 whlab 550641063 Mar 11 11:38 gfs.t12z.pgrb2.0p25.f021
+-rw-r--r-- 1 mbc18672 whlab 550672780 Mar 11 11:38 gfs.t12z.pgrb2.0p25.f022
+-rw-r--r-- 1 mbc18672 whlab 551411968 Mar 11 11:38 gfs.t12z.pgrb2.0p25.f023
+-rw-r--r-- 1 mbc18672 whlab 552035742 Mar 11 11:38 gfs.t12z.pgrb2.0p25.f024
+```
+
+Take note of the model run time (in this case, the 11th @ 12z)[^2] and how many hours you downloaded (F0-F24). Head back to your WPS folder and modify the following lines in `namelist.wps`:
+
+```
+&share
+...
+ start_date = '2026-03-11_12:00:00',
+ end_date = '2026-03-12_12:00:00',
+ interval_seconds = 3600
+/
+
+&ungrib
+ out_format = 'WPS',
+ prefix = 'GFS',
+/
+```
+
+Now, we'll need to symlink a variable table to the base of the WPS directory for ungrib to understand how to unpack our GFS files. These are located in `./ungrib/Variable_Tables`.
+
+`ln -s ./ungrib/Variable_Tables/Vtable.GFS Vtable`
+
+The name of the symlinked variable table **must** just be `Vtable`! Now we'll need to symlink our GFS datasets to this folder, for which a script is given to us to automatically do it within WPS.
+
+`./link_grib.csh /scratch/mbc18672/mpas/DATA/GFS/gfs.*.f*`
+
+... making sure to swap the path to where **your** GFS gribbed files are. Now we're ready to run ungrib!
+
+`./ungrib.exe`
+
+This may take some time. When finished, you should see this:
+
+```
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+! Successful completion of ungrib. !
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+```
+
+and your WPS directory should be full of file starting with `GFS:*`:
+
+```sh
+mbc18672@c4-16 WPS$ ls -l GFS*
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:13 GFS:2026-03-11_12
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:14 GFS:2026-03-11_13
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:14 GFS:2026-03-11_14
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:14 GFS:2026-03-11_15
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:14 GFS:2026-03-11_16
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_17
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_18
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_19
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_20
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_21
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:16 GFS:2026-03-11_22
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:16 GFS:2026-03-11_23
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:16 GFS:2026-03-12_00
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:16 GFS:2026-03-12_01
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:17 GFS:2026-03-12_02
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:17 GFS:2026-03-12_03
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:17 GFS:2026-03-12_04
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:17 GFS:2026-03-12_05
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_06
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_07
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_08
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_09
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_10
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:19 GFS:2026-03-12_11
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:19 GFS:2026-03-12_12
+```
+
+These files can now be moved into our `METDATA` (or equivalent) folder to prepare them to be used with `init_atmosphere_model`.
+
+`mv GFS:* ../DATA/METDATA/`
+
+... to be finished
+
+### Using ERA5 Data
+
+coming soon :-)
---
## Sample files
@@ -188,3 +394,8 @@ I've included my .sh files to schedule both init_atmosphere and atmosphere model
## Credit where credit's due
While most of this is from my own workflow, this is kind of an adaptation of part 1 [NCAR's Sept. 2025 MPAS-A Boulder Tutorial](https://www2.mmm.ucar.edu/projects/mpas/tutorial/Boulder2025/) that I attended.
+
+[^1]: If you'd like to use these datasets, NOAA/OAR/GSL has their own fork of MPAS that should have support for them. Setup should be almost 1:1 with these instructions, but I have not tested it yet so I cannot confirm it for sure. If you're interested, check it out here with their own documentation: https://github.com/ufs-community/MPAS-Model
+
+[^2]: This HPC has its timezone set to follow EDT, which means the 'day' the model runs may not line up with what the output of `ls` tells you! If you need more clarity on what day, the download script outputs the UTC date it is pulling from right after it starts running, in format `Selected GFS run: ${run_hour}Z for date $utc_date`.
+
diff --git a/doc/metis.md b/doc/metis.md
deleted file mode 100644
index 3fcdc2d..0000000
--- a/doc/metis.md
+++ /dev/null
@@ -1,20 +0,0 @@
-METIS is used to split your mesh into multiple parts in a .graph file for parallel processing, which is required for MPAS to function with your mesh.
-
-To install METIS:
-```
-export INSTALL_DIR=(dir)
-
-git clone https://github.com/KarypisLab/GKlib.git
-cd GKlib
-make config prefix=${INSTALL_DIR}/GKlib
-make
-make install
-cd ..
-
-git clone https://github.com/KarypisLab/METIS.git
-cd METIS/
-make config prefix=${INSTALL_DIR}/METIS gklib_path=${INSTALL_DIR}/GKlib
-make
-make install
-```
-... where (dir) is the directory in which you want METIS to be installed.
diff --git a/doc/regional_mesh_setup.md b/doc/regional_mesh_setup.md
index bcb4dde..29c0cd3 100644
--- a/doc/regional_mesh_setup.md
+++ b/doc/regional_mesh_setup.md
@@ -16,7 +16,29 @@ Additionally, for running a regional simulation, you will need these three tools
- [MPAS-Limited-Area](https://github.com/MPAS-Dev/MPAS-Limited-Area)
-- gpmetis. Instructions can be found in `metis.md`
+- gpmetis. See instructions:
+
+METIS is used to split your mesh into multiple parts in a .graph file for parallel processing, which is required for MPAS to function with your mesh.
+
+To install METIS:
+```sh
+export INSTALL_DIR=(dir)
+
+git clone https://github.com/KarypisLab/GKlib.git
+cd GKlib
+make config prefix=${INSTALL_DIR}/GKlib
+make
+make install
+cd ..
+
+git clone https://github.com/KarypisLab/METIS.git
+cd METIS/
+make config prefix=${INSTALL_DIR}/METIS gklib_path=${INSTALL_DIR}/GKlib
+make
+make install
+```
+... where (dir) is the directory in which you want METIS to be installed.
+
For some more finer control over resolution, you may also like to grab:
@@ -52,7 +74,7 @@ Before we start, we need to modify the `namelist.input`! These NCAR provided mes
All that's left is to run the file: `./grid_rotate (input grid name) (output grid name)`. I named my grid `x5.6488066.rotated.grid.nc`, but you can do whatever you wish. It's good practice to leave it ending in `*.grid.nc`. This might take a minute or two with a high resolution mesh.
-If you just want to rotate where your ariable resolution grid refines into and leave your simulation as a global one, then you're done! You should be able to use the same .graph.info.part. from the original archive in your MPAS simulations.
+If you just want to rotate where your global variable resolution grid refines into, then you're done! You should be able to use the same .graph.info.part. file in your MPAS simulations.
---
diff --git a/scripts/gfs_pre01_download.sh b/scripts/gfs_pre01_download.sh
new file mode 100644
index 0000000..14796d5
--- /dev/null
+++ b/scripts/gfs_pre01_download.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+# Adapted from UGAWRF download script
+DATA_DIR="/path/to/download/DATA/GFS"
+START_FHR=0
+END_FHR=$1 # takes in a number of hours to download as argument, 27hr is good for 24hr run
+# --- End Configuration ---
+
+# --- Main Script ---
+mkdir -p "$DATA_DIR"
+cd "$DATA_DIR" || exit
+rm gfs.*
+
+hour_utc=$(date -u +"%H")
+utc_date=$(date -u +"%Y%m%d")
+
+echo "Current UTC date is: $utc_date"
+echo "Current UTC hour is: $hour_utc"
+
+if (( hour_utc >= 4 && hour_utc < 10 )); then
+ run_hour="00"
+elif (( hour_utc >= 10 && hour_utc < 16 )); then
+ run_hour="06"
+elif (( hour_utc >= 16 && hour_utc < 22 )); then
+ run_hour="12"
+else
+ run_hour="18"
+ if (( hour_utc < 4 )); then
+ utc_date=$(date -u -d "yesterday" +"%Y%m%d")
+ echo "Fetching previous day's 18Z run for date: $utc_date"
+ fi
+fi
+
+echo "Selected GFS run: ${run_hour}Z for date $utc_date"
+
+BASE_URL="https://nomads.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/gfs.${utc_date}/${run_hour}/atmos"
+
+echo "Downloading GFS forecast hours from F${START_FHR} to F${END_FHR}..."
+for fhr_num in $(seq $START_FHR $END_FHR); do
+ fhr=$(printf "%03d" $fhr_num)
+
+ FILENAME="gfs.t${run_hour}z.pgrb2.0p25.f${fhr}"
+ FULL_URL="${BASE_URL}/${FILENAME}"
+
+ echo "Queueing download: ${FILENAME}"
+
+ wget -nv "$FULL_URL" &
+done
+
+wait
+
+echo "All GFS files downloaded successfully to ${DATA_DIR}"
From d51ab1110122f78f36311c60b8480a95f615e9de Mon Sep 17 00:00:00 2001
From: scout teehee <34626346+luceboxed@users.noreply.github.com>
Date: Thu, 19 Mar 2026 01:26:36 -0400
Subject: [PATCH 5/6] almost done with running model steps
---
doc/basic_setup.md | 732 +++++++++++++++++++++++++++++++++-
doc/regional_mesh_setup.md | 26 +-
scripts/gfs_pre01_download.sh | 51 +++
3 files changed, 801 insertions(+), 8 deletions(-)
create mode 100644 scripts/gfs_pre01_download.sh
diff --git a/doc/basic_setup.md b/doc/basic_setup.md
index b8bfad9..07d52d4 100644
--- a/doc/basic_setup.md
+++ b/doc/basic_setup.md
@@ -17,7 +17,7 @@ You will need access to the following modules/compilers:
---
## Compilation!
-The first step in compiling MPAS is to grab it's source code, which is on Github:
+The first step in compiling MPAS is to grab its source code, which is on Github:
`git clone https://github.com/MPAS-Dev/MPAS-Model.git`
@@ -25,7 +25,7 @@ Then, move into MPAS-Model: `cd MPAS-Model`. Provided we have the proper compile
`make -j# gnu CORE=init_atmosphere`
-...where # is the number of jobs you wish to use to compile. 8 is a good number.
+...where # is the number of jobs you wish to use to compile. 8 is a good number. Also, we're using `gnu` compilers here, but if your HPC uses different compilers i.e. `intel` then you'd swap it out.
You should see a message like this, if successful:
@@ -47,7 +47,7 @@ Using the SMIOL library.
*******************************************************************************
```
-Additionally, there should be new files in the directory: `init_atmosphere_model`, `namelist.init_atmosphere`, and `streams.init_atmosphere`! For WRF users, `init_atmosphere` is essentially MPAS's version of the WRF's WPS, bundled into one neat executable. Next, we need to compile the actual model:
+Additionally, there should be new files in the directory: `init_atmosphere_model`, `namelist.init_atmosphere`, and `streams.init_atmosphere`! For WRF users, `init_atmosphere` is essentially MPAS's version of the WRF's WRF Preprocessing System (WPS), bundled into one neat executable. Next, we need to compile the actual model:
`make -j# gnu CORE=atmosphere`
@@ -105,16 +105,48 @@ Remember to clean up the archive once you're done!
Now, we'll need to start modifying our `namelist.init_atmosphere` and `streams.init_atmosphere`. Because this is essentially every step of the WPS packed in one, we'll have to run it multiple times with different configurations. You can go about this however you want, but I find that making a `templates` folder inside our `mpas_sim` directory and having each step's config saved in a different spot makes things easier, especially when trying to automate a model. In this setup doc, though, I'll assume we're just overwriting the same namelist and streams file each time.
-Open your `namelist.init_atmosphere` file and replace **everything** with:
+Open your `namelist.init_atmosphere` file and modify these lines:
```
&nhyd_model
config_init_case = 7
+ config_start_time = '2026-03-11_12:00:00'
+ config_stop_time = '2026-03-11_12:00:00'
+ config_theta_adv_order = 3
+ config_coef_3rd_order = 0.25
+/
+&dimensions
+ config_nvertlevels = 55
+ config_nsoillevels = 4
+ config_nfglevels = 38
+ config_nfgsoillevels = 4
/
&data_sources
config_geog_data_path = '/scratch/mbc18672/mpas/mpas_static'
+ config_met_prefix = 'GFS'
+ config_sfc_prefix = 'SST'
+ config_fg_interval = 86400
+ config_landuse_data = 'MODIFIED_IGBP_MODIS_NOAH'
+ config_topo_data = 'GMTED2010'
+ config_vegfrac_data = 'MODIS'
+ config_albedo_data = 'MODIS'
+ config_maxsnowalbedo_data = 'MODIS'
+ config_supersample_factor = 3
+ config_use_spechumd = false
config_noahmp_static = false
/
+&vertical_grid
+ config_ztop = 30000.0
+ config_nsmterrain = 1
+ config_smooth_surfaces = true
+ config_dzmin = 0.3
+ config_nsm = 30
+ config_tc_vertical_grid = true
+ config_blend_bdy_terrain = true
+/
+&interpolation_control
+ config_extrap_airtemp = 'lapse-rate'
+/
&preproc_stages
config_static_interp = true
config_native_gwd_static = true
@@ -124,13 +156,39 @@ Open your `namelist.init_atmosphere` file and replace **everything** with:
config_input_sst = false
config_frac_seaice = false
/
+&io
+ config_pio_num_iotasks = 0
+ config_pio_stride = 1
+/
&decomposition
config_block_decomp_file_prefix = 'pr.graph.info.part.'
/
```
+MAKE SURE these are set from the defaults:
+
+- `config_init_case = 7`
+
- `config_geog_data_path` should be YOUR own path to your geography dataset.
+- `config_noahmp_static = false` should be ADDED underneath the data path
+
+- Under `&preproc_stages`
+
+ - `config_static_interp = true`
+
+ - `config_native_gwd_static = true`
+
+ - `config_native_gwd_gsl_static = false`
+
+ - `config_vertical_grid = false`
+
+ - `config_met_interp = false`
+
+ - `config_input_sst = false`
+
+ - `config_frac_seaice = false`
+
- `config_block_decomp_file_prefix` should be the part of YOUR own graph file all the way up to the number.
Open your `streams.init_atmosphere` file and change **these lines** to the following:
@@ -154,6 +212,8 @@ Open your `streams.init_atmosphere` file and change **these lines** to the follo
- Everything else not specified can remain the same.
+By default, MPAS's 'clobber mode' is set to `never_modify`, which means that should you ever re-run the model, it will refuse to overwrite any files but it will *not* fail. If you wish to have the model automatically overwrite any file it's already made, add `clobber_mode="overwrite"`. It doesn't matter that much here, as you won't be recomputing your static fields a lot, but when you're doing repeated reprocessing and down the line, it may help!
+
Now its time to actually interpolate the static fields! Using your HPC's scheduler (or on the login node if you're a SICKO (just kidding, please schedule it)) run:
`mpiexec -np # ./init_atmosphere_model >& log.init_atmosphere.0000.out`
@@ -175,16 +235,676 @@ If all is works just fine, you should see a success message
and have a brand new `*.static.nc` file in your MPAS directory! Unless you change meshes or geography datasets (for whatever reason), you won't need to rerun this part again! Hooray!
---
-## Forcing/initial and boundary conditions
+## Forcing/initial and boundary conditions!
+
+### Welcome Back ungrib!
+
+**If you plan on JUST using ERA5/ungribbed data, this part can be skipped unless you'd like to be prepared for the instance you will need to use real model data.**
+
+Now that we have our static fields set and ready to go, next we need to give our model its initial forcing data and (if running a regional sim) boundary conditions. MPAS can ingest datasets from a model like the GFS and reanalysis like ERA5, but as of now the main branch of MPAS CANNOT support datasets from models like HRRR and NAM due to issues with soil levels.[^1]
+
+Now, above, while I said `init_atmosphere_model` is essentially the WRF WPS all in one executable, there still is one part of the WPS that we require: `ungrib`. Clone WPS to a location of your choosing (good practice: the parent folder of `mpas_sim`.)
+
+```sh
+cd ..
+git clone https://github.com/wrf-model/WPS.git
+cd WPS
+```
+
+Now that we're in the `WPS` folder, we will need to configure it! We'll run this:
+
+`./configure --nowrf --build-grib2-libs`
+
+... which tells WPS not to look for an already compiled WRF model and compiles libraries for ungrib to work. You will be given a screen to select the platform you will be compiling for. Choose the one that corresponds to the compiler on your HPC with the serial option! Once you've done that, all thats left to do is to compile ungrib:
+
+`./compile ungrib`
+
+We don't need to compile the entirety of the WPS, so we just specify only to compile ungrib. This may take a minute or two to finish, and it doesn't really give a pretty "finished!" message, so you should go confirm if it actually compiled before moving on. Run this afterwards while inside the WPS folder, and if you get a similar message and file-size you should be good to go:
+
+```sh
+mbc18672@c4-16 WPS$ ls -lh ./ungrib/src/ungrib.exe
+-rwxr-xr-x 1 mbc18672 whlab 2414408 Mar 11 15:52 ./ungrib/src/ungrib.exe
+```
+
+The next part of this guide splits into two paths depending on the type of dataset you're using: gribbed model data (in this guide, GFS) or netCDF reanalysis data (in this guide, ERA5). For a quick sanity check, let's look at what your parent directory should contain. Here's the results of me running `tree -Ld 1`, which lists out the folders in the current directory and then the folders INSIDE those folders:
+
+```sh
+mbc18672@c4-16 mpas$ tree -Ld 2
+.
+├── MPAS-Model
+│ ├── cmake
+│ ├── default_inputs
+│ ├── docs
+│ ├── src
+│ └── testing_and_setup
+├── WPS
+│ ├── arch
+│ ├── cmake
+│ ├── external
+│ ├── geogrid
+│ ├── grib2
+│ ├── metgrid
+│ ├── ungrib
+│ └── util
+├── mpas_sim
+│ └── templates
+└── mpas_static
+ ├── albedo_modis
+ ├── albedo_ncep
+ ├── greenfrac
+ ├── greenfrac_fpar_modis
+ ├── landuse_30s
+ ├── maxsnowalb
+ ├── maxsnowalb_modis
+ ├── modis_landuse_20class_30s
+ ├── soilgrids
+ ├── soiltemp_1deg
+ ├── soiltype_top_30s
+ ├── topo_30s
+ └── topo_gmted2010_30s
+
+35 directories
+```
+
+It doesn't have to look exact (especially if you use different folder names or a different organization structure), but if it's similar, you're so far so good!
+
+### Using GFS Data
+
+(If you understand how to use ungrib, this part of the guide may just be review!)
+
+If you are using another dataset than GFS, remember to update configurations as needed (namelists, Vtables, directory naming, etc.) and also know the provided download script will ONLY automate GFS downloads.
+
+We'll first need to collect the GFS datasets we'll be using. Let's lay the groundwork and set up a structure to store our files in. You can do this your own way, but I'm going to go back up to the parent folder and create a new `DATA` folder which will then have subfolders of `GFS`, `ERA5`, and `METDATA`.
+
+```sh
+cd ..
+mkdir DATA
+cd DATA
+mkdir GFS
+mkdir ERA5
+mkdir METDATA
+```
+
+While you could manually go to NOAA NOMADS and manually download hourly GFS gribbed data, I've provided the script our UGA-MPAS model uses to automatically fetch these files inside `/scripts/gfs_pre01_download.sh`. Go into that script and change `DATA_DIR="/path/to/download/DATA/GFS"` to whatever path you've decided you want your GFS data to be downloaded to. Now, when we run this script we will download hours 0-##, where ## is whatever you decide (up to 384) as the script's argument. This guide will prepare our model for a simple 24-hour run, but these instructions should work for runs of any length. Let's run our script:
+
+```sh
+chmod +x gfs_pre01_download.sh
+./gfs_pre01_download.sh 24
+```
+
+`chmod +x` marks our file as executable, if not already marked. Now we must wait for all the files to finish downloading, which depends heavily on the amount of hours you chose and internet speed of your HPC. When it finishes, you should get a similar message to:
+
+`All GFS files downloaded successfully to /scratch/mbc18672/mpas/DATA/GFS`
+
+... using your own data path. Head to your data directory where your GFS data is stored and run `ls -l`.
+
+```sh
+mbc18672@c4-16 GFS$ ls -l
+total 11105773
+-rw-r--r-- 1 mbc18672 whlab 510710733 Mar 11 11:32 gfs.t12z.pgrb2.0p25.f000
+-rw-r--r-- 1 mbc18672 whlab 542346908 Mar 11 11:32 gfs.t12z.pgrb2.0p25.f001
+-rw-r--r-- 1 mbc18672 whlab 544136015 Mar 11 11:32 gfs.t12z.pgrb2.0p25.f002
+-rw-r--r-- 1 mbc18672 whlab 545561658 Mar 11 11:33 gfs.t12z.pgrb2.0p25.f003
+-rw-r--r-- 1 mbc18672 whlab 543540371 Mar 11 11:33 gfs.t12z.pgrb2.0p25.f004
+-rw-r--r-- 1 mbc18672 whlab 545450213 Mar 11 11:33 gfs.t12z.pgrb2.0p25.f005
+-rw-r--r-- 1 mbc18672 whlab 547246385 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f006
+-rw-r--r-- 1 mbc18672 whlab 545255414 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f007
+-rw-r--r-- 1 mbc18672 whlab 545966342 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f008
+-rw-r--r-- 1 mbc18672 whlab 548019963 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f009
+-rw-r--r-- 1 mbc18672 whlab 547616955 Mar 11 11:34 gfs.t12z.pgrb2.0p25.f010
+-rw-r--r-- 1 mbc18672 whlab 547770205 Mar 11 11:35 gfs.t12z.pgrb2.0p25.f011
+-rw-r--r-- 1 mbc18672 whlab 549861686 Mar 11 11:35 gfs.t12z.pgrb2.0p25.f012
+-rw-r--r-- 1 mbc18672 whlab 545379320 Mar 11 11:35 gfs.t12z.pgrb2.0p25.f013
+-rw-r--r-- 1 mbc18672 whlab 548745114 Mar 11 11:35 gfs.t12z.pgrb2.0p25.f014
+-rw-r--r-- 1 mbc18672 whlab 547414283 Mar 11 11:36 gfs.t12z.pgrb2.0p25.f015
+-rw-r--r-- 1 mbc18672 whlab 547789611 Mar 11 11:36 gfs.t12z.pgrb2.0p25.f016
+-rw-r--r-- 1 mbc18672 whlab 549778021 Mar 11 11:36 gfs.t12z.pgrb2.0p25.f017
+-rw-r--r-- 1 mbc18672 whlab 549684424 Mar 11 11:37 gfs.t12z.pgrb2.0p25.f018
+-rw-r--r-- 1 mbc18672 whlab 548082784 Mar 11 11:37 gfs.t12z.pgrb2.0p25.f019
+-rw-r--r-- 1 mbc18672 whlab 550309999 Mar 11 11:37 gfs.t12z.pgrb2.0p25.f020
+-rw-r--r-- 1 mbc18672 whlab 550641063 Mar 11 11:38 gfs.t12z.pgrb2.0p25.f021
+-rw-r--r-- 1 mbc18672 whlab 550672780 Mar 11 11:38 gfs.t12z.pgrb2.0p25.f022
+-rw-r--r-- 1 mbc18672 whlab 551411968 Mar 11 11:38 gfs.t12z.pgrb2.0p25.f023
+-rw-r--r-- 1 mbc18672 whlab 552035742 Mar 11 11:38 gfs.t12z.pgrb2.0p25.f024
+```
+
+Take note of the model run time (in this case, the 11th @ 12z)[^2] and how many hours you downloaded (F0-F24). Head back to your WPS folder and modify the following lines in `namelist.wps`:
+
+```
+&share
+...
+ start_date = '2026-03-11_12:00:00',
+ end_date = '2026-03-12_12:00:00',
+ interval_seconds = 3600
+/
+
+&ungrib
+ out_format = 'WPS',
+ prefix = 'GFS',
+/
+```
+
+Now, we'll need to symlink a variable table to the base of the WPS directory for ungrib to understand how to unpack our GFS files. These are located in `./ungrib/Variable_Tables`.
+
+`ln -s ./ungrib/Variable_Tables/Vtable.GFS Vtable`
+
+The name of the symlinked variable table **must** just be `Vtable`! Now we'll need to symlink our GFS datasets to this folder, for which a script is given to us to automatically do it within WPS.
+
+`./link_grib.csh /scratch/mbc18672/mpas/DATA/GFS/gfs.*.f*`
+
+... making sure to swap the path to where **your** GFS gribbed files are. Now we're ready to run ungrib!
+
+`./ungrib.exe`
+
+This may take some time. When finished, you should see this:
+
+```
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+! Successful completion of ungrib. !
+!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+```
+
+and your WPS directory should be full of file starting with `GFS:*`:
+
+```sh
+mbc18672@c4-16 WPS$ ls -l GFS*
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:13 GFS:2026-03-11_12
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:14 GFS:2026-03-11_13
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:14 GFS:2026-03-11_14
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:14 GFS:2026-03-11_15
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:14 GFS:2026-03-11_16
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_17
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_18
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_19
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_20
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:15 GFS:2026-03-11_21
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:16 GFS:2026-03-11_22
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:16 GFS:2026-03-11_23
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:16 GFS:2026-03-12_00
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:16 GFS:2026-03-12_01
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:17 GFS:2026-03-12_02
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:17 GFS:2026-03-12_03
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:17 GFS:2026-03-12_04
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:17 GFS:2026-03-12_05
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_06
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_07
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_08
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_09
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:18 GFS:2026-03-12_10
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:19 GFS:2026-03-12_11
+-rw-r--r-- 1 mbc18672 whlab 818178824 Mar 11 17:19 GFS:2026-03-12_12
+```
+
+These files can now be moved into our `METDATA` (or equivalent) folder to prepare them to be used with `init_atmosphere_model`. We can then go ahead and symlink this to our `mpas_sim` folder (or, you can skip the `METDATA` copy and directly put in into your sim folder. Up to you!).
+
+```sh
+mv GFS:* ../DATA/METDATA/
+ln -s ../DATA/METDATA/GFS* ../mpas_sim
+```
+
+Now that our forcing data is in the folder, lets interpolate them to our grid. Check out the section after the next one!
+
+### Using ERA5 Data
+
+coming soon :-)
+
+### Interpolating ICs to grid
+
+We'll need to edit our `namelist.init_atmosphere` and `streams.init_atmosphere` again...
+
+Here's what your namelist.init_atmosphere should look like:
+
+```
+&nhyd_model
+ config_init_case = 7
+ config_start_time = '2026-03-11_12:00:00'
+ config_stop_time = '2026-03-11_12:00:00'
+ config_theta_adv_order = 3
+ config_coef_3rd_order = 0.25
+/
+&dimensions
+ config_nvertlevels = 55
+ config_nsoillevels = 4
+ config_nfglevels = 38
+ config_nfgsoillevels = 4
+/
+&data_sources
+ config_geog_data_path = '/scratch/mbc18672/mpas/mpas_static'
+ config_met_prefix = 'GFS'
+ config_sfc_prefix = 'SST'
+ config_fg_interval = 86400
+ config_landuse_data = 'MODIFIED_IGBP_MODIS_NOAH'
+ config_topo_data = 'GMTED2010'
+ config_vegfrac_data = 'MODIS'
+ config_albedo_data = 'MODIS'
+ config_maxsnowalbedo_data = 'MODIS'
+ config_supersample_factor = 3
+ config_use_spechumd = false
+ config_noahmp_static = false
+/
+&vertical_grid
+ config_ztop = 30000.0
+ config_nsmterrain = 1
+ config_smooth_surfaces = true
+ config_dzmin = 0.3
+ config_nsm = 30
+ config_tc_vertical_grid = true
+ config_blend_bdy_terrain = true
+/
+&interpolation_control
+ config_extrap_airtemp = 'lapse-rate'
+/
+&preproc_stages
+ config_static_interp = false
+ config_native_gwd_static = false
+ config_native_gwd_gsl_static = false
+ config_vertical_grid = true
+ config_met_interp = true
+ config_input_sst = false
+ config_frac_seaice = true
+/
+&io
+ config_pio_num_iotasks = 0
+ config_pio_stride = 1
+/
+&decomposition
+ config_block_decomp_file_prefix = 'pr.graph.info.part.'
+/
+```
+
+... ensuring:
+
+- `config_start_time` and `config_stop_time` is the time of the FIRST forcing file you have (`config_stop_time` can remain untouched here.). Format is `YYYY-MM-DD_hh:mm:ss`
+
+- `config_met_prefix` should be `GFS` or `ERA5` (unless you use a different IC source, in which case it'll be whatever is before the : in the file name.)
+
+- Under `&preproc_stages`:
+
+ - `config_static_interp = false`
+
+ - `config_native_gwd_static = false`
+
+ - `config_native_gwd_gsl_static = false`
+
+ - `config_vertical_grid = true`
+
+ - `config_met_interp = true`
+
+ - `config_input_sst = false`
+
+ - `config_frac_seaice = true`
+
+- `config_block_decomp_file_prefix` remains the prefix of your graph file.
+
+And in the `streams.init_atmosphere`, change these fields:
+
+```
+
+
+
+```
+
+... we're simply changing the input to our new static field and the output to a new interpolated IC file!
+
+After that, simply rerun the model!
+
+`mpiexec -np # ./init_atmosphere_model >& log.init_atmosphere.0000.out`
+
+or, if on a SLURM HPC:
+
+`srun -n # ./init_atmosphere_model`
+
+Again, # is number of MPI jobs, equal to the number at the end of the graph file you will be using. You should tail your log file again to watch its status, but you can go hands off while it runs. It shouldn't take that long, and you should get a similar success message.
+
+We should now have an IC-interpolated grid under the name `*.init.nc`! We're almost ready for an actual simulation, but first we need LBCs if running a regional sim! If you're not running a regional sim, then you can proceed directly to running the `atmosphere` core.
+
+### Creating LBCs
+
+Lateral boundary conditions (LBC) let our regional model know whats happening outside its bounds. We'll simply use the GFS forcing data we already have for this! Again, the namelist.init_atmosphere file will need to be edited:
+
+```
+&nhyd_model
+ config_init_case = 9
+ config_start_time = '2026-03-11_12:00:00'
+ config_stop_time = '2026-03-12_12:00:00'
+ config_theta_adv_order = 3
+ config_coef_3rd_order = 0.25
+/
+&dimensions
+ config_nvertlevels = 55
+ config_nsoillevels = 4
+ config_nfglevels = 38
+ config_nfgsoillevels = 4
+/
+&data_sources
+ config_geog_data_path = '/scratch/mbc18672/mpas/mpas_static'
+ config_met_prefix = 'GFS'
+ config_sfc_prefix = 'SST'
+ config_fg_interval = 10800
+ config_landuse_data = 'MODIFIED_IGBP_MODIS_NOAH'
+ config_topo_data = 'GMTED2010'
+ config_vegfrac_data = 'MODIS'
+ config_albedo_data = 'MODIS'
+ config_maxsnowalbedo_data = 'MODIS'
+ config_supersample_factor = 3
+ config_use_spechumd = false
+ config_noahmp_static = false
+/
+&vertical_grid
+ config_ztop = 30000.0
+ config_nsmterrain = 1
+ config_smooth_surfaces = true
+ config_dzmin = 0.3
+ config_nsm = 30
+ config_tc_vertical_grid = true
+ config_blend_bdy_terrain = true
+/
+&interpolation_control
+ config_extrap_airtemp = 'lapse-rate'
+/
+&preproc_stages
+ config_static_interp = false
+ config_native_gwd_static = false
+ config_native_gwd_gsl_static = false
+ config_vertical_grid = true
+ config_met_interp = true
+ config_input_sst = false
+ config_frac_seaice = true
+/
+&io
+ config_pio_num_iotasks = 0
+ config_pio_stride = 1
+/
+&decomposition
+ config_block_decomp_file_prefix = 'pr.graph.info.part.'
+/
+```
+
+Make sure:
+
+- `config_init_case = 9`, letting `init_atmosphere` know we're wanting to do LBCs.
+
+- `config_start_time` and `config_stop_time` should be equal to the dates of your first AND last forcing files. Format is `YYYY-MM-DD_hh:mm:ss`
+
+- `config_fg_interval` should be set to how often you wish to create new LBCs, in seconds. 3 hours should work fine, but you may go lower or higher (just ensure you actually have data available for those times! i.e. if you do hourly LBCs make sure you have *hourly* forcing data).
+
+- `config_met_prefix = 'GFS'`, or `ERA5`, or whatever prefix your forcing data uses.
+
+In `streams.init_atmosphere`:
+
+```
+
+
+
+
+
+
+
+```
+
+- Make sure your input `filename_template` is now your `init` grid.
+
+- Set your output `filename_template` to `null`. This output stream will not be used in LBC generation, but MPAS will fail without a proper output.
+
+- Set your lbc `output_interval` equal to whatever your `config_fg_interval` is, in hours. I used 10800, which is 3 hours.
+
+Now for the final time, lets run `init_atmosphere`!
+
+`mpiexec -np # ./init_atmosphere_model >& log.init_atmosphere.0000.out`
+
+or, if on a SLURM HPC:
+
+`srun -n # ./init_atmosphere_model`
+
+Upon completion, you should have files starting with `lbc.*` in your `mpas_sim` directory!
+
+```sh
+mbc18672@ss-sub1 mpas_sim$ ls -l lbc*
+-rw-r--r-- 1 mbc18672 whlab 3145692216 Mar 18 18:18 lbc.2026-03-11_12.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3145692216 Mar 18 18:18 lbc.2026-03-11_15.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3145692216 Mar 18 18:18 lbc.2026-03-11_18.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3145692216 Mar 18 18:19 lbc.2026-03-11_21.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3145692216 Mar 18 18:19 lbc.2026-03-12_00.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3145692216 Mar 18 18:20 lbc.2026-03-12_03.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3145692216 Mar 18 18:20 lbc.2026-03-12_06.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3145692216 Mar 18 18:21 lbc.2026-03-12_09.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3145692216 Mar 18 18:21 lbc.2026-03-12_12.00.00.nc
+```
+
+We are FINALLY ready to run the `atmosphere` model and do our actual simulations! Let's quickly do a sanity check of what should be in our `mpas_sim` directory:
+
+```sh
+mbc18672@ss-sub1 mpas_sim$ ls -lh
+total 43G
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_12 -> ../DATA/METDATA/GFS:2026-03-11_12
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_13 -> ../DATA/METDATA/GFS:2026-03-11_13
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_14 -> ../DATA/METDATA/GFS:2026-03-11_14
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_15 -> ../DATA/METDATA/GFS:2026-03-11_15
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_16 -> ../DATA/METDATA/GFS:2026-03-11_16
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_17 -> ../DATA/METDATA/GFS:2026-03-11_17
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_18 -> ../DATA/METDATA/GFS:2026-03-11_18
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_19 -> ../DATA/METDATA/GFS:2026-03-11_19
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_20 -> ../DATA/METDATA/GFS:2026-03-11_20
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_21 -> ../DATA/METDATA/GFS:2026-03-11_21
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_22 -> ../DATA/METDATA/GFS:2026-03-11_22
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-11_23 -> ../DATA/METDATA/GFS:2026-03-11_23
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_00 -> ../DATA/METDATA/GFS:2026-03-12_00
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_01 -> ../DATA/METDATA/GFS:2026-03-12_01
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_02 -> ../DATA/METDATA/GFS:2026-03-12_02
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_03 -> ../DATA/METDATA/GFS:2026-03-12_03
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_04 -> ../DATA/METDATA/GFS:2026-03-12_04
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_05 -> ../DATA/METDATA/GFS:2026-03-12_05
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_06 -> ../DATA/METDATA/GFS:2026-03-12_06
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_07 -> ../DATA/METDATA/GFS:2026-03-12_07
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_08 -> ../DATA/METDATA/GFS:2026-03-12_08
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_09 -> ../DATA/METDATA/GFS:2026-03-12_09
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_10 -> ../DATA/METDATA/GFS:2026-03-12_10
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_11 -> ../DATA/METDATA/GFS:2026-03-12_11
+lrwxrwxrwx 1 mbc18672 whlab 33 Mar 18 16:27 GFS:2026-03-12_12 -> ../DATA/METDATA/GFS:2026-03-12_12
+lrwxrwxrwx 1 mbc18672 whlab 30 Mar 9 18:09 atmosphere_model -> ../MPAS-Model/atmosphere_model
+lrwxrwxrwx 1 mbc18672 whlab 35 Mar 9 18:09 init_atmosphere_model -> ../MPAS-Model/init_atmosphere_model
+-rw-r--r-- 1 mbc18672 whlab 3.0G Mar 18 18:18 lbc.2026-03-11_12.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3.0G Mar 18 18:18 lbc.2026-03-11_15.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3.0G Mar 18 18:18 lbc.2026-03-11_18.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3.0G Mar 18 18:19 lbc.2026-03-11_21.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3.0G Mar 18 18:19 lbc.2026-03-12_00.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3.0G Mar 18 18:20 lbc.2026-03-12_03.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3.0G Mar 18 18:20 lbc.2026-03-12_06.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3.0G Mar 18 18:21 lbc.2026-03-12_09.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 3.0G Mar 18 18:21 lbc.2026-03-12_12.00.00.nc
+-rw-r--r-- 1 mbc18672 whlab 2.0K Mar 9 18:10 namelist.atmosphere
+-rw-r--r-- 1 mbc18672 whlab 1.5K Mar 18 18:16 namelist.init_atmosphere
+-rwxr-xr-x 1 mbc18672 whlab 4.1M Mar 9 19:37 pr.graph.info.part.30
+-rwxr-xr-x 1 mbc18672 whlab 1.7G Mar 18 17:59 pr.grid.nc
+-rw-r--r-- 1 mbc18672 whlab 13G Mar 18 18:14 pr.init.nc
+-rw-r--r-- 1 mbc18672 whlab 2.4G Mar 18 18:08 pr.static.nc
+-rw-r--r-- 1 mbc18672 whlab 2.5K Mar 9 18:10 streams.atmosphere
+-rw-r--r-- 1 mbc18672 whlab 1.2K Mar 18 18:16 streams.init_atmosphere
+```
+
+Like before, this doesn't need to be exactly equal, but if it looks similar after following this guide, you're on the right path!
+
+---
+## Running MPAS-A
+
+We have the `atmosphere_model` symlinked to our `mpas_sim` directory and the namelist/streams files, but we'll also need to bring over lookup tables for our physics scheme before we start! These are located in the compiled MPAS-Model directory. We'll also need the `stream_list.atmosphere.*` files.
+
+```sh
+ln -s ../MPAS-Model/src/core_atmosphere/physics/physics_wrf/files/* .
+cp ../MPAS-Model/stream_list.atmosphere.* .
+```
+
+Checking your directory, there should now be handful of files ending in `.TBL`, `.DATA`, and `.DATA.DBL`. Now, let's open our `namelist.atmosphere` and make these changes:
+
+```
+&nhyd_model
+ config_time_integration_order = 2
+ config_dt = 6.6
+ config_start_time = '2026-03-11_12:00:00'
+ config_run_duration = '1_00:00:00'
+ config_split_dynamics_transport = true
+ config_number_of_sub_steps = 2
+ config_dynamics_split_steps = 3
+ config_horiz_mixing = '2d_smagorinsky'
+ config_visc4_2dsmag = 0.05
+ config_scalar_advection = true
+ config_monotonic = true
+ config_coef_3rd_order = 0.25
+ config_epssm = 0.1
+ config_smdiv = 0.1
+/
+&damping
+ config_zd = 22000.0
+ config_xnutr = 0.2
+/
+&limited_area
+ config_apply_lbcs = true
+/
+&io
+ config_pio_num_iotasks = 0
+ config_pio_stride = 1
+/
+&decomposition
+ config_block_decomp_file_prefix = 'pr.graph.info.part.'
+/
+&restart
+ config_do_restart = false
+/
+&printout
+ config_print_global_minmax_vel = true
+ config_print_detailed_minmax_vel = false
+/
+&IAU
+ config_IAU_option = 'off'
+ config_IAU_window_length_s = 21600.
+/
+&physics
+ config_sst_update = false
+ config_sstdiurn_update = false
+ config_deepsoiltemp_update = false
+ config_radtlw_interval = '00:30:00'
+ config_radtsw_interval = '00:30:00'
+ config_bucket_update = 'none'
+ config_physics_suite = 'convection_permitting'
+/
+&soundings
+ config_sounding_interval = 'none'
+/
+&physics_lsm_noahmp
+ config_noahmp_iopt_dveg = 4
+ config_noahmp_iopt_crs = 1
+ config_noahmp_iopt_btr = 1
+ config_noahmp_iopt_runsrf = 3
+ config_noahmp_iopt_runsub = 3
+ config_noahmp_iopt_sfc = 1
+ config_noahmp_iopt_frz = 1
+ config_noahmp_iopt_inf = 1
+ config_noahmp_iopt_rad = 3
+ config_noahmp_iopt_alb = 1
+ config_noahmp_iopt_snf = 1
+ config_noahmp_iopt_tksno = 1
+ config_noahmp_iopt_tbot = 2
+ config_noahmp_iopt_stc = 1
+ config_noahmp_iopt_gla = 1
+ config_noahmp_iopt_rsf = 4
+ config_noahmp_iopt_soil = 1
+ config_noahmp_iopt_pedo = 1
+ config_noahmp_iopt_crop = 0
+ config_noahmp_iopt_irr = 0
+ config_noahmp_iopt_irrm = 0
+ config_noahmp_iopt_infdv = 1
+ config_noahmp_iopt_tdrn = 0
+/
+```
+
+Specifically, modify these lines:
+
+- `config_dt` is your timestep in seconds; a rough rule of thumb to start with is to set this to 6x your minimum resolution in km (so, a variable mesh with 1km as its most fine area should be 6sec).
+
+- `config_start_time` should the date you wish to start your sim; this should line up with your IC and first LBC (if applicable) time!
+
+- `config_run_duration` is how long you wish to run the model out to. If you're doing a regional sim with LBCs, you will need LBCs for the amount of time you're running out to! Format is `D_HH:MM:SS`.
+
+- `config_apply_lbcs = true` if you're running a regional sim!
+
+- `config_physics_suite` is the physics suite you wish to use. MPAS has two built in physics suites: `mesoscale_reference` and `convection_permitting`[^3]. Read more on them [here](https://www2.mmm.ucar.edu/projects/mpas/site/documentation/users_guide/phys_suites.html).
+
+Everything else can remain the same. Let's modify our streams.atmosphere, now:
+
+```
+
+
+
+
+
+
-to come
+
+
+
+
+```
+
+- Change the input `filename_template` to your `init` grid.
+
+- For the `output` and `diagnostics` fields, change your output_interval to however your often you want files outputted; personally, I just want my entire model run bundled into two files, so I set it to the same run time. I'll dive more into the difference between the `diag.` and `history.` files later.
+
+Now we can run our model! Instead of queueing `init_atmosphere_model` we're just going to queue `atmosphere_model`:
+
+`mpiexec -np # ./atmosphere_model >& log.atmosphere.0000.out`
+
+or, if on a SLURM HPC:
+
+`srun -n # ./atmosphere_model`
+
+This will be the longest step, and it will be important to check the output of your log file to see how long each timestep is taking or if you encounter any errors. A segfault here usually means you're using an inappropriate timestep or you've run out of memory. Make sure to look out for this line:
+
+`Timing for integration step:`
+
+If this number is crazy high (ideally for real-time forecasting it should be LESS than your timestep (so you run faster than real time)) then you may need to go back and reconfigure your mesh to be rougher or more limited in scope.
---
## Sample files
I've included my .sh files to schedule both init_atmosphere and atmosphere models to SLURM under /scripts.
+I've also included full templates for namelists and streams files at each step under /configs.
+
---
## Credit where credit's due
While most of this is from my own workflow, this is kind of an adaptation of part 1 [NCAR's Sept. 2025 MPAS-A Boulder Tutorial](https://www2.mmm.ucar.edu/projects/mpas/tutorial/Boulder2025/) that I attended.
+
+[^1]: If you'd like to use these datasets, NOAA/OAR/GSL has their own fork of MPAS that should have support for them. Setup should be almost 1:1 with these instructions, but I have not tested it yet so I cannot confirm it for sure. If you're interested, check it out here with their own documentation: https://github.com/ufs-community/MPAS-Model
+
+[^2]: This HPC has its timezone set to follow EDT, which means the 'day' the model runs may not line up with what the output of `ls` tells you! If you need more clarity on what day, the download script outputs the UTC date it is pulling from right after it starts running, in format `Selected GFS run: ${run_hour}Z for date $utc_date`.
+
+[^3]: If you will be using `convection_permitting`, there is an additional step! In the root of your `MPAS-Model` folder you must run `./build_tables`, which provides the files for the suite to function. This may take some time to run! Afterwards, symlink all the new `.DBL` files to your `mpas_sim` directory. This should only have to be done once.
\ No newline at end of file
diff --git a/doc/regional_mesh_setup.md b/doc/regional_mesh_setup.md
index ef06631..ef440e3 100644
--- a/doc/regional_mesh_setup.md
+++ b/doc/regional_mesh_setup.md
@@ -16,7 +16,29 @@ Additionally, for running a regional simulation, you will need these three tools
- [MPAS-Limited-Area](https://github.com/MPAS-Dev/MPAS-Limited-Area)
-- gpmetis. Instructions can be found in `metis.md`
+- gpmetis. See instructions:
+
+METIS is used to split your mesh into multiple parts in a .graph file for parallel processing, which is required for MPAS to function with your mesh.
+
+To install METIS:
+```sh
+export INSTALL_DIR=(dir)
+
+git clone https://github.com/KarypisLab/GKlib.git
+cd GKlib
+make config prefix=${INSTALL_DIR}/GKlib
+make
+make install
+cd ..
+
+git clone https://github.com/KarypisLab/METIS.git
+cd METIS/
+make config prefix=${INSTALL_DIR}/METIS gklib_path=${INSTALL_DIR}/GKlib
+make
+make install
+```
+... where (dir) is the directory in which you want METIS to be installed.
+
For some more finer control over resolution, you may also like to grab:
@@ -66,7 +88,7 @@ If you just want to rotate where your global variable resolution grid refines in
- (lat) & (lon) are the lat, lon of the center point of your grid.
-This can take a good bit of time, depending on resolution and scale factor. It is quite memory intensive! Scaling my 15-3km with a scale factor of 2.0 took just around 1hr15min and ~8gb of mem! If you haven't yet already, this would be a great time to look at `compile.md` and begin setting up and compiling MPAS on your HPC cluster.
+This can take a good bit of time, depending on resolution and scale factor. It is quite memory intensive! Scaling my 15-3km with a scale factor of 2.0 took just around 1hr15min and ~8gb of mem! This is sort of an extreme exmaple, though. Scaling a 46-12km mesh by 12.0 took a much shorter (but still somewhat lengthy) ~8min. If you haven't yet already, this would be a great time to look at `compile.md` and begin setting up and compiling MPAS on your HPC cluster.
In case you want a view of what your mesh looks like after this process, I've provided a `mesh_resolution.py` script from NCAR's MPAS-A tutorial in this repo's scripts (/scripts) that will output an image with approx. mesh resolutions contoured. You can run it with `python ./mesh_resolution.py (grid file)`. This'll output a plot of the mesh resolution to your pwd under `mesh_resolution.png`.
diff --git a/scripts/gfs_pre01_download.sh b/scripts/gfs_pre01_download.sh
new file mode 100644
index 0000000..14796d5
--- /dev/null
+++ b/scripts/gfs_pre01_download.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+# Adapted from UGAWRF download script
+DATA_DIR="/path/to/download/DATA/GFS"
+START_FHR=0
+END_FHR=$1 # takes in a number of hours to download as argument, 27hr is good for 24hr run
+# --- End Configuration ---
+
+# --- Main Script ---
+mkdir -p "$DATA_DIR"
+cd "$DATA_DIR" || exit
+rm gfs.*
+
+hour_utc=$(date -u +"%H")
+utc_date=$(date -u +"%Y%m%d")
+
+echo "Current UTC date is: $utc_date"
+echo "Current UTC hour is: $hour_utc"
+
+if (( hour_utc >= 4 && hour_utc < 10 )); then
+ run_hour="00"
+elif (( hour_utc >= 10 && hour_utc < 16 )); then
+ run_hour="06"
+elif (( hour_utc >= 16 && hour_utc < 22 )); then
+ run_hour="12"
+else
+ run_hour="18"
+ if (( hour_utc < 4 )); then
+ utc_date=$(date -u -d "yesterday" +"%Y%m%d")
+ echo "Fetching previous day's 18Z run for date: $utc_date"
+ fi
+fi
+
+echo "Selected GFS run: ${run_hour}Z for date $utc_date"
+
+BASE_URL="https://nomads.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/gfs.${utc_date}/${run_hour}/atmos"
+
+echo "Downloading GFS forecast hours from F${START_FHR} to F${END_FHR}..."
+for fhr_num in $(seq $START_FHR $END_FHR); do
+ fhr=$(printf "%03d" $fhr_num)
+
+ FILENAME="gfs.t${run_hour}z.pgrb2.0p25.f${fhr}"
+ FULL_URL="${BASE_URL}/${FILENAME}"
+
+ echo "Queueing download: ${FILENAME}"
+
+ wget -nv "$FULL_URL" &
+done
+
+wait
+
+echo "All GFS files downloaded successfully to ${DATA_DIR}"
From 6d10eea6457a30cee6cafb4ec847fa46b055f038 Mon Sep 17 00:00:00 2001
From: scout teehee <34626346+luceboxed@users.noreply.github.com>
Date: Thu, 19 Mar 2026 01:30:35 -0400
Subject: [PATCH 6/6] update docs to be actually right
---
doc/basic_setup.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/basic_setup.md b/doc/basic_setup.md
index 6e2b8e0..07d52d4 100644
--- a/doc/basic_setup.md
+++ b/doc/basic_setup.md
@@ -8,11 +8,11 @@ You will need access to the following modules/compilers:
- mpicc
-- netCDF
+- netCDF netCDF/4.9.3-gompi-2025a
- netCDF-Fortran
-- PnetCDF
+- PnetCDF PnetCDF/1.12.3-gompi-2023b
---
## Compilation!