Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
165 changes: 160 additions & 5 deletions srcPython/pGITM.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,14 @@
from scipy.io import FortranFile
from struct import unpack

# check if netcdf can be imported. if not, don't error unless writing netcdf files
canWriteCDF = True
try:
from netCDF4 import Dataset
from pyitm.fileio import variables
except:
canWriteCDF = False

# This should work on most systems:
endChar='<'

Expand All @@ -28,7 +36,8 @@

def get_args_pgitm():
parser = argparse.ArgumentParser(
description = 'Post-process GITM model results')
description = "Post-process GITM model results."
" (note: NetCDF functionality is only available through post_process.py)")
parser.add_argument('-v', \
action='store_true', default = False, \
help = 'set verbose to true')
Expand Down Expand Up @@ -86,6 +95,132 @@ def write_gitm_file(file, data, isVerbose = False):
fp.close()


def create_netcdf(filename, data, isVerbose=False):

if isVerbose:
print(" --> Creating netCDF file:", filename)

# get dimensions:
nz, ny, nx = data["Longitude"].shape

with Dataset(filename, mode="w", format="NETCDF4") as ncfile:
# Dimensions
t = ncfile.createDimension('time', None)
xdim = ncfile.createDimension('lon', nx)
ydim = ncfile.createDimension('lat', ny)
zdim = ncfile.createDimension('z', nz)

# time!
reftime = data['time'].date()
time = ncfile.createVariable('time', np.float64, ('time',))
time.units = 'seconds since ' + str(reftime)
time.long_name = 'time'
time[:] = [(data['time']
- datetime.datetime(reftime.year, reftime.month, reftime.day)
).total_seconds()]

if isVerbose:
print(" --> Created dataset with dimensions:")
[print(dim) for dim in ncfile.dimensions.items()]

# Add in attributes. Version, etc.
ncfile.title = f"{filename[:5]} GITM Outputs"
ncfile.model = "GITM"
ncfile.version = data['version']

if isVerbose:
print(" --> Attributes added. Current ncfile:")
print(ncfile)

# We'll try to add the coordinates, if they can be added cleanly...
if np.all(data['Longitude'][0, 0, :] == data['Longitude']):
lon = ncfile.createVariable('lon', np.float64, ('lon'))
lon[:] = np.rad2deg(data['Longitude'][0, 0, :])
lon.units = 'degrees_east'
lon.long_name = 'Longitude'

if np.all([data['Latitude'][0, :, 0] == data['Latitude'][i, :, :].T for i in range(nx)]):#, axis=(0, 2)):
lat = ncfile.createVariable('lat', np.float64, ('lat'))
lat[:] = np.rad2deg(data['Latitude'][0, :, 0])
lat.units = 'degrees_north'
lat.long_name = 'Latitude'

if np.all(data['Altitude'][:, 0, 0] == data['Altitude'].T):#, axis=(0, 1)):
alt = ncfile.createVariable('z', np.float64, ('z'))
alt[:] = data['Altitude'][:, 0, 0] / 1000
alt.units = 'km'
alt.long_name = 'Altitude'

# Then put the variables in
for varname in data['vars']:
unit = None
newname = variables.get_short_names([varname])[0]
thisvar = ncfile.createVariable(newname, np.float64,
('time', 'lon', 'lat', 'z'))
thisvar[:] = data[varname].T
if 'V' in newname:
unit = 'm/s'
elif '[' in varname:
unit='/m3'
elif 'Temp' in varname:
unit = 'K'
if unit:
ncfile[newname].units = unit

ncfile[newname].long_name = variables.get_long_names([varname])[0]

return

def append_netcdf(filename, data, isVerbose):

if isVerbose:
print(" --> file already found. appending...")

with Dataset(filename, mode='a', format='NETCDF4') as ncfile:
time = ncfile['time']

reftime = datetime.datetime.strptime(time.units.split("since ")[-1], "%Y-%m-%d")
time[len(time)] = (data["time"] - reftime).total_seconds()

for varname in data['vars']:
newname = variables.get_short_names([varname])[0]
ncfile[newname][-1, ...] = data[varname].T

return


def write_to_netcdf(file, data, combine=False, runname='', isVerbose=False):
"""
Dispatcher for writing/appending netCDFs

inputs:
file (str): file name (that the bin would have been written to). Used to
determine if the data is 2D/3D & whether we need to drop ghost cells
data (dict): The gitm outputs.
runname (str, optional): name to add to processed files. output format is:
'runname_3DALL.nc', or if no runname is given its just '3DALL.nc'
isVerbose (bool): whether to print extra info.

"""

if combine:
file = file.split('_')[0] + '.nc'
if runname:
file = runname + '_' + file
else:
file = file.replace('bin', 'nc')

if file.startswith('1D'):
raise ValueError("Cannot write netcdf files for 1D outputs yet!")

if (isVerbose):
print(' -> Writing NetCDF for: ', file, " to:", runname)

if not os.path.exists(file):
create_netcdf(file, data, isVerbose)
else:
append_netcdf(file, data, isVerbose)

# ----------------------------------------------------------------------------
#
# ----------------------------------------------------------------------------
Expand Down Expand Up @@ -298,7 +433,8 @@ def remove_files(header, isVerbose = False):
#
# ----------------------------------------------------------------------------

def process_one_file(header, isVerbose = False):
def process_one_file(header, isVerbose = False, dowrite=True,
write_nc=False, combine=False, runname=''):

fileOut = header.replace('.header','.bin')

Expand Down Expand Up @@ -362,11 +498,23 @@ def process_one_file(header, isVerbose = False):
iLonStart:iLonEnd]
iBlock += 1

write_gitm_file(fileOut, data, isVerbose = isVerbose)
if dowrite:
if write_nc:
if canWriteCDF:
write_to_netcdf(fileOut, data, runname=runname, combine=combine,
isVerbose=isVerbose)
else:
raise ImportError("Cant import netCDF4")
else: # default !!
write_gitm_file(fileOut, data, isVerbose = isVerbose)
return

# return data if dowrite is false (for debugging)
return data


def post_process_gitm(dir, doRemove, isVerbose = False):
def post_process_gitm(dir, doRemove, isVerbose = False,
write_nc=False, combine=False, runname=''):

# Get into the correct directory for processing
processDir = dir
Expand All @@ -384,6 +532,8 @@ def post_process_gitm(dir, doRemove, isVerbose = False):
headers = sorted(glob('*.header'))
if (isVerbose):
print('Found %i files to process' % len(headers))
elif len(headers) == 0:
print(" -> Did not find any files to process.")

# Process each single header:
for head in headers:
Expand All @@ -392,11 +542,16 @@ def post_process_gitm(dir, doRemove, isVerbose = False):
print(f' --> Processing {head}')

if os.path.exists("../../PostGITM.exe"):
if write_nc:
raise ValueError(
"Cannot use PostGITM and netCDF at the same time, yet."
"\n >> either remove PostGITM.exe or disable -nc option")
# Check if we can use the old postprocessor. it's faster.
# Pipe the header filename into PostGITM.exe
run(f"echo {head} | ../../PostGITM.exe ", check=True, shell=True)
else:
data = process_one_file(head, isVerbose = isVerbose)
process_one_file(head, isVerbose=isVerbose,
write_nc=write_nc, combine=combine, runname=runname)

if (doRemove):
remove_files(head, isVerbose = isVerbose)
Expand Down
Loading
Loading