diff --git a/README.md b/README.md index 6a90086..201439c 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,63 @@ # webplot plotting package -This is a simplified version of the webplot plotting library used to create graphics for the NCAR ensemble system. +### create graphics from MPAS ensemble -For example, to create a 2-m ensemble mean temperature plot using ensemble data initialized at 2017061900, valid at 2017061912: +For example, to plot max precipitation accumulation with contours of 500hPa mean height, and +mean 500hPa wind barbs from forecast hours [12, 18]: -make_webplot.py -d=2017070500 -f=t2_mean -tr=12 -t='2-m Ensemble Mean Temperature (F)' +``` +python webplot.py 20170518 --fill precipacc/max --fhr 12 18 --contour hgt500/mean \ + --barb wind500/mean --title 'Max precip acc, mean 500hPa hgt [m] and wind barbs [kt]' \ + --idir /glade/campaign/mmm/parc/schwartz/MPAS_JEDI/15-3km_mesh/cold_start \ + --init_file /glade/scratch/schwartz/MPAS/15-3km_mesh/mpas_init/2019042518/init.nc \ + --mesh 15-3km_mesh +``` -The package uses basemap objects that are stored as pickle files. These should be created with the saveNewMap() function in webplot.py before attempting to plot. +MPAS initialization time provided as first argument. + +``` +usage: webplot.py [-h] [--autolevels] [-b BARB] [-c CONTOUR] [-con] [-d] [--domain {CONUS,NA,SGP,NGP,CGP,SW,NW,SE,NE,MATL}] [--ENS_SIZE ENS_SIZE] [-f FILL] [--fhr FHR [FHR ...]] [--idir IDIR] + [--init_file INIT_FILE] [--meshstr MESHSTR] [--nbarbs NBARBS] [--nlon_max NLON_MAX] [--nlat_max NLAT_MAX] [--sigma SIGMA] [-t TITLE] + date + +Web plotting script for NCAR ensemble + +positional arguments: + date model initialization time + +options: + -h, --help show this help message and exit + --autolevels use min/max to determine levels for plot (default: False) + -b BARB, --barb BARB barb field (FIELD_PRODUCT_THRESH) (default: None) + -c CONTOUR, --contour CONTOUR + contour field (FIELD_PRODUCT_THRESH) (default: None) + -con, --convert run final image through imagemagick (default: True) + -d, --debug turn on debugging (default: False) + --domain {CONUS,NA,SGP,NGP,CGP,SW,NW,SE,NE,MATL} + domain of plot (default: CONUS) + --ENS_SIZE ENS_SIZE number of members in ensemble (default: 1) + -f FILL, --fill FILL fill field (FIELD_PRODUCT_THRESH), FIELD options:precip,precip-24hr,precip-48hr,precipacc,sbcape,mlcape,mucape,sbcinh,mlcinh,pwat,t2,t2depart,t2- + 0c,mslp,td2,td2depart,thetae,thetapv,rh2m,pblh,hmuh,hmneguh,hmuh03,hmuh01,rvort1,sspf,hmup,hmdn,hmwind,hmgrp,cref,ref1km,srh3,srh1,shr06mag,shr01mag,zlfc,zlcl,ltg1,ltg2,ltg3,olrtoa,thck1000- + 500,thck1000- + 850,hgt200,hgt250,hgt300,hgt500,hgt700,hgt850,hgt925,speed200,speed250,speed300,speed500,speed700,speed850,speed925,temp200,temp250,temp300,temp500,temp700,temp850,temp925,td500,td700,td850, + td925,rh300,rh500,rh700,rh850,rh925,pvort320k,speed10m,speed10m- + tc,stp,uhratio,crefuh,wind10m,windsfc,wind1km,wind6km,windpv,shr06,shr01,bunkers,wind200,wind250,wind300,wind500,wind700,wind850,wind925,vort500,vort700,vort850,vortpv PRODUCT may be one of + [max,maxstamp,min,mean,meanstamp,prob,neprob,problt,neproblt,paintball,stamp,spaghetti] (default: None) + --fhr FHR [FHR ...] list of forecast hours (default: [12]) + --idir IDIR path to model output (default: /glade/campaign/mmm/parc/schwartz/MPAS_ensemble_paper) + --init_file INIT_FILE + path to file with lat/lon/area of mesh (default: None) + --meshstr MESHSTR mesh id. used to prefix output file and pickle file (default: None) + --nbarbs NBARBS max barbs in one dimension (default: 32) + --nlon_max NLON_MAX max pts in longitude dimension (default: 1500) + --nlat_max NLAT_MAX max pts in latitude dimension (default: 1500) + --sigma SIGMA smooth probabilities using gaussian smoother (default: 2) + -t TITLE, --title TITLE + title for plot (default: None) +``` + +### Installation + +Create conda environment described in environment.yaml. + +Use f2py3 to install vert2cell. diff --git a/environment.yaml b/environment.yaml new file mode 100644 index 0000000..6e7254a --- /dev/null +++ b/environment.yaml @@ -0,0 +1,12 @@ +name: webplot +channels: + - conda-forge + - defaults +dependencies: + - pygrib + - scipy + - xarray + - netcdf4 + - dask + - metpy +prefix: /glade/u/home/ahijevyc/miniconda3/envs/webplot diff --git a/fieldinfo.py b/fieldinfo.py index 2e26083..00ad994 100755 --- a/fieldinfo.py +++ b/fieldinfo.py @@ -1,153 +1,160 @@ +from collections import defaultdict +import os # for NCARG_ROOT +import numpy as np + def readcm(name): '''Read colormap from file formatted as 0-1 RGB CSV''' - rgb = [] fh = open(name, 'r') - for line in fh.read().splitlines(): rgb.append(map(float,line.split())) - return rgb + rgb = np.loadtxt(fh) + fh.close() + return rgb.tolist() def readNCLcm(name): - '''Read in NCL colormap for use in matplotlib''' - import os - rgb, appending = [], False - ### directories to NCL colormaps on yellowstone/cheyenne - rgb_dir_ys = '/glade/apps/opt/ncl/6.2.0/intel/12.1.5/lib/ncarg/colormaps' - rgb_dir_ch = '/glade/u/apps/ch/opt/ncl/6.4.0/intel/16.0.3/lib/ncarg/colormaps' - if os.path.isdir(rgb_dir_ys): fh = open('%s/%s.rgb'%(rgb_dir_ys,name), 'r') - else: fh = open('%s/%s.rgb'%(rgb_dir_ch,name), 'r') + '''Read in NCL colormap for use in matplotlib + Replaces original function in /glade/u/home/wrfrt/wwe/python_scripts/fieldinfo.py + ''' + + # comments start with ; or # + # first real line is ncolors = 256 (or something like that) + # The rest is bunch of rgb values, one trio per line. + + fh = open(os.getenv('NCARG_ROOT','/glade/u/apps/opt/ncl/6.5.0/intel/17.0.1')+'/lib/ncarg/colormaps/%s.rgb'%name, 'r') + rgb = np.loadtxt(fh, comments=[';', '#', 'n']) # treat ncolors=x as a comment + fh.close() + if rgb.max() > 1: + rgb = rgb/255.0 + return rgb.tolist() + - for line in fh.read().splitlines(): - if appending: rgb.append(map(float,line.split())) - if ''.join(line.split()) in ['#rgb',';RGB']: appending = True - maxrgb = max([ x for y in rgb for x in y ]) - if maxrgb > 1: rgb = [ [ x/255.0 for x in a ] for a in rgb ] - return rgb +fieldinfo = defaultdict(dict) -fieldinfo = { +fieldinfo.update({ # surface and convection-related entries - 'precip' :{ 'levels' : [0,0.01,0.05,0.1,0.2,0.3,0.4,0.5,0.75,1,1.5,2,2.5,3.0], 'cmap': [readNCLcm('precip2_17lev')[i] for i in (0,1,2,4,5,6,7,8,10,12,13,14,15)], 'fname':['PREC_ACC_NC'] }, - 'precip-24hr' :{ 'levels' : [0,0.01,0.05,0.1,0.25,0.5,0.75,1.0,1.25,1.5,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,11.0,12.0,13.0], 'cmap': [readNCLcm('precip2_17lev')[i] for i in (0,1,2,4,5,6,7,8,10,12,13,14,15,16,17)]+['#777777', '#AAAAAA', '#CCCCCC', '#EEEEEE']+[readNCLcm('sunshine_diff_12lev')[i] for i in (4,2,1)], 'fname':['PREC_ACC_NC'] }, - 'precipacc':{ 'levels' : [0,0.01,0.05,0.1,0.25,0.5,0.75,1.0,1.25,1.5,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,11.0,12.0,13.0], 'cmap': [readNCLcm('precip2_17lev')[i] for i in (0,1,2,4,5,6,7,8,10,12,13,14,15,16,17)]+['#777777', '#AAAAAA', '#CCCCCC', '#EEEEEE']+[readNCLcm('sunshine_diff_12lev')[i] for i in (4,2,1)], 'fname':['RAINNC'] }, + 'precip' :{ 'levels' : [0,0.01,0.05,0.1,0.2,0.3,0.4,0.5,0.75,1,1.5,2,2.5,3.0], 'cmap': [readNCLcm('precip2_17lev')[i] for i in (0,1,2,4,5,6,7,8,10,12,13,14,15)], 'fname':['rainnc'] }, + 'precip-24hr' :{ 'levels' : [0,0.01,0.05,0.1,0.25,0.5,0.75,1.0,1.25,1.5,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,11.0,12,13], 'cmap': [readNCLcm('precip2_17lev')[i] for i in (0,1,2,4,5,6,7,8,10,12,13,14,15,16,17)]+['#777777', '#AAAAAA', '#CCCCCC', '#EEEEEE']+[readNCLcm('sunshine_diff_12lev')[i] for i in (4,2,1)], 'fname':['rainnc'] }, + 'precip-48hr' :{ 'levels' : [0,0.05,0.1,0.25,0.5,0.75,1.0,1.25,1.5,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,12.0,15.0,20,25], 'cmap': [readNCLcm('precip2_17lev')[i] for i in (0,1,2,4,5,6,7,8,10,12,13,14,15,16,17)]+['#777777', '#AAAAAA', '#CCCCCC', '#EEEEEE']+[readNCLcm('sunshine_diff_12lev')[i] for i in (4,2,1)], 'fname':['rainnc'] }, + 'precipacc' :{ 'levels' : [0,0.01,0.05,0.1,0.25,0.5,0.75,1.0,1.25,1.5,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0,11.0,12.0,13.0], 'cmap': [readNCLcm('precip2_17lev')[i] for i in (0,1,2,4,5,6,7,8,10,12,13,14,15,16,17)]+['#777777', '#AAAAAA', '#CCCCCC', '#EEEEEE']+[readNCLcm('sunshine_diff_12lev')[i] for i in (4,2,1)], 'fname':['rainnc'] }, 'sbcape' :{ 'levels' : [100,250,500,750,1000,1250,1500,1750,2000,2500,3000,3500,4000,4500,5000,5500,6000], - 'cmap' : ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname': ['SBCAPE'], 'filename':'upp' }, + 'cmap' : ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname': ['sbcape'] }, 'mlcape' :{ 'levels' : [100,250,500,750,1000,1250,1500,1750,2000,2500,3000,3500,4000,4500,5000,5500,6000], - 'cmap' : ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname': ['MLCAPE'], 'filename':'upp' }, + 'cmap' : ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname': ['mlcape'] }, 'mucape' :{ 'levels' : [10,25,50,100,250,500,750,1000,1250,1500,1750,2000,2500,3000,3500,4000,4500,5000,5500,6000], - 'cmap' : ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname': ['MUCAPE'], 'filename':'upp' }, - 'sbcinh' :{ 'levels' : [50,75,100,150,200,250,500], 'cmap': readNCLcm('topo_15lev')[1:], 'fname': ['SBCINH'], 'filename':'upp' }, - 'mlcinh' :{ 'levels' : [50,75,100,150,200,250,500], 'cmap': readNCLcm('topo_15lev')[1:], 'fname': ['MLCINH'], 'filename':'upp' }, + 'cmap' : ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname': ['cape'] }, + 'sbcinh' :{ 'levels' : [50,75,100,150,200,250,500], 'cmap': readNCLcm('topo_15lev')[1:], 'fname': ['sbcin'] }, + 'mlcinh' :{ 'levels' : [50,75,100,150,200,250,500], 'cmap': readNCLcm('topo_15lev')[1:], 'fname': ['mlcin'] }, 'pwat' :{ 'levels' : [0.25,0.5,0.75,1.0,1.25,1.5,1.75,2.0,2.5,3.0,3.5,4.0], 'cmap' : ['#dddddd', '#cccccc', '#e1e1d3', '#e1d5b1', '#ffffd5', '#e5ffa7', '#addd8e', '#41ab5d', '#007837', '#005529', '#0029b1'], - 'fname' : ['PWAT'], 'filename':'upp'}, - 'hailk1' :{ 'levels' : [0.25,0.5,0.75,1.0,1.25,1.5,1.75,2.0,2.5,3.0,3.5,4.0], 'cmap' : ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname':['HAIL_MAXK1'], 'filename': 'diag' }, - 'hail2d' :{ 'levels' : [0.25,0.5,0.75,1.0,1.25,1.5,1.75,2.0,2.5,3.0,3.5,4.0], 'cmap' : ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname':['HAIL_MAX2D'], 'filename': 'diag' }, - 'afhail' :{ 'levels' : [0.25,0.5,0.75,1.0,1.25,1.5,1.75,2.0,2.5,3.0,3.5,4.0], 'cmap' : ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname': ['AFWA_HAIL_NEWMEAN'], 'filename':'diag' }, - 't2' :{ 'levels' : [-35,-30,-25,-20,-15,-10,-5,0,5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100,105,110,115,120], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['T2'] }, - 't2depart' :{ 'levels' : [-35,-30,-25,-20,-15,-10,-5,0,5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100,105,110,115,120], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['T2'] }, - 'mslp' :{ 'levels' : [960,964,968,972,976,980,984,988,992,996,1000,1004,1008,1012,1016,1020,1024,1028,1032,1036,1040,1044,1048,1052], 'cmap':readNCLcm('nice_gfdl')[3:193], 'fname':['MSLP'], 'filename': 'upp' }, + 'fname' : ['precipw']}, + 't2' :{ 'levels' : range(-35,125,5), 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['t2m'] }, + 't2depart' :{ 'levels' : range(-35,125,5), 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['t2m'] }, + 't2-0c' :{ 'levels' : [32], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['t2m'] }, + 'mslp' :{ 'levels' : range(960,1056,4), 'cmap':readNCLcm('nice_gfdl')[3:193], 'fname':['mslp'] }, 'td2' :{ 'levels' : [-35,-30,-25,-20,-15,-10,-5,0,5,10,15,20,25,30,35,40,45,50,55,60,64,68,72,76,80,84], 'cmap':['#ad598a', '#c589ac','#dcb8cd','#e7cfd1','#d0a0a4','#ad5960', '#8b131d', '#8b4513','#ad7c59', '#c5a289','#dcc7b8','#eeeeee', '#dddddd', '#bbbbbb', '#e1e1d3', '#e1d5b1','#ccb77a','#ffffe5','#f7fcb9', '#addd8e', '#41ab5d', '#006837', '#004529', '#195257', '#4c787c'], - 'fname' : ['DEWPOINT_2M'], 'filename':'upp'}, + 'fname' : ['dewpoint_surface']}, 'td2depart' :{ 'levels' : [20,25,30,35,40,45,50,55,60,64,68,72,76,80,84], 'cmap' : ['#eeeeee', '#dddddd', '#bbbbbb', '#e1e1d3', '#e1d5b1','#ccb77a','#ffffe5','#f7fcb9', '#addd8e', '#41ab5d', '#006837', '#004529', '#195257', '#4c787c'], - 'fname' : ['DEWPOINT_2M'], 'filename':'upp'}, - 'thetae' :{ 'levels' : [300,305,310,315,320,325,330,335,340,345,350,355,360], 'cmap': ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname' : ['T2', 'Q2', 'PSFC'], 'filename': 'diag'}, - 'rh2m' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100,110], 'cmap': readNCLcm('precip2_17lev')[:17][::-1], 'fname': ['T2', 'PSFC', 'Q2'], 'filename': 'diag'}, - 'heatindex' :{ 'levels' : [65,70,75,80,85,90,95,100,105,110,115,120,125,130], 'cmap': readNCLcm('MPL_hot')[::-1], 'fname': ['AFWA_HEATIDX'], 'filename':'diag' }, + 'fname' : ['dewpoint_surface']}, + 'thetae' :{ 'levels' : [300,305,310,315,320,325,330,335,340,345,350,355,360], 'cmap': ['#eeeeee', '#dddddd', '#cccccc', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname' : ['t2m', 'q2', 'surface_pressure']}, + 'thetapv' :{ 'levels' : np.arange(278,386,4), 'cmap': readNCLcm('WhiteBlueGreenYellowRed'), 'fname' : ['theta_pv']}, + 'rh2m' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100,110], 'cmap': readNCLcm('precip2_17lev')[:17][::-1], 'fname': ['t2m', 'surface_pressure', 'q2']}, 'pblh' :{ 'levels' : [0,250,500,750,1000,1250,1500,1750,2000,2500,3000,3500,4000], - 'cmap': ['#eeeeee', '#dddddd', '#cccccc', '#bbbbbb', '#44aaee','#88bbff', '#aaccff', '#bbddff', '#efd6c1', '#e5c1a1', '#eebb32', '#bb9918'], 'fname': ['PBLH'] }, - 'hmuh' :{ 'levels' : [10,25,50,75,100,125,150,175,200,250,300,400,500], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['UP_HELI_MAX'], 'filename':'diag'}, - 'hmneguh' :{ 'levels' : [10,25,50,75,100,125,150,175,200,250,300,400,500], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['UP_HELI_MIN'], 'filename':'diag'}, - 'hmuh03' :{ 'levels' : [10,25,50,75,100,125,150,175,200,250,300,400,500], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['UP_HELI_MAX03'], 'filename':'diag'}, - 'rvort1' :{ 'levels' : [0.005,0.006,0.007,0.008,0.009,0.01,0.011,0.012,0.013,0.014,0.015], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['RVORT1_MAX'], 'filename':'diag'}, - 'hmup' :{ 'levels' : [4,6,8,10,12,14,16,18,20,24,28,32,36,40,44,48], 'cmap': readNCLcm('prcp_1')[1:16], 'fname': ['W_UP_MAX'], 'filename':'diag' }, - 'hmdn' :{ 'levels' : [2,3,4,6,8,10,12,14,16,18,20,22,24,26,28,30], 'cmap': readNCLcm('prcp_1')[1:16], 'fname': ['W_DN_MAX'], 'filename':'diag' }, - 'hmwind' :{ 'levels' : [10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42], 'cmap': readNCLcm('prcp_1')[:16], 'fname': ['WSPD10MAX'], 'filename':'diag' }, - 'hmgrp' :{ 'levels' : [0.01,0.1,0.25,0.5,0.75,1.0,1.5,2.0,2.5,3.0,4.0,5.0], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['GRPL_MAX'], 'filename':'diag' }, - 'cref' :{ 'levels' : [5,10,15,20,25,30,35,40,45,50,55,60,65,70], 'cmap': readcm('cmap_rad.rgb')[1:14], 'fname': ['REFL_MAX_COL'], 'filename':'upp' }, - 'lmlref' :{ 'levels' : [5,10,15,20,25,30,35,40,45,50,55,60,65,70], 'cmap': readcm('cmap_rad.rgb')[1:14], 'fname': ['REFL_10CM'], 'arraylevel':0 }, - 'ref1km' :{ 'levels' : [5,10,15,20,25,30,35,40,45,50,55,60,65,70], 'cmap': readcm('cmap_rad.rgb')[1:14], 'fname': ['REFL_1KM_AGL'], 'filename':'upp' }, - 'echotop' :{ 'levels' : [1000,5000,10000,15000,20000,25000,30000,35000,40000,45000,50000,55000,60000,65000], 'cmap': readNCLcm('precip3_16lev')[1::], 'fname': ['ECHOTOP'], 'filename':'diag' }, - 'srh3' :{ 'levels' : [50,100,150,200,250,300,400,500], 'cmap': readNCLcm('perc2_9lev'), 'fname': ['SR_HELICITY_3KM'], 'filename' : 'upp' }, - 'srh1' :{ 'levels' : [50,100,150,200,250,300,400,500], 'cmap': readNCLcm('perc2_9lev'), 'fname': ['SR_HELICITY_1KM'], 'filename' : 'upp' }, - 'shr06mag' :{ 'levels' : [30,35,40,45,50,55,60,65,70,75,80], 'cmap': readNCLcm('perc2_9lev'), 'fname': ['UBSHR6', 'VBSHR6'], 'filename':'upp' }, - 'shr01mag' :{ 'levels' : [10,15,20,25,30,35,40,45,50,55], 'cmap': readNCLcm('perc2_9lev'), 'fname': ['UBSHR1', 'VBSHR1'], 'filename':'upp' }, - 'zlfc' :{ 'levels' : [0,250,500,750,1000,1250,1500,2000,2500,3000,3500,4000,5000], 'cmap': [readNCLcm('nice_gfdl')[i] for i in [3,20,37,54,72,89,106,123,141,158,175,193]], 'fname': ['AFWA_ZLFC'], 'filename':'diag' }, - 'zlcl' :{ 'levels' : [0,250,500,750,1000,1250,1500,2000,2500,3000,3500,4000,5000], 'cmap': [readNCLcm('nice_gfdl')[i] for i in [3,20,37,54,72,89,106,123,141,158,175,193]], 'fname': ['LCL_HEIGHT'], 'filename':'upp' }, - 'ltg1' :{ 'levels' : [0.1,0.5,1,1.5,2,2.5,3,4,5,6,7,8,10,12], 'cmap': readNCLcm('prcp_1')[:15], 'fname': ['LTG1_MAX'], 'filename':'diag' }, - 'ltg2' :{ 'levels' : [0.1,0.5,1,1.5,2,2.5,3,4,5,6,7,8,10,12], 'cmap': readNCLcm('prcp_1')[:15], 'fname': ['LTG2_MAX'], 'filename':'diag' }, - 'ltg3' :{ 'levels' : [0.1,0.5,1,1.5,2,2.5,3,4,5,6,7,8,10,12], 'cmap': readNCLcm('prcp_1')[:15], 'fname': ['LTG3_MAX'], 'filename':'diag' }, - 'liftidx' :{ 'levels' : [-14,-12,-10,-8,-6,-4,-2,0,2,4,6,8], 'cmap': readNCLcm('nice_gfdl')[193:3:-1]+['#ffffff'], 'fname': ['SFC_LI'], 'filename':'upp'}, - 'bmin' :{ 'levels' : [-20,-16,-12,-10,-8,-6,-4,-2,-1,-0.5,0,0.5], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['MLBMIN'], 'filename':'upp' }, - 'goesch3' :{ 'levels' : [-80,-78,-76,-74,-72,-70,-68,-66,-64,-62,-60,-58,-56,-54,-52,-50,-48,-46,-44,-42,-40,-38,-36,-34,-32,-30,-28,-26,-24,-22,-20,-18,-16,-14,-12,-10], 'cmap': readcm('cmap_sat2.rgb')[38:1:-1], 'fname': ['GOESE_WV'], 'filename':'upp' }, - 'goesch4' :{ 'levels' : [-80,-76,-72,-68,-64,-60,-56,-52,-48,-44,-40,-36,-32,-28,-24,-20,-16,-12,-8,-4,0,4,8,12,16,20,24,28,32,36,40], 'cmap': readcm('cmap_satir.rgb')[32:1:-1], 'fname': ['GOESE_IR'], 'filename':'upp' }, - 'afwavis' :{ 'levels' : [0.0,0.1,0.25,0.5,1.0,2.0,3.0,4.0,5.0,6.0,8.0,10.0,12.0], 'cmap': readNCLcm('nice_gfdl')[3:175]+['#ffffff'], 'fname': ['VISIBILITY'], 'filename':'upp' }, - 'pbmin' :{ 'levels' : [0,30,60,90,120,150,180],'cmap': ['#dddddd', '#aaaaaa']+readNCLcm('precip2_17lev')[3:-1], 'fname': ['MLPBMIN','PBMIN_SFC'], 'filename':'upp' }, + 'cmap': ['#eeeeee', '#dddddd', '#cccccc', '#bbbbbb', '#44aaee','#88bbff', '#aaccff', '#bbddff', '#efd6c1', '#e5c1a1', '#eebb32', '#bb9918'], 'fname': ['hpbl']}, + 'hmuh' :{ 'levels' : [10,25,50,75,100,125,150,175,200,250,300,400,500], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['updraft_helicity_max']}, + 'hmneguh' :{ 'levels' : [10,25,50,75,100,125,150,175,200,250,300,400,500], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['UP_HELI_MIN']}, + 'hmuh03' :{ 'levels' : [10,25,50,75,100,125,150,175,200,250,300,400,500], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['updraft_helicity_max03']}, + 'hmuh01' :{ 'levels' : [5,10,25,50,75,100,125,150,175,200,250,300,400,500], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['updraft_helicity_max01']}, + 'rvort1' :{ 'levels' : [0.005,0.006,0.007,0.008,0.009,0.01,0.011,0.012,0.013,0.014,0.015], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['rvort1_max']}, + 'sspf' :{ 'levels' : [10,25,50,75,100,125,150,175,200,250,300,400,500], 'cmap': readNCLcm('prcp_1')[:15], 'fname': ['updraft_helicity_max','WSPD10MAX','HAIL_MAXK1']}, + 'hmup' :{ 'levels' : [4,6,8,10,12,14,16,18,20,24,28,32,36,40,44,48], 'cmap': readNCLcm('prcp_1')[1:16], 'fname': ['w_velocity_max'] }, + #'hmdn' :{ 'levels' : [-19,-17,-15,-13,-11,-9,-7,-5,-3,-1,0], 'cmap': readNCLcm('prcp_1')[16:1:-1]+['#ffffff'], 'fname': ['w_velocity_min'] }, + 'hmdn' :{ 'levels' : [2,3,4,6,8,10,12,14,16,18,20,22,24,26,28,30], 'cmap': readNCLcm('prcp_1')[1:16], 'fname': ['w_velocity_min'] }, + #'hmwind' :{ 'levels' : [10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42], 'cmap': readNCLcm('prcp_1')[:16], 'fname': ['WSPD10MAX'] }, + 'hmwind' :{ 'levels' : [10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42], 'cmap': readNCLcm('prcp_1')[:16], 'fname': ['wind_speed_level1_max'] }, + #'hmwind' :{ 'levels' : [10,12,14,16,18,20,22,24,26,28,30,32,34], 'cmap': readNCLcm('prcp_1')[1:15], 'fname': ['WSPD10MAX'] }, + #'hmwind' :{ 'levels' : [20,25,30,35,40,45,50,55,60,65,70,75,80,85], 'cmap': readNCLcm('prcp_1')[1:16], 'fname': ['WSPD10MAX'] }, + 'hmgrp' :{ 'levels' : [0.01,0.1,0.25,0.5,0.75,1.0,1.5,2.0,2.5,3.0,4.0,5.0], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['graupelnc'] }, + 'cref' :{ 'levels' : [5,10,15,20,25,30,35,40,45,50,55,60,65,70], 'cmap': readcm('/glade/u/home/sobash/RT2015_gpx/cmap_rad.rgb')[1:14], 'fname': ['refl10cm_max'] }, + 'ref1km' :{ 'levels' : [5,10,15,20,25,30,35,40,45,50,55,60,65,70], 'cmap': readcm('/glade/u/home/sobash/RT2015_gpx/cmap_rad.rgb')[1:14], 'fname': ['refl10cm_1km'] }, + 'srh3' :{ 'levels' : [50,100,150,200,250,300,400,500], 'cmap': readNCLcm('perc2_9lev'), 'fname': ['srh_0_3km'] }, + 'srh1' :{ 'levels' : [50,100,150,200,250,300,400,500], 'cmap': readNCLcm('perc2_9lev'), 'fname': ['srh_0_1km'] }, + 'shr06mag' :{ 'levels' : [30,35,40,45,50,55,60,65,70,75,80], 'cmap': readNCLcm('perc2_9lev'), 'fname': ['uzonal_6km', 'umeridional_6km', 'uzonal_surface', 'umeridional_surface']}, + 'shr01mag' :{ 'levels' : [30,35,40,45,50,55,60,65,70,75,80], 'cmap': readNCLcm('perc2_9lev'), 'fname': ['uzonal_1km', 'umeridional_1km', 'uzonal_surface', 'umeridional_surface']}, + 'zlfc' :{ 'levels' : [0,250,500,750,1000,1250,1500,2000,2500,3000,3500,4000,5000], 'cmap': [readNCLcm('nice_gfdl')[i] for i in [3,20,37,54,72,89,106,123,141,158,175,193]], 'fname': ['lfc'] }, + 'zlcl' :{ 'levels' : [0,250,500,750,1000,1250,1500,2000,2500,3000,3500,4000,5000], 'cmap': [readNCLcm('nice_gfdl')[i] for i in [3,20,37,54,72,89,106,123,141,158,175,193]], 'fname': ['lcl'] }, + 'ltg1' :{ 'levels' : [0.1,0.5,1,1.5,2,2.5,3,4,5,6,7,8,10,12], 'cmap': readNCLcm('prcp_1')[:15], 'fname': ['LTG1_MAX'] }, + 'ltg2' :{ 'levels' : [0.1,0.5,1,1.5,2,2.5,3,4,5,6,7,8,10,12], 'cmap': readNCLcm('prcp_1')[:15], 'fname': ['LTG2_MAX'] }, + 'ltg3' :{ 'levels' : [0.1,0.5,1,1.5,2,2.5,3,4,5,6,7,8,10,12], 'cmap': readNCLcm('prcp_1')[:15], 'fname': ['LTG3_MAX'] }, + 'olrtoa' :{ 'levels' : range(70,340,10), 'cmap': readcm('/glade/u/home/sobash/RT2015_gpx/cmap_satir.rgb')[32:1:-1], 'fname': ['olrtoa'] }, # winter fields - 'snow' :{ 'levels' : [0.01,0.1,0.25,0.5,0.75,1,1.5,2,2.5,3,4,5], 'cmap':['#dddddd','#aaaaaa']+[readNCLcm('precip3_16lev')[i] for i in [1,3,5,8,10,12,14,16]]+['#FF99FF'], 'fname':['AFWA_SNOWFALL_HRLY'] }, # CSS mod - 'snow-6hr' :{ 'levels' : [0.25,0.5,0.75,1,2,3,4,5,6,8,10,12], 'cmap':['#dddddd','#aaaaaa']+[readNCLcm('precip3_16lev')[i] for i in [1,3,5,8,10,12,14,16]]+['#FF99FF'], 'fname':['AFWA_SNOWFALL_HRLY'] }, # CSS mod - 'snow-12hr' :{ 'levels' : [0.5,1,2,3,6,8,10,12,14,16,18,20], 'cmap':['#dddddd','#aaaaaa']+[readNCLcm('precip3_16lev')[i] for i in [1,3,5,8,10,12,14,16]]+['#FF99FF'], 'fname':['AFWA_SNOWFALL_HRLY'] }, # CSS mod - 'snow-24hr' :{ 'levels' : [1,3,6,8,10,12,15,18,21,24,30,36], 'cmap':['#dddddd','#aaaaaa']+[readNCLcm('precip3_16lev')[i] for i in [1,3,5,8,10,12,14,16]]+['#FF99FF'], 'fname':['AFWA_SNOWFALL_HRLY'] }, # CSS mod - 'snowacc' :{ 'levels' : [0.01,0.1,0.5,1,2,3,4,5,6,8,10,12,18,24,36,48,60], 'cmap':['#dddddd','#aaaaaa']+[readNCLcm('precip3_16lev')[i] for i in [1,2,3,4,5,6,8,10,11,12,13,15,16]]+['#FF99FF'], 'fname':['AFWA_SNOWFALL'], 'filename':'diag'}, # CSS mod colortable - 'iceacc' :{ 'levels' : [0.01,0.05,0.1,0.15,0.2,0.25,0.3,0.4,0.5,0.75,1,1.25], 'cmap':[readNCLcm('precip3_16lev')[i] for i in [1,2,3,4,5,6,8,10,11,12,13,15,16]], 'fname':['AFWA_ICE'], 'filename':'diag'}, - 'fzra' :{ 'levels' : [0.01,0.05,0.1,0.15,0.2,0.25,0.3,0.4,0.5,0.75,1,1.25], 'cmap':[readNCLcm('precip3_16lev')[i] for i in [1,2,3,4,5,6,8,10,11,12,13,15,16]], 'fname':['AFWA_FZRA_HRLY'] }, # CSS added, hrly - 'fzraacc' :{ 'levels' : [0.01,0.05,0.1,0.15,0.2,0.25,0.3,0.4,0.5,0.75,1,1.25], 'cmap':[readNCLcm('precip3_16lev')[i] for i in [1,2,3,4,5,6,8,10,11,12,13,15,16]], 'fname':['AFWA_FZRA'], 'filename':'diag'}, - 'windchill' :{ 'levels' : [-40,-35,-30,-25,-20,-15,-10,-5,0,5,10,15,20,25,30,35,40,45], 'cmap':readNCLcm('GMT_ocean')[20:], 'fname':['AFWA_WCHILL'], 'filename':'diag'}, - 'freezelev' :{ 'levels' : [0, 500, 1000, 1500, 2000, 2500, 3000, 3500, 4000, 4500, 5000], 'cmap':readNCLcm('nice_gfdl')[3:193], 'fname':['FZLEV'], 'filename':'diag'}, + 'thck1000-500' :{ 'levels' : [480,486,492,498,504,510,516,522,528,534,540,546,552,558,564,570,576,582,588,592,600], 'cmap':readNCLcm('perc2_9lev'), 'fname':['GHT_PL', 'GHT_PL'], 'arraylevel':[0,5]}, # CSS mod + 'thck1000-850' :{ 'levels' : [82,85,88,91,94,97,100,103,106,109,112,115,118,121,124,127,130,133,136,139,142,145,148,151,154,157,160], 'cmap':readNCLcm('perc2_9lev'), 'fname':['GHT_PL', 'GHT_PL'], 'arraylevel':[0,2]}, # CSS mod # pressure level entries - 'hgt250' :{ 'levels' : [9700,9760,9820,9880,9940,10000,10060,10120,10180,10240,10300,10360,10420,10480,10540,10600,10660,10720,10780,10840,10900,10960,11020], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['GHT_PL'], 'filename':'diag', 'arraylevel':8 }, - 'hgt300' :{ 'levels' : [8400,8460,8520,8580,8640,8700,8760,8820,8880,8940,9000,9060,9120,9180,9240,9300,9360,9420,9480,9540,9600,9660,9720,9780,9840,9900,9960,10020], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['GHT_PL'], 'filename':'diag', 'arraylevel':7 }, - 'hgt500' :{ 'levels' : [4800,4860,4920,4980,5040,5100,5160,5220,5280,5340,5400,5460,5520,5580,5640,5700,5760,5820,5880,5940,6000], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['GHT_PL'], 'filename':'diag', 'arraylevel':5 }, - 'hgt700' :{ 'levels' : [2700,2730,2760,2790,2820,2850,2880,2910,2940,2970,3000,3030,3060,3090,3120,3150,3180,3210,3240,3270,3300], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['GHT_PL'], 'filename':'diag', 'arraylevel':3 }, - 'hgt850' :{ 'levels' : [1200,1230,1260,1290,1320,1350,1380,1410,1440,1470,1500,1530,1560,1590,1620,1650], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['GHT_PL'], 'filename':'diag', 'arraylevel':2 }, - 'hgt925' :{ 'levels' : [550,580,610,640,670,700,730,760,790,820,850,880,910,940,970,1000,1030], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['GHT_PL'], 'filename':'diag', 'arraylevel':1 }, - 'speed250' :{ 'levels' : [10,20,30,40,50,60,70,80,90,100,110,120,130,140,150,160,170], 'cmap': readNCLcm('wind_17lev'), 'fname': ['S_PL'], 'filename':'diag', 'arraylevel':8 }, - 'speed300' :{ 'levels' : [10,20,30,40,50,60,70,80,90,100,110,120,130,140,150,160,170], 'cmap': readNCLcm('wind_17lev'), 'fname': ['S_PL'], 'filename':'diag', 'arraylevel':7 }, - 'speed500' :{ 'levels' : [15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100], 'cmap': readNCLcm('wind_17lev'), 'fname': ['S_PL'], 'filename':'diag', 'arraylevel':5 }, - 'speed700' :{ 'levels' : [5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85], 'cmap': readNCLcm('wind_17lev'), 'fname': ['S_PL'], 'filename':'diag', 'arraylevel':3 }, - 'speed850' :{ 'levels' : [6,10,14,18,22,26,30,34,38,42,46,50,54,58,62,66,70,74], 'cmap': readNCLcm('wind_17lev'), 'fname': ['S_PL'], 'filename':'diag', 'arraylevel':2 }, - 'speed925' :{ 'levels' : [6,10,14,18,22,26,30,34,38,42,46,50,54,58,62,66,70,74], 'cmap': readNCLcm('wind_17lev'), 'fname': ['S_PL'], 'filename':'diag', 'arraylevel':1 }, - 'temp250' :{ 'levels' : [-65,-63,-61,-59,-57,-55,-53,-51,-49,-47,-45,-43,-41,-39,-37,-35,-33,-31,-29], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['T_PL'], 'filename':'diag', 'arraylevel':8 }, - 'temp300' :{ 'levels' : [-65,-63,-61,-59,-57,-55,-53,-51,-49,-47,-45,-43,-41,-39,-37,-35,-33,-31,-29], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['T_PL'], 'filename':'diag', 'arraylevel':7 }, - 'temp500' :{ 'levels' : [-41,-39,-37,-35,-33,-31,-29,-26,-23,-20,-17,-14,-11,-8,-5,-2], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['T_PL'], 'filename':'diag', 'arraylevel':5 }, - 'temp700' :{ 'levels' : [-36,-33,-30,-27,-24,-21,-18,-15,-12,-9,-6,-3,0,3,6,9,12,15,18,21], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['T_PL'], 'filename':'diag', 'arraylevel':3 }, - 'temp850' :{ 'levels' : [-30,-27,-24,-21,-18,-15,-12,-9,-6,-3,0,3,6,9,12,15,18,21,24,27,30], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['T_PL'], 'filename':'diag', 'arraylevel':2 }, - 'temp925' :{ 'levels' : [-24,-21,-18,-15,-12,-9,-6,-3,0,3,6,9,12,15,18,21,24,27,30,33], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['T_PL'], 'filename':'diag', 'arraylevel':1 }, - 'td700' :{ 'levels' : [-30,-25,-20,-15,-10,-5,0,5,10], 'cmap' : readNCLcm('nice_gfdl')[3:193], 'fname': ['TD_PL'], 'filename':'diag', 'arraylevel':3 }, - 'td850' :{ 'levels' : [-40,-35,-30,-25,-20,-15,-10,-5,0,5,10,15,20,25,30], 'cmap' : readNCLcm('nice_gfdl')[3:193], 'fname': ['TD_PL'], 'filename':'diag', 'arraylevel':2 }, - 'td925' :{ 'levels' : [-30,-25,-20,-15,-10,-5,0,5,10,15,20,25,30], 'cmap' : readNCLcm('nice_gfdl')[3:193], 'fname': ['TD_PL'], 'filename':'diag', 'arraylevel':1 }, - 'rh300' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : [readNCLcm('MPL_PuOr')[i] for i in (2,18,34,50)]+[readNCLcm('MPL_Greens')[j] for j in (2,17,50,75,106,125)], 'fname': ['RH_PL'], 'filename':'diag', 'arraylevel':7 }, - 'rh500' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : [readNCLcm('MPL_PuOr')[i] for i in (2,18,34,50)]+[readNCLcm('MPL_Greens')[j] for j in (2,17,50,75,106,125)], 'fname': ['RH_PL'], 'filename':'diag', 'arraylevel':5 }, - 'rh700' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : readNCLcm('CBR_drywet'), 'fname': ['RH_PL'], 'filename':'diag', 'arraylevel':3 }, - 'rh850' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : [readNCLcm('MPL_PuOr')[i] for i in (2,18,34,50)]+[readNCLcm('MPL_Greens')[j] for j in (2,17,50,75,106,125)], 'fname': ['RH_PL'], 'filename':'diag', 'arraylevel':2 }, - 'rh925' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : [readNCLcm('MPL_PuOr')[i] for i in (2,18,34,50)]+[readNCLcm('MPL_Greens')[j] for j in (2,17,50,75,106,125)], 'fname': ['RH_PL'], 'filename':'diag', 'arraylevel':1 }, - 'avo500' :{ 'levels' : [0,9,12,15,18,21,24,27,30,33], 'cmap': readNCLcm('prcp_1'), 'fname': ['AVORT_PL'], 'filename':'diag', 'arraylevel':5 }, -'pvort320k' :{ 'levels' : [0,0.1,0.2,0.3,0.4,0.5,0.75,1,1.5,2,3,4,5,7,10], + 'hgt200' :{ 'levels' : list(range(10900,12500,60)), 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['height_200hPa']}, + 'hgt250' :{ 'levels' : [9700,9760,9820,9880,9940,10000,10060,10120,10180,10240,10300,10360,10420,10480,10540,10600,10660,10720,10780,10840,10900,10960,11020], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['height_250hPa']}, + 'hgt300' :{ 'levels' : [8400,8460,8520,8580,8640,8700,8760,8820,8880,8940,9000,9060,9120,9180,9240,9300,9360,9420,9480,9540,9600,9660,9720,9780,9840,9900,9960,10020], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['height_300hPa']}, + 'hgt500' :{ 'levels' : [4800,4860,4920,4980,5040,5100,5160,5220,5280,5340,5400,5460,5520,5580,5640,5700,5760,5820,5880,5940,6000], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['height_500hPa']}, + 'hgt700' :{ 'levels' : [2700,2730,2760,2790,2820,2850,2880,2910,2940,2970,3000,3030,3060,3090,3120,3150,3180,3210,3240,3270,3300], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['height_700hPa']}, + 'hgt850' :{ 'levels' : [1200,1230,1260,1290,1320,1350,1380,1410,1440,1470,1500,1530,1560,1590,1620,1650], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['height_850hPa']}, + 'hgt925' :{ 'levels' : [550,580,610,640,670,700,730,760,790,820,850,880,910,940,970,1000,1030], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['height_925hPa']}, + # RAS: adjusted these ranges - need to capture higher wind speeds + 'speed200' :{ 'levels' : [25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100,105,110], 'cmap': readNCLcm('wind_17lev'), 'fname': ['uzonal_200hPa','umeridional_200hPa']}, + 'speed250' :{ 'levels' : [25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100,105,110], 'cmap': readNCLcm('wind_17lev'), 'fname': ['uzonal_250hPa','umeridional_250hPa']}, + 'speed300' :{ 'levels' : [25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100,105,110], 'cmap': readNCLcm('wind_17lev'), 'fname': ['uzonal_300hPa','umeridional_300hPa']}, + 'speed500' :{ 'levels' : [15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100], 'cmap': readNCLcm('wind_17lev'), 'fname': ['uzonal_500hPa','umeridional_500hPa']}, + 'speed700' :{ 'levels' : [5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85], 'cmap': readNCLcm('wind_17lev'), 'fname': ['uzonal_700hPa','umeridional_700hPa']}, + 'speed850' :{ 'levels' : [6,10,14,18,22,26,30,34,38,42,46,50,54,58,62,66,70,74], 'cmap': readNCLcm('wind_17lev'), 'fname': ['uzonal_850hPa','umeridional_850hPa']}, + 'speed925' :{ 'levels' : [6,10,14,18,22,26,30,34,38,42,46,50,54,58,62,66,70,74], 'cmap': readNCLcm('wind_17lev'), 'fname': ['uzonal_925hPa','umeridional_925hPa']}, + 'temp200' :{ 'levels' : [-65,-63,-61,-59,-57,-55,-53,-51,-49,-47,-45,-43,-41,-39,-37,-35,-33,-31,-29], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['temperature_200hPa']}, + 'temp250' :{ 'levels' : [-65,-63,-61,-59,-57,-55,-53,-51,-49,-47,-45,-43,-41,-39,-37,-35,-33,-31,-29], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['temperature_250hPa']}, + 'temp300' :{ 'levels' : [-65,-63,-61,-59,-57,-55,-53,-51,-49,-47,-45,-43,-41,-39,-37,-35,-33,-31,-29], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['temperature_300hPa']}, + 'temp500' :{ 'levels' : [-41,-39,-37,-35,-33,-31,-29,-26,-23,-20,-17,-14,-11,-8,-5,-2], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['temperature_500hPa']}, + 'temp700' :{ 'levels' : [-36,-33,-30,-27,-24,-21,-18,-15,-12,-9,-6,-3,0,3,6,9,12,15,18,21], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['temperature_700hPa']}, + 'temp850' :{ 'levels' : [-30,-27,-24,-21,-18,-15,-12,-9,-6,-3,0,3,6,9,12,15,18,21,24,27,30], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['temperature_850hPa']}, + 'temp925' :{ 'levels' : [-24,-21,-18,-15,-12,-9,-6,-3,0,3,6,9,12,15,18,21,24,27,30,33], 'cmap': readNCLcm('nice_gfdl')[3:193], 'fname': ['temperature_925hPa']}, + 'td500' :{ 'levels' : [-30,-25,-20,-15,-10,-5,0,5,10], 'cmap' : readNCLcm('nice_gfdl')[3:193], 'fname': ['dewpoint_500hPa']}, + 'td700' :{ 'levels' : [-30,-25,-20,-15,-10,-5,0,5,10], 'cmap' : readNCLcm('nice_gfdl')[3:193], 'fname': ['dewpoint_700hPa']}, + 'td850' :{ 'levels' : [-40,-35,-30,-25,-20,-15,-10,-5,0,5,10,15,20,25,30], 'cmap' : readNCLcm('nice_gfdl')[3:193], 'fname': ['dewpoint_850hPa']}, + 'td925' :{ 'levels' : [-30,-25,-20,-15,-10,-5,0,5,10,15,20,25,30], 'cmap' : readNCLcm('nice_gfdl')[3:193], 'fname': ['dewpoint_925hPa']}, + 'rh300' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : [readNCLcm('MPL_PuOr')[i] for i in (2,18,34,50)]+[readNCLcm('MPL_Greens')[j] for j in (2,17,50,75,106,125)], 'fname': ['relhum_300hPa']}, + 'rh500' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : [readNCLcm('MPL_PuOr')[i] for i in (2,18,34,50)]+[readNCLcm('MPL_Greens')[j] for j in (2,17,50,75,106,125)], 'fname': ['relhum_500hPa']}, + 'rh700' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : readNCLcm('CBR_drywet'), 'fname': ['relhum_700hPa']}, + 'rh850' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : readNCLcm('CBR_drywet'), 'fname': ['relhum_850hPa']}, + 'rh925' :{ 'levels' : [0,10,20,30,40,50,60,70,80,90,100], 'cmap' : readNCLcm('CBR_drywet'), 'fname': ['relhum_925hPa']}, + 'pvort320k' :{ 'levels' : [0,0.1,0.2,0.3,0.4,0.5,0.75,1,1.5,2,3,4,5,7,10], 'cmap' : ['#ffffff','#eeeeee','#dddddd','#cccccc','#bbbbbb','#d1c5b1','#e1d5b9','#f1ead3','#003399','#0033FF','#0099FF','#00CCFF','#8866FF','#9933FF','#660099'], - 'fname': ['PVORT_320K'], 'filename':'upp' }, - 'bunkmag' :{ 'levels' : [20,25,30,35,40,45,50,55,60], 'cmap':readNCLcm('wind_17lev')[1:], 'fname':['U_COMP_STM_6KM', 'V_COMP_STM_6KM'], 'filename':'upp' }, - 'speed10m' :{ 'levels' : [3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51], 'cmap': readNCLcm('wind_17lev')[1:],'fname' : ['U10', 'V10'], 'filename':'diag'}, - 'stp' :{ 'levels' : [0.5,0.75,1.0,1.5,2.0,3.0,4.0,5.0,6.0,7.0,8.0], 'cmap':readNCLcm('perc2_9lev'), 'fname':['SBCAPE','LCL_HEIGHT','SR_HELICITY_1KM','UBSHR6','VBSHR6'], 'arraylevel':[None,None,None,None,None], 'filename':'upp'}, - 'ptype' :{ 'levels' : [0.01,0.1,0.25,0.5,0.75,1,1.5,2,2.5,3,3.5,4], 'cmap':['#dddddd','#aaaaaa']+readNCLcm('precip3_16lev')[1:], 'fname':['AFWA_RAIN_HRLY', 'AFWA_FZRA_HRLY', 'AFWA_ICE_HRLY', 'AFWA_SNOWFALL_HRLY'], 'filename':'wrfout'}, - 'winter' :{ 'levels' : [0.01,0.1,0.25,0.5,0.75,1,1.5,2,2.5,3,3.5,4], 'cmap':['#dddddd','#aaaaaa']+readNCLcm('precip3_16lev')[1:], 'fname':['AFWA_RAIN_HRLY', 'AFWA_FZRA_HRLY', 'AFWA_ICE_HRLY', 'AFWA_SNOWFALL_HRLY'], 'filename':'wrfout'}, - 'crefuh' :{ 'levels' : [5,10,15,20,25,30,35,40,45,50,55,60,65,70], 'cmap': readcm('cmap_rad.rgb')[0:13], 'fname': ['REFL_MAX_COL', 'MAX_UPDRAFT_HELICITY'], 'filename':'upp' }, + 'fname': ['PVORT_320K'], 'filename':'upp' }, + 'speed10m' :{ 'levels' : [3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51], 'cmap': readNCLcm('wind_17lev')[1:],'fname' : ['u10', 'v10']}, + 'speed10m-tc' :{ 'levels' : [6,12,18,24,30,36,42,48,54,60,66,72,78,84,90,96,102], 'cmap': readNCLcm('wind_17lev')[1:],'fname' : ['u10', 'v10']}, + 'stp' :{ 'levels' : [0.5,0.75,1.0,1.5,2.0,3.0,4.0,5.0,6.0,7.0,8.0], 'cmap':readNCLcm('perc2_9lev'), 'fname':['CAPE_221_SFC','CIN_221_SFC','HLCY_221_HTGY'] }, + 'uhratio' :{ 'levels' : [0.1,0.3,0.5,0.7,0.9,1.0,1.1,1.2,1.3,1.4,1.5], 'cmap':readNCLcm('perc2_9lev'), 'fname':['updraft_helicity_max03', 'updraft_helicity_max']}, + 'crefuh' :{ 'levels' : [5,10,15,20,25,30,35,40,45,50,55,60,65,70], 'cmap': readcm('/glade/u/home/sobash/RT2015_gpx/cmap_rad.rgb')[0:13], 'fname': ['refl10cm_max', 'updraft_helicity_max'] }, # wind barb entries - 'wind10m' :{ 'fname' : ['U10', 'V10'], 'filename':'diag', 'skip':40 }, - 'wind250' :{ 'fname' : ['U_PL', 'V_PL'], 'filename':'diag', 'arraylevel':8, 'skip':40 }, - 'wind300' :{ 'fname' : ['U_PL', 'V_PL'], 'filename':'diag', 'arraylevel':7, 'skip':40 }, - 'wind500' :{ 'fname' : ['U_PL', 'V_PL'], 'filename':'diag', 'arraylevel':5, 'skip':40 }, - 'wind700' :{ 'fname' : ['U_PL', 'V_PL'], 'filename':'diag', 'arraylevel':3, 'skip':40 }, - 'wind850' :{ 'fname' : ['U_PL', 'V_PL'], 'filename':'diag', 'arraylevel':2, 'skip':40 }, - 'wind925' :{ 'fname' : ['U_PL', 'V_PL'], 'filename':'diag', 'arraylevel':1, 'skip':40 }, - 'shr06' :{ 'fname' : ['UBSHR6','VBSHR6'], 'filename': 'upp', 'skip':40 }, - 'shr01' :{ 'fname' : ['UBSHR1', 'VBSHR1'], 'filename': 'upp', 'skip':40 }, + 'wind10m' :{ 'fname' : ['u10', 'v10'], 'skip':50 }, + 'windsfc' :{ 'fname' : ['uzonal_surface','umeridional_surface'], 'skip':50 }, + 'wind1km' :{ 'fname' : ['uzonal_1km','umeridional_1km'], 'filename': 'diag', 'skip':50 }, + 'wind6km' :{ 'fname' : ['uzonal_6km','umeridional_6km'], 'filename': 'diag', 'skip':50 }, + 'windpv' :{ 'fname' : ['u_pv','v_pv'], 'filename': 'diag', 'skip':50 }, + 'shr06' :{ 'fname' : ['uzonal_6km','umeridional_6km','uzonal_surface','umeridional_surface'], 'skip':50 }, + 'shr01' :{ 'fname' : ['uzonal_1km','umeridional_1km','uzonal_surface','umeridional_surface'], 'skip':50 }, 'bunkers' :{ 'fname' : ['U_COMP_STM_6KM', 'V_COMP_STM_6KM'], 'filename': 'upp', 'skip':40 }, -} +}) + +# Enter wind barb info for list of pressure levels +for plev in ['200', '250', '300', '500', '700', '850', '925']: + fieldinfo['wind'+plev] = { 'fname' : ['uzonal_'+plev+'hPa', 'umeridional_'+plev+'hPa'], 'skip':50} + + +# Combine levels from RAIN, FZRA, ICE, and SNOW for plotting 1-hr accumulated precip for each type. Ahijevych added this +#fieldinfo['ptypes']['levels'] = [fieldinfo['precip']['levels'][1:],fieldinfo['snow']['levels'],fieldinfo['ice']['levels'],fieldinfo['fzra']['levels']] +# domains = { 'domainname': { 'corners':[ll_lat,ll_lon,ur_lat,ur_lon], 'figsize':[w,h] } } domains = { 'CONUS' :{ 'corners': [23.1593,-120.811,46.8857,-65.0212], 'fig_width': 1080 }, - 'SGP' :{ 'corners': [25.30,-107.00,36.00,-88.70], 'fig_width':1080 }, - 'NGP' :{ 'corners': [40.00,-105.00,50.30,-82.00], 'fig_width':1080 }, + 'NA' :{ 'corners': [15.00,-170.00,65.00,-50.00], 'fig_width':1080 }, + 'SGP' :{ 'corners': [25.3,-107.00,36.00,-88.70], 'fig_width':1080 }, + 'NGP' :{ 'corners': [40.00,-105.0,50.30,-82.00], 'fig_width':1080 }, 'CGP' :{ 'corners': [33.00,-107.50,45.00,-86.60], 'fig_width':1080 }, 'SW' :{ 'corners': [28.00,-121.50,44.39,-102.10], 'fig_width':1080 }, 'NW' :{ 'corners': [37.00,-124.40,51.60,-102.10], 'fig_width':1080 }, diff --git a/interp.py b/interp.py new file mode 100755 index 0000000..e9df1a3 --- /dev/null +++ b/interp.py @@ -0,0 +1,152 @@ +import argparse +import logging +import numpy as np +import pandas as pd +import pdb +import pickle +import pygrib +import os +from scipy.spatial import KDTree, Delaunay +import sys +import xarray + +logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO) + +class regrid: + ''' + Interpolate MPAS mesh to same grid as an example grib file. + Output netCDF and grib. + ''' + def __init__(self): + self.args = parseargs() + idate = pd.to_datetime(self.args.date) + idir = self.args.idir + var = self.args.var + + # Source mesh + path = self.args.init_file + logging.info(f"get mpas mesh file {path}") + mpas_mesh = xarray.open_dataset(path) + lonCell = mpas_mesh['lonCell'] + lonCell = np.degrees(lonCell) #radians to degrees + lonCell[lonCell>=180] -= 360 + mpas_mesh["lonCell"] = lonCell + mpas_mesh["latCell"] = np.degrees(mpas_mesh["latCell"]) #radians to degrees + + grb_with_dest_mesh = self.args.grb_with_dest_mesh + with pygrib.open(grb_with_dest_mesh) as f: + grb = f.readline() + dest_latlons = grb.latlons() + grb.dataDate = int(idate.strftime("%Y%m%d")) + grb.dataTime = int(idate.strftime("%H%M")) + # destination lats and lons + dest_lats, dest_lons = dest_latlons + + # weights and vertices for regridding + pk_file = os.path.join(os.getenv('TMPDIR'), f"{self.args.meshstr}.pk") + if not os.path.exists(pk_file): + saveNewMap(mpas_mesh, dest_latlons, pk_file) + logging.info(f"load {pk_file}") + (ibox, vtx, wts) = pickle.load(open(pk_file, 'rb')) + + # Source data + valid_times = [idate + pd.Timedelta(fhr, unit="hour") for fhr in self.args.fhr] + ifiles = [ os.path.join(idir, + idate.strftime("%Y%m%d%H"), + f'diag.{valid_time.strftime("%Y-%m-%d_%H.%M.%S")}.nc') + for valid_time in valid_times ] + + nt = len(ifiles) # used to reshape output + logging.info(f"open {len(ifiles)} files") + data = xarray.open_mfdataset(ifiles, concat_dim="Time", combine="nested") + data = data.assign_coords(Time=valid_times) + data = data[var].isel(nCells=ibox) + # dot product of nCells x 3 arrays vtx and wtx, preserving time dimension + out = np.einsum('tnj,nj->tn', np.take(data.values, vtx, axis=1), wts) + out = np.reshape(out, (nt,*dest_lats.shape)) + out = xarray.DataArray(data=out, name=var, + coords = dict( + Time=data.Time, + lat=(["y","x"], dest_lats), + lon=(["y","x"], dest_lons)), + dims=["Time","y","x"], attrs=data.attrs) + + ogrb = f'{idate.strftime("%Y%m%d_%H")}.grb2' + logging.info(f"write {ogrb}") + with open(ogrb, 'wb') as f: + for i, fhr in enumerate(self.args.fhr): + grb.values = out.values[i] + grb["forecastTime"] = fhr + f.write(grb.tostring()) + + ocdf = f'{idate.strftime("%Y%m%d_%H")}.nc' + logging.info(f"to_netcdf {ocdf}") + xarray.concat(out, pd.Index(self.args.fhr, name="forecast_hour")).to_netcdf(ocdf) + + +def parseargs(): + '''Parse arguments and argparse Namespace''' + + parser = argparse.ArgumentParser(description='regrid MPAS', + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('date', help='model initialization time') + parser.add_argument('init_file', help='path to init.nc file with latCell, lonCell of mesh') + parser.add_argument('-d', '--debug', action='store_true', help='turn on debugging') + parser.add_argument('--ENS_SIZE', type=int, default=1, help='number of members in ensemble') + parser.add_argument('--fhr', nargs='+', type=float, default=[12], help='list of forecast hours') + today = pd.Timestamp.now().strftime('%Y%m%d00') + parser.add_argument('--grb_with_dest_mesh', default = f'/glade/scratch/sobash/HRRR/{today}/hrrr.t00z.wrfsfcf00.grib2', + help = 'grib file with destination mesh') + parser.add_argument('--idir', default='/glade/campaign/mmm/parc/schwartz/MPAS_ensemble_paper', + help="path to model output") + parser.add_argument('--meshstr', default="HRRR", help=f'mesh id. used to prefix pickle file') + parser.add_argument('--var', default="refl10cm_1km_max", help=f'name of variable to regrid') + + args = parser.parse_args() + if args.debug: + logging.getLogger().setLevel(logging.DEBUG) + + assert os.path.exists(args.init_file), "--init_file must be path to file with latCell, lonCell, etc." + + return args + +def saveNewMap(mpas_mesh, dest_latlons, pk_file): + logging.info(f"saveNewMap: pk_file={pk_file}") + + # destination lats and lons + dest_lats, dest_lons = dest_latlons + + # Source lats and lons + latCell = mpas_mesh.latCell + lonCell = mpas_mesh.lonCell + # To save time, triangulate near destination mesh + # ibox: subscripts. speed up triangulation in interp_weights + tree = KDTree(np.c_[dest_lons.ravel(), dest_lats.ravel()]) + dd, ii = tree.query(np.c_[lonCell, latCell], distance_upper_bound=0.25) + ibox = np.isfinite(dd) + latCell = latCell[ibox] + lonCell = lonCell[ibox] + + logging.info(f"saveNewMap: triangulate {len(lonCell)} pts") + vtx, wts = interp_weights(np.vstack((lonCell,latCell)).T,np.vstack((dest_lons.flatten(), dest_lats.flatten())).T) + assert wts.min() >= 0, 'got negative weight' + + pickle.dump((ibox,vtx,wts), open(pk_file, 'wb')) + +# from https://stackoverflow.com/questions/20915502/speedup-scipy-griddata-for-multiple-interpolations-between-two-irregular-grids +def interp_weights(xyz, uvw): + logging.info("interp_weights: Delaunay") + tri = Delaunay(xyz) + logging.info("interp_weights: find_simplex") + simplex = tri.find_simplex(uvw) + logging.info("interp_weights: vertices") + vertices = np.take(tri.simplices, simplex, axis=0) + temp = np.take(tri.transform, simplex, axis=0) + d = xyz.shape[1] + delta = uvw - temp[:, d] + logging.info("interp_weights: bary") + bary = np.einsum('njk,nk->nj', temp[:, :d, :], delta) + return vertices, np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True))) + +if __name__ == "__main__": + regrid() diff --git a/latlonfile.nc b/latlonfile.nc deleted file mode 100644 index e23a41d..0000000 Binary files a/latlonfile.nc and /dev/null differ diff --git a/make_webplot.py b/make_webplot.py deleted file mode 100755 index a19f9ab..0000000 --- a/make_webplot.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python - -import sys, time, os -from webplot import webPlot, readGrid, drawOverlay, saveNewMap - -def log(msg): print time.ctime(time.time()),':', msg - -log('Begin Script'); stime = time.time() - -regions = ['CONUS', 'NGP', 'SGP', 'CGP', 'MATL', 'NE', 'NW', 'SE', 'SW'] - -if not os.path.exists('picklefilename.pk'): - saveNewMap(wrfout='wrfout_file_containing_lat_lons', domstr='name_for_domain') - -newPlot = webPlot() -log('Reading Data'); newPlot.readEnsemble() - -for dom in regions: - file_not_created, num_attempts = True, 0 - while file_not_created and num_attempts <= 3: - newPlot.domain = dom - - newPlot.createFilename() - fname = newPlot.outfile - - log('Loading Map for %s'%newPlot.domain) - newPlot.loadMap() - - log('Plotting Data') - if newPlot.opts['interp']: - newPlot.plotInterp() - else: - newPlot.plotFields() - newPlot.plotTitleTimes() - - log('Writing Image') - newPlot.saveFigure(trans=newPlot.opts['over']) - - if os.path.exists(fname): - file_not_created = False - log('Created %s, %.1f KB'%(fname,os.stat(fname).st_size/1000.0)) - - num_attempts += 1 - -etime = time.time() -log('End Plotting (took %.2f sec)'%(etime-stime)) - diff --git a/mpas.py b/mpas.py new file mode 100644 index 0000000..f29da9f --- /dev/null +++ b/mpas.py @@ -0,0 +1,11 @@ +from fieldinfo import fieldinfo,readNCLcm +import numpy as np + +# fieldinfo should have been imported from fieldinfo module. +# Copy fieldinfo dictionary for MPAS. Change some fnames. +fieldinfo['td2']['fname'] = ['surface_dewpoint'] +fieldinfo['td2depart']['fname'] = ['surface_dewpoint'] +for plev in ['500', '700', '850']: + fieldinfo['vort'+plev] = {'levels' : np.array([0,9,12,15,18,21,24,27,30,33])*1e-5, 'cmap': readNCLcm('prcp_1'), 'fname': ['vorticity_'+plev+'hPa']} +fieldinfo['vortpv'] = {'levels' : [0,9,12,15,18,21,24,27,30,33], 'cmap': readNCLcm('prcp_1'), 'fname': ['vort_pv']} +fieldinfo['stp']['fname'] = ['sbcape','lcl','srh_0_1km','uzonal_6km','umeridional_6km','uzonal_surface','umeridional_surface'] diff --git a/mpas_vort_cell.f90 b/mpas_vort_cell.f90 new file mode 100644 index 0000000..d16006e --- /dev/null +++ b/mpas_vort_cell.f90 @@ -0,0 +1,25 @@ +subroutine vert2cell(nEdgesOnCell, verticesOnCell, maxEdges, nVertLevels, nCells, nVertices, fieldv, fieldc) +implicit none + +integer, intent(in) :: maxEdges, nVertLevels, nCells, nVertices +!f2py required :: maxEdges, nVertLevels, nCells, nVertices +integer, intent(in) :: nEdgesOnCell(nCells) +integer, intent(in) :: verticesOnCell(maxEdges,nCells) +real*8, intent(in) :: fieldv(nVertLevels,nVertices) +real*8, intent(out) :: fieldc(nVertLevels,nCells) + +integer i,j,k +real*8 factor + +do k=1,nVertLevels + do i=1,nCells + factor = 1./nEdgesOnCell(i) + fieldc(k,i) = 0. + do j=1,nEdgesOnCell(i) + fieldc(k,i)=fieldc(k,i) + fieldv(k,verticesOnCell(j,i)) + end do + fieldc(k,i) = factor*fieldc(k,i) + end do +end do +return +end subroutine vert2cell diff --git a/ncar.png b/ncar.png new file mode 120000 index 0000000..c892200 --- /dev/null +++ b/ncar.png @@ -0,0 +1 @@ +/glade/work/ahijevyc/share/rt_ensemble/python_scripts/ncar.png \ No newline at end of file diff --git a/webplot.py b/webplot.py index 0a77211..a393d9f 100755 --- a/webplot.py +++ b/webplot.py @@ -1,106 +1,147 @@ +import argparse +import cartopy +from collections import defaultdict +import datetime +from fieldinfo import domains, readNCLcm +import logging import matplotlib.colors as colors import matplotlib.pyplot as plt -from mpl_toolkits.basemap import * -from datetime import * -import cPickle as pickle -import os, sys, time, argparse +from metpy.units import units +import mpas +from mpas import fieldinfo +import numpy as np +import pandas as pd +import pdb +import pickle +import os import scipy.ndimage as ndimage +from scipy.interpolate import griddata +from scipy.spatial import Delaunay import subprocess -from fieldinfo import * -from netCDF4 import Dataset, MFDataset +import re +import sys +import time +# created with f2py3 -c mpas_vort_cell.f90 -m vert2cell +# Had to fix /glade/u/home/ahijevyc/miniconda3/envs/webplot/lib/python3.11/site-packages/numpy/f2py/src/fortranobject.c +# Moved int i declaration outside for loop. (line 707) +import vert2cell +import xarray + +logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO) class webPlot: '''A class to plot data from NCAR ensemble''' def __init__(self): self.opts = parseargs() - self.initdate = datetime.strptime(self.opts['date'], '%Y%m%d%H') - self.title = self.opts['title'] - self.debug = self.opts['debug'] + self.initdate = pd.to_datetime(self.opts['date']) + self.ENS_SIZE = self.opts['ENS_SIZE'] self.autolevels = self.opts['autolevels'] + self.debug = self.opts['debug'] self.domain = self.opts['domain'] - if ',' in self.opts['timerange']: self.shr, self.ehr = map(int, self.opts['timerange'].split(',')) - else: self.shr, self.ehr = int(self.opts['timerange']), int(self.opts['timerange']) - self.createFilename() - self.ENS_SIZE = int(os.getenv('ENS_SIZE', 10)) + self.fhr = self.opts['fhr'] + self.idir = self.opts['idir'] + self.meshstr = self.opts['meshstr'] + self.nbarbs = self.opts['nbarbs'] + self.nlat_max = self.opts['nlat_max'] + self.nlon_max = self.opts['nlon_max'] + self.title = self.opts['title'] + self.get_mpas_mesh() + self.loadMap() + self.data = readEnsemble(self) + self.plotFields() + self.plotTitleTimes() + self.saveFigure() + def createFilename(self): - for f in ['fill', 'contour','barb']: + for f in ['fill', 'contour','barb']: # CSS added this for loop and everything in it + prefx = self.meshstr if 'name' in self.opts[f]: if 'thresh' in self.opts[f]: - prefx = self.opts[f]['name']+'_'+self.opts[f]['ensprod']+'_'+str(self.opts[f]['thresh']) + prefx = self.opts[f]['name']+'_'+self.opts[f]['ensprod']+'_'+str(self.opts[f]['thresh']) # CSS else: - prefx = self.opts[f]['name']+'_'+self.opts[f]['ensprod'] + prefx = self.opts[f]['name']+'_'+self.opts[f]['ensprod'] # CSS break - if self.shr == self.ehr: - self.outfile = prefx+'_f'+'%03d'%self.shr+'_'+self.domain+'.png' - else: - self.outfile = prefx+'_f'+'%03d'%self.shr+'-f'+'%03d'%self.ehr+'_'+self.domain+'.png' + shr = min(self.fhr) + ehr = max(self.fhr) + if len(self.fhr) == 1: + outfile = f"{prefx}_f{shr:03.0f}_{self.domain}.png" + else: # CSS + outfile = f"{prefx}_f{shr:03.0f}-f{ehr:03.0f}_{self.domain}.png" + + # create yyyymmddhh/domain/ directory if needed + subdir_path = os.path.join(os.getenv('TMPDIR'), self.opts['date'], self.domain) + if not os.path.isdir(subdir_path): + logging.warning(f"webPlot.createFilename(): making new output directory {subdir_path}") + os.makedirs(subdir_path) + # prepend subdir_path to outfile. + outfile = os.path.join(subdir_path, outfile) + outfile = os.path.realpath(outfile) + return outfile def loadMap(self): - # load pickle file containing figure and axes objects (should be pregenerated) - PYTHON_SCRIPTS_DIR = os.getenv('PYTHON_SCRIPTS_DIR', '.') - self.fig, self.ax, self.m = pickle.load(open('%s/%s.pk'%(PYTHON_SCRIPTS_DIR,self.domain), 'r')) - - # get lat/lons from file here - LATLON_FILE = os.getenv('LATLON_FILE', PYTHON_SCRIPTS_DIR+'/latlonfile.nc') - self.lats, self.lons = readGrid(LATLON_FILE) - self.x, self.y = self.m(self.lons,self.lats) - - def readEnsemble(self): - self.data, self.missing_members = readEnsemble(self.initdate, timerange=[self.shr,self.ehr], fields=self.opts, debug=self.debug, ENS_SIZE=self.ENS_SIZE) + self.pk_file = os.path.join(os.getenv('TMPDIR'), f"{self.meshstr}_{self.domain}_{self.nlon_max}x{self.nlat_max}.pk") + if not os.path.exists(self.pk_file): + saveNewMap(self) + logging.debug(f"loadMap: use old pickle file {self.pk_file}") + (self.ax, self.extent, self.lons, self.lats, + self.lon2d, self.lat2d, self.x2d, self.y2d, self.ibox, self.x, self.y, self.vtx, + self.wts) = pickle.load(open(self.pk_file, 'rb')) + + def drawOverlay(self): + domain=self.domain + logging.info(f"drawOverlay: domain={domain}") + self.ax.axis('off') + self.ax.set_extent(self.extent, crs=self.ax.projection) + plt.savefig(f'overlay_counties_{domain}.png', transparent=True) + + def get_mpas_mesh(self): + path = self.opts["init_file"] + logging.info(f"open mpas mesh file {path}") + mpas_mesh = xarray.open_dataset(path) + lonCell = mpas_mesh['lonCell'] + lonCell = np.degrees(lonCell) #radians to degrees + lonCell[lonCell>=180] -= 360 + mpas_mesh["lonCell"] = lonCell + mpas_mesh["latCell"] = np.degrees(mpas_mesh["latCell"]) #radians to degrees def plotTitleTimes(self): - if self.opts['over']: return fontdict = {'family':'monospace', 'size':12, 'weight':'bold'} - # place title and times above corners of map - x0, y1 = self.ax.transAxes.transform((0,1)) - x0, y0 = self.ax.transAxes.transform((0,0)) - x1, y1 = self.ax.transAxes.transform((1,1)) - self.ax.text(x0, y1+10, self.title, fontdict=fontdict, transform=None) + # place title and times above plot + verticalalignment="bottom" + self.ax.text(0, 1, self.title, fontdict=fontdict, horizontalalignment='left', + verticalalignment=verticalalignment, transform=self.ax.transAxes, wrap=True) - initstr = self.initdate.strftime('Init: %a %Y-%m-%d %H UTC') - if ((self.ehr - self.shr) == 0): - validstr = (self.initdate+timedelta(hours=self.shr)).strftime('Valid: %a %Y-%m-%d %H UTC') - else: - validstr1 = (self.initdate+timedelta(hours=(self.shr-1))).strftime('%a %Y-%m-%d %H UTC') - validstr2 = (self.initdate+timedelta(hours=self.ehr)).strftime('%a %Y-%m-%d %H UTC') - validstr = "Valid: %s - %s"%(validstr1, validstr2) - - self.ax.text(x1, y1+20, initstr, horizontalalignment='right', transform=None) - self.ax.text(x1, y1+5, validstr, horizontalalignment='right', transform=None) - - # Plot missing members (use wrfout count here, if upp missing this wont show that) - if len(self.missing_members['wrfout']) > 0: - missing_members = sorted(set([ (x%10)+1 for x in self.missing_members['wrfout'] ])) #get member number from missing indices - missing_members_string = ', '.join(str(x) for x in missing_members) - self.ax.text(x1-5, y0+5, 'Missing member #s: %s'%missing_members_string, horizontalalignment='right') + logging.info(self.title) + fontdict = {'family':'monospace'} + initstr, validstr = self.getInitValidStr() + logging.info(initstr+"\n"+validstr) + self.ax.text(1, 1, initstr+"\n"+validstr, fontdict=fontdict, horizontalalignment='right', + verticalalignment=verticalalignment, transform=self.ax.transAxes) def plotFields(self): if 'fill' in self.data: if self.opts['fill']['ensprod'] == 'paintball': self.plotPaintball() - elif self.opts['fill']['ensprod'] in ['stamp', 'maxstamp']: self.plotStamp() + elif self.opts['fill']['ensprod'].endswith("stamp"): self.plotStamp() else: self.plotFill() if 'contour' in self.data: if self.opts['contour']['ensprod'] == 'spaghetti': self.plotSpaghetti() - elif self.opts['contour']['ensprod'] == 'stamp': self.plotStamp() + elif self.opts['contour']['ensprod'].endswith('stamp'): self.plotStamp() else: self.plotContour() if 'barb' in self.data: - #self.plotStreamlines() + assert not self.opts['barb']['ensprod'].endswith('stamp'), "TODO: postage stamp barbs" self.plotBarbs() - + def plotFill(self): - if self.opts['fill']['name'] == 'ptype': self.plotFill_ptype(); return - elif self.opts['fill']['name'] == 'crefuh': self.plotReflectivityUH(); return - + if self.opts['fill']['name'] == 'crefuh': self.plotReflectivityUH(); return if self.autolevels: min, max = self.data['fill'][0].min(), self.data['fill'][0].max() levels = np.linspace(min, max, num=10) cmap = colors.ListedColormap(self.opts['fill']['colors']) - norm = colors.BoundaryNorm(levels, cmap.N) tick_labels = levels[:-1] else: levels = self.opts['fill']['levels'] @@ -112,97 +153,106 @@ def plotFill(self): cmap.set_over(self.opts['fill']['colors'][-1]) extend, extendfrac = 'max', 0.02 tick_labels = levels - norm = colors.BoundaryNorm(levels, cmap.N) + norm = colors.BoundaryNorm(levels, cmap.N) - # smooth some of the fill fields - if self.opts['fill']['name'] == 'avo500': self.data['fill'][0] = ndimage.gaussian_filter(self.data['fill'][0], sigma=4) - if self.opts['fill']['name'] == 'pbmin': self.data['fill'][0] = ndimage.gaussian_filter(self.data['fill'][0], sigma=2) - - cs1 = self.m.contourf(self.x, self.y, self.data['fill'][0], levels=levels, cmap=cmap, norm=norm, extend='max', ax=self.ax) - - self.plotColorbar(cs1, levels, tick_labels, extend, extendfrac) - - def plotFill_ptype(self): - ml_type = np.zeros(self.data['fill'][0].shape) - ml_type_prob = np.zeros(self.data['fill'][0].shape) - - for i in [1,2,3,4]: - pts = (self.data['fill'][i-1] > ml_type_prob+0.001) - ml_type_prob[pts] = self.data['fill'][i-1][pts] - ml_type[pts] = i+0.001 - - cmap = colors.ListedColormap(['#7BBF6A', 'red', 'orange', 'blue']) - norm = colors.BoundaryNorm([1,2,3,4,5], cmap.N) - - x = (self.x[1:,1:] + self.x[:-1,:-1])/2.0 - y = (self.y[1:,1:] + self.y[:-1,:-1])/2.0 - cs1 = self.m.pcolormesh(x, y, np.ma.masked_equal(ml_type[1:,1:], 0), cmap=cmap, norm=norm, edgecolors='None', ax=self.ax) - - # make axes for colorbar, 175px to left and 30px down from bottom of map - x0, y0 = self.ax.transAxes.transform((0,0)) - x, y = self.fig.transFigure.inverted().transform((x0+175,y0-29.5)) - cax = self.fig.add_axes([x,y,0.985-x,y/3.0]) - cb = plt.colorbar(cs1, cax=cax, orientation='horizontal') - cb.outline.set_linewidth(0.5) - cb.set_ticks([0.5,1.5,2.5,3.5,4.5,5.5]) - cb.set_ticklabels(['Rain', 'Freezing Rain', 'Sleet', 'Snow']) - cb.ax.tick_params(length=0) + data = self.data['fill'][0] + if not self.opts['fill']['ensprod'].startswith('neprob'): + logging.info(f"plotFill: latlonGrid") + data = self.latlonGrid(data) + if self.opts['fill']['name'] in ['avo500', 'vort500', 'pbmin']: + logging.info(f"smooth {data.name}") + # use .values to preserve DataArray attributes. + data.values = ndimage.gaussian_filter(data, sigma=4) + + cs1 = self.ax.contourf(self.x2d, self.y2d, data, levels=levels, cmap=cmap, norm=norm, extend='max') + label = f"{data.long_name} [{data.units}]" + self.plotColorbar(cs1, levels, tick_labels, extend, extendfrac, label=label) def plotReflectivityUH(self): levels = self.opts['fill']['levels'] cmap = colors.ListedColormap(self.opts['fill']['colors']) + extend, extendfrac = 'neither', 0.0 norm = colors.BoundaryNorm(levels, cmap.N) tick_labels = levels[:-1] - cs1 = self.m.contourf(self.x, self.y, self.data['fill'][0], levels=levels, cmap=cmap, norm=norm, extend='max', ax=self.ax) - self.m.contourf(self.x, self.y, self.data['fill'][1], levels=[75,1000], colors='black', ax=self.ax, alpha=0.3) - self.m.contour(self.x, self.y, self.data['fill'][1], levels=[75], colors='k', linewidth=0.5, ax=self.ax) + logging.info(f"plotReflectivityUH: latlonGrid {self.data['fill'][0].long_name}") + data = self.latlonGrid(self.data["fill"][0]) + cs1 = self.ax.contourf(self.x2d, self.y2d, data, levels=levels, cmap=cmap, norm=norm, extend='max') + logging.info(f"plotReflectivityUH: latlonGrid {self.data['fill'][1].long_name}") + uh = self.latlonGrid(self.data['fill'][1]) + minUH = 100 + logging.info(f"plotReflectivityUH: UH shading and contour above {minUH}") + self.ax.contourf(self.x2d, self.y2d, uh, levels=[minUH,1000], colors='black', alpha=0.3) + cs2 = self.ax.contour( self.x2d, self.y2d, uh, levels=[minUH], colors='k', linewidths=0.5) - #maxuh = self.data['fill'][1].max() - #self.ax.text(0.03,0.03,'Domain-wide UH max %0.f'%maxuh ,ha="left",va="top",bbox=dict(boxstyle="square",lw=0.5,fc="white"), transform=self.ax.transAxes) - - self.plotColorbar(cs1, levels, tick_labels) - - def plotColorbar(self, cs, levels, tick_labels, extend='neither', extendfrac=0.0): - # make axes for colorbar, 175px to left and 30px down from bottom of map - x0, y0 = self.ax.transAxes.transform((0,0)) - x, y = self.fig.transFigure.inverted().transform((x0+175,y0-29.5)) - cax = self.fig.add_axes([x,y,0.985-x,y/3.0]) - cb = plt.colorbar(cs, cax=cax, orientation='horizontal', extend=extend, extendfrac=extendfrac, ticks=tick_labels) + label = f"{data.long_name} [{data.units}], shaded {uh.long_name} above ${minUH} {uh.units}$" + self.plotColorbar(cs1, levels, tick_labels, extend, extendfrac, label=label) + + def plotColorbar(self, cs, levels, tick_labels, extend='neither', extendfrac=0.0, label=""): + cb = plt.colorbar(cs, ax=self.ax, location="bottom", orientation='horizontal', extend=extend, shrink=0.65, + anchor=(1,1), panchor=(1,0), aspect=55, pad=0.03, extendfrac=extendfrac, ticks=tick_labels, label=label) + cb.ax.xaxis.set_label_position('top') cb.outline.set_linewidth(0.5) - def plotContour(self): - if self.opts['contour']['name'] in ['t2-0c']: data = ndimage.gaussian_filter(self.data['contour'][0], sigma=2) - else: data = ndimage.gaussian_filter(self.data['contour'][0], sigma=10) + def interpolatetri(self, values, vtx, wts): + return np.einsum('nj,nj->n', np.take(values, vtx), wts) + + def latlonGrid(self, data): + logging.debug(f"latlonGrid: {data.name}") + data = data.metpy.dequantify() # Allow units to transfer to gridded array via attribute + # apply ibox to data + data = data.isel(nCells=self.ibox) + if hasattr(self, "vtx") and hasattr(self, "wts"): + logging.debug("latlonGrid: interpolatetri(vtx and wts)") + # by using .values, Avoid interpolatetri ValueError: dimensions ('nCells',) must have the same length as the number of data dimensions, ndim=2 + data_gridded = self.interpolatetri(data.values, self.vtx, self.wts) + data_gridded = np.reshape(data_gridded, self.lat2d.shape) + else: + logging.info("latlonGrid: interpolate with griddata()") + data_gridded = griddata((self.lons, self.lats), data, (self.lon2d, self.lat2d), method='nearest') + data_gridded = xarray.DataArray(data = data_gridded, coords = dict(lat=self.lat2d[:,0], lon=self.lon2d[0]), attrs=data.attrs) + return data_gridded + def plotContour(self): if self.opts['contour']['name'] in ['sbcinh','mlcinh']: linewidth, alpha = 0.5, 0.75 else: linewidth, alpha = 1.5, 1.0 - cs2 = self.m.contour(self.x, self.y, data, levels=self.opts['contour']['levels'], colors='k', linewidths=linewidth, ax=self.ax, alpha=alpha) + data = self.data['contour'][0] + + if not self.opts['contour']['ensprod'].startswith('neprob'): + logging.info(f"plotContour: latlonGrid") + data = self.latlonGrid(data) + + if self.opts['contour']['name'] in ['t2-0c']: data.values = ndimage.gaussian_filter(data, sigma=2) + else: data.values = ndimage.gaussian_filter(data, sigma=25) + + cs2 = self.ax.contour(self.x2d, self.y2d, data, levels=self.opts['contour']['levels'], colors='k', linewidths=linewidth, alpha=alpha) plt.clabel(cs2, fontsize='small', fmt='%i') def plotBarbs(self): - skip = self.opts['barb']['skip'] - if self.domain != 'CONUS': skip = 20 + + skip = max([*self.x2d.shape, *self.y2d.shape])/self.nbarbs + skip = int(skip) + logging.debug(f"plotBarbs: nbarbs={self.nbarbs} skip={skip}") if self.opts['fill']['name'] == 'crefuh': alpha=0.5 else: alpha=1.0 - cs2 = self.m.barbs(self.x[::skip,::skip], self.y[::skip,::skip], self.data['barb'][0][::skip,::skip], self.data['barb'][1][::skip,::skip], \ - color='black', alpha=alpha, length=5.5, linewidth=0.25, sizes={'emptybarb':0.05}, ax=self.ax) + logging.debug(f"plotBarbs: starting barbs {[x.name for x in self.data['barb']]}") + # skip interval intended for 2-D fields + u = self.latlonGrid(self.data['barb'][0])[::skip,::skip].values.flatten() + v = self.latlonGrid(self.data['barb'][1])[::skip,::skip].values.flatten() + x = self.lon2d[::skip,::skip].flatten() + y = self.lat2d[::skip,::skip].flatten() + # transform orients the barbs properly on projection + logging.info(f"plotBarbs: barbs") + cs2 = self.ax.barbs(x, y, u, v, alpha=alpha, length=5.5, linewidth=0.25, sizes={'emptybarb':0.05}, transform=cartopy.crs.PlateCarree()) - def plotStreamlines(self): - speed = np.sqrt(self.data['barb'][0]**2 + self.data['barb'][1]**2) - lw = 5*speed/speed.max() - cs2 = self.m.streamplot(self.x[0,:], self.y[:,0], self.data['barb'][0], self.data['barb'][1], color='k', density=3, linewidth=lw, ax=self.ax) - cs2.lines.set_alpha(0.5) - cs2.arrows.set_alpha(0.5) #apparently this doesn't work? - def plotPaintball(self): rects, labels = [], [] colorlist = self.opts['fill']['colors'] levels = self.opts['fill']['levels'] for i in range(self.data['fill'][0].shape[0]): - cs = self.m.contourf(self.x, self.y, self.data['fill'][0][i,:], levels=levels, colors=[colorlist[i%len(colorlist)]], ax=self.ax, alpha=0.5) + cs = self.ax.contourf(self.x, self.y, self.data['fill'][0][i,self.ibox], tri=True, levels=levels, colors=[colorlist[i%len(colorlist)]], alpha=0.5) rects.append(plt.Rectangle((0,0),1,1,fc=colorlist[i%len(colorlist)])) labels.append("member %d"%(i+1)) @@ -213,152 +263,176 @@ def plotSpaghetti(self): proxy = [] colorlist = self.opts['contour']['colors'] levels = self.opts['contour']['levels'] - data = ndimage.gaussian_filter(self.data['contour'][0], sigma=[0,4,4]) + data = self.data['contour'][0] + data.values = ndimage.gaussian_filter(data, sigma=[0,4,4]) for i in range(self.data['contour'][0].shape[0]): - #cs = self.m.contour(self.x, self.y, data[i,:], levels=levels, colors=[colorlist[i]], linewidths=2, linestyles='solid', ax=self.ax) - cs = self.m.contour(self.x, self.y, data[i,:], levels=levels, colors='k', alpha=0.6, linewidths=1, linestyles='solid', ax=self.ax) + cs = self.ax.contour(self.x, self.y, data[i,:], levels=levels, colors='k', alpha=0.6, linewidths=1, linestyles='solid') #proxy.append(plt.Rectangle((0,0),1,1,fc=colorlist[i])) #plt.legend(proxy, ["member %d"%i for i in range(1,11)], ncol=5, loc='right', bbox_to_anchor=(1.0,-0.05), fontsize=11, \ # frameon=False, borderpad=0.25, borderaxespad=0.25, handletextpad=0.2) def plotStamp(self): - fig_width_px, dpi = 1280, 90 - fig = plt.figure(dpi=dpi) - num_rows, num_columns = 3, 4 - fig_width = fig_width_px/dpi - width_per_panel = fig_width/float(num_columns) - height_per_panel = width_per_panel*self.m.aspect - fig_height = height_per_panel*num_rows - fig_height_px = fig_height*dpi - fig.set_size_inches((fig_width, fig_height)) + num_rows, num_columns = 3, 4 + fig, axs = plt.subplots(nrows=num_rows, ncols=num_columns, + subplot_kw={'projection':self.ax.projection}, + figsize=(14,8)) + fig.subplots_adjust(bottom=0.01, top=0.99, left=0.01, right=0.99, wspace=0.01, hspace=0.01) - levels = self.opts['fill']['levels'] - cmap = colors.ListedColormap(self.opts['fill']['colors']) - norm = colors.BoundaryNorm(levels, cmap.N) - filename = self.opts['fill']['filename'] - - memberidx = 0 - for j in range(0,num_rows): - for i in range(0,num_columns): - member = num_columns*j+i - if member > 9: break - spacing_w, spacing_h = 5/float(fig_width_px), 5/float(fig_height_px) - spacing_w = 10/float(fig_width_px) - x, y = i*width_per_panel/float(fig_width), 1.0 - (j+1)*height_per_panel/float(fig_height) - w, h = (width_per_panel/float(fig_width))-spacing_w, (height_per_panel/float(fig_height))-spacing_h - if member == 9: y = 0 - - #print 'member', member, 'creating axes at', x, y - thisax = fig.add_axes([x,y,w,h]) - - thisax.axis('on') - for axis in ['top','bottom','left','right']: thisax.spines[axis].set_linewidth(0.5) - self.m.drawcoastlines(ax=thisax, linewidth=0.3) - self.m.drawstates(linewidth=0.15, ax=thisax) - self.m.drawcountries(ax=thisax, linewidth=0.3) - thisax.text(0.03,0.97,member+1,ha="left",va="top",bbox=dict(boxstyle="square",lw=0.5,fc="white"), transform=thisax.transAxes) - - # plot, unless file that has fill field is missing, then skip - if member not in self.missing_members[filename] and member < self.ENS_SIZE: - cs1 = self.m.contourf(self.x, self.y, self.data['fill'][0][memberidx,:], levels=levels, cmap=cmap, norm=norm, extend='max', ax=thisax) - memberidx += 1 - - # use every other tick for large colortables, remove last tick label for both - if self.opts['fill']['name'] in ['goesch3', 'goesch4', 't2', 'precipacc' ]: ticks = levels[:-1][::2] # CSS added precipacc - else: ticks = levels[:-1] - - # add colorbar to figure - cax = fig.add_axes([0.51,0.3,0.48,0.02]) - cb = plt.colorbar(cs1, cax=cax, orientation='horizontal', ticks=ticks, extendfrac=0.0) - cb.outline.set_linewidth(0.5) - cb.ax.tick_params(labelsize=9) - - # add init/valid text - fontdict = {'family':'monospace', 'size':13, 'weight':'bold'} - initstr = self.initdate.strftime(' Init: %a %Y-%m-%d %H UTC') - if ((self.ehr - self.shr) == 0): - validstr = (self.initdate+timedelta(hours=self.shr)).strftime('Valid: %a %Y-%m-%d %H UTC') - else: - validstr1 = (self.initdate+timedelta(hours=(self.shr-1))).strftime('%a %Y-%m-%d %H UTC') - validstr2 = (self.initdate+timedelta(hours=self.ehr)).strftime('%a %Y-%m-%d %H UTC') - validstr = "Valid: %s - %s"%(validstr1, validstr2) - - fig.text(0.51, 0.22, self.title, fontdict=fontdict, transform=fig.transFigure) - fig.text(0.51, 0.22 - 25/float(fig_height_px), initstr, transform=fig.transFigure) - fig.text(0.51, 0.22 - 40/float(fig_height_px), validstr, transform=fig.transFigure) - - # add logo and text below logo - x, y = fig.transFigure.transform((0.51,0)) - fig.figimage(plt.imread('ncar.png'), xo=x, yo=y+15, zorder=1000) - plt.text(x+10, y+5, 'ensemble.ucar.edu', fontdict={'size':9, 'color':'#505050'}, transform=None) - - def saveFigure(self, trans=False): + levels = self.opts['fill']['levels'] + cmap = colors.ListedColormap(self.opts['fill']['colors']) + norm = colors.BoundaryNorm(levels, cmap.N) + + for member, ax in enumerate(axs.flatten()): + ax.add_feature(cartopy.feature.COASTLINE.with_scale('110m'), linewidth=0.25) + ax.add_feature(cartopy.feature.BORDERS.with_scale('110m'), linewidth=0.25) + ax.add_feature(cartopy.feature.STATES.with_scale('110m'), linewidth=0.05) + ax.add_feature(cartopy.feature.LAKES.with_scale('110m'), edgecolor='k', linewidth=0.25, facecolor='k', alpha=0.05) + ax.text(0.03,0.97,member+1,ha="left",va="top",bbox=dict(boxstyle="square",lw=0.5,fc="white"), transform=ax.transAxes) + # plot, unless file that has fill field is missing, then skip + if member not in self.missing_members and member < self.ENS_SIZE: + data = self.latlonGrid(self.data['fill'][0].isel(mem=member)) + logging.debug(f"plotStamp: starting contourf with regridded array member {member}") + cs1 = ax.contourf(self.x2d, self.y2d, data, levels=levels, cmap=cmap, norm=norm, extend='max') + ax.set_extent(self.extent, crs=self.ax.projection) + if member >= self.ENS_SIZE: + fig.delaxes(ax) + + + # use every other tick for large colortables, remove last tick label for both + if self.opts['fill']['name'] in ['goesch3', 'goesch4', 't2', 'precipacc' ]: ticks = levels[:-1][::2] # CSS added precipacc + else: ticks = levels[:-1] + + label = f"{data.long_name} [{data.units}]" + # add colorbar to figure + cax = fig.add_axes([0.51,0.3,0.48,0.02]) + cb = plt.colorbar(cs1, cax=cax, orientation='horizontal', ticks=ticks, extendfrac=0.0, label=label) + cb.outline.set_linewidth(0.5) + cb.ax.tick_params(labelsize=9) + + # add init/valid text + fontdict = {'family':'monospace', 'size':13, 'weight':'bold'} + initstr, validstr = self.getInitValidStr() + + fig.text(0.51, 0.22, self.title, fontdict=fontdict, transform=fig.transFigure) + pos = axs.flatten()[-2].get_position() + fontdict = {'family':'monospace'} + fig.text(0.51, pos.y0+0.3*pos.height, " "+initstr+"\n"+validstr, fontdict=fontdict, ha="left", + transform=fig.transFigure) + + # add NCAR logo and text below logo + xo, yo = fig.transFigure.transform((0.51,pos.y0)) + fig.figimage(plt.imread('ncar.png'), xo=xo, yo=yo) + #plt.text(x+10, y+5, 'ensemble.ucar.edu', fontdict={'size':9, 'color':'#505050'}, transform=None) + + + def getInitValidStr(self): + initstr = self.initdate.strftime('Init: %a %Y-%m-%d %H UTC') + shr = min(self.fhr) + ehr = max(self.fhr) + fmt = '%a %Y-%m-%d %H UTC' + validstr = "Valid: " + (self.initdate+datetime.timedelta(hours=shr)).strftime(fmt) + if ehr > shr: # range of valid times + validstr += " - " + (self.initdate+datetime.timedelta(hours=ehr)).strftime(fmt) + return initstr, validstr + + def saveFigure(self, transparent=False): + outfile = self.createFilename() # place NCAR logo 57 pixels below bottom of map, then save image if 'ensprod' in self.opts['fill']: # CSS needed incase not a fill plot - if not trans and self.opts['fill']['ensprod'] not in ['stamp', 'maxstamp']: - x, y = self.ax.transAxes.transform((0,0)) - #self.fig.figimage(plt.imread('ncar.png'), xo=x, yo=(y-44), zorder=1000) - #plt.text(x+10, y-54, 'ensemble.ucar.edu', fontdict={'size':9, 'color':'#505050'}, transform=None) + if not transparent and not self.opts['fill']['ensprod'].endswith('stamp'): + img = plt.imread('ncar.png') + self.ax.figure.figimage(img, xo=10, yo=10) + #plt.text(x+10, y-54, 'ensemble.ucar.edu', fontdict={'size':9, 'color':'#505050'}, transform=None) - plt.savefig(self.outfile, dpi=90, transparent=trans) + self.ax.set_extent(self.extent, crs=self.ax.projection) + + plt.savefig(outfile, transparent=transparent, bbox_inches="tight") if self.opts['convert']: - #command = 'convert -colors 255 %s %s'%(self.outfile, self.outfile) + # reduce colors to shrink file size if not self.opts['fill']: ncolors = 48 #if no fill field exists - elif self.opts['fill']['ensprod'] in ['prob', 'neprob', 'probgt', 'problt', 'neprobgt', 'neproblt']: ncolors = 48 + elif "prob" in self.opts['fill']['ensprod']: ncolors = 48 elif self.opts['fill']['name'] in ['crefuh']: ncolors = 48 else: ncolors = 255 - command = 'pngquant %d %s --ext=.png --force'%(ncolors,self.outfile) + if os.environ['NCAR_HOST'] == "cheyenne": + quant = '/glade/u/home/ahijevyc/bin_cheyenne/pngquant' + else: + quant = '/glade/u/home/ahijevyc/bin/pngquant' + beforesize = os.path.getsize(outfile) + command = f"{quant} {ncolors} {outfile} --ext=.png --force" + logging.info(f"{beforesize} bytes before reducing colors") + logging.debug(command) ret = subprocess.check_call(command.split()) plt.clf() + fsize = os.path.getsize(outfile) + logging.info(f"created {outfile} {fsize} bytes") def parseargs(): '''Parse arguments and return dictionary of fill, contour and barb field parameters''' - parser = argparse.ArgumentParser(description='Web plotting script for NCAR ensemble') - parser.add_argument('-d', '--date', default=datetime.utcnow().strftime('%Y%m%d00'), help='initialization datetime (YYYYMMDDHH)') - parser.add_argument('-tr', '--timerange', required=True, help='time range of forecasts (START,END)') - parser.add_argument('-f', '--fill', help='fill field (FIELD_PRODUCT_THRESH), field keys:'+','.join(fieldinfo.keys())) - parser.add_argument('-c', '--contour', help='contour field (FIELD_PRODUCT_THRESH)') + parser = argparse.ArgumentParser(description='Web plotting script for NCAR ensemble', + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('date', help='model initialization time') + parser.add_argument('--autolevels', action='store_true', help='use min/max to determine levels for plot') parser.add_argument('-b', '--barb', help='barb field (FIELD_PRODUCT_THRESH)') - parser.add_argument('-bs', '--barbskip', help='barb skip interval') - parser.add_argument('-t', '--title', help='title for plot') - parser.add_argument('-dom', '--domain', default='CONUS', help='domain to plot') - parser.add_argument('-al', '--autolevels', action='store_true', help='use min/max to determine levels for plot') + parser.add_argument('-c', '--contour', help='contour field (FIELD_PRODUCT_THRESH)') parser.add_argument('-con', '--convert', default=True, action='store_false', help='run final image through imagemagick') - parser.add_argument('-sig', '--sigma', default=2, help='smooth probabilities using gaussian smoother') - parser.add_argument('--debug', action='store_true', help='turn on debugging') + parser.add_argument('-d', '--debug', action='store_true', help='turn on debugging') + parser.add_argument('--domain', type=str, choices=domains.keys(), default="CONUS", help='domain of plot') + parser.add_argument('--ENS_SIZE', type=int, default=1, help='number of members in ensemble') + parser.add_argument('-f', '--fill', help='fill field (FIELD_PRODUCT_THRESH), FIELD options:' + f"{','.join(list(fieldinfo.keys()))} PRODUCT may be one of [max,maxstamp,min,mean,meanstamp," + "prob,neprob,problt,neproblt,paintball,stamp,spaghetti]") + parser.add_argument('--fhr', nargs='+', type=float, default=[12], help='list of forecast hours') + parser.add_argument('--idir', type=str, default='/glade/campaign/mmm/parc/schwartz/MPAS_ensemble_paper', + help="path to model output") + parser.add_argument('--init_file', help='path to file with lat/lon/area of mesh') + parser.add_argument('--meshstr', type=str, help=f'mesh id. used to prefix output file and pickle file') + parser.add_argument('--nbarbs', type=int, default=32, help='max barbs in one dimension') + parser.add_argument('--nlon_max', type=int, default=1500, help='max pts in longitude dimension') + parser.add_argument('--nlat_max', type=int, default=1500, help='max pts in latitude dimension') + parser.add_argument('--sigma', default=2, help='smooth probabilities using gaussian smoother') + parser.add_argument('-t', '--title', help='title for plot') - opts = vars(parser.parse_args()) - if opts['interp']: opts['over'] = True - - # opts = { 'date':date, 'timerange':timerange, 'fill':'sbcape_prob_25', 'ensprod':'mean' ... } - # now, convert underscore delimited fill, contour, and barb args into dicts + opts = vars(parser.parse_args()) # argparse.Namespace in form of dictionary + if opts["debug"]: + logging.getLogger().setLevel(logging.DEBUG) + + assert os.path.exists(opts["init_file"]), "--init_file must be path to file with latCell, lonCell, etc." + + # now, convert slash-delimited fill, contour, and barb args into dicts for f in ['contour','barb','fill']: thisdict = {} if opts[f] is not None: - input = opts[f].lower().split('_') + input = opts[f].split('/') + + assert len(input) > 1, f"{f} has 2-3 components separated by /. Add '/mean'?" thisdict['name'] = input[0] thisdict['ensprod'] = input[1] - thisdict['arrayname'] = fieldinfo[input[0]]['fname'] + thisdict['arrayname'] = fieldinfo[input[0]]['fname'] # name of variable in input file # assign contour levels and colors if (input[1] in ['prob', 'neprob', 'probgt', 'problt', 'neprobgt', 'neproblt', 'prob3d']): + if len(input)<3: + logging.error(f"your {f} option has less than 3 components. It needs name, ensprod, and thresh.") + sys.exit(1) thisdict['thresh'] = float(input[2]) if int(opts['sigma']) != 40: thisdict['levels'] = np.arange(0.1,1.1,0.1) else: thisdict['levels'] = [0.02,0.05,0.1,0.15,0.2,0.25,0.35,0.45,0.6] + #thisdict['levels'] = np.arange(0.1,1.1,0.2) thisdict['colors'] = readNCLcm('perc2_9lev') + #thisdict['colors'] = ['#d9d9d9', '#bdbdbd', '#969696', '#636363', '#252525'] # greyscale elif (input[1] in ['paintball', 'spaghetti']): thisdict['thresh'] = float(input[2]) thisdict['levels'] = [float(input[2]), 1000] thisdict['colors'] = readNCLcm('GMT_paired') elif (input[1] == 'var'): - if (input[0][0:3] == 'hgt'): + if input[0].startswith('hgt'): thisdict['levels'] = [2,4,6,8,10,15,20,25,30,35,40,45,50,55,60,65,70,75] #hgt thisdict['colors'] = readNCLcm('wind_17lev') - elif (input[0][0:3] == 'spe'): + elif input[0].startswith('spe'): thisdict['levels'] = [1,2,3,4,5,6,7,8,9,10,12.5,15,20,25,30,35,40,45] #iso thisdict['colors'] = readNCLcm('wind_17lev') else: @@ -372,104 +446,126 @@ def parseargs(): if 'arraylevel' in fieldinfo[input[0]]: thisdict['arraylevel'] = fieldinfo[input[0]]['arraylevel'] - # get barb-skip for barb fields - if opts['barbskip'] is not None: thisdict['skip'] = int(opts['barbskip']) - elif 'skip' in fieldinfo[input[0]]: thisdict['skip'] = fieldinfo[input[0]]['skip'] - - # get filename - if 'filename' in fieldinfo[input[0]]: thisdict['filename'] = fieldinfo[input[0]]['filename'] - else: thisdict['filename'] = 'wrfout' - opts[f] = thisdict return opts -def makeEnsembleList(wrfinit, timerange, ENS_SIZE): - # create lists of files (and missing file indices) for various file types - shr, ehr = timerange - file_list = { 'wrfout':[], 'upp': [], 'diag':[] } - missing_list = { 'wrfout':[], 'upp': [], 'diag':[] } - - EXP_DIR = os.getenv('EXP_DIR', '/glade/scratch/wrfrt/realtime_ensemble/ensf') - - missing_index = 0 - for hr in range(shr,ehr+1): - wrfvalidstr = (wrfinit + timedelta(hours=hr)).strftime('%Y-%m-%d_%H:%M:%S') - yyyymmddhh = wrfinit.strftime('%Y%m%d%H') - for mem in range(1,ENS_SIZE+1): - wrfout = '%s/%s/wrf_rundir/ens_%d/wrfout_d02_%s'%(EXP_DIR,yyyymmddhh,mem,wrfvalidstr) - diag = '%s/%s/wrf_rundir/ens_%d/diags_d02.%s.nc'%(EXP_DIR,yyyymmddhh,mem,wrfvalidstr) - upp = '%s/%s/post_rundir/mem_%d/fhr_%d/WRFTWO%02d.nc'%(EXP_DIR,yyyymmddhh,mem,hr,hr) - if os.path.exists(wrfout): file_list['wrfout'].append(wrfout) - else: missing_list['wrfout'].append(missing_index) - if os.path.exists(diag): file_list['diag'].append(diag) - else: missing_list['diag'].append(missing_index) - if os.path.exists(upp): file_list['upp'].append(upp) - else: missing_list['upp'].append(missing_index) - missing_index += 1 - return (file_list, missing_list) - -def readEnsemble(wrfinit, timerange=None, fields=None, debug=False, ENS_SIZE=10): + +def makeEnsembleList(Plot): + """ + Return list of input files + """ + idir = Plot.idir + initdate = Plot.initdate + fhr = Plot.fhr + ENS_SIZE = Plot.ENS_SIZE + file_list = [] + yyyymmddhh = initdate.strftime('%Y%m%d%H') + for hr in fhr: + validstr = (initdate + datetime.timedelta(hours=hr)).strftime('%Y-%m-%d_%H.%M.%S') + if ENS_SIZE > 1: + file_list.extend([os.path.join(idir, f"{yyyymmddhh}/ens_{mem}/diag.{validstr}.nc") for mem in range(1,ENS_SIZE+1)]) + else: + file_list.append( os.path.join(idir, f"{yyyymmddhh}/diag.{validstr}.nc") ) + logging.debug(f"file_list {file_list}") + assert len(file_list), 'Empty file_list' + for f in file_list: + assert os.path.exists(f), f'{f} does not exist' + return file_list + + +def readEnsemble(Plot): + initdate = Plot.initdate + fhr = Plot.fhr + fields = Plot.opts + ENS_SIZE = Plot.ENS_SIZE ''' Reads in desired fields and returns 2-D arrays of data for each field (barb/contour/field) ''' - if debug: print fields + logging.debug(fields) datadict = {} - file_list, missing_list = makeEnsembleList(wrfinit, timerange, ENS_SIZE) #construct list of files - + file_list = makeEnsembleList(Plot) #construct list of files + assert len(file_list), "no ensemble files. Perhaps edit makeEnsembleList()" + # loop through fill field, contour field, barb field and retrieve required data for f in ['fill', 'contour', 'barb']: - if not fields[f].keys(): continue - if debug: print 'Reading field:', fields[f]['name'], 'from', fields[f]['filename'] + if not list(fields[f].keys()): continue + logging.info(f"read {fields[f]['name']}") # save some variables for use in this function - filename = fields[f]['filename'] arrays = fields[f]['arrayname'] - fieldtype = fields[f]['ensprod'] + ensprod = fields[f]['ensprod'] fieldname = fields[f]['name'] - if fieldtype in ['prob', 'neprob', 'problt', 'probgt', 'neprobgt', 'neproblt', 'prob3d']: thresh = fields[f]['thresh'] - if fieldtype[0:3]=='mem': member = int(fieldtype[3:]) + if ensprod in ['prob', 'neprob', 'problt', 'probgt', 'neprobgt', 'neproblt', 'prob3d']: thresh = fields[f]['thresh'] + if ensprod.startswith('mem'): member = int(ensprod[3:]) - # open Multi-file netcdf dataset - if debug: print file_list[filename] - fh = MFDataset(file_list[filename]) - + # open xarray Dataset + logging.info(f"opening xarray Dataset. {len(fhr)} forecast hours x {ENS_SIZE} members") + paths = file_list + logging.debug(f"1-d paths = {paths}") + paths = np.array(paths).reshape(len(fhr), ENS_SIZE).tolist() + logging.debug(f"2-d paths = {paths}") + # Use a nested list of paths so xarray assigns data to both "Time" and "mem" dimensions. + fh = xarray.open_mfdataset(paths,engine='netcdf4',combine="nested", concat_dim=['Time','mem']) + # turn XTIME = b'2017-05-02_00:00:00 ' to proper datetime. + # use .load() or only a '2' will be read + fh["Time"] = pd.to_datetime(fh.xtime.load().isel(mem=0).astype(str).str.strip(), format="%Y-%m-%d_%H:%M:%S") + # loop through each field, wind fields will have two fields that need to be read datalist = [] for n,array in enumerate(arrays): - if debug: print 'Reading', array - + logging.debug(f'Reading data {array}') #read in 3D array (times*members,ny,nx) from file object if 'arraylevel' in fields[f]: if isinstance(fields[f]['arraylevel'], list): level = fields[f]['arraylevel'][n] else: level = fields[f]['arraylevel'] else: level = None - if level == 'max': data = np.amax(fh.variables[array][:,:,:,:], axis=1) - elif level is None: data = fh.variables[array][:,:,:] - else: data = fh.variables[array][:,level,:,:] + if level == 'max': data = fh[array].max(dim=level) + elif level is None: data = fh[array] + else: data = fh[array].sel(level=level) + + if 'nVertices' in fh[array].dims: + logging.info(f"{fieldname} is on vertices. Put on cells") + # .load() to avoid dask PerformanceWarning: Reshaping is producing a large chunk. + data = data.load().stack(TimeMem=("Time","mem")).T + mpas_mesh = Plot.mpas_mesh + nEdgesOnCell = mpas_mesh.nEdgesOnCell + verticesOnCell = mpas_mesh.verticesOnCell + maxEdges = mpas_mesh.maxEdges.size + nVertLevels = data.TimeMem.size # for now treat Time like vertical dimension + nCells = mpas_mesh.nCells.size + nVertices = data.nVertices.size + # verticesOnCell is the transpose of what mpas_vort_cell1 expects + dataCells = vert2cell.vert2cell(nEdgesOnCell, verticesOnCell.T, maxEdges, nVertLevels, nCells, nVertices, data) + # Assign to new DataArray with nCells dimension. transfer DataArray attributes + data = xarray.DataArray(data = dataCells, coords = dict(TimeMem=data.TimeMem, nCells=fh.nCells), attrs=data.attrs).unstack() # change units for certain fields - if array in ['U_PL', 'V_PL', 'UBSHR6','VBSHR6','UBSHR1', 'VBSHR1', 'U10','V10', 'U_COMP_STM', 'V_COMP_STM','S_PL','U_COMP_STM_6KM','V_COMP_STM_6KM']: data = data*1.93 # m/s > kt - elif array in ['DEWPOINT_2M', 'T2', 'AFWA_WCHILL', 'AFWA_HEATIDX']: data = (data - 273.15)*1.8 + 32.0 # K > F - elif array in ['PREC_ACC_NC', 'PREC_ACC_C', 'AFWA_PWAT', 'PWAT', 'AFWA_RAIN', 'AFWA_SNOWFALL', 'AFWA_SNOW', 'AFWA_ICE', 'AFWA_FZRA','AFWA_RAIN_HRLY','AFWA_ICE_HRLY','AFWA_SNOWFALL_HRLY', 'AFWA_FZRA_HRLY']: data = data*0.0393701 # mm > in - elif array in ['RAINNC', 'GRPL_MAX', 'SNOW_ACC_NC', 'AFWA_HAIL', 'HAILCAST_DIAM_MAX']: data = data*0.0393701 # mm > in - elif array in ['T_PL', 'TD_PL', 'SFC_LI']: data = data - 273.15 # K > C - elif array in ['AFWA_MSLP', 'MSLP']: data = data*0.01 # Pa > hPa - elif array in ['ECHOTOP']: data = data*3.28084# m > ft - elif array in ['UP_HELI_MIN']: data = np.abs(data) - elif array in ['AFWA_VIS', 'VISIBILITY']: data = (data*0.001)/1.61 # m > mi - elif array in ['SBCINH', 'MLCINH', 'W_DN_MAX']: data = data*-1.0 # make cin positive - elif array in ['PVORT_320K']: data = data*1000000 # multiply by 1e6 - elif array in ['SBT123_GDS3_NTAT','SBT124_GDS3_NTAT','GOESE_WV','GOESE_IR']: data = data -273.15 # K -> C - elif array in ['HAIL_MAXK1', 'HAIL_MAX2D']: data = data*39.3701 # m -> inches - elif array in ['PBMIN', 'PBMIN_SFC', 'BESTPBMIN', 'MLPBMIN', 'MUPBMIN']: data = data*0.01 # Pa -> hPa -# elif array in ['LTG1_MAX1', 'LTG2_MAX', 'LTG3_MAX']: data = data*0.20 # scale down excess values + if array in ['u10','v10']: data = data.metpy.convert_units("knot") + elif "zonal" in array or "meridional" in array: data = data.metpy.convert_units("knot") + elif array in ['dewpoint_surface', 't2m']: data = data.metpy.convert_units("degF") + elif array in ['precipw','rainnc','grpl_max','SNOW_ACC_NC']:data = data.metpy.convert_units("inch") + elif array in ['T_PL', 'TD_PL', 'SFC_LI']: data = data.metpy.convert_units("Celsius") + elif array.startswith("temp"): data = data.metpy.convert_units("Celsius") + elif array.startswith("dewp"): data = data.metpy.convert_units("Celsius") + elif array == 'mslp' or 'PB' in array: data = data.metpy.convert_units("hPa") datalist.append(data) # these are derived fields, we don't have in any of the input files but we can compute - print datalist[0].shape if 'name' in fields[f]: - if fieldname in ['shr06mag', 'shr01mag', 'bunkmag','speed10m']: datalist = [np.sqrt(datalist[0]**2 + datalist[1]**2)] + if fieldname in ['shr06mag', 'shr01mag']: + logging.info(f"derive {fieldname} from {arrays}") + # Assume datalist is 4-element list: [bottom_u, bottom_v, top_u, top_v] + shearmag = ((datalist[0]-datalist[2])**2 + (datalist[1]-datalist[3])**2)**0.5 + shearmag.attrs["long_name"] = fieldname.replace("shr","").replace("0","0-").replace("mag", "km shear magnitude") + datalist = [shearmag] + elif fieldname.startswith('speed') or fieldname == 'bunkmag': + logging.info(f"derive speed from {arrays}") + speed = (datalist[0]**2 + datalist[1]**2)**0.5 + speed.attrs["long_name"] = datalist[0].attrs["long_name"].replace("zonal wind"," wind") + datalist = [speed] + elif fieldname in ['shr06','shr01']: datalist = [datalist[0]-datalist[2], datalist[1]-datalist[3]] + elif fieldname == 'uhratio': datalist = [compute_uhratio(datalist)] elif fieldname == 'stp': datalist = [computestp(datalist)] # GSR in fields are T(K), mixing ratio (kg/kg), and surface pressure (Pa) elif fieldname == 'thetae': datalist = [compute_thetae(datalist)] @@ -477,163 +573,205 @@ def readEnsemble(wrfinit, timerange=None, fields=None, debug=False, ENS_SIZE=10) #elif fieldname == 'pbmin': datalist = [ datalist[1] - datalist[0][:,0,:] ] elif fieldname == 'pbmin': datalist = [ datalist[1] - datalist[0] ] # CSS changed above line for GRIB2 elif fieldname in ['thck1000-500', 'thck1000-850'] : datalist = [ datalist[1]*0.1 - datalist[0]*0.1 ] # CSS added for thicknesses - elif fieldname == 'winter': datalist = [datalist[1] + datalist[2] + datalist[3]] datadict[f] = [] for data in datalist: - # perform mean/max/variance/etc to reduce 3D array to 2D - if (fieldtype == 'mean'): data = np.mean(data, axis=0) - elif (fieldtype == 'pmm'): data = compute_pmm(data) - elif (fieldtype == 'max'): data = np.amax(data, axis=0) - elif (fieldtype == 'min'): data = np.amin(data, axis=0) - elif (fieldtype == 'var'): data = np.std(data, axis=0) - elif (fieldtype == 'maxstamp'): - for i in missing_list[filename]: data = np.insert(data, i, np.nan, axis=0) #insert nan for missing files - data = np.reshape(data, (data.shape[0]/ENS_SIZE,ENS_SIZE,data.shape[1],data.shape[2])) - data = np.nanmax(data, axis=0) - elif (fieldtype == 'summean'): - for i in missing_list[filename]: data = np.insert(data, i, np.nan, axis=0) #insert nan for missing files - data = np.reshape(data, (data.shape[0]/ENS_SIZE,ENS_SIZE,data.shape[1],data.shape[2])) - data = np.nansum(data, axis=0) - data = np.nanmean(data, axis=0) - elif (fieldtype == 'maxmean'): - for i in missing_list[filename]: data = np.insert(data, i, np.nan, axis=0) #insert nan for missing files - data = np.reshape(data, (data.shape[0]/ENS_SIZE,ENS_SIZE,data.shape[1],data.shape[2])) - data = np.nanmax(data, axis=0) - data = np.nanmean(data, axis=0) - elif (fieldtype == 'summax'): - for i in missing_list[filename]: data = np.insert(data, i, np.nan, axis=0) #insert nan for missing files - data = np.reshape(data, (data.shape[0]/ENS_SIZE,ENS_SIZE,data.shape[1],data.shape[2])) - data = np.nansum(data, axis=0) - data = np.nanmax(data, axis=0) - elif (fieldtype[0:3] == 'mem'): - for i in missing_list[filename]: data = np.insert(data, i, np.nan, axis=0) #insert nan for missing files - data = np.reshape(data, (data.shape[0]/ENS_SIZE,ENS_SIZE,data.shape[1],data.shape[2])) - print fieldname - if fieldname in ['precip', 'precipacc']: - print 'where we should be' - data = np.nanmax(data, axis=0) - else: data = np.nanmax(data, axis=0) - data = data[member-1,:] - elif (fieldtype in ['prob', 'neprob', 'problt', 'probgt', 'neprobgt', 'neproblt']): - if fieldtype in ['prob', 'neprob', 'probgt', 'neprobgt']: data = (data>=thresh).astype('float') - elif fieldtype in ['problt', 'neproblt']: data = (data=thresh).astype('float') - for i in missing_list[filename]: data = np.insert(data, i, np.nan, axis=0) - data = np.reshape(data, (data.shape[0]/ENS_SIZE,ENS_SIZE,data.shape[1],data.shape[2])) - data = compute_prob3d(data, roi=14, sigma=float(fields['sigma']), type='gaussian') - if debug: print 'field', fieldname, 'has shape', data.shape, 'max', data.max(), 'min', data.min() - - # attach data arrays for each type of field (e.g. { 'fill':[data], 'barb':[data,data] }) - datadict[f].append(data) - - fh.close() - - return (datadict, missing_list) - -def readGrid(file_dir): - f = Dataset(file_dir, 'r') - lats = f.variables['XLAT'][0,:] - lons = f.variables['XLONG'][0,:] - f.close() - return (lats,lons) - -def saveNewMap(domstr='CONUS', wrfout=None): - # THIS GENERATES A NEW PICKLE FILE FOR A DOMAIN (need to run once for a new domain) - # if domstr is not in the dictionary, then use provided wrfout to create new domain - if domstr not in domains: - fh = Dataset(wrfout, 'r') - lats = fh.variables['XLAT'][0,:] - lons = fh.variables['XLONG'][0,:] - ll_lat, ll_lon, ur_lat, ur_lon = lats[0,0], lons[0,0], lats[-1,-1], lons[-1,-1] - lat_1, lat_2, lon_0 = fh.TRUELAT1, fh.TRUELAT2, fh.STAND_LON - fig_width = 1080 - fh.close() - # else assume domstr is in dictionary - elif 'file' in domains[domstr]: - fh = Dataset(domains[domstr]['file'], 'r') - lats = fh.variables['XLAT'][0,:] - lons = fh.variables['XLONG'][0,:] - ll_lat, ll_lon, ur_lat, ur_lon = lats[0,0], lons[0,0], lats[-1,-1], lons[-1,-1] - lat_1, lat_2, lon_0 = fh.TRUELAT1, fh.TRUELAT2, fh.STAND_LON - if 'fig_width' in domains[domstr]: fig_width = domains[domstr]['fig_width'] - else: fig_width = 1080 - fh.close() - else: - ll_lat, ll_lon, ur_lat, ur_lon = domains[domstr]['corners'] - fig_width = domains[domstr]['fig_width'] - lat_1, lat_2, lon_0 = 32.0, 46.0, -101.0 - - dpi = 90 - fig = plt.figure(dpi=dpi) - m = Basemap(projection='lcc', resolution='i', llcrnrlon=ll_lon, llcrnrlat=ll_lat, urcrnrlon=ur_lon, urcrnrlat=ur_lat, \ - lat_1=lat_1, lat_2=lat_2, lon_0=lon_0, area_thresh=1000) - - # compute height based on figure width, map aspect ratio, then add some vertical space for labels/colorbar - fig_width = fig_width/float(dpi) - fig_height = fig_width*m.aspect + 0.93 - #fig_height = fig_width*m.aspect + 1.25 - figsize = (fig_width, fig_height) - fig.set_size_inches(figsize) - - # place map 0.7" from bottom of figure, leave rest of 0.93" at top for title (needs to be in figure-relative coords) - #x,y,w,h = 0.01, 0.8/float(fig_height), 0.98, 0.98*fig_width*m.aspect/float(fig_height) #too much padding at top - x,y,w,h = 0.01, 0.7/float(fig_height), 0.98, 0.98*fig_width*m.aspect/float(fig_height) - ax = fig.add_axes([x,y,w,h]) - for i in ax.spines.itervalues(): i.set_linewidth(0.5) - - m.drawcoastlines(linewidth=0.5, ax=ax) - m.drawstates(linewidth=0.25, ax=ax) - m.drawcountries(ax=ax) - #m.drawcounties(linewidth=0.1, ax=ax) - - pickle.dump((fig,ax,m), open('rt2015_%s.pk'%domstr, 'w')) - + if fieldname.startswith("precip"): + fmt = '%a %Y-%m-%d %H UTC' + logging.info(f"Derive accumulated precipitation {data.Time[0].dt.strftime(fmt).data} - {data.Time[-1].dt.strftime(fmt).data}") + # subtract first time from last time + ensemble_at_last_time = data.isel(Time=[-1]) # brackets preserve Time dimension. + ensemble_at_first_time = data.isel(Time=[0]) + data = ensemble_at_last_time # use last time for output data + data.data -= ensemble_at_first_time.data # .data preserves quantity units, metadata, avoids pint.errors.DimensionalityError: Cannot convert from 'dimensionless' to 'inch' + + logging.info(f"get {ensprod} of {data.shape} data") + if (ensprod == 'mean'): + data = data.mean(dim=["Time","mem"], keep_attrs=True) + elif (ensprod == 'pmm'): + data = compute_pmm(data) + elif (ensprod == 'max'): + data = data.max(dim=["Time","mem"], keep_attrs=True) + elif (ensprod == 'min'): + data = data.min(dim=["Time","mem"], keep_attrs=True) + elif (ensprod == 'var'): + data = data.std(dim=["Time","mem"], keep_attrs=True) + elif (ensprod == 'maxstamp'): + data = data.max(dim="Time", keep_attrs=True) + elif (ensprod == 'meanstamp'): + data = data.mean(dim="Time", keep_attrs=True) + elif (ensprod == 'summean'): + data = data.sum(dim="Time", keep_attrs=True) + data = data.mean(dim="mem", keep_attrs=True) + elif (ensprod == 'maxmean'): + data = data.max(dim="Time", keep_attrs=True) + data = data.mean(dim="mem", keep_attrs=True) + elif (ensprod == 'summax'): + data = data.sum(dim="Time", keep_attrs=True) + data = data.max(dim="mem", keep_attrs=True) + elif (ensprod[0:3] == 'mem'): + data = data.sel(mem=member) + elif 'prob' in ensprod: + u = data.metpy.units + if ensprod.endswith('lt'): + data.values = data.values < thresh + data.attrs["long_name"] = "less than {thresh} {u}" + else: + data.values = data.values >= thresh + data.attrs["long_name"] = f"greater than or equal to {thresh} {u}" + if ensprod.startswith("ne"): + # grid spacing of interpolated lat-lon grid. + grid_spacing = units.km * np.sqrt(Plot.mpas_mesh["areaCell"].values.min())/1000 + nbrhd = 25 * units.miles + roi = nbrhd / grid_spacing + roi = roi.to_base_units() + logging.info(f"compute neighborhood probability with radius {roi:.2f}") + data = data.stack(TimeMem=("Time","mem")).groupby("TimeMem").apply(Plot.latlonGrid) + data = compute_neprob(data, roi=roi, sigma=float(Plot.opts['sigma']), type='gaussian') + data = data.unstack() + data.attrs["long_name"] += f" within {nbrhd:~}" + data = data.mean(dim=["Time","mem"], keep_attrs=True) + data.attrs["long_name"] = "probability " + data.attrs["long_name"] + data.attrs["units"] = "dimensionless" + elif (ensprod in ['prob3d']): + data = (data.values>=thresh).astype('float') + data = compute_prob3d(data, roi=14, sigma=float(Plot.opts['sigma']), type='gaussian') + logging.debug(f'field {fieldname} has shape {data.shape} range {data.min()}-{data.min()}') + + datadict[f].append(data) + + return datadict + + + + + +def saveNewMap(plot): + logging.info(f"saveNewMap: pk_file={plot.pk_file}") + ll_lat, ll_lon, ur_lat, ur_lon = domains[plot.domain]['corners'] + lat_1, lat_2, lon_0 = 32., 46., -101. + fig = plt.figure() + + proj = cartopy.crs.LambertConformal(central_longitude=lon_0, standard_parallels=(lat_1,lat_2)) + (llx, lly, llz), (urx, ury, urz) = proj.transform_points( + cartopy.crs.PlateCarree(), + np.array([ll_lon, ur_lon]), + np.array([ll_lat, ur_lat]) + ) + ul_lon, ul_lat = cartopy.crs.PlateCarree().transform_point(llx, ury, proj) + lr_lon, lr_lat = cartopy.crs.PlateCarree().transform_point(urx, lly, proj) + lc_lon, lc_lat = cartopy.crs.PlateCarree().transform_point(0, lly, proj) + uc_lon, uc_lat = cartopy.crs.PlateCarree().transform_point(0, ury, proj) + + # To save time, triangulate within a lat/lon bounding box. + # Get extreme longitudes and latitudes within domain so the entire domain is covered. + # These were attributes of Basemap object, but not cartopy. + # Extreme longitudes are probably in upper left and upper right corners, but also check lower corners. + lonmin = min([ll_lon, ul_lon]) + lonmax = max([lr_lon, ur_lon]) + # Extreme latitudes are probably in lower center and upper center (lc_lat, uc_lat). + latmin = min([ll_lat, lc_lat, lr_lat]) + latmax = max([ul_lat, uc_lat, ur_lat]) + + extent = (llx, urx, lly, ury) # in projection coordinates (x,y) meters + + # y/x aspect ratio was an attribute of Basemap object, but not cartopy. + aspect = (ury - lly) / (urx - llx) + + # Constant figure width, no matter the aspect ratio. + fig.set_size_inches(16,16*aspect) + + ax = plt.axes(projection = proj) + for i in list(ax.spines.values()): i.set_linewidth(0.5) + + ax.add_feature(cartopy.feature.COASTLINE.with_scale('50m'), linewidth=0.25) + ax.add_feature(cartopy.feature.BORDERS.with_scale('50m'), linewidth=0.25) + ax.add_feature(cartopy.feature.STATES.with_scale('50m'), linewidth=0.05) + ax.add_feature(cartopy.feature.LAKES.with_scale('50m'), edgecolor='k', linewidth=0.25, facecolor='k', alpha=0.05) + if plot.domain != "CONUS": + logging.info("add county lines") + # Create custom cartopy feature COUNTIES that can be added to the axes. + reader = cartopy.io.shapereader.Reader('/glade/work/ahijevyc/share/shapeFiles/cb_2013_us_county_500k/cb_2013_us_county_500k.shp') + counties = list(reader.geometries()) + COUNTIES = cartopy.feature.ShapelyFeature(counties, cartopy.crs.PlateCarree()) + ax.add_feature(COUNTIES, facecolor="none", linewidth=0.1, alpha=0.25) + + # lat/lons from mpas_mesh file + mpas_mesh = plot.mpas_mesh + # min_grid_spacing is the grid spacing of interpolated lat-lon grid in meters. + min_grid_spacing = np.sqrt(plot.mpas_mesh["areaCell"].values.min()) + + lats = mpas_mesh["latCell"] + lons = mpas_mesh["lonCell"] + if lons.max() > 180: + lons[lons>180] -= 360 + nlon = int((urx - llx)/min_grid_spacing) # calculate in projection space (meters) + nlat = int((ury - lly)/min_grid_spacing) + nlon = np.clip(nlon, 1, plot.nlon_max) + nlat = np.clip(nlat, 1, plot.nlat_max) + lon2d, lat2d = np.meshgrid(np.linspace(lonmin, lonmax, nlon), np.linspace(latmin,latmax,nlat)) + # Convert to map coordinates instead of latlon to avoid transform=PlateCarree in contour method. + xyz = proj.transform_points(cartopy.crs.PlateCarree(), lon2d, lat2d) + x2d = xyz[...,0] + y2d = xyz[...,1] + # ibox: subscripts within lat/lon box plus buffer. speed up triangulation in interp_weights + ibox = (lonmin-1 <= lons ) & (lons < lonmax+1) & (latmin-1 <= lats) & (lats < latmax+1) + lons = lons[ibox] + lats = lats[ibox] + xyz = proj.transform_points(cartopy.crs.PlateCarree(), lons, lats) + x = xyz[...,0] + y = xyz[...,1] + + logging.info(f"saveNewMap: triangulate {len(lons)} pts") + vtx, wts = interp_weights(np.vstack((lons,lats)).T,np.vstack((lon2d.flatten(), lat2d.flatten())).T) + + pickle.dump((ax,extent,lons,lats,lon2d,lat2d,x2d,y2d,ibox,x,y,vtx,wts), open(plot.pk_file, 'wb')) + def compute_pmm(ensemble): - mem, dy, dx = ensemble.shape - ens_mean = np.mean(ensemble, axis=0) - ens_dist = np.sort(ensemble.flatten())[::-1] - pmm = ens_dist[::mem] - - ens_mean_index = np.argsort(ens_mean.flatten())[::-1] + members = ensemble.Time.size + ens_mean = ensemble.mean(dim="Time", keep_attrs=True) + logging.info(f"compute_pmm: sort {ensemble.values.size} ensemble values") + ens_dist = np.sort(ensemble.values.flatten())[::-1] + pmm = ens_dist[::members] # brilliant + + logging.debug(f"compute_pmm: sort {ens_mean.values.size} ens_mean values") + ens_mean_index = np.argsort(ens_mean.values.flatten())[::-1] temp = np.empty_like(pmm) temp[ens_mean_index] = pmm - temp = np.where(ens_mean.flatten() > 0, temp, 0.0) - return temp.reshape((dy,dx)) + temp = np.where(ens_mean.values.flatten() > 0, temp, 0.0) + ens_mean.values = temp + return ens_mean def compute_neprob(ensemble, roi=0, sigma=0.0, type='gaussian'): + roi = np.rint(roi) # round to nearest integer + assert len(ensemble.dims) >= 3, 'compute_neprob: needs ensemble of 2D arrays, not 1D arrays' y,x = np.ogrid[-roi:roi+1, -roi:roi+1] kernel = x**2 + y**2 <= roi**2 ens_roi = ndimage.filters.maximum_filter(ensemble, footprint=kernel[np.newaxis,:]) - ens_mean = np.nanmean(ens_roi, axis=0) - #ens_mean = np.nanmean(ensemble, axis=0) - - if (type == 'uniform'): + if type == 'uniform': y,x = np.ogrid[-sigma:sigma+1, -sigma:sigma+1] kernel = x**2 + y**2 <= sigma**2 - ens_mean = ndimage.filters.convolve(ens_mean, kernel/float(kernel.sum())) - elif (type == 'gaussian'): - ens_mean = ndimage.filters.gaussian_filter(ens_mean, sigma) - return ens_mean + smoothed = ndimage.filters.convolve(ens_roi, kernel/float(kernel.sum())) + elif type == 'gaussian': + # smooth last 2 dimensions (not TimeMember dimension) + smoothed = ndimage.filters.gaussian_filter(ens_roi, (0,sigma,sigma)) + else: + logging.error(f"compute_neprob: unknown filter {type}") + sys.exit(1) + ensemble.values = smoothed + return ensemble -def compute_prob3d(ensemble, roi=0, sigma=0.0, type='gaussian'): - print ensemble.shape +def compute_prob3d(ensemble, roi=0, sigma=0.): + logging.info(f"compute_prob3d: roi={roi} sigma={sigma}") y,x = np.ogrid[-roi:roi+1, -roi:roi+1] kernel = x**2 + y**2 <= roi**2 ens_roi = ndimage.filters.maximum_filter(ensemble, footprint=kernel[np.newaxis,np.newaxis,:]) - print ens_roi.shape + logging.info(f"ens_roi.shape={ens_roi.shape}") ens_mean = np.nanmean(ens_roi, axis=1) - print ens_mean.shape + logging.info(f"ens_mean.shape={ens_mean.shape}") ens_mean = ndimage.filters.gaussian_filter(ens_mean, [2,20,20]) return ens_mean[3,:] @@ -647,7 +785,9 @@ def computestp(data): srh_term = (data[2]/150.0) - shear06 = np.sqrt(data[3]**2 + data[4]**2) #this will be in knots (converted prior to fn) + ushear06 = data[3]-data[5] + vshear06 = data[4]-data[6] + shear06 = np.sqrt(ushear06**2 + vshear06**2) #this will be in knots (converted prior to fn) shear_term = (shear06/38.87) shear_term = np.where(shear06 > 58.32, 1.5, shear_term) shear_term = np.where(shear06 < 24.3, 0.0, shear_term) @@ -658,6 +798,31 @@ def computestp(data): stp = np.ma.filled(stp, 0.0) return stp +def compute_sspf(data, cref_files): + fh = xarray.open_mfdataset(cref_files) + #cref = fh['REFD_MAX'] #in diag files + cref = fh['REFL_MAX_COL'] #in upp files + + # if all times are in one file, then need to reshape and extract desired times + #cref = cref.reshape((10,49,cref.shape[1],cref.shape[2])) # reshape + #cref = cref[:,13:36+1,:] # extract desired times + #cref = np.swapaxes(cref,0,1) # flip first two axes so time is first + #cref = cref.reshape((10*((13+1)-36)),cref.shape[2],cref.shape[3]) #reshape again + + # flag nearby points + #kernel = np.ones((5,5)) + cref_mask = (cref>=30.0) + #cref_mask = ndimage.filters.maximum_filter(cref_mask, footprint=kernel[np.newaxis,:]) + + #wind_cref_hits = (data[1]>=25.0) + wind_cref_hits = np.logical_and( (data[1]>=25.0), cref_mask) + + sspf = np.logical_or( np.logical_or((data[0]>=75), wind_cref_hits), (data[2]>=1)) + return sspf + +def compute_uhratio(data): + return np.where(data[1]>50, data[0]/data[1], 0.0) + def compute_thetae(data): # GSR constants for theta E calc P0 = 100000.0 # (Pa) @@ -678,3 +843,27 @@ def compute_rh(data): qsat = 0.622 * es / (psfc - es) rh = q2 / qsat return 100*rh + +# from https://stackoverflow.com/questions/20915502/speedup-scipy-griddata-for-multiple-interpolations-between-two-irregular-grids +def interp_weights(xyz, uvw): + logging.info("interp_weights: Delaunay") + tri = Delaunay(xyz) + logging.info("interp_weights: find_simplex") + simplex = tri.find_simplex(uvw) + logging.info("interp_weights: vertices") + vertices = np.take(tri.simplices, simplex, axis=0) + temp = np.take(tri.transform, simplex, axis=0) + d = xyz.shape[1] + delta = uvw - temp[:, d] + logging.info("interp_weights: bary") + bary = np.einsum('njk,nk->nj', temp[:, :d, :], delta) + return vertices, np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True))) + + +def computefrzdepth(t): + frz_at_surface = np.where(t[0,:] < 33, True, False) #pts where surface T is below 33F + max_column_t = np.amax(t, axis=0) + above_frz_aloft = np.where(max_column_t > 32, True, False) #pts where max column T is above 32F + +if __name__ == "__main__": + webPlot()