diff --git a/fieldinfo.py b/fieldinfo.py index 2e26083..d81541a 100755 --- a/fieldinfo.py +++ b/fieldinfo.py @@ -2,7 +2,7 @@ def readcm(name): '''Read colormap from file formatted as 0-1 RGB CSV''' rgb = [] fh = open(name, 'r') - for line in fh.read().splitlines(): rgb.append(map(float,line.split())) + for line in fh.read().splitlines(): rgb.append(list(map(float,line.split()))) return rgb def readNCLcm(name): @@ -16,7 +16,7 @@ def readNCLcm(name): else: fh = open('%s/%s.rgb'%(rgb_dir_ch,name), 'r') for line in fh.read().splitlines(): - if appending: rgb.append(map(float,line.split())) + if appending: rgb.append(list(map(float,line.split()))) if ''.join(line.split()) in ['#rgb',';RGB']: appending = True maxrgb = max([ x for y in rgb for x in y ]) if maxrgb > 1: rgb = [ [ x/255.0 for x in a ] for a in rgb ] diff --git a/make_webplot.py b/make_webplot.py index a19f9ab..6fbdede 100755 --- a/make_webplot.py +++ b/make_webplot.py @@ -1,9 +1,9 @@ #!/usr/bin/env python import sys, time, os -from webplot import webPlot, readGrid, drawOverlay, saveNewMap +from webplot import webPlot, readGrid, saveNewMap -def log(msg): print time.ctime(time.time()),':', msg +def log(msg): print(time.ctime(time.time()),':', msg) log('Begin Script'); stime = time.time() diff --git a/webplot.py b/webplot.py index 0a77211..611d6e0 100755 --- a/webplot.py +++ b/webplot.py @@ -2,7 +2,7 @@ import matplotlib.pyplot as plt from mpl_toolkits.basemap import * from datetime import * -import cPickle as pickle +import pickle as pickle import os, sys, time, argparse import scipy.ndimage as ndimage import subprocess @@ -18,8 +18,8 @@ def __init__(self): self.debug = self.opts['debug'] self.autolevels = self.opts['autolevels'] self.domain = self.opts['domain'] - if ',' in self.opts['timerange']: self.shr, self.ehr = map(int, self.opts['timerange'].split(',')) - else: self.shr, self.ehr = int(self.opts['timerange']), int(self.opts['timerange']) + if ',' in self.opts['timerange']: self.shr, self.ehr = list(map(int, self.opts['timerange'].split(','))) + else: self.shr, self.ehr = int(self.opts['timerange']), int(self.opts['timerange']) self.createFilename() self.ENS_SIZE = int(os.getenv('ENS_SIZE', 10)) @@ -40,7 +40,7 @@ def createFilename(self): def loadMap(self): # load pickle file containing figure and axes objects (should be pregenerated) PYTHON_SCRIPTS_DIR = os.getenv('PYTHON_SCRIPTS_DIR', '.') - self.fig, self.ax, self.m = pickle.load(open('%s/%s.pk'%(PYTHON_SCRIPTS_DIR,self.domain), 'r')) + self.fig, self.ax, self.m = pickle.load(open('%s/%s.pk'%(PYTHON_SCRIPTS_DIR,self.domain), 'rb')) # get lat/lons from file here LATLON_FILE = os.getenv('LATLON_FILE', PYTHON_SCRIPTS_DIR+'/latlonfile.nc') @@ -51,7 +51,6 @@ def readEnsemble(self): self.data, self.missing_members = readEnsemble(self.initdate, timerange=[self.shr,self.ehr], fields=self.opts, debug=self.debug, ENS_SIZE=self.ENS_SIZE) def plotTitleTimes(self): - if self.opts['over']: return fontdict = {'family':'monospace', 'size':12, 'weight':'bold'} # place title and times above corners of map @@ -309,7 +308,7 @@ def saveFigure(self, trans=False): elif self.opts['fill']['ensprod'] in ['prob', 'neprob', 'probgt', 'problt', 'neprobgt', 'neproblt']: ncolors = 48 elif self.opts['fill']['name'] in ['crefuh']: ncolors = 48 else: ncolors = 255 - command = 'pngquant %d %s --ext=.png --force'%(ncolors,self.outfile) + command = '/glade/u/home/sobash/pngquant/pngquant %d %s --ext=.png --force'%(ncolors,self.outfile) ret = subprocess.check_call(command.split()) plt.clf() @@ -319,7 +318,7 @@ def parseargs(): parser = argparse.ArgumentParser(description='Web plotting script for NCAR ensemble') parser.add_argument('-d', '--date', default=datetime.utcnow().strftime('%Y%m%d00'), help='initialization datetime (YYYYMMDDHH)') parser.add_argument('-tr', '--timerange', required=True, help='time range of forecasts (START,END)') - parser.add_argument('-f', '--fill', help='fill field (FIELD_PRODUCT_THRESH), field keys:'+','.join(fieldinfo.keys())) + parser.add_argument('-f', '--fill', help='fill field (FIELD_PRODUCT_THRESH), field keys:'+','.join(list(fieldinfo.keys()))) parser.add_argument('-c', '--contour', help='contour field (FIELD_PRODUCT_THRESH)') parser.add_argument('-b', '--barb', help='barb field (FIELD_PRODUCT_THRESH)') parser.add_argument('-bs', '--barbskip', help='barb skip interval') @@ -331,7 +330,6 @@ def parseargs(): parser.add_argument('--debug', action='store_true', help='turn on debugging') opts = vars(parser.parse_args()) - if opts['interp']: opts['over'] = True # opts = { 'date':date, 'timerange':timerange, 'fill':'sbcape_prob_25', 'ensprod':'mean' ... } # now, convert underscore delimited fill, contour, and barb args into dicts @@ -410,15 +408,15 @@ def makeEnsembleList(wrfinit, timerange, ENS_SIZE): def readEnsemble(wrfinit, timerange=None, fields=None, debug=False, ENS_SIZE=10): ''' Reads in desired fields and returns 2-D arrays of data for each field (barb/contour/field) ''' - if debug: print fields + if debug: print(fields) datadict = {} file_list, missing_list = makeEnsembleList(wrfinit, timerange, ENS_SIZE) #construct list of files # loop through fill field, contour field, barb field and retrieve required data for f in ['fill', 'contour', 'barb']: - if not fields[f].keys(): continue - if debug: print 'Reading field:', fields[f]['name'], 'from', fields[f]['filename'] + if not list(fields[f].keys()): continue + if debug: print('Reading field:', fields[f]['name'], 'from', fields[f]['filename']) # save some variables for use in this function filename = fields[f]['filename'] @@ -429,13 +427,13 @@ def readEnsemble(wrfinit, timerange=None, fields=None, debug=False, ENS_SIZE=10) if fieldtype[0:3]=='mem': member = int(fieldtype[3:]) # open Multi-file netcdf dataset - if debug: print file_list[filename] + if debug: print(file_list[filename]) fh = MFDataset(file_list[filename]) # loop through each field, wind fields will have two fields that need to be read datalist = [] for n,array in enumerate(arrays): - if debug: print 'Reading', array + if debug: print('Reading', array) #read in 3D array (times*members,ny,nx) from file object if 'arraylevel' in fields[f]: @@ -467,7 +465,7 @@ def readEnsemble(wrfinit, timerange=None, fields=None, debug=False, ENS_SIZE=10) datalist.append(data) # these are derived fields, we don't have in any of the input files but we can compute - print datalist[0].shape + print(datalist[0].shape) if 'name' in fields[f]: if fieldname in ['shr06mag', 'shr01mag', 'bunkmag','speed10m']: datalist = [np.sqrt(datalist[0]**2 + datalist[1]**2)] elif fieldname == 'stp': datalist = [computestp(datalist)] @@ -509,9 +507,9 @@ def readEnsemble(wrfinit, timerange=None, fields=None, debug=False, ENS_SIZE=10) elif (fieldtype[0:3] == 'mem'): for i in missing_list[filename]: data = np.insert(data, i, np.nan, axis=0) #insert nan for missing files data = np.reshape(data, (data.shape[0]/ENS_SIZE,ENS_SIZE,data.shape[1],data.shape[2])) - print fieldname + print(fieldname) if fieldname in ['precip', 'precipacc']: - print 'where we should be' + print('where we should be') data = np.nanmax(data, axis=0) else: data = np.nanmax(data, axis=0) data = data[member-1,:] @@ -529,7 +527,7 @@ def readEnsemble(wrfinit, timerange=None, fields=None, debug=False, ENS_SIZE=10) for i in missing_list[filename]: data = np.insert(data, i, np.nan, axis=0) data = np.reshape(data, (data.shape[0]/ENS_SIZE,ENS_SIZE,data.shape[1],data.shape[2])) data = compute_prob3d(data, roi=14, sigma=float(fields['sigma']), type='gaussian') - if debug: print 'field', fieldname, 'has shape', data.shape, 'max', data.max(), 'min', data.min() + if debug: print('field', fieldname, 'has shape', data.shape, 'max', data.max(), 'min', data.min()) # attach data arrays for each type of field (e.g. { 'fill':[data], 'barb':[data,data] }) datadict[f].append(data) @@ -587,14 +585,14 @@ def saveNewMap(domstr='CONUS', wrfout=None): #x,y,w,h = 0.01, 0.8/float(fig_height), 0.98, 0.98*fig_width*m.aspect/float(fig_height) #too much padding at top x,y,w,h = 0.01, 0.7/float(fig_height), 0.98, 0.98*fig_width*m.aspect/float(fig_height) ax = fig.add_axes([x,y,w,h]) - for i in ax.spines.itervalues(): i.set_linewidth(0.5) + for i in ax.spines.values(): i.set_linewidth(0.5) m.drawcoastlines(linewidth=0.5, ax=ax) m.drawstates(linewidth=0.25, ax=ax) m.drawcountries(ax=ax) - #m.drawcounties(linewidth=0.1, ax=ax) + m.drawcounties(linewidth=0.1, ax=ax) - pickle.dump((fig,ax,m), open('rt2015_%s.pk'%domstr, 'w')) + pickle.dump((fig,ax,m), open('rt2015_%s.pk'%domstr, 'wb')) def compute_pmm(ensemble): mem, dy, dx = ensemble.shape @@ -626,14 +624,14 @@ def compute_neprob(ensemble, roi=0, sigma=0.0, type='gaussian'): return ens_mean def compute_prob3d(ensemble, roi=0, sigma=0.0, type='gaussian'): - print ensemble.shape + print(ensemble.shape) y,x = np.ogrid[-roi:roi+1, -roi:roi+1] kernel = x**2 + y**2 <= roi**2 ens_roi = ndimage.filters.maximum_filter(ensemble, footprint=kernel[np.newaxis,np.newaxis,:]) - print ens_roi.shape + print(ens_roi.shape) ens_mean = np.nanmean(ens_roi, axis=1) - print ens_mean.shape + print(ens_mean.shape) ens_mean = ndimage.filters.gaussian_filter(ens_mean, [2,20,20]) return ens_mean[3,:]