diff --git a/util/OrographyStats.py b/util/OrographyStats.py new file mode 100644 index 00000000..d55481cf --- /dev/null +++ b/util/OrographyStats.py @@ -0,0 +1,95 @@ +import numpy as np + + +class OrographyStats: + def __init__( self, box ): + self.box = box + + self.mean = np.mean( self.box ) + # var (actulally stddev) + self.std = np.std( self.box ) + self.max = np.max( box ) + + # This currently does not use the landuse to average over only land and zero + # out on mostly water + # con (convexity) + if self.std < 1.0: + self.con = 0.0 + else: + var4 = np.sum( [ ( h - self.mean )**4 for h in np.nditer( box ) ] ) + self.con = var4 / ( self.box.size * self.std**4 ) + + # oa (orographic asymmetry) + self.oa = np.zeros( (4) ) + self.calc_oa() + + # ol (orographic effective length) + self.ol = np.zeros( (4) ) + self.calc_ol() + + def __repr__( self ): + return f"Mean : {self.mean} Std : {self.std} Max: {self.max} CON : {self.con} OA : {self.oa} OL : {self.ol}" + + def calc_oa( self ): + # Note that right now the assumption is that the box is laid out in col major order + # with [y,x] indices + + # oa1 is the orographic asymmetry in the West direction + nu = np.sum( self.box[:, :int((self.box.shape[1] + self.box.shape[1] % 2) / 2)] > self.mean ) + nd = np.sum( self.box[:, int((self.box.shape[1] - self.box.shape[1] % 2) / 2):] > self.mean ) + self.oa[0] = ( nu - nd ) / ( nu + nd ) if ( ( nu + nd ) > 0 ) else 0.0 + + # oa2 is the orographic asymmetry in the South direction + nu = np.sum( self.box[:int((self.box.shape[0] + self.box.shape[0] % 2) / 2), :] > self.mean ) + nd = np.sum( self.box[int((self.box.shape[0] - self.box.shape[0] % 2) / 2):, :] > self.mean ) + self.oa[1] = ( nu - nd ) / ( nu + nd ) if ( ( nu + nd ) > 0 ) else 0.0 + + # Pre-compute the geometric diagonal of the box + slope = self.box.shape[0] / self.box.shape[1] + j, i = np.indices( self.box.shape ) + # Corrected - all indices that lie on diagonal are counted in both upstream and downstream + vals = ( i + 1 ) * slope - ( self.box.shape[0] - j ) + upstream = ( vals <= slope ) + downstream = ( vals >= -1.0 ) + + # MPAS calcs - slightly accounts for diagonal when i ~= j but increasingly off for larger discrepancies + # vals = np.rint( ( i + 1 ) * slope ) - ( self.box.shape[0] - j ) + # upstream = ( vals <= 0 ) + # downstream = ( vals >= 0 ) + + # oa3 is the orographic asymmetry in the South-West direction + nu = np.sum( self.box[upstream] > self.mean ) + nd = np.sum( self.box[downstream] > self.mean ) + self.oa[2] = ( nu - nd ) / ( nu + nd ) if ( ( nu + nd ) > 0 ) else 0.0 + + # oa4 is the orographic asymmetry in the North-West direction + upstream = np.flip( upstream, axis=0 ) + downstream = np.flip( downstream, axis=0 ) + nu = np.sum( self.box[upstream] > self.mean ) + nd = np.sum( self.box[downstream] > self.mean ) + self.oa[3] = ( nu - nd ) / ( nu + nd ) if ( ( nu + nd ) > 0 ) else 0.0 + + def calc_ol( self ): + # ol1 is the effective orographic length in the West direction + interior = self.box[int(np.floor(self.box.shape[0] * .25)):int(np.ceil(self.box.shape[0] * .75)), :] + self.ol[0] = np.sum( interior > self.mean ) / interior.size + + # ol2 is the effective orographic length in the South direction + interior = self.box[:, int(np.floor(self.box.shape[1] * .25)):int(np.ceil(self.box.shape[1] * .75))] + self.ol[1] = np.sum( interior > self.mean ) / interior.size + + # The prescribed methodology uses 4 quadrants to get the diagonals and effectively + # test half of the box, however this does not actually test the interior half + # of the area of the box in the wind direction... + + # ol3 is the effective orographic length in the South-West direction + interiorA = self.box[:int(self.box.shape[0] / 2), :int(self.box.shape[1] / 2)] # first half of x first half of y + interiorB = self.box[int(self.box.shape[0] / 2):, int(self.box.shape[1] / 2):] # second half of x second half of y + + self.ol[2] = ( np.sum( interiorA > self.mean ) + np.sum( interiorB > self.mean ) ) / ( interiorA.size + interiorB.size ) + + # ol4 is the effective orographic length in the North-West direction + interiorA = self.box[int(self.box.shape[0] / 2):, :int(self.box.shape[1] / 2)] # first half of x second half of y + interiorB = self.box[:int(self.box.shape[0] / 2), int(self.box.shape[1] / 2):] # second half of x first half of y + + self.ol[3] = ( np.sum( interiorA > self.mean ) + np.sum( interiorB > self.mean ) ) / ( interiorA.size + interiorB.size ) diff --git a/util/SourceData.py b/util/SourceData.py new file mode 100644 index 00000000..81ffb3b5 --- /dev/null +++ b/util/SourceData.py @@ -0,0 +1,257 @@ +import numpy as np +import cartopy + +from TileData import TileData + + +def get_kv_pair( line, delimiter="=", comment="#" ): + non_comment = line.split( comment, maxsplit=1 )[0] + + if delimiter in non_comment: + key, _, value = non_comment.partition( delimiter ) + return key.strip(), value.strip() + else: + return None, None + + +class IndexData: + def __init__( self, path ): + self.source_path = path + # Default values of index file from : + # https://www2.mmm.ucar.edu/wrf/users/wrf_users_guide/build/html/wps.html#index-options + self.projection = "required" + self.source_type = "required" + self.signed = False + self.units = "required" + self.description = "required" + self.dx = -1.0 # required + self.dy = -1.0 # required + self.known_x = 1.0 + self.known_y = 1.0 + self.known_lat = -1.0 # required + self.known_lon = -1.0 # required + self.stdlon = -2.0 # not required + self.truelat1 = -2.0 # not required + self.truelat2 = -2.0 # not required + self.wordsize = -1 # required + self.tile_x = -1 # required + self.tile_y = -1 # required + self.tile_z = -2 # not required + self.tile_z_start = -2 # not required + self.tile_z_end = -2 # not required + self.category_min = -2 # not required + self.category_max = -2 # not required + self.tile_bdr = 0 + self.missing_value = -2.0 # not required + self.scale_factor = 1.0 + self.row_order = "bottom_top" + self.endian = "big" + self.iswater = 16 + self.islake = -1 # i.e. no separate inland water category (does not mean required) + self.isice = 24 + self.isurban = 1 + self.isoilwater = 14 + self.mminlu = "USGS" + self.filename_digits = 5 + + self.read_file() + + def read_file( self ): + with open( self.source_path + "/index" ) as f: + for line in f: + key, value = get_kv_pair( line ) + if key is not None: + if key == "projection": self.projection = value + elif key == "source_type": self.source_type = value + elif key == "signed": self.signed = ( value == "yes" ) + elif key == "units": self.units = value + elif key == "description": self.description = value + elif key == "dx": self.dx = float( value ) + elif key == "dy": self.dy = float( value ) + elif key == "known_x": self.known_x = float( value ) + elif key == "known_y": self.known_y = float( value ) + elif key == "known_lat": self.known_lat = float( value ) + elif key == "known_lon": self.known_lon = float( value ) + elif key == "stdlon": self.stdlon = float( value ) + elif key == "truelat1": self.truelat1 = float( value ) + elif key == "truelat2": self.truelat2 = float( value ) + elif key == "wordsize": self.wordsize = int( value ) + elif key == "tile_x": self.tile_x = int( value ) + elif key == "tile_y": self.tile_y = int( value ) + elif key == "tile_z": self.tile_z = int( value ) + elif key == "tile_z_start": self.tile_z_start = int( value ) + elif key == "tile_z_end": self.tile_z_end = int( value ) + elif key == "category_min": self.category_min = int( value ) + elif key == "category_max": self.category_max = int( value ) + elif key == "tile_bdr": self.tile_bdr = int( value ) + elif key == "missing_value": self.missing_value = float( value ) + elif key == "scale_factor": self.scale_factor = float( value ) + elif key == "row_order": self.row_order = value + elif key == "endian": self.endian = value + elif key == "iswater": self.iswater = int( value ) + elif key == "islake": self.islake = int( value ) + elif key == "isice": self.isice = int( value ) + elif key == "isurban": self.isurban = int( value ) + elif key == "isoilwater": self.isoilwater = int( value ) + elif key == "mminlu": self.mminlu = value + elif key == "filename_digits": self.filename_digits = int( value ) + + def get_dtype( self ): + endian = ">" if self.endian == "big" else "<" + signed = "i" if self.signed else "u" + return f"{endian}{signed}{self.wordsize}" + + +class SourceData: + def __init__( self, name, path ): + self._earth_radius = 6371229.0 + self._name = name + self._source_path = path + self._index = IndexData( self._source_path ) + + self._projection = None + self._npts_x = int( 360.0 / self._index.dx ) + self._pts_per_deg = int( 1.0 / self._index.dx ) + self._subgrid_m_dx = 2.0 * np.pi * self._earth_radius / self._npts_x + + self._tile_data = TileData( + int( int( 360.0 / self._index.dx ) / self._index.tile_x ), + int( int( 180.0 / self._index.dy ) / self._index.tile_y ), + self._index.tile_x, + self._index.tile_y, + load_func=lambda i, j: + self.read_geogrid( + # Limit the view to just the tile data for now, remove border + self.get_tile_name_ij( i, j )[0] )[ + 0, + self._index.tile_bdr:self._index.tile_y + self._index.tile_bdr, + self._index.tile_bdr:self._index.tile_x + self._index.tile_bdr + ] + ) + + def read_geogrid( self, file ): + """ + Read in the geogrid raw data using the format provided here: + https://www2.mmm.ucar.edu/wrf/users/wrf_users_guide/build/html/wps.html#writing-static-data-to-the-geogrid-binary-format + + Note that data passed out is assumed to be oriented as follows: + SW + <- -x +x -> (increasing longitude) + ^ +---+---+---+---+---+---+ + | | | | | | | | + -y +---+---+---+---+---+---+ + | | | | | | | + +---+---+---+---+---+---+ + | | | | | | | + +---+---+---+---+---+---+ + | | | | | | | + +y +---+---+---+---+---+---+ + | | | | | | | | + v +---+---+---+---+---+---+ + (increasing latitude) + NE + """ + rawdata = np.fromfile( + file, + dtype=self._index.get_dtype() + ) + data = rawdata.astype( np.float32, casting="unsafe" ) * self._index.scale_factor + z_dim = 1 + if self._index.tile_z_start > 0 and self._index.tile_z_end > 0: + z_dim = self._index.tile_z_end - self._index.tile_z_start + elif self._index.tile_z > 0: + z_dim = self._index.tile_z + + data = data.reshape( + ( + z_dim, + self._index.tile_y + 2 * self._index.tile_bdr, + self._index.tile_x + 2 * self._index.tile_bdr + ) + ) + if self._index.row_order == "top_bottom": + data = np.flip( data, axis=1 ) + + return data + + def latlon_to_ij( self, lat, lon ): + # Ignore staggering for now + i = 0.0 + j = 0.0 + if self._index.projection == "regular_ll": + delta_lat = lat - self._index.known_lat + delta_lon = lon - self._index.known_lon + + i = ( delta_lon / self._index.dx + self._index.known_x ) % int( 360.0 / self._index.dx ) + j = ( delta_lat / self._index.dy + self._index.known_y ) + + return i, j + + def ij_to_latlon( self, i, j ): + lat = 0.0 + lon = 0.0 + if self._index.projection == "regular_ll": + lon = ( i - self._index.known_x ) * self._index.dx + self._index.known_lon + lat = ( j - self._index.known_y ) * self._index.dy + self._index.known_lat + + if lon > 180.0: + lon -= 360.0 + + return lat, lon + + def get_tile_extent( self, starti, startj ): + start_lat, start_lon = self.ij_to_latlon( starti - self._index.tile_bdr, startj - self._index.tile_bdr ) + stop_lat, stop_lon = self.ij_to_latlon( starti + self._index.tile_x - 1 + self._index.tile_bdr, startj + self._index.tile_y - 1 + self._index.tile_bdr ) + print( ( starti - self._index.tile_bdr, startj - self._index.tile_bdr ) ) + print( ( starti + self._index.tile_x - 1 + self._index.tile_bdr, startj + self._index.tile_y - 1 + self._index.tile_bdr ) ) + return ( start_lon, stop_lon, start_lat, stop_lat ) + + def get_tile_name_ll( self, lat, lon ): + i, j = self.latlon_to_ij( lat, lon ) + return self.get_tile_name_ij( i, j ) + + def get_tile_name_ij( self, i, j ): + tile_i = self._index.tile_x * int( int( i ) / self._index.tile_x ) + 1 + tile_j = self._index.tile_y * int( int( j ) / self._index.tile_y ) + 1 + + path = f"{self._source_path}/" + path += f"{{xstart:0{self._index.filename_digits}d}}-{{xstop:0{self._index.filename_digits}d}}." + path += f"{{ystart:0{self._index.filename_digits}d}}-{{ystop:0{self._index.filename_digits}d}}" + return path.format( + xstart=tile_i, + xstop=tile_i + self._index.tile_x - 1, + ystart=tile_j, + ystop=tile_j + self._index.tile_y - 1 + ), tile_i, tile_j + + def get_box( self, lat, lon, size_x, size_y=None ): + if size_y is None: + size_y = size_x + + # X span in points + nx = 0 + if ( + np.cos( np.deg2rad( lat ) ) + > ( 2.0 * self._pts_per_deg * size_x * 180.0 ) / ( self._npts_x * np.pi * self._earth_radius ) + ): + nx = int( + np.ceil( + ( 180.0 * size_x * self._pts_per_deg ) + / ( np.pi * self._earth_radius * np.cos( np.deg2rad( lat ) ) ) + ) + ) + else: + nx = int( self._npts_x / 2 ) + + # Y span in points + ny = int( np.ceil( ( 180.0 * size_y * self._pts_per_deg ) / ( np.pi * self._earth_radius ) ) ) + + true_i, true_j = self.latlon_to_ij( lat, lon ) + # Generate the indices for this box regardless of tile periodicity, let the tile data handle that + indices = np.indices( ( ny, nx ) ) + indices[0] += int( true_j - ny / 2 ) + indices[1] += int( true_i - nx / 2 ) + + box = self._tile_data.get_box( indices ) + # box is now a 2D box of data spanning size in meters centered on lat/lon + return box diff --git a/util/TileData.py b/util/TileData.py new file mode 100644 index 00000000..098a573a --- /dev/null +++ b/util/TileData.py @@ -0,0 +1,143 @@ +from collections import deque +import numpy as np + + +class TileData: + def __init__( self, nx, ny, tile_x, tile_y, load_func ): + # https://docs.python.org/3/faq/programming.html#how-do-i-create-a-multidimensional-list + # Beware multiplying lists + self._tiles = [ [None] * nx for y in range( ny ) ] + self._nx = nx + self._ny = ny + self._tile_x = tile_x + self._tile_y = tile_y + self._load_func = load_func + self._tile_cache = deque() + self._max_cache = 4 + self._debug = False + + def print_present_tile_grid( self ): + for tile_list in self._tiles: + r = [ 1 if c is not None else 0 for c in tile_list ] + print( r ) + + def load_tile( self, tile_i, tile_j ): + if len( self._tile_cache ) == self._max_cache: + # make space for incoming tile + tile = self._tile_cache.popleft() + if self._debug: + print( f"Removing tile {tile} from loaded tiles cache" ) + + data = self._tiles[tile[0]][tile[1]] + self._tiles[tile[0]][tile[1]] = None + del data + + # Convert tile_i and tile_j to full ij index for the start of the tile + # This assumes geogrid start of 1,1 + self.add_tile_by_index( tile_i, tile_j, self._load_func( tile_i * self._tile_x, tile_j * self._tile_y ) ) + + self._tile_cache.append( ( tile_j, tile_i ) ) + + def get_tile_index( self, i, j ): + tile_i = int( i / self._tile_x ) + tile_j = int( j / self._tile_y ) + return tile_i, tile_j + + def add_tile_by_ij( self, i, j, data ): + # i and j are raw indices + tile_i, tile_j = self.get_tile_index( i, j ) + self.add_tile_by_tile_index( tile_i, tile_j, data ) + + def add_tile_by_index( self, tile_i, tile_j, data ): + self._tiles[tile_j][tile_i] = data + + def get_tiles( self, i, j, tile_indices=np.array([[0, 0]]) ): + tiles = [] + tile_i, tile_j = self.get_tile_index( i, j ) + + for ij in tiles: + tiles.append( self.get_tile( tile_j + ij[1], tile_i + ij[0] ) ) + + return tiles + + def get_tile_by_index( self, tile_i, tile_j ): + """ + Go into the tiles and grab the corresponding tile, periodicity is resolved here + """ + # Lat-lon periodicity + true_tile_i = tile_i % self._nx + true_tile_j = tile_j + rotate = False + + if tile_j < 0 or tile_j > self._ny: + # Flip at the poles so return a view of the data rotated 180 + true_tile_i = int( tile_i + self._nx / 2 ) % self._nx + true_tile_j = self._ny - ( tile_j % ( self._ny + 1 ) ) + rotate = True + + data = self._tiles[ true_tile_j ][ true_tile_i ] + if data is None: + if self._debug: + print( f"Tile at {true_tile_i} {true_tile_j} is not loaded yet. Loading..." ) + self.load_tile( true_tile_i, true_tile_j ) + # Grab the data again + data = self._tiles[ true_tile_j ][ true_tile_i ] + else: + if self._debug: + print( f"Tile at {true_tile_i} {true_tile_j} is already loaded" ) + + if rotate: + if self._debug: + print( f"Tile at {tile_i} {tile_j} must be rotated" ) + data = np.rot90( data, 2 ) + + return data + + def get_box( self, indices ): + # First transform indices to regions of distinct tiles by dividing the x and y + # indices by the respective tile sizes + as_tile_idx = np.floor( + np.einsum( + "ijk, i -> ijk", + indices, + [ 1 / self._tile_y, 1 / self._tile_x ] + ) + ).astype( int ) + + # Create our unique set of tiles needed and load them into an analogous structure + # Naive creation by checking for uniqueness + # uniq_tiles = np.unique( np.unique( as_tile_idx, axis=1 ), axis=2 ).astype( int ) + + # We know they will increase monotonically so just generate the range + uniq_tiles = np.indices( + ( + as_tile_idx[0, -1, 0] - as_tile_idx[0, 0, 0] + 1, + as_tile_idx[1, 0, -1] - as_tile_idx[1, 0, 0] + 1 + ) + ) + uniq_tiles[0] += as_tile_idx[0, 0, 0] + uniq_tiles[1] += as_tile_idx[1, 0, 0] + + uniq_tiles_list = list( map( tuple, uniq_tiles.transpose( 1, 2, 0 ).reshape( int( uniq_tiles.size / 2 ), 2 ) ) ) + + tiles = {} + for tile in uniq_tiles_list: + tiles[tile] = self.get_tile_by_index( tile[1], tile[0] ) + + box = np.zeros( indices[0].shape, dtype=next(iter(tiles.values())).dtype ) + # Now we have our tile set easily accessible based on the 1-to-1 mapping of indices to tile idx + + for tile in uniq_tiles_list: + if self._debug: + print( f"Processing tile {tile}") + # Get the box indices corresponding to this tile + box_indices = np.logical_and( as_tile_idx[0] == tile[0], as_tile_idx[1] == tile[1] ) + + # Bulk assign to box this region of uniq tile by getting the relative index + # into that tile from original indices + box[box_indices] = tiles[tile][indices[0, box_indices] % self._tile_y, indices[1, box_indices] % self._tile_x] + + if self._debug: + self.print_present_tile_grid() + + return box diff --git a/util/compute_gwdo.py b/util/compute_gwdo.py new file mode 100755 index 00000000..05a8f239 --- /dev/null +++ b/util/compute_gwdo.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python3 +import os +import re +import argparse +from collections import OrderedDict + +import netCDF4 as nc4 + +from SourceData import SourceData +from OrographyStats import OrographyStats + + +# https://stackoverflow.com/a/34325723 +def progerss_bar( iteration, total, prefix="", suffix="", precision=1, length=100, fill="*", empty="-", end="\r"): + """Print out a progress bar based on iteration/total of fill + iteration : current iteration + total : total iterations + prefix : prefix string + suffix : suffix string + decimals : precision of float in printing + length : character length of bar + fill : bar fill character + end : end character (e.g. "\r", "\r\n") + """ + progress = iteration / float( total ) + percent = ( f"{{0:.{precision}f}}").format( 100 * progress ) + current_length = int( length * progress ) + bar = fill * current_length + empty * ( length - current_length ) + print(f'\r{prefix} [{bar}] {percent}% {suffix}', end=end) + if iteration == total: + print( "" ) + + +def read_nml( filename ): + """Read a strict subset of Fortran 90 namelist + This ONLY reads namelists in the format: + &group + key0 = value0, + key1 = value0, value1, + key2='value0', 'value1', + key3 = "value0", "value1", + / + ...and so on... + Index slices, multiple key-value pairs on the same line, + and other features are NOT supported. For full support + please utilize f90nml. + Spaces between key and value and commas does not matter. + Multivalue entries MUST be delimited by commas. + """ + contents = "" + with open( filename, "r" ) as f: + contents = f.read() + + nml = OrderedDict() + for match in re.finditer( r"&(?P\w+)(?P.*?)\n*^[ ]*/", contents, re.S | re.M ): + group = match.group( "group" ) + nml[group] = OrderedDict() + for kv_match in re.finditer( r"^[ ]+(?P\w+)[ ]*=[ ]*(?P.*?)$", match.group("kv_pairs"), re.M ): + key = kv_match.group( "key" ) + values = list( + filter( + None, + [ + s.strip("\"' " ) + for s in list( filter( None, kv_match.group( "value" ).split(",") ) ) + ] + ) + ) + nml[group][key] = values if len( values ) > 1 else values[0] + return nml + + +def main(): + parser = argparse.ArgumentParser( + "./compute_gwdo.py", + description="Compute GWDO ancillary fields directly and update geogrid files", + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + "-n", "--namelist", + help="WPS namelist", + type=str, + default="./namelist.wps" + ) + parser.add_argument( + "-d", "--dataset", + help="Topographic dataset to use, in geogrid format", + type=str, + default="topo_gmted2010_30s" + ) + parser.add_argument( + "-hp", "--hide_progress", + help="Hide the progress bar", + action="store_true" + ) + options = parser.parse_args() + nml = read_nml( options.namelist ) + datapath= nml["geogrid"]["geog_data_path"] + + topo_data = os.path.join( datapath, options.dataset ) + topo_source = SourceData( options.dataset, topo_data ) + + # For now just get cmdline arg + max_dom = 1 + if "max_dom" in nml["share"]: + max_dom = int(nml["share"]["max_dom"]) + geo_files = [ f"geo_em.d{dom_id + 1:02d}.nc" for dom_id in range( max_dom ) ] + box_size_x = float( nml["geogrid"]["dx"] ) * 2 + box_size_y = float( nml["geogrid"]["dy"] ) * 2 + + for geo in geo_files: + print( f"Processing {geo}" ) + geo_data = nc4.Dataset( geo, "r+" ) + + xlat = geo_data.variables[ "XLAT_M" ] + xlon = geo_data.variables[ "XLONG_M" ] + + # Fields to overwrite + con = geo_data.variables[ "CON" ] + var = geo_data.variables[ "VAR" ] + + oa1 = geo_data.variables[ "OA1" ] + oa2 = geo_data.variables[ "OA2" ] + oa3 = geo_data.variables[ "OA3" ] + oa4 = geo_data.variables[ "OA4" ] + + ol1 = geo_data.variables[ "OL1" ] + ol2 = geo_data.variables[ "OL2" ] + ol3 = geo_data.variables[ "OL3" ] + ol4 = geo_data.variables[ "OL4" ] + + if "MAX_EL" in geo_data.variables: + max_el = geo_data.variables["MAX_EL"] + else: + max_el = geo_data.createVariable( "MAX_EL", "f4", ( var.dimensions ) ) + + ns_size = xlat.shape[1] + we_size = xlat.shape[2] + + if not options.hide_progress: + progerss_bar( 0, ns_size * we_size, length=50 ) + for j in range( ns_size ): + for i in range( we_size ): + lat = xlat[0, j, i] + lon = xlon[0, j, i] + box = topo_source.get_box( lat, lon, box_size_x, box_size_y ) + + oro_stats = OrographyStats( box ) + con[0, j, i] = oro_stats.con + var[0, j, i] = oro_stats.std + + oa1[0, j, i] = oro_stats.oa[0] + oa2[0, j, i] = oro_stats.oa[1] + oa3[0, j, i] = oro_stats.oa[2] + oa4[0, j, i] = oro_stats.oa[3] + + ol1[0, j, i] = oro_stats.ol[0] + ol2[0, j, i] = oro_stats.ol[1] + ol3[0, j, i] = oro_stats.ol[2] + ol4[0, j, i] = oro_stats.ol[3] + + max_el[0, j, i] = oro_stats.max + if not options.hide_progress: + progerss_bar( i + j * we_size + 1, ns_size * we_size, length=50 ) + + geo_data.close() + + print( "Done!" ) + + +if __name__ == "__main__": + main() diff --git a/util/src/CMakeLists.txt b/util/src/CMakeLists.txt index d357c8d9..18e42d8c 100644 --- a/util/src/CMakeLists.txt +++ b/util/src/CMakeLists.txt @@ -69,4 +69,4 @@ target_sources( height_ukmo PRIVATE height_ukmo.F - ) \ No newline at end of file + )