diff --git a/.circleci/config.yml b/.circleci/config.yml index ff39565d..3241e0c2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -30,8 +30,8 @@ jobs: export PATH="$HOME/miniconda/bin:$PATH" source $HOME/miniconda/etc/profile.d/conda.sh conda install mamba -n base -c conda-forge --yes - mamba update -y conda - mamba update -y mamba + # mamba update -y conda + # mamba update -y mamba mamba env create -f environment.yml mamba install -c anaconda git @@ -55,6 +55,7 @@ jobs: export GDAL_HTTP_COOKIEJAR=/tmp//cookies.txt export VSI_CACHE=YES echo $CC_OT > ~/.topoapi + echo gdalinfo --version chmod 600 ~/.topoapi python setup.py build python setup.py install diff --git a/environment.yml b/environment.yml index 77ca6088..9626586e 100644 --- a/environment.yml +++ b/environment.yml @@ -11,7 +11,7 @@ dependencies: - python>=3.6 - asf_search - cartopy - - gdal>=3.2.1 + - gdal>=3.2.1, <3.4.2 - hdf5 - joblib - libgdal diff --git a/tools/ARIAtools.egg-info/PKG-INFO b/tools/ARIAtools.egg-info/PKG-INFO index 30ba7ee9..6fc263e9 100644 --- a/tools/ARIAtools.egg-info/PKG-INFO +++ b/tools/ARIAtools.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ARIAtools -Version: 1.1.0 +Version: 1.1.3 Summary: This is the ARIA tools package without RelaxIV support Home-page: UNKNOWN License: UNKNOWN diff --git a/tools/ARIAtools/ARIAProduct.py b/tools/ARIAtools/ARIAProduct.py index edb39355..66437f08 100644 --- a/tools/ARIAtools/ARIAProduct.py +++ b/tools/ARIAtools/ARIAProduct.py @@ -14,12 +14,12 @@ from osgeo import gdal from ARIAtools.url_manager import url_versions -from ARIAtools.shapefile_util import open_shapefile,save_shapefile +from ARIAtools.shapefile_util import open_shapefile, save_shapefile from ARIAtools.logger import logger gdal.UseExceptions() gdal.PushErrorHandler('CPLQuietErrorHandler') -gdal.SetConfigOption('CPL_VSIL_CURL_USE_HEAD', 'NO') +# gdal.SetConfigOption('CPL_VSIL_CURL_USE_HEAD', 'NO') log = logging.getLogger(__name__) @@ -28,6 +28,7 @@ def unwrap_self_readproduct(arg): # arg is the self argument and the filename is of the file to be read return ARIA_standardproduct.__readproduct__(arg[0], arg[1])[0] + class ARIA_standardproduct: #Input file(s) and bbox as either list or physical shape file. """ @@ -41,9 +42,9 @@ class ARIA_standardproduct: #Input file(s) and bbox as either list or physical s def __init__(self, filearg, bbox=None, workdir='./', num_threads=1, url_version='None', verbose=False): """ - + Parse products and input bounding box (if specified) - + """ # If user wants verbose mode # Parse through file(s)/bbox input diff --git a/tools/ARIAtools/computeMisclosure.py b/tools/ARIAtools/computeMisclosure.py index c8c2a15d..3b4bdb61 100644 --- a/tools/ARIAtools/computeMisclosure.py +++ b/tools/ARIAtools/computeMisclosure.py @@ -149,6 +149,7 @@ def __init__(self,imgfile,workdir='./', self.basename = os.path.basename(self.imgfile) self.imgdir = os.path.dirname(self.imgfile) self.workdir = os.path.abspath(workdir) + self.verbose = verbose # Check if output directory exists if not os.path.exists(self.workdir): @@ -164,7 +165,8 @@ def __init__(self,imgfile,workdir='./', self.excludePairs = excludePairs # Other - if self.verbose: logger.setLevel(logging.DEBUG) + if self.verbose: + logger.setLevel(logging.DEBUG) # Read stack data and retrieve list of dates diff --git a/tools/ARIAtools/extractProduct.py b/tools/ARIAtools/extractProduct.py index 70c393fb..9146e9ba 100755 --- a/tools/ARIAtools/extractProduct.py +++ b/tools/ARIAtools/extractProduct.py @@ -49,7 +49,8 @@ def createParser(): parser.add_argument('-w', '--workdir', dest='workdir', default='./', help='Specify directory to deposit all outputs. Default is local directory where script is launched.') parser.add_argument('-tp', '--tropo_products', dest='tropo_products', type=str, default=None, - help='Path to director(ies) or tar file(s) containing GACOS products.') + help='Path to director(ies) or tar file(s) containing GACOS products. Will use new version of products (.tif) if they exist.'\ + 'Further information on GACOS available at: http://www.gacos.net.') parser.add_argument('-l', '--layers', dest='layers', default=None, help='Specify layers to extract as a comma deliminated list bounded by single quotes. Allowed keys are: "unwrappedPhase", "coherence", "amplitude", "bPerpendicular", "bParallel", "incidenceAngle", "lookAngle", "azimuthAngle", "ionosphere". If "all" is specified, then all layers are extracted. If blank, will only extract bounding box.') parser.add_argument('-d', '--demfile', dest='demfile', type=str, @@ -118,7 +119,7 @@ def __call__(self, line, pix, h): class metadata_qualitycheck: """Metadata quality control function. - + Artifacts recognized based off of covariance of cross-profiles. Bug-fix varies based off of layer of interest. Verbose mode generates a series of quality control plots with @@ -1041,9 +1042,9 @@ def tropo_correction(full_product_dict, tropo_products, bbox_file, ] # Check and report if tropospheric product falls outside of standard product range - latest_start = max(aria_rsc_dict['azimuthZeroDopplerMidTime'] + latest_start = max(aria_rsc_dict['azimuthZeroDopplerMidTime'] + [min(tropo_rsc_dict['TIME_OF_DAY'])]) - earliest_end = min(aria_rsc_dict['azimuthZeroDopplerMidTime'] + earliest_end = min(aria_rsc_dict['azimuthZeroDopplerMidTime'] + [max(tropo_rsc_dict['TIME_OF_DAY'])]) delta = (earliest_end - latest_start).total_seconds() + 1 if delta<0: @@ -1087,7 +1088,7 @@ def tropo_correction(full_product_dict, tropo_products, bbox_file, tropo_product = np.subtract(tropo_secondary, tropo_product) # Convert troposphere to rad - tropo_product = np.divide(tropo_product, + tropo_product = np.divide(tropo_product, float(metadata_dict[1][i][0]) \ / (4*np.pi)) # Account for lookAngle diff --git a/tools/ARIAtools/mask_util.py b/tools/ARIAtools/mask_util.py index a19fe18b..edf0595a 100755 --- a/tools/ARIAtools/mask_util.py +++ b/tools/ARIAtools/mask_util.py @@ -161,6 +161,7 @@ def prep_mask(product_dict, maskfilename, bbox_file, prods_TOTbbox, proj, except: pass + mask.FlushCache() return mask @@ -252,6 +253,7 @@ def __init__(self, path_aria, lc=[11, 12, 90, 95]): self.lc = lc # landcover classes to mask gdal.PushErrorHandler('CPLQuietErrorHandler') + def __call__(self, proj, bounds, arrshape, outputFormat='ENVI', test=False): """ view=True to plot the mask; test=True to apply mask to dummy data """ import matplotlib.pyplot as plt @@ -279,21 +281,24 @@ def __call__(self, proj, bounds, arrshape, outputFormat='ENVI', test=False): path_mask = op.join(self.path_aria, 'mask') os.mkdirs(path_mask) if not op.exists(path_mask) else '' dst = op.join(path_mask, 'NLCD_crop.msk') - ds = gdal.Translate(dst, ds_mask, options=gdal.TranslateOptions(format=outputFormat, outputType=gdal.GDT_Byte)) - gdal.BuildVRT(dst + '.vrt' ,ds) + ds = gdal.Translate(dst, ds_mask, format=outputFormat, outputType=gdal.GDT_Byte) + + ds1 = gdal.BuildVRT(dst+'.vrt', ds) ## save a view of the mask arr = ds.ReadAsArray() plt.imshow(arr, cmap=plt.cm.Greys_r, interpolation='nearest') plt.colorbar(); plt.title(f'Resampled mask\nDims: {arr.shape}') - plt.savefig(op.join(path_mask, 'NLCD_crop.msk.png')) + plt.savefig(f'{op.splitext(dst)[0]}.png') if test: self.__test__(ds_mask) ds.FlushCache() - del ds, ds_mask, ds_resamp, ds_crop + del ds, ds1, ds_crop, ds_resamp, ds_mask + return dst + def __test__(self, ds_maskre): ## apply mask to dummy data FOR TESTING import matplotlib.pyplot as plt @@ -305,6 +310,7 @@ def __test__(self, ds_maskre): plt.imshow(arr, cmap='jet_r', interpolation='nearest'); plt.colorbar() plt.title('Mask applied to dummy data'); plt.show() + def _dummy_data(self, ds2match): """ Create raster of dummy data using the dem (for sizing); For test """ if isinstance(ds2match, str) and op.exists(ds2match): @@ -318,6 +324,7 @@ def _dummy_data(self, ds2match): arr1 = ds.ReadAsArray() return ds + def _apply_mask(self, ds_mask, ds_2mask): """ Apply mask to test viewing """ arr = ds_mask.ReadAsArray() diff --git a/tools/bin/ariaAOIassist.py b/tools/bin/ariaAOIassist.py index c7a362b4..c73c05e4 100755 --- a/tools/bin/ariaAOIassist.py +++ b/tools/bin/ariaAOIassist.py @@ -297,7 +297,7 @@ def checkContinuity(self,removeIncompleteDates=False): for date in dates: # Indices of frames matching non-RAW and date dateIndices=self.metadata[self.metadata['Common Date']==date].index - passIndices=set(SLCindices).intersection(dateIndices) + passIndices=list(set(SLCindices).intersection(dateIndices)) # Sort tracks south-north and compare latitude extents # "satPass" refers to all the acquisitions from a single satellite pass @@ -437,8 +437,8 @@ def plotFrameCenters(self,flagPartialCoverage=False,plotRaw=False): if flagPartialCoverage==True: slcIndices=self.metadata[self.metadata['Processing Level']=='SLC'].index partialIndices=self.metadata[self.metadata['Extent Covered']==False].index - partialIndices=set(slcIndices).intersection(partialIndices) - partialDates=set([date for date in self.metadata.loc[partialIndices,'Common Date']]) + partialIndices=list(set(slcIndices).intersection(partialIndices)) + partialDates=list(set([date for date in self.metadata.loc[partialIndices,'Common Date']])) # Change date label to red if only partial coverage [self.ax.get_xticklabels()[n].set_color('r') for n,date in enumerate(datelabels) if @@ -506,7 +506,7 @@ def save2kml(self): for date in dates: dateIndices=self.metadata[self.metadata['Common Date']==date].index - dateIndices=set(slcIndices).intersection(dateIndices) + dateIndices=list(set(slcIndices).intersection(dateIndices)) # Create KML layer layer=DS.CreateLayer(date,None,ogr.wkbPolygon) @@ -597,7 +597,7 @@ def __mergeFramesbyDate__(self,date): # Collect indices of date dateIndices=self.metadata[self.metadata['Common Date']==date].index - dateIndices=set(slcIndices).intersection(dateIndices) + dateIndices=list(set(slcIndices).intersection(dateIndices)) # Compute polygons datePolygons=[] @@ -727,7 +727,7 @@ def __saveAOI__(self,AOI): print ('ARIA-tools Version:', get_distribution('ARIAtools').version) except: pass - + inps = cmdLineParse(iargs=None) diff --git a/tools/bin/ariaDownload.py b/tools/bin/ariaDownload.py index 979b2ab5..931022f8 100755 --- a/tools/bin/ariaDownload.py +++ b/tools/bin/ariaDownload.py @@ -37,27 +37,25 @@ def createParser(): '\n\t ariaDownload.py --bbox "36.75 37.225 -76.655 -75.928"' '\n\t ariaDownload.py -t 004,077 --start 20190101 -o count', formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('-o', '--output', dest='output', default='Download', \ - type=str, - help='Output type, default is "Download". "Download", "Count", and "Url"' - '"Kmz" are currently supported. Use "Url" for ingestion to ' - 'aria*.py') - parser.add_argument('-t', '--track', dest='track', default=None, type=str, + parser.add_argument('-o', '--output', default='Download', type=str.title, + choices=('Download', 'Count', 'Url'), help='Output type. '\ + 'Default="Download". Use "Url" for ingestion to aria*.py') + parser.add_argument('-t', '--track', default=None, type=str, help='track to download; single number (including leading zeros) or ' 'comma separated') - parser.add_argument('-b', '--bbox', dest='bbox', default=None, type=str, + parser.add_argument('-b', '--bbox', default=None, type=str, help='Lat/Lon Bounding SNWE, or GDAL-readable file containing ' 'POLYGON geometry.') parser.add_argument('-w', '--workdir', dest='wd', default='./products', \ type=str, help='Specify directory to deposit all outputs. Default is ' '"products" in local directory where script is launched.') - parser.add_argument('-s', '--start', dest='start', default='20140101', type=str, + parser.add_argument('-s', '--start', default='20140101', type=str, help='Start date as YYYYMMDD; If none provided, starts at beginning ' 'of Sentinel record (2014).') - parser.add_argument('-e', '--end', dest='end', default='21000101', type=str, + parser.add_argument('-e', '--end', default='21000101', type=str, help='End date as YYYYMMDD. If none provided, ends today.') - parser.add_argument('-u', '--user', dest='user', default=None, type=str, + parser.add_argument('-u', '--user', default=None, type=str, help='NASA Earthdata URS user login. Users must add "GRFN Door ' '(PROD)" and "ASF Datapool Products" to their URS approved ' 'applications.') @@ -74,21 +72,21 @@ def createParser(): help='Take pairs with a temporal baseline -- days greater than this ' 'value. Example, annual pairs: ariaDownload.py -t 004 ' '--daysmore 364.') - parser.add_argument('-nt', '--num_threads', dest='num_threads', \ - default='1', type=str, + parser.add_argument('-nt', '--num_threads', default='1', type=str, help='Specify number of threads for multiprocessing ' 'download. By default "1". Can also specify "All" to use all ' 'available threads.') - parser.add_argument('-i', '--ifg', dest='ifg', default=None, type=str, + parser.add_argument('-i', '--ifg', default=None, type=str, help='Retrieve one interferogram by its start/end date, specified as ' 'YYYYMMDD_YYYYMMDD (order independent)') parser.add_argument('-d', '--direction', dest='flightdir', default=None, \ type=str, help='Flight direction, options: ascending, a, descending, d') - parser.add_argument('--version', dest='version', default=None, + parser.add_argument('--version', default=None, help='Specify version as str, e.g. 2_0_4 or all prods; default: ' - 'newest') - parser.add_argument('-v', '--verbose', dest='v', action='store_true', + 'newest. All products are downloaded. Unspecified versions are ' + 'stored in "workdir"/duplicated_products') + parser.add_argument('-v', '--verbose', action='store_true', help='Print products to be downloaded to stdout') return parser @@ -108,12 +106,6 @@ def cmdLineParse(iargs=None): if not inps.track and not inps.bbox: raise Exception('Must specify either a bbox or track') - if not inps.output.lower() in ['count', 'kmz', 'kml', 'url', 'download']: - raise Exception ('Incorrect output keyword. Choose "count", "kmz", ' - '"url", or "download"') - - inps.output = 'Kml' if inps.output.lower() == 'kmz' or \ - inps.output.lower() == 'kml' else inps.output.title() return inps @@ -235,10 +227,10 @@ def __call__(self): if self.inps.output == 'Count': log.info('\nFound -- %d -- products', len(scenes)) - elif self.inps.output == 'Kml': - dst = self._fmt_dst() - self.log.error('Kml option is not yet supported. '\ - 'Revert to an older version of ARIAtools') + # elif self.inps.output == 'Kml': + # dst = fmt_dst(inps) + # log.error('Kml option is not yet supported. '\ + # 'Revert to an older version of ARIAtools') elif self.inps.output == 'Url': dst = fmt_dst(inps) @@ -251,6 +243,7 @@ def __call__(self): scenes = asf.ASFSearchResults(scenes) nt = int(self.inps.num_threads) # so legacy works ## allow a user to specify username / password + log.info (f'Downloading {len(scenes)} products...') if self.inps.user is not None: session = asf.ASFSession() session.auth_with_creds(self.inps.user, self.inps.passw) @@ -258,7 +251,11 @@ def __call__(self): else: scenes.download(self.inps.wd, processes=nt) - log.info(f'Wrote -- {len(scenes)} -- products to: {self.inps.wd}') + log.info(f'Download complete. Wrote -- {len(scenes)} -- products to: {self.inps.wd}') + + if inps.verbose: + for scene in scenes: + print(scene.geojson()['properties']['sceneName']) return