diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index 4d8ea8ba..5ed6a328 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -41,7 +41,7 @@ body: id: description attributes: label: Describe the feature request - description: A clear and concise description of your request. + description: A clear and concise description of your request. placeholder: Tell us what you want value: "I need or want [...]" validations: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4179bc8e..cc37f8b2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,25 @@ -# See https://pre-commit.com for more information -# See https://pre-commit.com/hooks.html for more hooks +ci: + autofix_prs: true + repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: "v5.0.0" hooks: - - id: check-added-large-files \ No newline at end of file + - id: check-added-large-files + - id: check-case-conflict + - id: check-docstring-first + - id: check-merge-conflict + - id: check-yaml + args: [--allow-multiple-documents] + - id: debug-statements + - id: end-of-file-fixer + - id: file-contents-sorter + files: (requirements.txt)$ + - id: mixed-line-ending + - id: trailing-whitespace + + - repo: https://github.com/psf/black + rev: "25.1.0" + hooks: + - id: black + args: [--preview, --enable-unstable-feature, string_processing] diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 560787cd..8ea2399d 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -19,4 +19,4 @@ python: - method: pip path: . extra_requirements: - - docs \ No newline at end of file + - docs diff --git a/build_docker_image.sh b/build_docker_image.sh index ab8bd5ff..f5e314fa 100755 --- a/build_docker_image.sh +++ b/build_docker_image.sh @@ -13,4 +13,4 @@ set -ex docker build --rm --force-rm --network host -t $REPO/$IMAGE:$TAG -f docker/Dockerfile.isce3_builder . -docker save $REPO/$IMAGE:$TAG > docker/dockerimg_cslc_s1_${TAG}.tar \ No newline at end of file +docker save $REPO/$IMAGE:$TAG > docker/dockerimg_cslc_s1_${TAG}.tar diff --git a/docker/specfile.txt b/docker/specfile.txt index c243bb01..3921f955 100644 --- a/docker/specfile.txt +++ b/docker/specfile.txt @@ -244,4 +244,3 @@ https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.3.0-py311hc009520 https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.5.0-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/geopandas-0.13.2-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/dem_stitcher-2.5.0-pyhd8ed1ab_0.conda - diff --git a/docker/specfile_isce3_builder b/docker/specfile_isce3_builder index d3b7464c..e5626fcd 100644 --- a/docker/specfile_isce3_builder +++ b/docker/specfile_isce3_builder @@ -1,10 +1,10 @@ -# This file may be used to create an environment using: -# $ conda create --name --file -# platform: linux-64 -@EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 -https://conda.anaconda.org/conda-forge/noarch/_sysroot_linux-64_curr_repodata_hack-3-h69a702a_13.conda -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda +# This file may be used to create an environment using: +# $ conda create --name --file +# platform: linux-64 +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 +https://conda.anaconda.org/conda-forge/noarch/_sysroot_linux-64_curr_repodata_hack-3-h69a702a_13.conda +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 @@ -259,4 +259,4 @@ https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.3.0-py39hc236052_ https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.5.0-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/linux-64/pysolid-0.3.0-py39hf62ee64_0.conda https://conda.anaconda.org/conda-forge/noarch/geopandas-0.13.2-pyhd8ed1ab_1.conda -https://conda.anaconda.org/conda-forge/noarch/dem_stitcher-2.5.0-pyhd8ed1ab_0.conda \ No newline at end of file +https://conda.anaconda.org/conda-forge/noarch/dem_stitcher-2.5.0-pyhd8ed1ab_0.conda diff --git a/environment.yml b/environment.yml index e1515eca..83269b5f 100644 --- a/environment.yml +++ b/environment.yml @@ -29,4 +29,3 @@ dependencies: - rasterio - dem_stitcher - gxx_linux-64>=9,<10 - diff --git a/requirements.txt b/requirements.txt index 8a943534..e6af9ee8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,17 +1,17 @@ -python>=3.9 # sentinel1-reader requirement -numpy # sentinel1-reader requirement -lxml # sentinel1-reader requirement +#journal # as of Mar 2022, journal from conda does not support python3.9; since it is included during isce3 installation above, comment this out temporarily. gdal>=3 +h5py isce3>=0.13.0 # since the conda-installed isce3 is not the most updated version, installing isce3 from stratch is recommended, to stay in sync with isce3 development. -#journal # as of Mar 2022, journal from conda does not support python3.9; since it is included during isce3 installation above, comment this out temporarily. +lxml # sentinel1-reader requirement +numpy # sentinel1-reader requirement pandas pyproj pysolid pytest pytest-order +python>=3.9 # sentinel1-reader requirement +requests ruamel.yaml scipy -yamale -h5py shapely -requests +yamale diff --git a/src/compass/s1_cslc.py b/src/compass/s1_cslc.py index 37cb81b8..d7fa51f4 100755 --- a/src/compass/s1_cslc.py +++ b/src/compass/s1_cslc.py @@ -1,9 +1,14 @@ #!/usr/bin/env python -'''driver for CSLC workflow in radar/geo''' - -from compass import (s1_rdr2geo, s1_geo2rdr, s1_resample, - s1_geocode_slc, s1_static_layers) +"""driver for CSLC workflow in radar/geo""" + +from compass import ( + s1_rdr2geo, + s1_geo2rdr, + s1_resample, + s1_geocode_slc, + s1_static_layers, +) from compass.utils.geo_runconfig import GeoRunConfig from compass.utils.runconfig import RunConfig from compass.utils.yaml_argparse import YamlArgparse @@ -21,10 +26,10 @@ def run(run_config_path: str, grid_type: str): Grid type of the output CSLC """ - if grid_type == 'radar': + if grid_type == "radar": # CSLC workflow in radar coordinates # get a runconfig dict from command line args - cfg = RunConfig.load_from_yaml(run_config_path, 's1_cslc_radar') + cfg = RunConfig.load_from_yaml(run_config_path, "s1_cslc_radar") if cfg.is_reference: # reference burst - run rdr2geo and archive it @@ -35,13 +40,13 @@ def run(run_config_path: str, grid_type: str): s1_geo2rdr.run(cfg) s1_resample.run(cfg) - elif grid_type == 'geo': + elif grid_type == "geo": # CSLC workflow in geo-coordinates # get a runconfig dict from command line arguments - cfg = GeoRunConfig.load_from_yaml(run_config_path, 's1_cslc_geo') + cfg = GeoRunConfig.load_from_yaml(run_config_path, "s1_cslc_geo") # Check if product_type is CSLC-S1, and produce product only - if cfg.product_type == 'CSLC_S1': + if cfg.product_type == "CSLC_S1": s1_geocode_slc.run(cfg) else: s1_static_layers.run(cfg) @@ -54,5 +59,5 @@ def main(): if __name__ == "__main__": - '''run s1_cslc from command line''' + """run s1_cslc from command line""" main() diff --git a/src/compass/s1_cslc_qa.py b/src/compass/s1_cslc_qa.py index 94fb9573..88af22c9 100644 --- a/src/compass/s1_cslc_qa.py +++ b/src/compass/s1_cslc_qa.py @@ -1,6 +1,7 @@ -''' +""" Class to compute stats for geocoded raster and corrections -''' +""" + import datetime import json import os @@ -12,56 +13,72 @@ from osgeo import ogr, osr, gdal from scipy import ndimage -from compass.s1_rdr2geo import (file_name_los_east, - file_name_los_north,file_name_local_incidence, - file_name_x, file_name_y, file_name_z) -from compass.utils.h5_helpers import (DATA_PATH, METADATA_PATH, TIME_STR_FMT, - QA_PATH, add_dataset_and_attrs, Meta) +from compass.s1_rdr2geo import ( + file_name_los_east, + file_name_los_north, + file_name_local_incidence, + file_name_x, + file_name_y, + file_name_z, +) +from compass.utils.h5_helpers import ( + DATA_PATH, + METADATA_PATH, + TIME_STR_FMT, + QA_PATH, + add_dataset_and_attrs, + Meta, +) from compass.utils.helpers import WORKFLOW_SCRIPTS_DIR # determine the path to the world land GPKG file -LAND_GPKG_FILE = os.path.join(WORKFLOW_SCRIPTS_DIR, 'data', - 'GSHHS_l_L1.shp.no_attrs.epsg3413_dissolved.gpkg') +LAND_GPKG_FILE = os.path.join( + WORKFLOW_SCRIPTS_DIR, "data", "GSHHS_l_L1.shp.no_attrs.epsg3413_dissolved.gpkg" +) + def _compute_slc_array_stats(arr: np.ndarray, pwr_phase: str): # internal to function to compute min, max, mean, and std dev of power or # phase of SLC array. Default to phase stat computation. - if pwr_phase == 'power': - post_op_arr = np.abs(arr)**2 + if pwr_phase == "power": + post_op_arr = np.abs(arr) ** 2 else: post_op_arr = np.angle(arr) - return [float(np_op(post_op_arr)) - for np_op in [np.nanmean, np.nanmin, np.nanmax, - np.nanstd]] + return [ + float(np_op(post_op_arr)) + for np_op in [np.nanmean, np.nanmin, np.nanmax, np.nanstd] + ] def value_description_dict(val, desc): - ''' + """ Convenience function that returns dict with description and value - ''' - return {'value': val, 'description': desc} + """ + return {"value": val, "description": desc} def _qa_items_to_h5_and_dict(h5_group, qa_dict, qa_items): - ''' + """ Convenience function that write QA items to HDF5 group and QA dict - ''' + """ # write items to HDF5 and dict for qa_item in qa_items: # write to HDF5 group RFI info add_dataset_and_attrs(h5_group, qa_item) # add items to RFI dict - qa_dict[qa_item.name] = value_description_dict(qa_item.value, - qa_item.description) + qa_dict[qa_item.name] = value_description_dict( + qa_item.value, qa_item.description + ) class QualityAssuranceCSLC: - ''' + """ Class to compute stats for geocoded raster and corrections - ''' - stat_names = ['mean', 'min', 'max', 'std'] + """ + + stat_names = ["mean", "min", "max", "std"] def __init__(self): self.stats_dict = {} @@ -71,9 +88,8 @@ def __init__(self): self.orbit_dict = {} self.output_to_json = False - def compute_CSLC_raster_stats(self, cslc_h5py_root, bursts): - ''' + """ Compute CSLC raster stats. Stats written to HDF5 and saved to class dict for later JSON output @@ -83,12 +99,12 @@ def compute_CSLC_raster_stats(self, cslc_h5py_root, bursts): Root of CSLC HDF5 bursts: list Bursts whose geocoded raster stats are to be computed - ''' + """ for b in bursts: pol = b.polarization # get dataset and compute stats according to dtype - pol_path = f'{DATA_PATH}/{pol}' + pol_path = f"{DATA_PATH}/{pol}" pol_arr = cslc_h5py_root[pol_path][()] # create dict for current polarization @@ -96,29 +112,27 @@ def compute_CSLC_raster_stats(self, cslc_h5py_root, bursts): pol_dict = self.stats_dict[pol] # compute power or phase then write stats to HDF5 for CSLC - for pwr_phase in ['power', 'phase']: + for pwr_phase in ["power", "phase"]: # create dict to store real/imaginary stat items pol_dict[pwr_phase] = {} # create HDF5 group for power or phase stats of current # polarization - h5_stats_path = f'{QA_PATH}/statistics/data/{pol}/{pwr_phase}' + h5_stats_path = f"{QA_PATH}/statistics/data/{pol}/{pwr_phase}" stats_group = cslc_h5py_root.require_group(h5_stats_path) # build list of QA stat items for pwr_phase qa_items = [] vals = _compute_slc_array_stats(pol_arr, pwr_phase) for val_name, val in zip(self.stat_names, vals): - desc = f'{val_name} of {pwr_phase} of {pol} geocoded SLC' + desc = f"{val_name} of {pwr_phase} of {pol} geocoded SLC" qa_items.append(Meta(val_name, val, desc)) # save stats to dict and write to HDF5 - _qa_items_to_h5_and_dict(stats_group, pol_dict[pwr_phase], - qa_items) - + _qa_items_to_h5_and_dict(stats_group, pol_dict[pwr_phase], qa_items) def compute_static_layer_stats(self, cslc_h5py_root, rdr2geo_params): - ''' + """ Compute correction stats. Stats written to HDF5 and saved to class dict for later JSON output @@ -132,9 +146,9 @@ def compute_static_layer_stats(self, cslc_h5py_root, rdr2geo_params): Type of troposphere delay. Any between 'dry', or 'wet', or 'wet_dry' for the sum of wet and dry troposphere delays. Only used apply_tropo_corrections is true. - ''' + """ # path to source group - static_layer_path = f'{DATA_PATH}' + static_layer_path = f"{DATA_PATH}" # Get the static layer to compute stats # Following dict tracks which static layers to generate @@ -146,20 +160,18 @@ def compute_static_layer_stats(self, cslc_h5py_root, rdr2geo_params): file_name_z: rdr2geo_params.compute_height, file_name_local_incidence: rdr2geo_params.compute_local_incidence_angle, file_name_los_east: rdr2geo_params.compute_ground_to_sat_east, - file_name_los_north: rdr2geo_params.compute_ground_to_sat_north + file_name_los_north: rdr2geo_params.compute_ground_to_sat_north, } - static_layers = [key for key, val in static_layers_dict.items() - if val] + static_layers = [key for key, val in static_layers_dict.items() if val] - self.compute_stats_from_float_hdf5_dataset(cslc_h5py_root, - static_layer_path, - 'static_layers', - static_layers) + self.compute_stats_from_float_hdf5_dataset( + cslc_h5py_root, static_layer_path, "static_layers", static_layers + ) - - def compute_correction_stats(self, cslc_h5py_root, apply_tropo_corrections, - tropo_delay_type): - ''' + def compute_correction_stats( + self, cslc_h5py_root, apply_tropo_corrections, tropo_delay_type + ): + """ Compute correction stats. Stats written to HDF5 and saved to class dict for later JSON output @@ -173,30 +185,36 @@ def compute_correction_stats(self, cslc_h5py_root, apply_tropo_corrections, Type of troposphere delay. Any between 'dry', or 'wet', or 'wet_dry' for the sum of wet and dry troposphere delays. Only used apply_tropo_corrections is true. - ''' + """ # path to source group - corrections_src_path = f'{METADATA_PATH}/processing_information/timing_corrections' + corrections_src_path = ( + f"{METADATA_PATH}/processing_information/timing_corrections" + ) # names of datasets to compute stats for - corrections = ['bistatic_delay', 'geometry_steering_doppler', - 'azimuth_fm_rate_mismatch', 'los_ionospheric_delay', - 'los_solid_earth_tides', 'azimuth_solid_earth_tides'] + corrections = [ + "bistatic_delay", + "geometry_steering_doppler", + "azimuth_fm_rate_mismatch", + "los_ionospheric_delay", + "los_solid_earth_tides", + "azimuth_solid_earth_tides", + ] # check if tropo corrections need to be computed and saved if apply_tropo_corrections: - for delay_type in ['wet', 'dry']: + for delay_type in ["wet", "dry"]: if delay_type in tropo_delay_type: - corrections.append(f'{delay_type}_los_troposphere_delay') - - self.compute_stats_from_float_hdf5_dataset(cslc_h5py_root, - corrections_src_path, - 'timing_corrections', corrections) + corrections.append(f"{delay_type}_los_troposphere_delay") + self.compute_stats_from_float_hdf5_dataset( + cslc_h5py_root, corrections_src_path, "timing_corrections", corrections + ) - def compute_stats_from_float_hdf5_dataset(self, cslc_h5py_root, - src_group_path, qa_group_name, - qa_item_names): - ''' + def compute_stats_from_float_hdf5_dataset( + self, cslc_h5py_root, src_group_path, qa_group_name, qa_item_names + ): + """ Compute correction stats for float-type, HDF5datasets. Stats written to HDF5 and saved to class dict for later JSON output @@ -207,7 +225,7 @@ def compute_stats_from_float_hdf5_dataset(self, cslc_h5py_root, src_group_path: str qa_group_name: str qa_item_names: list[str] - ''' + """ # init dict to save all QA item stats to self.stats_dict[qa_group_name] = {} qa_dict = self.stats_dict[qa_group_name] @@ -219,61 +237,66 @@ def compute_stats_from_float_hdf5_dataset(self, cslc_h5py_root, qa_item_dict = qa_dict[qa_item_name] # get dataset and compute stats according to dtype - qa_item_path = f'{src_group_path}/{qa_item_name}' + qa_item_path = f"{src_group_path}/{qa_item_name}" qa_item_ds = cslc_h5py_root[qa_item_path] # compute stats stat_obj = isce3.math.StatsFloat32(qa_item_ds[()].astype(np.float32)) # create HDF5 group for stats of current QA item - h5_stats_path = f'{QA_PATH}/statistics/{qa_group_name}/{qa_item_name}' + h5_stats_path = f"{QA_PATH}/statistics/{qa_group_name}/{qa_item_name}" qa_item_stats_group = cslc_h5py_root.require_group(h5_stats_path) # build list of QA stat items qa_items = [] - vals = [stat_obj.mean, stat_obj.min, stat_obj.max, - stat_obj.sample_stddev] + vals = [stat_obj.mean, stat_obj.min, stat_obj.max, stat_obj.sample_stddev] for val_name, val in zip(self.stat_names, vals): - desc = f'{val_name} of {qa_item_name}' + desc = f"{val_name} of {qa_item_name}" qa_items.append(Meta(val_name, val, desc)) # save stats to dict and write to HDF5 - _qa_items_to_h5_and_dict(qa_item_stats_group, qa_item_dict, - qa_items) - + _qa_items_to_h5_and_dict(qa_item_stats_group, qa_item_dict, qa_items) def shadow_pixel_classification(self, cslc_h5py_root): - ''' + """ Populate classification of shadow layover pixels Parameters ---------- cslc_h5py_root: h5py.File Root of CSLC HDF5 - ''' + """ - percent_shadow, percent_layover, percent_combined =\ + percent_shadow, percent_layover, percent_combined = ( self.compute_layover_shadow_pixel_percent(cslc_h5py_root) + ) pxl_qa_items = [ - Meta('percent_layover_pixels', percent_layover, - 'Percentage of output pixels labeled layover'), - Meta('percent_shadow_pixels', percent_shadow, - 'Percentage of output pixels labeled shadow'), - Meta('percent_combined_pixels', percent_combined, - 'Percentage of output pixels labeled layover and shadow') + Meta( + "percent_layover_pixels", + percent_layover, + "Percentage of output pixels labeled layover", + ), + Meta( + "percent_shadow_pixels", + percent_shadow, + "Percentage of output pixels labeled shadow", + ), + Meta( + "percent_combined_pixels", + percent_combined, + "Percentage of output pixels labeled layover and shadow", + ), ] # create HDF5 group for pixel classification info - h5_pxl_path = f'{QA_PATH}/pixel_classification' + h5_pxl_path = f"{QA_PATH}/pixel_classification" pxl_group = cslc_h5py_root.require_group(h5_pxl_path) # write items to HDF5 and dict - _qa_items_to_h5_and_dict(pxl_group, self.pixel_percentage_dict, - pxl_qa_items) - + _qa_items_to_h5_and_dict(pxl_group, self.pixel_percentage_dict, pxl_qa_items) def percent_land_and_valid_pixels(self, cslc_h5py_root, pol): - ''' + """ Populate classification of geocoded pixel types Parameters @@ -283,28 +306,32 @@ def percent_land_and_valid_pixels(self, cslc_h5py_root, pol): pol: str Polarization of the CSLC layer - ''' - percent_land_pixels, percent_valid_pixels = \ - self.compute_valid_land_and_pixel_percents(cslc_h5py_root, - pol) + """ + percent_land_pixels, percent_valid_pixels = ( + self.compute_valid_land_and_pixel_percents(cslc_h5py_root, pol) + ) pxl_qa_items = [ - Meta('percent_land_pixels', percent_land_pixels, - 'Percentage of output pixels labeled as land'), - Meta('percent_valid_pixels', percent_valid_pixels, - 'Percentage of output pixels are valid') + Meta( + "percent_land_pixels", + percent_land_pixels, + "Percentage of output pixels labeled as land", + ), + Meta( + "percent_valid_pixels", + percent_valid_pixels, + "Percentage of output pixels are valid", + ), ] # create HDF5 group for pixel classification info - h5_pxl_path = f'{QA_PATH}/pixel_classification' + h5_pxl_path = f"{QA_PATH}/pixel_classification" pxl_group = cslc_h5py_root.require_group(h5_pxl_path) # write items to HDF5 and dict - _qa_items_to_h5_and_dict(pxl_group, self.pixel_percentage_dict, - pxl_qa_items) - + _qa_items_to_h5_and_dict(pxl_group, self.pixel_percentage_dict, pxl_qa_items) def populate_rfi_dict(self, cslc_h5py_root, bursts): - ''' + """ Place holder for populating SAFE RFI information Parameters @@ -313,29 +340,37 @@ def populate_rfi_dict(self, cslc_h5py_root, bursts): Root of CSLC HDF5 bursts: list[Sentinel1BurstSlc] List of burst SLC object with RFI info - ''' + """ for burst in bursts: is_rfi_info_available = burst.burst_rfi_info is not None - rfi_qa_items_pol = [Meta('is_rfi_info_available', - is_rfi_info_available, - 'Whether or not RFI information is available')] + rfi_qa_items_pol = [ + Meta( + "is_rfi_info_available", + is_rfi_info_available, + "Whether or not RFI information is available", + ) + ] if is_rfi_info_available: # Follow key/values only assigned if RFI info is available rfi_info_list = [ - Meta('rfi_mitigation_performed', - burst.burst_rfi_info.rfi_mitigation_performed, - ('Activation strategy of RFI mitigation' - '["never", "BasedOnNoiseMeas", "always"]')), - Meta('rfi_mitigation_domain', - burst.burst_rfi_info.rfi_mitigation_domain, - 'Domain the RFI mitigation step was performed') + Meta( + "rfi_mitigation_performed", + burst.burst_rfi_info.rfi_mitigation_performed, + "Activation strategy of RFI mitigation" + '["never", "BasedOnNoiseMeas", "always"]', + ), + Meta( + "rfi_mitigation_domain", + burst.burst_rfi_info.rfi_mitigation_domain, + "Domain the RFI mitigation step was performed", + ), ] rfi_qa_items_pol += rfi_info_list # create HDF5 group for RFI info for current polarization - h5_rfi_path = f'{QA_PATH}/rfi_information/{burst.polarization}' + h5_rfi_path = f"{QA_PATH}/rfi_information/{burst.polarization}" rfi_group = cslc_h5py_root.require_group(h5_rfi_path) # write items to HDF5 and dict @@ -350,93 +385,135 @@ def populate_rfi_dict(self, cslc_h5py_root, bursts): # Add the metadata of the burst RFI report rfi_burst_report_list = [ - Meta('swath', - rfi_burst_report['swath'], - 'Swath of the burst'), - Meta('azimuth_time', - datetime.datetime.strftime(rfi_burst_report['azimuthTime'], - TIME_STR_FMT), - 'Azimuth time of the burst report'), - Meta('in_band_out_band_power_ratio', - rfi_burst_report['inBandOutBandPowerRatio'], - 'Ratio between the in-band and out-of-band power of the burst') + Meta("swath", rfi_burst_report["swath"], "Swath of the burst"), + Meta( + "azimuth_time", + datetime.datetime.strftime( + rfi_burst_report["azimuthTime"], TIME_STR_FMT + ), + "Azimuth time of the burst report", + ), + Meta( + "in_band_out_band_power_ratio", + rfi_burst_report["inBandOutBandPowerRatio"], + "Ratio between the in-band and out-of-band power of the burst", + ), ] - self.rfi_dict['rfi_burst_report'] = {} - rfi_burst_report_group = rfi_group.require_group('rfi_burst_report') - _qa_items_to_h5_and_dict(rfi_burst_report_group, - self.rfi_dict['rfi_burst_report'], - rfi_burst_report_list) + self.rfi_dict["rfi_burst_report"] = {} + rfi_burst_report_group = rfi_group.require_group("rfi_burst_report") + _qa_items_to_h5_and_dict( + rfi_burst_report_group, + self.rfi_dict["rfi_burst_report"], + rfi_burst_report_list, + ) # Take care of the time domain portion of the burst report - if 'timeDomainRfiReport' in rfi_burst_report.keys(): - time_domain_report = rfi_burst_report['timeDomainRfiReport'] + if "timeDomainRfiReport" in rfi_burst_report.keys(): + time_domain_report = rfi_burst_report["timeDomainRfiReport"] burst_time_domain_report_item = [ - Meta('percentage_affected_lines', - time_domain_report['percentageAffectedLines'], - 'Percentage of level-0 lines affected by RFI.'), - Meta('avg_percentage_affected_samples', - time_domain_report['avgPercentageAffectedSamples'], - 'Average percentage of affected level-0 samples in the lines containing RFI'), - Meta('max_percentage_affected_samples', - time_domain_report['maxPercentageAffectedSamples'], - 'Maximum percentage of level-0 samples affected by RFI in the same line'), + Meta( + "percentage_affected_lines", + time_domain_report["percentageAffectedLines"], + "Percentage of level-0 lines affected by RFI.", + ), + Meta( + "avg_percentage_affected_samples", + time_domain_report["avgPercentageAffectedSamples"], + "Average percentage of affected level-0 samples in the lines" + " containing RFI", + ), + Meta( + "max_percentage_affected_samples", + time_domain_report["maxPercentageAffectedSamples"], + "Maximum percentage of level-0 samples affected by RFI in the" + " same line", + ), ] - self.rfi_dict['rfi_burst_report']['time_domain_rfi_report'] = {} - rfi_burst_report_time_domain_group =\ - rfi_burst_report_group.require_group('time_domain_rfi_report') - _qa_items_to_h5_and_dict(rfi_burst_report_time_domain_group, - self.rfi_dict['rfi_burst_report']['time_domain_rfi_report'], - burst_time_domain_report_item) + self.rfi_dict["rfi_burst_report"]["time_domain_rfi_report"] = {} + rfi_burst_report_time_domain_group = ( + rfi_burst_report_group.require_group("time_domain_rfi_report") + ) + _qa_items_to_h5_and_dict( + rfi_burst_report_time_domain_group, + self.rfi_dict["rfi_burst_report"]["time_domain_rfi_report"], + burst_time_domain_report_item, + ) # Take care of the frequency time domain portion of the burst report - if 'frequencyDomainRfiBurstReport' in rfi_burst_report.keys(): - freq_domain_report = rfi_burst_report['frequencyDomainRfiBurstReport'] + if "frequencyDomainRfiBurstReport" in rfi_burst_report.keys(): + freq_domain_report = rfi_burst_report["frequencyDomainRfiBurstReport"] burst_freq_domain_report_item = [ - Meta('num_sub_blocks', - freq_domain_report['numSubBlocks'], - 'Number of sub-blocks in the current burst'), - Meta('sub_block_size', - freq_domain_report['subBlockSize'], - 'Number of lines in each sub-block'), - Meta('percentage_blocks_persistent_rfi', - freq_domain_report['percentageBlocksPersistentRfi'], - ('Percentage of processing blocks affected by persistent RFI. ' - 'In this case the RFI detection is performed on the mean PSD of ' - 'each processing block')), - Meta('max_percentage_bw_affected_persistent_rfi', - freq_domain_report['maxPercentageBWAffectedPersistentRfi'], - ('Max percentage bandwidth affected by ' - 'persistent RFI in a single processing block.')) + Meta( + "num_sub_blocks", + freq_domain_report["numSubBlocks"], + "Number of sub-blocks in the current burst", + ), + Meta( + "sub_block_size", + freq_domain_report["subBlockSize"], + "Number of lines in each sub-block", + ), + Meta( + "percentage_blocks_persistent_rfi", + freq_domain_report["percentageBlocksPersistentRfi"], + "Percentage of processing blocks affected by persistent RFI. In" + " this case the RFI detection is performed on the mean PSD of" + " each processing block", + ), + Meta( + "max_percentage_bw_affected_persistent_rfi", + freq_domain_report["maxPercentageBWAffectedPersistentRfi"], + "Max percentage bandwidth affected by " + "persistent RFI in a single processing block.", + ), ] - self.rfi_dict['rfi_burst_report']['frequency_domain_rfi_report'] = {} - rfi_burst_report_freq_domain_group = rfi_burst_report_group.require_group('frequency_domain_rfi_report') - _qa_items_to_h5_and_dict(rfi_burst_report_freq_domain_group, - self.rfi_dict['rfi_burst_report']['frequency_domain_rfi_report'], - burst_freq_domain_report_item) + self.rfi_dict["rfi_burst_report"]["frequency_domain_rfi_report"] = {} + rfi_burst_report_freq_domain_group = ( + rfi_burst_report_group.require_group("frequency_domain_rfi_report") + ) + _qa_items_to_h5_and_dict( + rfi_burst_report_freq_domain_group, + self.rfi_dict["rfi_burst_report"]["frequency_domain_rfi_report"], + burst_freq_domain_report_item, + ) # Take care of isolated RFI report inside frequency burst RFI report - isolated_rfi_report = freq_domain_report['isolatedRfiReport'] + isolated_rfi_report = freq_domain_report["isolatedRfiReport"] isolated_report_item = [ - Meta('percentage_affected_lines', - isolated_rfi_report['percentageAffectedLines'], - 'Percentage of level-0 lines affected by isolated RFI'), - Meta('max_percentage_affected_bw', - isolated_rfi_report['maxPercentageAffectedBW'], - 'Max. percentage of bandwidth affected by isolated RFI in a single line') + Meta( + "percentage_affected_lines", + isolated_rfi_report["percentageAffectedLines"], + "Percentage of level-0 lines affected by isolated RFI", + ), + Meta( + "max_percentage_affected_bw", + isolated_rfi_report["maxPercentageAffectedBW"], + "Max. percentage of bandwidth affected by isolated RFI in a" + " single line", + ), ] - self.rfi_dict['rfi_burst_report']['time_domain_rfi_report']['isolated_rfi_report'] = {} - isolated_rfi_report_group = rfi_burst_report_freq_domain_group.require_group('isolated_rfi_report') - _qa_items_to_h5_and_dict(isolated_rfi_report_group, - self.rfi_dict['rfi_burst_report']['time_domain_rfi_report']['isolated_rfi_report'], - isolated_report_item) - + self.rfi_dict["rfi_burst_report"]["time_domain_rfi_report"][ + "isolated_rfi_report" + ] = {} + isolated_rfi_report_group = ( + rfi_burst_report_freq_domain_group.require_group( + "isolated_rfi_report" + ) + ) + _qa_items_to_h5_and_dict( + isolated_rfi_report_group, + self.rfi_dict["rfi_burst_report"]["time_domain_rfi_report"][ + "isolated_rfi_report" + ], + isolated_report_item, + ) def set_orbit_type(self, cfg, cslc_h5py_root): - ''' + """ Populate QA orbit information Parameters @@ -445,49 +522,51 @@ def set_orbit_type(self, cfg, cslc_h5py_root): Runconfig dict containing orbit path cslc_h5py_root: h5py.File Root of CSLC HDF5 - ''' + """ orbit_file_path = Path(cfg.orbit_path[0]).name - if 'RESORB' in orbit_file_path: - orbit_type = 'restituted orbit file' - if 'POEORB' in orbit_file_path: - orbit_type = 'precise orbit file' + if "RESORB" in orbit_file_path: + orbit_type = "restituted orbit file" + if "POEORB" in orbit_file_path: + orbit_type = "precise orbit file" orbit_qa_items = [ - Meta('orbit_type', orbit_type, - 'Type of orbit file used for processing. ' - 'RESORB: restituted orbit ephemeris or POEORB: precise orbit ephemeris') + Meta( + "orbit_type", + orbit_type, + "Type of orbit file used for processing. " + "RESORB: restituted orbit ephemeris or POEORB: precise orbit ephemeris", + ) ] # create HDF5 group for orbit info - h5_orbit_path = f'{QA_PATH}/orbit_information' + h5_orbit_path = f"{QA_PATH}/orbit_information" orbit_group = cslc_h5py_root.require_group(h5_orbit_path) # write to HDF5 group orbit info _qa_items_to_h5_and_dict(orbit_group, self.orbit_dict, orbit_qa_items) - def write_qa_dicts_to_json(self, file_path): - ''' + """ Write computed stats in dict to JSON file Parameters ---------- file_path: str JSON file to write stats to - ''' + """ # combine all the dicts into one for output output_dict = { - 'raster_statistics': self.stats_dict, - 'pixel_classification_percentatges': self.pixel_percentage_dict, - 'rfi_information': self.rfi_dict, - 'orbit_information': self.orbit_dict} + "raster_statistics": self.stats_dict, + "pixel_classification_percentatges": self.pixel_percentage_dict, + "rfi_information": self.rfi_dict, + "orbit_information": self.orbit_dict, + } # write combined dict to JSON - with open(file_path, 'w') as f: + with open(file_path, "w") as f: json.dump(output_dict, f, indent=4) - def compute_valid_land_and_pixel_percents(self, cslc_h5py_root, pol): - ''' + """ Compute the percentage of valid pixels on land area Parameters @@ -506,23 +585,23 @@ def compute_valid_land_and_pixel_percents(self, cslc_h5py_root, pol): percent_valid_px: float Percentage of invalid pixels in the geocoded burst area - ''' + """ # extract the geogrid information - epsg_cslc = int(cslc_h5py_root[f'{DATA_PATH}/projection'][()]) + epsg_cslc = int(cslc_h5py_root[f"{DATA_PATH}/projection"][()]) - x_spacing = float(cslc_h5py_root[f'{DATA_PATH}/x_spacing'][()]) - y_spacing = float(cslc_h5py_root[f'{DATA_PATH}/y_spacing'][()]) + x_spacing = float(cslc_h5py_root[f"{DATA_PATH}/x_spacing"][()]) + y_spacing = float(cslc_h5py_root[f"{DATA_PATH}/y_spacing"][()]) - x0 = list(cslc_h5py_root[f'{DATA_PATH}/x_coordinates'][()])[0] - x_spacing / 2 - y0 = list(cslc_h5py_root[f'{DATA_PATH}/y_coordinates'][()])[0] - y_spacing / 2 + x0 = list(cslc_h5py_root[f"{DATA_PATH}/x_coordinates"][()])[0] - x_spacing / 2 + y0 = list(cslc_h5py_root[f"{DATA_PATH}/y_coordinates"][()])[0] - y_spacing / 2 - cslc_array = np.array(cslc_h5py_root[f'{DATA_PATH}/{pol}']) + cslc_array = np.array(cslc_h5py_root[f"{DATA_PATH}/{pol}"]) height_cslc, width_cslc = cslc_array.shape - mask_land = _get_land_mask(epsg_cslc, - (x0, x_spacing, 0, y0, 0, y_spacing), - (height_cslc, width_cslc)) + mask_land = _get_land_mask( + epsg_cslc, (x0, x_spacing, 0, y0, 0, y_spacing), (height_cslc, width_cslc) + ) mask_geocoded_burst = _get_valid_pixel_mask(cslc_array) @@ -536,9 +615,8 @@ def compute_valid_land_and_pixel_percents(self, cslc_h5py_root, pol): return percent_valid_land_px, percent_valid_px - def compute_layover_shadow_pixel_percent(self, cslc_h5py_root): - ''' + """ Compute the percentage of layover, shadow, and layover/shadow pixels in the geocoded burst area @@ -558,27 +636,35 @@ def compute_layover_shadow_pixel_percent(self, cslc_h5py_root): percent_combined: float Percentage of the shadow and layover pixels in the geocoded burst area - ''' - layover_shadow_mask_array = cslc_h5py_root[f'{DATA_PATH}/layover_shadow_mask'][()] + """ + layover_shadow_mask_array = cslc_h5py_root[f"{DATA_PATH}/layover_shadow_mask"][ + () + ] mask_geocoded_burst = layover_shadow_mask_array != 127 n_unmasked_pxls = mask_geocoded_burst.sum() - mask_shadow_inside_burst = mask_geocoded_burst & (layover_shadow_mask_array == 1) + mask_shadow_inside_burst = mask_geocoded_burst & ( + layover_shadow_mask_array == 1 + ) percent_shadow = mask_shadow_inside_burst.sum() / n_unmasked_pxls * 100 - mask_layover_inside_burst = mask_geocoded_burst & (layover_shadow_mask_array == 2) + mask_layover_inside_burst = mask_geocoded_burst & ( + layover_shadow_mask_array == 2 + ) percent_layover = mask_layover_inside_burst.sum() / n_unmasked_pxls * 100 - mask_combined_inside_burst = mask_geocoded_burst & (layover_shadow_mask_array == 3) + mask_combined_inside_burst = mask_geocoded_burst & ( + layover_shadow_mask_array == 3 + ) percent_combined = mask_combined_inside_burst.sum() / n_unmasked_pxls * 100 return percent_shadow, percent_layover, percent_combined def _get_valid_pixel_mask(arr_cslc): - ''' + """ Get the binary index of the pixels in the geocoded burst area Parameters @@ -591,21 +677,19 @@ def _get_valid_pixel_mask(arr_cslc): valid_pixel_index: np.ndarray binary index that identifies the pixels in the geocoded burst area - ''' + """ mask_nan = np.isnan(arr_cslc) labeled_arr, _ = ndimage.label(mask_nan) - labels_along_edges = np.concatenate((labeled_arr[0, :], - labeled_arr[-1, :], - labeled_arr[:, 0], - labeled_arr[:, -1])) + labels_along_edges = np.concatenate( + (labeled_arr[0, :], labeled_arr[-1, :], labeled_arr[:, 0], labeled_arr[:, -1]) + ) # Filter out the valid pixels that touches the edges labels_along_edges = labels_along_edges[labels_along_edges != 0] labels_edge_list = list(set(labels_along_edges)) - # Initial binary index array. Filled with `True` valid_pixel_index = np.full(labeled_arr.shape, True) @@ -617,7 +701,7 @@ def _get_valid_pixel_mask(arr_cslc): def _get_land_mask(epsg_cslc: int, geotransform: tuple, shape_mask: tuple): - ''' + """ Get the land mask within the CSLC bounding box Parameters @@ -633,7 +717,7 @@ def _get_land_mask(epsg_cslc: int, geotransform: tuple, shape_mask: tuple): ------- mask_land: np.ndarray Raster Mask for land area. `1` is land, `0` otherwise - ''' + """ # Extract the land polygon ds_land = ogr.Open(LAND_GPKG_FILE, 0) layer_land = ds_land.GetLayer() @@ -679,21 +763,23 @@ def _get_land_mask(epsg_cslc: int, geotransform: tuple, shape_mask: tuple): intersection_land.Transform(transformer_land_to_cslc) # Build up a vector layer, and add a feature that has `intersection_land`` as geometry - drv_intersection_polygon = ogr.GetDriverByName('Memory') - ds_intersection_polygon = drv_intersection_polygon.CreateDataSource(str(time.time_ns)) - layer_intersection = ds_intersection_polygon.CreateLayer('layer_intersection', - srs_cslc, - ogr.wkbPolygon) + drv_intersection_polygon = ogr.GetDriverByName("Memory") + ds_intersection_polygon = drv_intersection_polygon.CreateDataSource( + str(time.time_ns) + ) + layer_intersection = ds_intersection_polygon.CreateLayer( + "layer_intersection", srs_cslc, ogr.wkbPolygon + ) feature_defn = layer_intersection.GetLayerDefn() feature = ogr.Feature(feature_defn) feature.SetGeometry(intersection_land) layer_intersection.CreateFeature(feature) # Prepare for output layer for the rasterization - drv_raster_out = gdal.GetDriverByName('MEM') - rasterized_land = drv_raster_out.Create(str(time.time_ns), - shape_mask[1], shape_mask[0], - 1, gdal.GDT_Byte) + drv_raster_out = gdal.GetDriverByName("MEM") + rasterized_land = drv_raster_out.Create( + str(time.time_ns), shape_mask[1], shape_mask[0], 1, gdal.GDT_Byte + ) rasterized_land.SetGeoTransform(geotransform) rasterized_land.SetProjection(srs_cslc.ExportToWkt()) diff --git a/src/compass/s1_geo2rdr.py b/src/compass/s1_geo2rdr.py index 04b5dc70..04217407 100755 --- a/src/compass/s1_geo2rdr.py +++ b/src/compass/s1_geo2rdr.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -'''wrapper for geo2rdr''' +"""wrapper for geo2rdr""" import os import time @@ -25,7 +25,7 @@ def run(cfg: dict): """ module_name = get_module_name(__file__) info_channel = journal.info(f"{module_name}.run") - info_channel.log(f'Starting {module_name} burst') + info_channel.log(f"Starting {module_name} burst") # Tracking time elapsed for processing t_start = time.perf_counter() @@ -69,17 +69,16 @@ def run(cfg: dict): # Get topo layers from vrt ref_burst_path = cfg.reference_radar_info.path - topo_raster = isce3.io.Raster(f'{ref_burst_path}/topo.vrt') + topo_raster = isce3.io.Raster(f"{ref_burst_path}/topo.vrt") # Get radar grid and orbit rdr_grid = burst.as_isce3_radargrid() orbit = burst.orbit # Initialize geo2rdr object - geo2rdr_obj = geo2rdr(rdr_grid, orbit, ellipsoid, - isce3.core.LUT2d(), - threshold, iters, - blocksize) + geo2rdr_obj = geo2rdr( + rdr_grid, orbit, ellipsoid, isce3.core.LUT2d(), threshold, iters, blocksize + ) # Execute geo2rdr geo2rdr_obj.geo2rdr(topo_raster, out_paths.output_directory) @@ -93,8 +92,9 @@ def run(cfg: dict): parser = YamlArgparse() # Get a runconfig dict from command line arguments - cfg = RunConfig.load_from_yaml(parser.args.run_config_path, - workflow_name='s1_cslc_radar') + cfg = RunConfig.load_from_yaml( + parser.args.run_config_path, workflow_name="s1_cslc_radar" + ) # Run geo2rdr run(cfg) diff --git a/src/compass/s1_geocode_metadata.py b/src/compass/s1_geocode_metadata.py index 34f89d8b..7520676e 100755 --- a/src/compass/s1_geocode_metadata.py +++ b/src/compass/s1_geocode_metadata.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -'''wrapper to geocode metadata layers''' +"""wrapper to geocode metadata layers""" import time @@ -12,37 +12,47 @@ from scipy.interpolate import InterpolatedUnivariateSpline from compass import s1_rdr2geo -from compass.s1_rdr2geo import (file_name_los_east, - file_name_los_north, file_name_local_incidence, - file_name_layover, file_name_x, - file_name_y, file_name_z) +from compass.s1_rdr2geo import ( + file_name_los_east, + file_name_los_north, + file_name_local_incidence, + file_name_layover, + file_name_x, + file_name_y, + file_name_z, +) from compass.s1_cslc_qa import QualityAssuranceCSLC from compass.utils.geo_runconfig import GeoRunConfig -from compass.utils.h5_helpers import (algorithm_metadata_to_h5group, - identity_to_h5group, - init_geocoded_dataset, - metadata_to_h5group, DATA_PATH, - ROOT_PATH) -from compass.utils.helpers import (bursts_grouping_generator, get_module_name, - get_time_delta_str, - OPERA_OPERATION_CONTACT_EMAIL) +from compass.utils.h5_helpers import ( + algorithm_metadata_to_h5group, + identity_to_h5group, + init_geocoded_dataset, + metadata_to_h5group, + DATA_PATH, + ROOT_PATH, +) +from compass.utils.helpers import ( + bursts_grouping_generator, + get_module_name, + get_time_delta_str, + OPERA_OPERATION_CONTACT_EMAIL, +) from compass.utils.yaml_argparse import YamlArgparse from compass.utils.radar_grid import get_decimated_rdr_grd -def _fix_layover_shadow_mask(static_layers_dict, h5_root, geo_grid, - output_params): - ''' +def _fix_layover_shadow_mask(static_layers_dict, h5_root, geo_grid, output_params): + """ kludge correctly mask invalid pixel in geocoded layover shadow to address isce3::geocode::geocodeCov's inability to take in an user defined invalid value layover shadow invalid value is 127 but isce3::geocode::geocodeCov uses 0 which conflicts with the value for non layover, non shadow pixels - ''' - dst_ds_name = 'layover_shadow_mask' + """ + dst_ds_name = "layover_shadow_mask" # find if a correctly masked dataset exists - correctly_masked_dataset_name = '' + correctly_masked_dataset_name = "" # only "enabled" from static_layers_dict.items() needed; ignore others for dataset_name, (enabled, *_) in static_layers_dict.items(): if enabled and dataset_name != dst_ds_name: @@ -51,27 +61,36 @@ def _fix_layover_shadow_mask(static_layers_dict, h5_root, geo_grid, if correctly_masked_dataset_name: # get mask from correctly masked dataset - correctly_masked_dataset_arr = \ - h5_root[f'{DATA_PATH}/{correctly_masked_dataset_name}'][()] + correctly_masked_dataset_arr = h5_root[ + f"{DATA_PATH}/{correctly_masked_dataset_name}" + ][()] mask = np.isnan(correctly_masked_dataset_arr) # use mask from above to correctly mask shadow layover # save existing to temp with mask - layover_shadow_path = f'{DATA_PATH}/{dst_ds_name}' + layover_shadow_path = f"{DATA_PATH}/{dst_ds_name}" temp_arr = h5_root[layover_shadow_path][()] temp_arr[mask] = 127 # delete existing and rewrite with masked data del h5_root[layover_shadow_path] - desc = 'Layover shadow mask. 0=no layover, no shadow; 1=shadow; 2=layover; 3=shadow and layover.' - _ = init_geocoded_dataset(h5_root[DATA_PATH], dst_ds_name, geo_grid, - dtype=None, - description=np.string_(desc), - data=temp_arr, output_cfg=output_params) + desc = ( + "Layover shadow mask. 0=no layover, no shadow; 1=shadow; 2=layover;" + " 3=shadow and layover." + ) + _ = init_geocoded_dataset( + h5_root[DATA_PATH], + dst_ds_name, + geo_grid, + dtype=None, + description=np.string_(desc), + data=temp_arr, + output_cfg=output_params, + ) def run(cfg, burst, fetch_from_scratch=False): - ''' + """ Geocode metadata layers in single HDF5 Parameters @@ -82,7 +101,7 @@ def run(cfg, burst, fetch_from_scratch=False): Object containing burst parameters needed for geocoding fetch_from_scratch: bool If True grabs metadata layers from scratch dir - ''' + """ module_name = get_module_name(__file__) info_channel = journal.info(f"{module_name}.run") info_channel.log(f"Starting {module_name} burst") @@ -126,43 +145,71 @@ def run(cfg, burst, fetch_from_scratch=False): geocode_obj.numiter_geo2rdr = iters float_bytes = 4 block_size = lines_per_block * geo_grid.width * float_bytes - geocode_obj.geogrid(geo_grid.start_x, geo_grid.start_y, - geo_grid.spacing_x, geo_grid.spacing_y, - geo_grid.width, geo_grid.length, geo_grid.epsg) + geocode_obj.geogrid( + geo_grid.start_x, + geo_grid.start_y, + geo_grid.spacing_x, + geo_grid.spacing_y, + geo_grid.width, + geo_grid.length, + geo_grid.epsg, + ) # Init geotransform to be set in geocoded product - geotransform = [geo_grid.start_x, geo_grid.spacing_x, 0, - geo_grid.start_y, 0, geo_grid.spacing_y] + geotransform = [ + geo_grid.start_x, + geo_grid.spacing_x, + 0, + geo_grid.start_y, + 0, + geo_grid.spacing_y, + ] # Dict containing which layers to geocode and their respective file names # key: dataset name # value: (bool flag if dataset is to written, raster layer name, description) - static_layers = \ - {file_name_x: (cfg.rdr2geo_params.compute_longitude, 'x', - 'Longitude coordinate in degrees'), - file_name_y: (cfg.rdr2geo_params.compute_latitude, 'y', - 'Latitude coordinate in degrees'), - file_name_z: (cfg.rdr2geo_params.compute_height, 'z', - 'Height in meters'), - file_name_local_incidence: (cfg.rdr2geo_params.compute_local_incidence_angle, - 'local_incidence_angle', - 'Local incidence angle in degrees'), - file_name_los_east: (cfg.rdr2geo_params.compute_ground_to_sat_east, - 'los_east', - 'East component of LOS unit vector from target to sensor'), - file_name_los_north: (cfg.rdr2geo_params.compute_ground_to_sat_north, - 'los_north', - 'North component of LOS unit vector from target to sensor'), - file_name_layover: (cfg.rdr2geo_params.compute_layover_shadow_mask, - 'layover_shadow_mask', - 'Layover shadow mask. 0=no layover, no shadow; 1=shadow; 2=layover; 3=shadow and layover.') - } - - out_h5 = f'{out_paths.output_directory}/static_layers_{burst_id}.h5' - with h5py.File(out_h5, 'w') as h5_root: + static_layers = { + file_name_x: ( + cfg.rdr2geo_params.compute_longitude, + "x", + "Longitude coordinate in degrees", + ), + file_name_y: ( + cfg.rdr2geo_params.compute_latitude, + "y", + "Latitude coordinate in degrees", + ), + file_name_z: (cfg.rdr2geo_params.compute_height, "z", "Height in meters"), + file_name_local_incidence: ( + cfg.rdr2geo_params.compute_local_incidence_angle, + "local_incidence_angle", + "Local incidence angle in degrees", + ), + file_name_los_east: ( + cfg.rdr2geo_params.compute_ground_to_sat_east, + "los_east", + "East component of LOS unit vector from target to sensor", + ), + file_name_los_north: ( + cfg.rdr2geo_params.compute_ground_to_sat_north, + "los_north", + "North component of LOS unit vector from target to sensor", + ), + file_name_layover: ( + cfg.rdr2geo_params.compute_layover_shadow_mask, + "layover_shadow_mask", + ( + "Layover shadow mask. 0=no layover, no shadow; 1=shadow; 2=layover;" + " 3=shadow and layover." + ), + ), + } + + out_h5 = f"{out_paths.output_directory}/static_layers_{burst_id}.h5" + with h5py.File(out_h5, "w") as h5_root: # Global attributes for static layers - h5_root.attrs['conventions'] = "CF-1.8" + h5_root.attrs["conventions"] = "CF-1.8" h5_root.attrs["contact"] = np.string_(OPERA_OPERATION_CONTACT_EMAIL) h5_root.attrs["institution"] = np.string_("NASA JPL") h5_root.attrs["project_name"] = np.string_("OPERA") @@ -171,9 +218,14 @@ def run(cfg, burst, fetch_from_scratch=False): # write identity and metadata to HDF5 root_group = h5_root[ROOT_PATH] - metadata_to_h5group(root_group, burst, cfg, save_noise_and_cal=False, - save_processing_parameters=False) - identity_to_h5group(root_group, burst, cfg, 'Static layers CSLC-S1') + metadata_to_h5group( + root_group, + burst, + cfg, + save_noise_and_cal=False, + save_processing_parameters=False, + ) + identity_to_h5group(root_group, burst, cfg, "Static layers CSLC-S1") algorithm_metadata_to_h5group(root_group, is_static_layers=True) # Create group static_layers group under DATA_PATH for consistency with @@ -181,8 +233,11 @@ def run(cfg, burst, fetch_from_scratch=False): static_layer_data_group = h5_root.require_group(DATA_PATH) # Geocode designated layers - for dataset_name, (enabled, raster_file_name, - description) in static_layers.items(): + for dataset_name, ( + enabled, + raster_file_name, + description, + ) in static_layers.items(): if not enabled: continue @@ -190,43 +245,51 @@ def run(cfg, burst, fetch_from_scratch=False): dtype = np.single # layoverShadowMask is last option, no need to change data type # and interpolator afterwards - if dataset_name == 'layover_shadow_mask': - geocode_obj.data_interpolator = 'NEAREST' + if dataset_name == "layover_shadow_mask": + geocode_obj.data_interpolator = "NEAREST" dtype = np.byte # layover shadow is a char (no NaN char, 0 represents unmasked # value) # Create dataset with x/y coords/spacing and projection - topo_ds = init_geocoded_dataset(static_layer_data_group, - dataset_name, geo_grid, dtype, - description, - output_cfg=cfg.output_params) + topo_ds = init_geocoded_dataset( + static_layer_data_group, + dataset_name, + geo_grid, + dtype, + description, + output_cfg=cfg.output_params, + ) # Init output and input isce3.io.Raster objects for geocoding - output_raster = isce3.io.Raster(f"IH5:::ID={topo_ds.id.id}".encode("utf-8"), - update=True) - - input_raster = isce3.io.Raster(f'{input_path}/{raster_file_name}.tif') - - geocode_obj.geocode(radar_grid=radar_grid, - input_raster=input_raster, - output_raster=output_raster, - dem_raster=dem_raster, - output_mode=isce3.geocode.GeocodeOutputMode.INTERP, - min_block_size=block_size, - max_block_size=block_size) + output_raster = isce3.io.Raster( + f"IH5:::ID={topo_ds.id.id}".encode("utf-8"), update=True + ) + + input_raster = isce3.io.Raster(f"{input_path}/{raster_file_name}.tif") + + geocode_obj.geocode( + radar_grid=radar_grid, + input_raster=input_raster, + output_raster=output_raster, + dem_raster=dem_raster, + output_mode=isce3.geocode.GeocodeOutputMode.INTERP, + min_block_size=block_size, + max_block_size=block_size, + ) output_raster.set_geotransform(geotransform) output_raster.set_epsg(output_epsg) del input_raster del output_raster - if dataset_name == 'layover_shadow_mask': - _fix_layover_shadow_mask(static_layers, h5_root, geo_grid, - cfg.output_params) + if dataset_name == "layover_shadow_mask": + _fix_layover_shadow_mask( + static_layers, h5_root, geo_grid, cfg.output_params + ) if cfg.quality_assurance_params.perform_qa: cslc_qa = QualityAssuranceCSLC() - with h5py.File(out_h5, 'a') as h5_root: + with h5py.File(out_h5, "a") as h5_root: cslc_qa.compute_static_layer_stats(h5_root, cfg.rdr2geo_params) cslc_qa.shadow_pixel_classification(h5_root) cslc_qa.set_orbit_type(cfg, h5_root) @@ -234,13 +297,20 @@ def run(cfg, burst, fetch_from_scratch=False): cslc_qa.write_qa_dicts_to_json(out_paths.stats_json_path) dt = get_time_delta_str(t_start) - info_channel.log( - f"{module_name} burst successfully ran in {dt} (hr:min:sec)") - - -def geocode_luts(geo_burst_h5, burst, cfg, dst_group_path, item_dict, - output_params, dec_factor_x_rng=20, dec_factor_y_az=5): - ''' + info_channel.log(f"{module_name} burst successfully ran in {dt} (hr:min:sec)") + + +def geocode_luts( + geo_burst_h5, + burst, + cfg, + dst_group_path, + item_dict, + output_params, + dec_factor_x_rng=20, + dec_factor_y_az=5, +): + """ Geocode the radiometric calibration parameters, and write them into output HDF5. @@ -262,7 +332,7 @@ def geocode_luts(geo_burst_h5, burst, cfg, dst_group_path, item_dict, dec_factor_y_az: int Decimation factor to downsample the LUT in y or azimuth direction - ''' + """ dem_raster = isce3.io.Raster(cfg.dem) epsg = dem_raster.get_epsg() proj = isce3.core.make_projection(epsg) @@ -281,13 +351,14 @@ def geocode_luts(geo_burst_h5, burst, cfg, dst_group_path, item_dict, # generate decimated radar and geo grids for LUT(s) decimated_geogrid = isce3.product.GeoGridParameters( - geo_grid.start_x, - geo_grid.start_y, - geo_grid.spacing_x * dec_factor_x_rng, - geo_grid.spacing_y * dec_factor_y_az, - int(np.ceil(geo_grid.width // dec_factor_x_rng)), - int(np.ceil(geo_grid.length // dec_factor_y_az)), - geo_grid.epsg) + geo_grid.start_x, + geo_grid.start_y, + geo_grid.spacing_x * dec_factor_x_rng, + geo_grid.spacing_y * dec_factor_y_az, + int(np.ceil(geo_grid.width // dec_factor_x_rng)), + int(np.ceil(geo_grid.length // dec_factor_y_az)), + geo_grid.epsg, + ) # initialize geocode object geocode_obj = isce3.geocode.GeocodeFloat32() @@ -296,70 +367,80 @@ def geocode_luts(geo_burst_h5, burst, cfg, dst_group_path, item_dict, geocode_obj.doppler = isce3.core.LUT2d() geocode_obj.threshold_geo2rdr = threshold geocode_obj.numiter_geo2rdr = iters - geocode_obj.geogrid(decimated_geogrid.start_x, - decimated_geogrid.start_y, - decimated_geogrid.spacing_x, - decimated_geogrid.spacing_y, - decimated_geogrid.width, - decimated_geogrid.length, - decimated_geogrid.epsg) - dst_group =\ - geo_burst_h5.require_group(dst_group_path) - - gdal_geotiff_driver = gdal.GetDriverByName('GTiff') + geocode_obj.geogrid( + decimated_geogrid.start_x, + decimated_geogrid.start_y, + decimated_geogrid.spacing_x, + decimated_geogrid.spacing_y, + decimated_geogrid.width, + decimated_geogrid.length, + decimated_geogrid.epsg, + ) + dst_group = geo_burst_h5.require_group(dst_group_path) + + gdal_geotiff_driver = gdal.GetDriverByName("GTiff") # Define the radargrid for LUT interpolation # The resultant radargrid will have # the very first and the last LUT values be included in the grid. - radargrid_interp = get_decimated_rdr_grd(burst.as_isce3_radargrid(), - dec_factor_x_rng, dec_factor_y_az) + radargrid_interp = get_decimated_rdr_grd( + burst.as_isce3_radargrid(), dec_factor_x_rng, dec_factor_y_az + ) range_px_interp_vec = np.linspace(0, burst.width - 1, radargrid_interp.width) azimuth_px_interp_vec = np.linspace(0, burst.length - 1, radargrid_interp.length) - for item_name, (rg_lut_grid, rg_lut_val, - az_lut_grid, az_lut_val) in item_dict.items(): + for item_name, ( + rg_lut_grid, + rg_lut_val, + az_lut_grid, + az_lut_val, + ) in item_dict.items(): # prepare input dataset in output HDF5 - init_geocoded_dataset(dst_group, - item_name, - decimated_geogrid, - 'float32', - f'geocoded {item_name}', - output_cfg=cfg.output_params) + init_geocoded_dataset( + dst_group, + item_name, + decimated_geogrid, + "float32", + f"geocoded {item_name}", + output_cfg=cfg.output_params, + ) - dst_dataset = geo_burst_h5[f'{dst_group_path}/{item_name}'] + dst_dataset = geo_burst_h5[f"{dst_group_path}/{item_name}"] # prepare output raster - geocoded_cal_lut_raster =\ - isce3.io.Raster( - f"IH5:::ID={dst_dataset.id.id}".encode("utf-8"), update=True) + geocoded_cal_lut_raster = isce3.io.Raster( + f"IH5:::ID={dst_dataset.id.id}".encode("utf-8"), update=True + ) if az_lut_grid is not None: azimuth_px_interp_vec += az_lut_grid[0] # Get the interpolated range LUT - param_interp_obj_rg = InterpolatedUnivariateSpline(rg_lut_grid, - rg_lut_val, - k=1) + param_interp_obj_rg = InterpolatedUnivariateSpline(rg_lut_grid, rg_lut_val, k=1) range_lut_interp = param_interp_obj_rg(range_px_interp_vec) # Get the interpolated azimuth LUT if az_lut_grid is None or az_lut_val is None: azimuth_lut_interp = np.ones(radargrid_interp.length) else: - param_interp_obj_az = InterpolatedUnivariateSpline(az_lut_grid, - az_lut_val, - k=1) + param_interp_obj_az = InterpolatedUnivariateSpline( + az_lut_grid, az_lut_val, k=1 + ) azimuth_lut_interp = param_interp_obj_az(azimuth_px_interp_vec) - lut_arr = np.matmul(azimuth_lut_interp[..., np.newaxis], - range_lut_interp[np.newaxis, ...]) - - lut_path = f'{scratch_path}/{item_name}_radargrid.tif' - lut_gdal_raster = gdal_geotiff_driver.Create(lut_path, - radargrid_interp.width, - radargrid_interp.length, - 1, gdal.GDT_Float32) + lut_arr = np.matmul( + azimuth_lut_interp[..., np.newaxis], range_lut_interp[np.newaxis, ...] + ) + + lut_path = f"{scratch_path}/{item_name}_radargrid.tif" + lut_gdal_raster = gdal_geotiff_driver.Create( + lut_path, + radargrid_interp.width, + radargrid_interp.length, + 1, + gdal.GDT_Float32, + ) lut_band = lut_gdal_raster.GetRasterBand(1) lut_band.WriteArray(lut_arr) lut_band.FlushCache() @@ -368,15 +449,22 @@ def geocode_luts(geo_burst_h5, burst, cfg, dst_group_path, item_dict, input_raster = isce3.io.Raster(lut_path) # geocode then set transform and EPSG in output raster - geocode_obj.geocode(radar_grid=radargrid_interp, - input_raster=input_raster, - output_raster=geocoded_cal_lut_raster, - dem_raster=dem_raster, - output_mode=isce3.geocode.GeocodeOutputMode.INTERP) - - geotransform = \ - [decimated_geogrid.start_x, decimated_geogrid.spacing_x, 0, - decimated_geogrid.start_y, 0, decimated_geogrid.spacing_y] + geocode_obj.geocode( + radar_grid=radargrid_interp, + input_raster=input_raster, + output_raster=geocoded_cal_lut_raster, + dem_raster=dem_raster, + output_mode=isce3.geocode.GeocodeOutputMode.INTERP, + ) + + geotransform = [ + decimated_geogrid.start_x, + decimated_geogrid.spacing_x, + 0, + decimated_geogrid.start_y, + 0, + decimated_geogrid.spacing_y, + ] geocoded_cal_lut_raster.set_geotransform(geotransform) geocoded_cal_lut_raster.set_epsg(epsg) @@ -385,10 +473,10 @@ def geocode_luts(geo_burst_h5, burst, cfg, dst_group_path, item_dict, del geocoded_cal_lut_raster -def geocode_calibration_luts(geo_burst_h5, burst, cfg, - dec_factor_x_rng=20, - dec_factor_y_az=5): - ''' +def geocode_calibration_luts( + geo_burst_h5, burst, cfg, dec_factor_x_rng=20, dec_factor_y_az=5 +): + """ Geocode the radiometric calibration parameters, and write them into output HDF5. @@ -406,35 +494,44 @@ def geocode_calibration_luts(geo_burst_h5, burst, cfg, dec_factor_y_az: int Decimation factor to downsample the LUT in y or azimuth direction - ''' - dst_group_path = f'{ROOT_PATH}/metadata/calibration_information' + """ + dst_group_path = f"{ROOT_PATH}/metadata/calibration_information" - #[Range grid of the source in pixel, + # [Range grid of the source in pixel, # range LUT value, # azimuth grid of the source in pixel, # azimuth LUT value] item_dict_calibration = { - 'gamma':[burst.burst_calibration.pixel, - burst.burst_calibration.gamma, - None, - None], - 'sigma_naught':[burst.burst_calibration.pixel, - burst.burst_calibration.sigma_naught, - None, - None], - 'dn':[burst.burst_calibration.pixel, - burst.burst_calibration.dn, - None, - None] - } - geocode_luts(geo_burst_h5, burst, cfg, dst_group_path, item_dict_calibration, - cfg.output_params, dec_factor_x_rng, dec_factor_y_az) - - -def geocode_noise_luts(geo_burst_h5, burst, cfg, - dec_factor_x_rng=20, - dec_factor_y_az=5): - ''' + "gamma": [ + burst.burst_calibration.pixel, + burst.burst_calibration.gamma, + None, + None, + ], + "sigma_naught": [ + burst.burst_calibration.pixel, + burst.burst_calibration.sigma_naught, + None, + None, + ], + "dn": [burst.burst_calibration.pixel, burst.burst_calibration.dn, None, None], + } + geocode_luts( + geo_burst_h5, + burst, + cfg, + dst_group_path, + item_dict_calibration, + cfg.output_params, + dec_factor_x_rng, + dec_factor_y_az, + ) + + +def geocode_noise_luts( + geo_burst_h5, burst, cfg, dec_factor_x_rng=20, dec_factor_y_az=5 +): + """ Geocode the noise LUT, and write that into output HDF5. Parameters @@ -451,24 +548,36 @@ def geocode_noise_luts(geo_burst_h5, burst, cfg, dec_factor_y_az: int Decimation factor to downsample the LUT in y or azimuth direction - ''' - dst_group_path = f'{ROOT_PATH}/metadata/noise_information' - item_dict_noise = {'thermal_noise_lut': [burst.burst_noise.range_pixel, - burst.burst_noise.range_lut, - burst.burst_noise.azimuth_line, - burst.burst_noise.azimuth_lut] - } - geocode_luts(geo_burst_h5, burst, cfg, dst_group_path, item_dict_noise, - cfg.output_params, dec_factor_x_rng, dec_factor_y_az) + """ + dst_group_path = f"{ROOT_PATH}/metadata/noise_information" + item_dict_noise = { + "thermal_noise_lut": [ + burst.burst_noise.range_pixel, + burst.burst_noise.range_lut, + burst.burst_noise.azimuth_line, + burst.burst_noise.azimuth_lut, + ] + } + geocode_luts( + geo_burst_h5, + burst, + cfg, + dst_group_path, + item_dict_noise, + cfg.output_params, + dec_factor_x_rng, + dec_factor_y_az, + ) if __name__ == "__main__": - ''' run geocode metadata layers from command line''' + """run geocode metadata layers from command line""" parser = YamlArgparse() # Get a runconfig dict from command line args - cfg = GeoRunConfig.load_from_yaml(parser.args.run_config_path, - workflow_name='s1_cslc_geo') + cfg = GeoRunConfig.load_from_yaml( + parser.args.run_config_path, workflow_name="s1_cslc_geo" + ) for _, bursts in bursts_grouping_generator(cfg.bursts): burst = bursts[0] diff --git a/src/compass/s1_geocode_slc.py b/src/compass/s1_geocode_slc.py index 684d2841..2bb1158c 100755 --- a/src/compass/s1_geocode_slc.py +++ b/src/compass/s1_geocode_slc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -'''wrapper for geocoded CSLC''' +"""wrapper for geocoded CSLC""" import re @@ -18,20 +18,27 @@ from compass.utils.browse_image import make_browse_image from compass.utils.elevation_antenna_pattern import apply_eap_correction from compass.utils.geo_runconfig import GeoRunConfig -from compass.utils.h5_helpers import (algorithm_metadata_to_h5group, - corrections_to_h5group, - identity_to_h5group, - init_geocoded_dataset, - metadata_to_h5group, - DATA_PATH, METADATA_PATH, ROOT_PATH) -from compass.utils.helpers import (bursts_grouping_generator, - get_time_delta_str, get_module_name, - OPERA_OPERATION_CONTACT_EMAIL) +from compass.utils.h5_helpers import ( + algorithm_metadata_to_h5group, + corrections_to_h5group, + identity_to_h5group, + init_geocoded_dataset, + metadata_to_h5group, + DATA_PATH, + METADATA_PATH, + ROOT_PATH, +) +from compass.utils.helpers import ( + bursts_grouping_generator, + get_time_delta_str, + get_module_name, + OPERA_OPERATION_CONTACT_EMAIL, +) from compass.utils.lut import cumulative_correction_luts from compass.utils.yaml_argparse import YamlArgparse # TEMPORARY MEASURE TODO refactor types functions to isce3 namespace -from isce3.core.types import (truncate_mantissa, to_complex32) +from isce3.core.types import truncate_mantissa, to_complex32 def _wrap_phase(phase_arr): @@ -40,7 +47,7 @@ def _wrap_phase(phase_arr): def run(cfg: GeoRunConfig): - ''' + """ Run geocode burst workflow with user-defined args stored in dictionary runconfig *cfg* @@ -48,7 +55,7 @@ def run(cfg: GeoRunConfig): --------- cfg: GeoRunConfig GeoRunConfig object with user runconfig options - ''' + """ module_name = get_module_name(__file__) info_channel = journal.info(f"{module_name}.run") info_channel.log(f"Starting {module_name} burst") @@ -69,7 +76,7 @@ def run(cfg: GeoRunConfig): geo_grid = cfg.geogrids[burst_id] out_shape = (geo_grid.length, geo_grid.width) - info_channel.log(f'Starting geocoding of {burst_id} for {date_str}') + info_channel.log(f"Starting geocoding of {burst_id} for {date_str}") # Reinitialize the dem raster per burst to prevent raster artifacts # caused by modification in geocodeSlc @@ -88,15 +95,17 @@ def run(cfg: GeoRunConfig): # If enabled, get range and azimuth LUTs t_corrections = time.perf_counter() if cfg.lut_params.enabled: - rg_lut, az_lut = \ - cumulative_correction_luts(burst, dem_path=cfg.dem, - tec_path=cfg.tec_file, - scratch_path=scratch_path, - weather_model_path=cfg.weather_model_file, - rg_step=cfg.lut_params.range_spacing, - az_step=cfg.lut_params.azimuth_spacing, - delay_type=cfg.tropo_params.delay_type, - geo2rdr_params=cfg.geo2rdr_params) + rg_lut, az_lut = cumulative_correction_luts( + burst, + dem_path=cfg.dem, + tec_path=cfg.tec_file, + scratch_path=scratch_path, + weather_model_path=cfg.weather_model_file, + rg_step=cfg.lut_params.range_spacing, + az_step=cfg.lut_params.azimuth_spacing, + delay_type=cfg.tropo_params.delay_type, + geo2rdr_params=cfg.geo2rdr_params, + ) else: rg_lut = isce3.core.LUT2d() az_lut = isce3.core.LUT2d() @@ -110,16 +119,18 @@ def run(cfg: GeoRunConfig): az_carrier_poly2d = burst.get_az_carrier_poly() # Extract burst boundaries - b_bounds = np.s_[burst.first_valid_line:burst.last_valid_line, - burst.first_valid_sample:burst.last_valid_sample] + b_bounds = np.s_[ + burst.first_valid_line : burst.last_valid_line, + burst.first_valid_sample : burst.last_valid_sample, + ] # Create sliced radar grid representing valid region of the burst sliced_radar_grid = burst.as_isce3_radargrid()[b_bounds] output_hdf5 = out_paths.hdf5_path - with h5py.File(output_hdf5, 'w') as geo_burst_h5: - geo_burst_h5.attrs['conventions'] = "CF-1.8" + with h5py.File(output_hdf5, "w") as geo_burst_h5: + geo_burst_h5.attrs["conventions"] = "CF-1.8" geo_burst_h5.attrs["contact"] = np.string_(OPERA_OPERATION_CONTACT_EMAIL) geo_burst_h5.attrs["institution"] = np.string_("NASA JPL") geo_burst_h5.attrs["project_name"] = np.string_("OPERA") @@ -128,7 +139,7 @@ def run(cfg: GeoRunConfig): # add type to root for GDAL recognition of datasets ctype = h5py.h5t.py_create(np.complex64) - ctype.commit(geo_burst_h5['/'].id, np.string_('complex64')) + ctype.commit(geo_burst_h5["/"].id, np.string_("complex64")) grid_group = geo_burst_h5.require_group(DATA_PATH) check_eap = is_eap_correction_necessary(burst.ipf_version) @@ -145,19 +156,18 @@ def run(cfg: GeoRunConfig): pol = burst.polarization # Load the input burst SLC - temp_slc_path = f'{scratch_path}/{out_paths.file_name_pol}_temp.vrt' + temp_slc_path = f"{scratch_path}/{out_paths.file_name_pol}_temp.vrt" burst.slc_to_vrt_file(temp_slc_path) # Apply EAP correction if necessary if check_eap.phase_correction: - temp_slc_path_corrected = \ - temp_slc_path.replace('_temp.vrt', - '_corrected_temp.rdr') + temp_slc_path_corrected = temp_slc_path.replace( + "_temp.vrt", "_corrected_temp.rdr" + ) - apply_eap_correction(burst, - temp_slc_path, - temp_slc_path_corrected, - check_eap) + apply_eap_correction( + burst, temp_slc_path, temp_slc_path_corrected, check_eap + ) # Replace the input burst if the correction is applied temp_slc_path = temp_slc_path_corrected @@ -168,15 +178,20 @@ def run(cfg: GeoRunConfig): rdr_data_blks.append(rdr_dataset.ReadAsArray()) # Prepare output dataset of current polarization in HDF5 - geo_ds = init_geocoded_dataset(grid_group, pol, geo_grid, - 'complex64', - f'{pol} geocoded CSLC image', - output_cfg=cfg.output_params) + geo_ds = init_geocoded_dataset( + grid_group, + pol, + geo_grid, + "complex64", + f"{pol} geocoded CSLC image", + output_cfg=cfg.output_params, + ) geo_datasets.append(geo_ds) # Init geocoded output blocks/arrays lists to NaN geo_data_blks.append( - np.full(out_shape, np.nan + 1j * np.nan).astype(np.complex64)) + np.full(out_shape, np.nan + 1j * np.nan).astype(np.complex64) + ) dt_prep = get_time_delta_str(t_prep) @@ -185,89 +200,121 @@ def run(cfg: GeoRunConfig): # Declare names, types, and descriptions of carrier and flatten # outputs - phase_names = ['azimuth_carrier_phase', 'flattening_phase'] - phase_descrs = ['azimuth carrier phase', 'flattening phase'] + phase_names = ["azimuth_carrier_phase", "flattening_phase"] + phase_descrs = ["azimuth carrier phase", "flattening phase"] # Prepare arrays and datasets for carrier phase and flattening # phase - ((carrier_phase_data_blk, carrier_phase_ds), - (flatten_phase_data_blk, flatten_phase_ds)) = \ - [(np.full(out_shape, np.nan).astype(np.float64), - init_geocoded_dataset(grid_group, ds_name, geo_grid, - np.float64, ds_desc, - output_cfg=cfg.output_params)) - for ds_name, ds_desc in zip(phase_names, phase_descrs)] + ( + (carrier_phase_data_blk, carrier_phase_ds), + (flatten_phase_data_blk, flatten_phase_ds), + ) = [ + ( + np.full(out_shape, np.nan).astype(np.float64), + init_geocoded_dataset( + grid_group, + ds_name, + geo_grid, + np.float64, + ds_desc, + output_cfg=cfg.output_params, + ), + ) + for ds_name, ds_desc in zip(phase_names, phase_descrs) + ] # Geocode - isce3.geocode.geocode_slc(geo_data_blocks=geo_data_blks, - rdr_data_blocks=rdr_data_blks, - dem_raster=dem_raster, - radargrid=radar_grid, - geogrid=geo_grid, orbit=orbit, - native_doppler=native_doppler, - image_grid_doppler=image_grid_doppler, - ellipsoid=ellipsoid, - threshold_geo2rdr=threshold, - num_iter_geo2rdr=iters, - sliced_radargrid=sliced_radar_grid, - first_azimuth_line=0, - first_range_sample=0, - flatten=flatten, reramp=True, - az_carrier=az_carrier_poly2d, - rg_carrier=isce3.core.Poly2d(np.array([0])), - az_time_correction=az_lut, - srange_correction=rg_lut, - carrier_phase_block=carrier_phase_data_blk, - flatten_phase_block=flatten_phase_data_blk) + isce3.geocode.geocode_slc( + geo_data_blocks=geo_data_blks, + rdr_data_blocks=rdr_data_blks, + dem_raster=dem_raster, + radargrid=radar_grid, + geogrid=geo_grid, + orbit=orbit, + native_doppler=native_doppler, + image_grid_doppler=image_grid_doppler, + ellipsoid=ellipsoid, + threshold_geo2rdr=threshold, + num_iter_geo2rdr=iters, + sliced_radargrid=sliced_radar_grid, + first_azimuth_line=0, + first_range_sample=0, + flatten=flatten, + reramp=True, + az_carrier=az_carrier_poly2d, + rg_carrier=isce3.core.Poly2d(np.array([0])), + az_time_correction=az_lut, + srange_correction=rg_lut, + carrier_phase_block=carrier_phase_data_blk, + flatten_phase_block=flatten_phase_data_blk, + ) # write geocoded data blocks to respective HDF5 datasets - geo_datasets.extend([carrier_phase_ds, - flatten_phase_ds]) - geo_data_blks.extend([_wrap_phase(carrier_phase_data_blk), - _wrap_phase(flatten_phase_data_blk)]) - for cslc_dataset, cslc_data_blk in zip(geo_datasets, - geo_data_blks): + geo_datasets.extend([carrier_phase_ds, flatten_phase_ds]) + geo_data_blks.extend( + [ + _wrap_phase(carrier_phase_data_blk), + _wrap_phase(flatten_phase_data_blk), + ] + ) + for cslc_dataset, cslc_data_blk in zip(geo_datasets, geo_data_blks): # only convert/modify output if type not 'complex64' # do nothing if type is 'complex64' output_type = cfg.output_params.cslc_data_type - if output_type == 'complex32': + if output_type == "complex32": cslc_data_blk = to_complex32(cslc_data_blk) - if output_type == 'complex64_zero_mantissa': + if output_type == "complex64_zero_mantissa": # use default nonzero_mantissa_bits = 10 below truncate_mantissa(cslc_data_blk) # write to data block HDF5 cslc_dataset.write_direct(cslc_data_blk) - del dem_raster # modified in geocodeSlc + del dem_raster # modified in geocodeSlc dt_geocoding = get_time_delta_str(t_geocoding) # Save burst corrections and metadata with new h5py File instance # because io.Raster things t_qa_meta = time.perf_counter() - with h5py.File(output_hdf5, 'a') as geo_burst_h5: + with h5py.File(output_hdf5, "a") as geo_burst_h5: root_group = geo_burst_h5[ROOT_PATH] - identity_to_h5group(root_group, burst, cfg, 'CSLC-S1') - - metadata_to_h5group(root_group, burst, cfg, - eap_correction_applied=check_eap.phase_correction) + identity_to_h5group(root_group, burst, cfg, "CSLC-S1") + + metadata_to_h5group( + root_group, + burst, + cfg, + eap_correction_applied=check_eap.phase_correction, + ) algorithm_metadata_to_h5group(root_group) if cfg.lut_params.enabled: correction_group = geo_burst_h5.require_group( - f'{METADATA_PATH}/processing_information') - corrections_to_h5group(correction_group, burst, cfg, rg_lut, az_lut, - scratch_path, - weather_model_path=cfg.weather_model_file, - delay_type=cfg.tropo_params.delay_type) + f"{METADATA_PATH}/processing_information" + ) + corrections_to_h5group( + correction_group, + burst, + cfg, + rg_lut, + az_lut, + scratch_path, + weather_model_path=cfg.weather_model_file, + delay_type=cfg.tropo_params.delay_type, + ) # If needed, make browse image and compute CSLC raster stats browse_params = cfg.browse_image_params if browse_params.enabled: - make_browse_image(out_paths.browse_path, output_hdf5, - bursts, browse_params.complex_to_real, - browse_params.percent_low, - browse_params.percent_high, - browse_params.gamma, browse_params.equalize) + make_browse_image( + out_paths.browse_path, + output_hdf5, + bursts, + browse_params.complex_to_real, + browse_params.percent_low, + browse_params.percent_high, + browse_params.gamma, + browse_params.equalize, + ) # If needed, perform QA and write results to JSON if cfg.quality_assurance_params.perform_qa: @@ -276,12 +323,15 @@ def run(cfg: GeoRunConfig): # apply tropo corrections if weather file provided apply_tropo_corrections = cfg.weather_model_file is not None cslc_qa.compute_correction_stats( - geo_burst_h5, apply_tropo_corrections, - cfg.tropo_params.delay_type) + geo_burst_h5, + apply_tropo_corrections, + cfg.tropo_params.delay_type, + ) cslc_qa.compute_CSLC_raster_stats(geo_burst_h5, bursts) cslc_qa.populate_rfi_dict(geo_burst_h5, bursts) - cslc_qa.percent_land_and_valid_pixels(geo_burst_h5, - bursts[0].polarization) + cslc_qa.percent_land_and_valid_pixels( + geo_burst_h5, bursts[0].polarization + ) cslc_qa.set_orbit_type(cfg, geo_burst_h5) if cfg.quality_assurance_params.output_to_json: @@ -289,19 +339,17 @@ def run(cfg: GeoRunConfig): if burst.burst_calibration is not None: # Geocode the calibration parameters and write them into HDF5 - s1_geocode_metadata.geocode_calibration_luts(geo_burst_h5, - burst, - cfg) + s1_geocode_metadata.geocode_calibration_luts(geo_burst_h5, burst, cfg) if burst.burst_noise is not None: # Geocode the calibration parameters and write them into HDF5 - s1_geocode_metadata.geocode_noise_luts(geo_burst_h5, - burst, - cfg) + s1_geocode_metadata.geocode_noise_luts(geo_burst_h5, burst, cfg) dt_qa_meta = get_time_delta_str(t_qa_meta) dt = get_time_delta_str(t_start) - info_channel.log(f"{module_name} corrections computation time {dt_corrections} (hr:min:sec)") + info_channel.log( + f"{module_name} corrections computation time {dt_corrections} (hr:min:sec)" + ) info_channel.log(f"{module_name} geocode prep time {dt_prep} (hr:min:sec)") info_channel.log(f"{module_name} geocoding time {dt_geocoding} (hr:min:sec)") info_channel.log(f"{module_name} QA meta processing time {dt_qa_meta} (hr:min:sec)") @@ -309,13 +357,14 @@ def run(cfg: GeoRunConfig): if __name__ == "__main__": - '''Run geocode cslc workflow from command line''' + """Run geocode cslc workflow from command line""" # load arguments from command line parser = YamlArgparse() # Get a runconfig dict from command line arguments - cfg = GeoRunConfig.load_from_yaml(parser.run_config_path, - workflow_name='s1_cslc_geo') + cfg = GeoRunConfig.load_from_yaml( + parser.run_config_path, workflow_name="s1_cslc_geo" + ) # Run geocode burst workflow run(cfg) diff --git a/src/compass/s1_geocode_stack.py b/src/compass/s1_geocode_stack.py index a924ebed..c9b1b607 100755 --- a/src/compass/s1_geocode_stack.py +++ b/src/compass/s1_geocode_stack.py @@ -16,67 +16,158 @@ from compass.utils import helpers from compass.utils.geo_grid import get_point_epsg - -DEFAULT_BURST_DB_FILE = os.path.abspath("/u/aurora-r0/staniewi/dev/burst_map_bbox_only.sqlite3") # noqa +DEFAULT_BURST_DB_FILE = os.path.abspath( + "/u/aurora-r0/staniewi/dev/burst_map_bbox_only.sqlite3" +) # noqa def create_parser(): parser = argparse.ArgumentParser( - description='S1-A/B geocoded CSLC stack processor.', - formatter_class=argparse.ArgumentDefaultsHelpFormatter) + description="S1-A/B geocoded CSLC stack processor.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) # Separate the required options from the optional ones # https://stackoverflow.com/a/41747010/ parser._action_groups.pop() - required = parser.add_argument_group('required arguments') - optional = parser.add_argument_group('optional arguments') - required.add_argument('-s', '--slc-dir', required=True, - help='Directory containing the S1-A/B SLCs (zip files)') - required.add_argument('-d', '--dem-file', required=True, - help='File path to a GDAL-readable DEM to use for processing.') - optional.add_argument('-o', '--orbit-dir', default=None, - help='Directory with orbit files. If None, downloads orbit files') - optional.add_argument('-w', '--working-dir', dest='work_dir', default='stack', - help='Directory to store intermediate and final results') - optional.add_argument('-sd', '--start-date', help='Start date of the stack to process') - optional.add_argument('-ed', '--end-date', help='End date of the stack to process') - optional.add_argument('-b', '--burst-id', nargs='+', default=None, - help='List of burst IDs to process. If None, burst IDs ' - 'common to all dates are processed.') - optional.add_argument('--common-bursts-only', action='store_true', - help='If flag is set, only bursts present in all dates' - ' are processed.') - optional.add_argument('-exd', '--exclude-dates', nargs='+', - help='Date to be excluded from stack processing (format: YYYYMMDD)') - optional.add_argument('-p', '--pol', dest='pol', nargs='+', default='co-pol', - choices=['co-pol', 'cross-pol', 'dual-pol'], - help='Polarization to process: %(choices)s ') - optional.add_argument('-dx', '--x-spac', type=float, default=5, - help='Spacing in meters of geocoded CSLC along X-direction.') - optional.add_argument('-dy', '--y-spac', type=float, default=10, - help='Spacing in meters of geocoded CSLC along Y-direction.') - optional.add_argument('--bbox', nargs=4, type=float, default=None, - metavar=('xmin', 'ymin', 'xmax', 'ymax'), - help='Bounding box of the geocoded stack.') - optional.add_argument('--bbox-epsg', type=int, default=4326, - help='EPSG code of the bounding box. ' - 'If 4326, the bounding box is in lon/lat degrees.') - optional.add_argument('-e', '--output-epsg', type=int, default=None, - help='Output EPSG projection code for geocoded bursts. ' - 'If None, looks up the UTM zone for each burst.') - optional.add_argument('--burst-db-file', type=str, default=DEFAULT_BURST_DB_FILE, - help='Sqlite3 database file with burst bounding boxes.') - optional.add_argument('-nf', '--no-flatten', action='store_true', - help='If flag is set, disables topographic phase flattening.') - optional.add_argument('-nc', '--no-corrections', action='store_true', - help='If flag is set, skip the geocoding LUT corrections.') - optional.add_argument('--unzipped', action='store_true', - help='If flag is set, assumes that the SLCs are unzipped, ' - 'and only the SAFE directory is provided.') + required = parser.add_argument_group("required arguments") + optional = parser.add_argument_group("optional arguments") + required.add_argument( + "-s", + "--slc-dir", + required=True, + help="Directory containing the S1-A/B SLCs (zip files)", + ) + required.add_argument( + "-d", + "--dem-file", + required=True, + help="File path to a GDAL-readable DEM to use for processing.", + ) + optional.add_argument( + "-o", + "--orbit-dir", + default=None, + help="Directory with orbit files. If None, downloads orbit files", + ) + optional.add_argument( + "-w", + "--working-dir", + dest="work_dir", + default="stack", + help="Directory to store intermediate and final results", + ) + optional.add_argument( + "-sd", "--start-date", help="Start date of the stack to process" + ) + optional.add_argument("-ed", "--end-date", help="End date of the stack to process") + optional.add_argument( + "-b", + "--burst-id", + nargs="+", + default=None, + help=( + "List of burst IDs to process. If None, burst IDs " + "common to all dates are processed." + ), + ) + optional.add_argument( + "--common-bursts-only", + action="store_true", + help="If flag is set, only bursts present in all dates are processed.", + ) + optional.add_argument( + "-exd", + "--exclude-dates", + nargs="+", + help="Date to be excluded from stack processing (format: YYYYMMDD)", + ) + optional.add_argument( + "-p", + "--pol", + dest="pol", + nargs="+", + default="co-pol", + choices=["co-pol", "cross-pol", "dual-pol"], + help="Polarization to process: %(choices)s ", + ) + optional.add_argument( + "-dx", + "--x-spac", + type=float, + default=5, + help="Spacing in meters of geocoded CSLC along X-direction.", + ) + optional.add_argument( + "-dy", + "--y-spac", + type=float, + default=10, + help="Spacing in meters of geocoded CSLC along Y-direction.", + ) + optional.add_argument( + "--bbox", + nargs=4, + type=float, + default=None, + metavar=("xmin", "ymin", "xmax", "ymax"), + help="Bounding box of the geocoded stack.", + ) + optional.add_argument( + "--bbox-epsg", + type=int, + default=4326, + help=( + "EPSG code of the bounding box. " + "If 4326, the bounding box is in lon/lat degrees." + ), + ) + optional.add_argument( + "-e", + "--output-epsg", + type=int, + default=None, + help=( + "Output EPSG projection code for geocoded bursts. " + "If None, looks up the UTM zone for each burst." + ), + ) + optional.add_argument( + "--burst-db-file", + type=str, + default=DEFAULT_BURST_DB_FILE, + help="Sqlite3 database file with burst bounding boxes.", + ) + optional.add_argument( + "-nf", + "--no-flatten", + action="store_true", + help="If flag is set, disables topographic phase flattening.", + ) + optional.add_argument( + "-nc", + "--no-corrections", + action="store_true", + help="If flag is set, skip the geocoding LUT corrections.", + ) + optional.add_argument( + "--unzipped", + action="store_true", + help=( + "If flag is set, assumes that the SLCs are unzipped, " + "and only the SAFE directory is provided." + ), + ) return parser.parse_args() -def generate_burst_map(zip_files, orbit_dir, output_epsg=None, bbox=None, - bbox_epsg=4326, burst_db_file=DEFAULT_BURST_DB_FILE): +def generate_burst_map( + zip_files, + orbit_dir, + output_epsg=None, + bbox=None, + bbox_epsg=4326, + burst_db_file=DEFAULT_BURST_DB_FILE, +): """Generates a dataframe of geogrid infos for each burst ID in `zip_files`. Parameters @@ -120,14 +211,14 @@ def generate_burst_map(zip_files, orbit_dir, output_epsg=None, bbox=None, if epsg is None: # Flag for skipping burst continue - burst_map['burst_id'].append(str(burst.burst_id)) + burst_map["burst_id"].append(str(burst.burst_id)) # keep the burst object so we don't have to re-parse - burst_map['burst'].append(burst) + burst_map["burst"].append(burst) - burst_map['date'].append(burst.sensing_start.strftime("%Y%m%d")) + burst_map["date"].append(burst.sensing_start.strftime("%Y%m%d")) # Save the file paths for creating the runconfig - burst_map['orbit_path'].append(orbit_path) - burst_map['zip_file'].append(zip_file) + burst_map["orbit_path"].append(orbit_path) + burst_map["zip_file"].append(zip_file) burst_map = pd.DataFrame(data=burst_map) return burst_map @@ -141,20 +232,15 @@ def _get_burst_epsg_and_bbox(burst, output_epsg, bbox, bbox_epsg, burst_db_file) # # Get the UTM zone of the first burst from the database if output_epsg is None: if os.path.exists(burst_db_file): - epsg, _ = helpers.burst_bbox_from_db( - str(burst.burst_id), burst_db_file - ) + epsg, _ = helpers.burst_bbox_from_db(str(burst.burst_id), burst_db_file) else: # Fallback: ust the burst center UTM zone - epsg = get_point_epsg(burst.center.y, - burst.center.x) + epsg = get_point_epsg(burst.center.y, burst.center.x) else: epsg = output_epsg if bbox is not None: - bbox_utm = helpers.bbox_to_utm( - bbox, epsg_src=bbox_epsg, epsg_dst=epsg - ) + bbox_utm = helpers.bbox_to_utm(bbox, epsg_src=bbox_epsg, epsg_dst=epsg) burst_border_utm = helpers.polygon_to_utm( burst.border[0], epsg_src=4326, epsg_dst=epsg ) @@ -166,9 +252,7 @@ def _get_burst_epsg_and_bbox(burst, output_epsg, bbox, bbox_epsg, burst_db_file) str(burst.burst_id), burst_db_file ) if epsg_db != epsg: - bbox_utm = helpers.bbox_to_utm( - bbox_utm, epsg_src=epsg_db, epsg_dst=epsg - ) + bbox_utm = helpers.bbox_to_utm(bbox_utm, epsg_src=epsg_db, epsg_dst=epsg) return epsg, bbox_utm @@ -192,7 +276,7 @@ def prune_dataframe(data, id_col, id_list): data: pandas.DataFrame Pruned dataframe with rows in 'id_list' """ - pattern = '|'.join(id_list) + pattern = "|".join(id_list) df = data.loc[data[id_col].str.contains(pattern, case=False)] return df @@ -211,7 +295,7 @@ def get_common_burst_ids(data): List containing common burst IDs among all the dates """ # Identify all the dates for the bursts to stitch - unique_dates = list(set(data['date'])) + unique_dates = list(set(data["date"])) # Initialize list of unique burst IDs common_id = data.burst_id[data.date == unique_dates[0]] @@ -222,8 +306,17 @@ def get_common_burst_ids(data): return common_id -def create_runconfig(burst_map_row, dem_file, work_dir, flatten, pol, x_spac, - y_spac, enable_corrections, burst_db_file): +def create_runconfig( + burst_map_row, + dem_file, + work_dir, + flatten, + pol, + x_spac, + y_spac, + enable_corrections, + burst_db_file, +): """ Create runconfig to process geocoded bursts @@ -254,40 +347,42 @@ def create_runconfig(burst_map_row, dem_file, work_dir, flatten, pol, x_spac, Path to runconfig file """ # Load default runconfig and fill it with user-defined options - yaml_path = f'{helpers.WORKFLOW_SCRIPTS_DIR}/defaults/s1_cslc_geo.yaml' - with open(yaml_path, 'r') as stream: + yaml_path = f"{helpers.WORKFLOW_SCRIPTS_DIR}/defaults/s1_cslc_geo.yaml" + with open(yaml_path, "r") as stream: yaml_cfg = yaml.safe_load(stream) - groups = yaml_cfg['runconfig']['groups'] - inputs = groups['input_file_group'] - product = groups['product_path_group'] - process = groups['processing'] - geocode = process['geocoding'] + groups = yaml_cfg["runconfig"]["groups"] + inputs = groups["input_file_group"] + product = groups["product_path_group"] + process = groups["processing"] + geocode = process["geocoding"] # Allocate Inputs burst = burst_map_row.burst - inputs['safe_file_path'] = [burst_map_row.zip_file] - inputs['orbit_file_path'] = [burst_map_row.orbit_path] - inputs['burst_id'] = [str(burst.burst_id)] - groups['dynamic_ancillary_file_group']['dem_file'] = dem_file - groups['static_ancillary_file_group']['burst_database_file'] = burst_db_file + inputs["safe_file_path"] = [burst_map_row.zip_file] + inputs["orbit_file_path"] = [burst_map_row.orbit_path] + inputs["burst_id"] = [str(burst.burst_id)] + groups["dynamic_ancillary_file_group"]["dem_file"] = dem_file + groups["static_ancillary_file_group"]["burst_database_file"] = burst_db_file # Product path - product['product_path'] = work_dir - product['scratch_path'] = f'{work_dir}/scratch' - product['sas_output_file'] = work_dir + product["product_path"] = work_dir + product["scratch_path"] = f"{work_dir}/scratch" + product["sas_output_file"] = work_dir # Geocoding - process['polarization'] = pol - process['correction_luts']['enabled'] = enable_corrections - geocode['flatten'] = flatten - geocode['x_posting'] = x_spac - geocode['y_posting'] = y_spac + process["polarization"] = pol + process["correction_luts"]["enabled"] = enable_corrections + geocode["flatten"] = flatten + geocode["x_posting"] = x_spac + geocode["y_posting"] = y_spac date_str = burst.sensing_start.strftime("%Y%m%d") - os.makedirs(f'{work_dir}/runconfigs', exist_ok=True) - runconfig_path = f'{work_dir}/runconfigs/geo_runconfig_{date_str}_{str(burst.burst_id)}.yaml' - with open(runconfig_path, 'w') as yaml_file: + os.makedirs(f"{work_dir}/runconfigs", exist_ok=True) + runconfig_path = ( + f"{work_dir}/runconfigs/geo_runconfig_{date_str}_{str(burst.burst_id)}.yaml" + ) + with open(runconfig_path, "w") as yaml_file: yaml.dump(yaml_cfg, yaml_file, default_flow_style=False) return runconfig_path @@ -314,18 +409,17 @@ def _filter_by_date(zip_file_list, start_date, end_date, exclude_dates): """ safe_datetimes = [_parse_safe_filename(zip_file)[2] for zip_file in zip_file_list] if start_date: - start_datetime = datetime.datetime.strptime(start_date, '%Y%m%d') + start_datetime = datetime.datetime.strptime(start_date, "%Y%m%d") else: start_datetime = min(safe_datetimes) if end_date: - end_datetime = datetime.datetime.strptime(end_date, '%Y%m%d') + end_datetime = datetime.datetime.strptime(end_date, "%Y%m%d") else: end_datetime = max(safe_datetimes) if exclude_dates is not None: exclude_datetimes = [ - datetime.datetime.strptime(d, '%Y%m%d').date - for d in exclude_dates + datetime.datetime.strptime(d, "%Y%m%d").date for d in exclude_dates ] else: exclude_datetimes = [] @@ -339,11 +433,27 @@ def _filter_by_date(zip_file_list, start_date, end_date, exclude_dates): return zip_file_list -def run(slc_dir, dem_file, burst_id=None, common_bursts_only=False, start_date=None, - end_date=None, exclude_dates=None, orbit_dir=None, work_dir='stack', - pol='co-pol', x_spac=5, y_spac=10, bbox=None, bbox_epsg=4326, - output_epsg=None, burst_db_file=DEFAULT_BURST_DB_FILE, flatten=True, - enable_corrections=True, using_zipped=True): +def run( + slc_dir, + dem_file, + burst_id=None, + common_bursts_only=False, + start_date=None, + end_date=None, + exclude_dates=None, + orbit_dir=None, + work_dir="stack", + pol="co-pol", + x_spac=5, + y_spac=10, + bbox=None, + bbox_epsg=4326, + output_epsg=None, + burst_db_file=DEFAULT_BURST_DB_FILE, + flatten=True, + enable_corrections=True, + using_zipped=True, +): """Create runconfigs and runfiles generating geocoded bursts for a static stack of Sentinel-1 A/B SAFE files. @@ -394,38 +504,38 @@ def run(slc_dir, dem_file, burst_id=None, common_bursts_only=False, start_date=N Will search for .zip files if True, and .SAFE directories if False. """ start_time = time.perf_counter() - error = journal.error('s1_geo_stack_processor.main') - info = journal.info('s1_geo_stack_processor.main') + error = journal.error("s1_geo_stack_processor.main") + info = journal.info("s1_geo_stack_processor.main") # Check if SLC dir and DEM exists if not os.path.isdir(slc_dir): - err_str = f'{slc_dir} SLC directory does not exist' + err_str = f"{slc_dir} SLC directory does not exist" error.log(err_str) raise FileNotFoundError(err_str) if not os.path.isfile(dem_file): - err_str = f'{dem_file} DEM file does not exists' + err_str = f"{dem_file} DEM file does not exists" error.log(err_str) raise FileNotFoundError(err_str) # Create directory for runfiles - run_dir = f'{work_dir}/run_files' + run_dir = f"{work_dir}/run_files" os.makedirs(run_dir, exist_ok=True) # Check if orbit are provided, if Not download if orbit_dir is None: - orbit_dir = f'{work_dir}/orbits' - info.log(f'Orbit directory not assigned. Using {orbit_dir} to download orbits') + orbit_dir = f"{work_dir}/orbits" + info.log(f"Orbit directory not assigned. Using {orbit_dir} to download orbits") os.makedirs(orbit_dir, exist_ok=True) # Note: Specific files will be downloaded as needed during `generate_burst_map` # Generate burst map and prune it if a list of burst ID is provided - search_ext = 'zip' if using_zipped else 'SAFE' - zip_file_list = sorted(glob.glob(f'{slc_dir}/S1[AB]_*.{search_ext}')) + search_ext = "zip" if using_zipped else "SAFE" + zip_file_list = sorted(glob.glob(f"{slc_dir}/S1[AB]_*.{search_ext}")) # Remove zip files that are not in the date range before generating burst map zip_file_list = _filter_by_date(zip_file_list, start_date, end_date, exclude_dates) - info.log(f'Generating burst map for {len(zip_file_list)} SAFE files') + info.log(f"Generating burst map for {len(zip_file_list)} SAFE files") burst_map = generate_burst_map( zip_file_list, orbit_dir, output_epsg, bbox, bbox_epsg, burst_db_file ) @@ -434,11 +544,11 @@ def run(slc_dir, dem_file, burst_id=None, common_bursts_only=False, start_date=N # burst IDs that are not in common if common_bursts_only: common_ids = get_common_burst_ids(burst_map) - burst_map = prune_dataframe(burst_map, 'burst_id', common_ids) + burst_map = prune_dataframe(burst_map, "burst_id", common_ids) # If user selects burst IDs to process, prune unnecessary bursts if burst_id is not None: - burst_map = prune_dataframe(burst_map, 'burst_id', burst_id) + burst_map = prune_dataframe(burst_map, "burst_id", burst_id) # Ready to geocode bursts for row in burst_map.itertuples(): @@ -454,14 +564,13 @@ def run(slc_dir, dem_file, burst_id=None, common_bursts_only=False, start_date=N burst_db_file=burst_db_file, ) date_str = row.burst.sensing_start.strftime("%Y%m%d") - runfile_name = f'{run_dir}/run_{date_str}_{row.burst.burst_id}.sh' - with open(runfile_name, 'w') as rsh: + runfile_name = f"{run_dir}/run_{date_str}_{row.burst.burst_id}.sh" + with open(runfile_name, "w") as rsh: path = os.path.dirname(os.path.realpath(__file__)) - rsh.write( - f'python {path}/s1_cslc.py {runconfig_path}\n') + rsh.write(f"python {path}/s1_cslc.py {runconfig_path}\n") end_time = time.perf_counter() - print('Elapsed time (min):', (end_time - start_time) / 60.0) + print("Elapsed time (min):", (end_time - start_time) / 60.0) def main(): @@ -492,6 +601,5 @@ def main(): ) - -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/compass/s1_rdr2geo.py b/src/compass/s1_rdr2geo.py index 770d01a2..d7c6f5ab 100755 --- a/src/compass/s1_rdr2geo.py +++ b/src/compass/s1_rdr2geo.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -'''wrapper for rdr2geo''' +"""wrapper for rdr2geo""" import time @@ -13,17 +13,17 @@ from compass.utils.runconfig import RunConfig from compass.utils.yaml_argparse import YamlArgparse -file_name_los_east = 'los_east' -file_name_los_north = 'los_north' -file_name_local_incidence = 'local_incidence_angle' -file_name_layover = 'layover_shadow_mask' -file_name_x = 'x' -file_name_y = 'y' -file_name_z = 'z' +file_name_los_east = "los_east" +file_name_los_north = "los_north" +file_name_local_incidence = "local_incidence_angle" +file_name_layover = "layover_shadow_mask" +file_name_x = "x" +file_name_y = "y" +file_name_z = "z" def run(cfg, burst=None, save_in_scratch=False): - '''run rdr2geo with provided runconfig + """run rdr2geo with provided runconfig Parameters ---------- @@ -33,7 +33,7 @@ def run(cfg, burst=None, save_in_scratch=False): Burst to run rdr2geo. If `None`, it will process all bursts in `cfg` save_in_scratch: bool Flag to save output in scratch dir instead of product dir - ''' + """ module_name = get_module_name(__file__) info_channel = journal.info(f"{module_name}.run") info_channel.log(f"Starting {module_name} burst") @@ -84,7 +84,7 @@ def run(cfg, burst=None, save_in_scratch=False): # save SLC to Geotiff for all bursts # run rdr2geo for only 1 burst avoid redundancy - burst.slc_to_file(f'{output_path}/{out_paths.file_name_pol}.slc.tif', 'GTiff') + burst.slc_to_file(f"{output_path}/{out_paths.file_name_pol}.slc.tif", "GTiff") # skip burst if id already rdr2geo processed # save id if not processed to avoid rdr2geo reprocessing @@ -94,7 +94,7 @@ def run(cfg, burst=None, save_in_scratch=False): # get radar grid of last SLC written and save for resample flattening rdr_grid = burst.as_isce3_radargrid() - ref_grid_path = f'{output_path}/radar_grid.txt' + ref_grid_path = f"{output_path}/radar_grid.txt" rdr_grid_to_file(ref_grid_path, rdr_grid) # get isce3 objs from burst @@ -104,67 +104,99 @@ def run(cfg, burst=None, save_in_scratch=False): grid_doppler = isce3.core.LUT2d() # init rdr2geo obj - rdr2geo_obj = Rdr2Geo(rdr_grid, isce3_orbit, ellipsoid, grid_doppler, - threshold=rdr2geo_cfg.threshold, - numiter=rdr2geo_cfg.numiter, - extraiter=rdr2geo_cfg.extraiter, - lines_per_block=rdr2geo_cfg.lines_per_block) + rdr2geo_obj = Rdr2Geo( + rdr_grid, + isce3_orbit, + ellipsoid, + grid_doppler, + threshold=rdr2geo_cfg.threshold, + numiter=rdr2geo_cfg.numiter, + extraiter=rdr2geo_cfg.extraiter, + lines_per_block=rdr2geo_cfg.lines_per_block, + ) # Dict containing the rdr2geo layers to generate and their filenames # key: rdr2geo layer name # value: (boolean flag; True if layers needs to be generated, layer name) - topo_output = {file_name_x: (rdr2geo_cfg.compute_longitude, gdal.GDT_Float64), - file_name_y: (rdr2geo_cfg.compute_latitude, gdal.GDT_Float64), - file_name_z: (rdr2geo_cfg.compute_height, gdal.GDT_Float64), - file_name_layover: ( - cfg.rdr2geo_params.compute_layover_shadow_mask, - gdal.GDT_Byte), - file_name_local_incidence: ( - rdr2geo_cfg.compute_local_incidence_angle, - gdal.GDT_Float32), - file_name_los_east: ( - rdr2geo_cfg.compute_ground_to_sat_east, gdal.GDT_Float32), - file_name_los_north: ( - rdr2geo_cfg.compute_ground_to_sat_north, gdal.GDT_Float32), - } + topo_output = { + file_name_x: (rdr2geo_cfg.compute_longitude, gdal.GDT_Float64), + file_name_y: (rdr2geo_cfg.compute_latitude, gdal.GDT_Float64), + file_name_z: (rdr2geo_cfg.compute_height, gdal.GDT_Float64), + file_name_layover: ( + cfg.rdr2geo_params.compute_layover_shadow_mask, + gdal.GDT_Byte, + ), + file_name_local_incidence: ( + rdr2geo_cfg.compute_local_incidence_angle, + gdal.GDT_Float32, + ), + file_name_los_east: ( + rdr2geo_cfg.compute_ground_to_sat_east, + gdal.GDT_Float32, + ), + file_name_los_north: ( + rdr2geo_cfg.compute_ground_to_sat_north, + gdal.GDT_Float32, + ), + } raster_list = [ - isce3.io.Raster(f'{output_path}/{fname}.tif', rdr_grid.width, - rdr_grid.length, 1, dtype, 'GTiff') - if enabled else None - for fname, (enabled, dtype) in topo_output.items()] - - (x_raster, y_raster, z_raster, layover_shadow_raster, - local_incident_angle_raster, los_east_raster, - los_north_raster) = raster_list + ( + isce3.io.Raster( + f"{output_path}/{fname}.tif", + rdr_grid.width, + rdr_grid.length, + 1, + dtype, + "GTiff", + ) + if enabled + else None + ) + for fname, (enabled, dtype) in topo_output.items() + ] + + ( + x_raster, + y_raster, + z_raster, + layover_shadow_raster, + local_incident_angle_raster, + los_east_raster, + los_north_raster, + ) = raster_list # run rdr2geo - rdr2geo_obj.topo(dem_raster, x_raster=x_raster, y_raster=y_raster, - height_raster=z_raster, - local_incidence_angle_raster=local_incident_angle_raster, - layover_shadow_raster=layover_shadow_raster, - ground_to_sat_east_raster=los_east_raster, - ground_to_sat_north_raster=los_north_raster) + rdr2geo_obj.topo( + dem_raster, + x_raster=x_raster, + y_raster=y_raster, + height_raster=z_raster, + local_incidence_angle_raster=local_incident_angle_raster, + layover_shadow_raster=layover_shadow_raster, + ground_to_sat_east_raster=los_east_raster, + ground_to_sat_north_raster=los_north_raster, + ) # remove undesired/None rasters from raster list raster_list = [raster for raster in raster_list if raster is not None] # save non-None rasters to vrt - output_vrt = isce3.io.Raster(f'{output_path}/topo.vrt', raster_list) + output_vrt = isce3.io.Raster(f"{output_path}/topo.vrt", raster_list) output_vrt.set_epsg(rdr2geo_obj.epsg_out) dt = get_time_delta_str(t_start) - info_channel.log( - f"{module_name} burst successfully ran in {dt} (hr:min:sec)") + info_channel.log(f"{module_name} burst successfully ran in {dt} (hr:min:sec)") if __name__ == "__main__": - '''run rdr2geo from command line''' + """run rdr2geo from command line""" # load command line args parser = YamlArgparse() # get a runconfig dict from command line args - cfg = RunConfig.load_from_yaml(parser.args.run_config_path, - workflow_name='s1_cslc_radar') + cfg = RunConfig.load_from_yaml( + parser.args.run_config_path, workflow_name="s1_cslc_radar" + ) # run rdr2geo run(cfg) diff --git a/src/compass/s1_resample.py b/src/compass/s1_resample.py index 6e46480d..57145139 100755 --- a/src/compass/s1_resample.py +++ b/src/compass/s1_resample.py @@ -63,31 +63,43 @@ def run(cfg: dict): az_poly = burst.get_az_carrier_poly() # Init resample SLC object - resamp_obj = resamp(rdr_grid, burst.doppler.lut2d, - az_poly, ref_rdr_grid=ref_rdr_grid) + resamp_obj = resamp( + rdr_grid, burst.doppler.lut2d, az_poly, ref_rdr_grid=ref_rdr_grid + ) resamp_obj.lines_per_tile = blocksize # Get range and azimuth offsets offset_path = out_paths.scratch_directory - rg_off_raster = isce3.io.Raster(f'{offset_path}/range.off') - az_off_raster = isce3.io.Raster(f'{offset_path}/azimuth.off') + rg_off_raster = isce3.io.Raster(f"{offset_path}/range.off") + az_off_raster = isce3.io.Raster(f"{offset_path}/azimuth.off") # Get original SLC as raster object - sec_burst_path = f'{out_paths.scratch_directory}/{out_paths.file_name_pol}.slc.vrt' + sec_burst_path = ( + f"{out_paths.scratch_directory}/{out_paths.file_name_pol}.slc.vrt" + ) burst.slc_to_vrt_file(sec_burst_path) original_raster = isce3.io.Raster(sec_burst_path) # Prepare resampled SLC as raster object - coreg_burst_path = f'{out_paths.output_directory}/{out_paths.file_name_stem}.slc.tif' - resampled_raster = isce3.io.Raster(coreg_burst_path, - rg_off_raster.width, - rg_off_raster.length, - 1, gdal.GDT_CFloat32, - 'GTiff') - - resamp_obj.resamp(original_raster, resampled_raster, - rg_off_raster, az_off_raster, - flatten=cfg.resample_params.flatten) + coreg_burst_path = ( + f"{out_paths.output_directory}/{out_paths.file_name_stem}.slc.tif" + ) + resampled_raster = isce3.io.Raster( + coreg_burst_path, + rg_off_raster.width, + rg_off_raster.length, + 1, + gdal.GDT_CFloat32, + "GTiff", + ) + + resamp_obj.resamp( + original_raster, + resampled_raster, + rg_off_raster, + az_off_raster, + flatten=cfg.resample_params.flatten, + ) dt = get_time_delta_str(t_start) info_channel.log(f"{module_name} burst successfully ran in {dt} (hr:min:sec)") @@ -98,8 +110,9 @@ def run(cfg: dict): parser = YamlArgparse() # Get a runconfig dict from command line arguments - cfg = RunConfig.load_from_yaml(parser.args.run_config_path, - workflow_name='s1_cslc_radar') + cfg = RunConfig.load_from_yaml( + parser.args.run_config_path, workflow_name="s1_cslc_radar" + ) # Run resample burst run(cfg) diff --git a/src/compass/s1_static_layers.py b/src/compass/s1_static_layers.py index 7125d464..571df524 100644 --- a/src/compass/s1_static_layers.py +++ b/src/compass/s1_static_layers.py @@ -5,13 +5,16 @@ from compass import s1_geocode_metadata, s1_rdr2geo from compass.utils.geo_runconfig import GeoRunConfig -from compass.utils.helpers import (bursts_grouping_generator, get_module_name, - get_time_delta_str) +from compass.utils.helpers import ( + bursts_grouping_generator, + get_module_name, + get_time_delta_str, +) from compass.utils.yaml_argparse import YamlArgparse def _make_rdr2geo_cfg(yaml_runconfig_str): - ''' + """ Make a rdr2geo specific runconfig with latitude, longitude, and height layers enabled for static layer product generation while preserving all other rdr2geo config settings @@ -26,21 +29,23 @@ def _make_rdr2geo_cfg(yaml_runconfig_str): rdr2geo_cfg: dict Dictionary with rdr2geo longitude, latitude, and height layers enabled. All other rdr2geo parameters are from *yaml_runconfig_str* - ''' + """ # If any of the requisite layers are false, make them true in yaml cfg str - for layer in ['latitude', 'longitude', 'incidence_angle']: - re.sub(f'compute_{layer}:\s+[Ff]alse', f'compute_{layer}: true', - yaml_runconfig_str) + for layer in ["latitude", "longitude", "incidence_angle"]: + re.sub( + f"compute_{layer}:\s+[Ff]alse", f"compute_{layer}: true", yaml_runconfig_str + ) # Load a GeoRunConfig from modified yaml cfg string - rdr2geo_cfg = GeoRunConfig.load_from_yaml(yaml_runconfig_str, - workflow_name='s1_cslc_geo') + rdr2geo_cfg = GeoRunConfig.load_from_yaml( + yaml_runconfig_str, workflow_name="s1_cslc_geo" + ) return rdr2geo_cfg def run(cfg: GeoRunConfig): - ''' + """ Run static layers workflow (i.e., generate static layers, geocode them, create product HDF5) with user-defined args stored in dictionary runconfig *cfg* @@ -49,7 +54,7 @@ def run(cfg: GeoRunConfig): --------- cfg: GeoRunConfig GeoRunConfig object with user runconfig options - ''' + """ module_name = get_module_name(__file__) info_channel = journal.info(f"{module_name}.run") @@ -63,7 +68,7 @@ def run(cfg: GeoRunConfig): date_str = burst.sensing_start.strftime("%Y%m%d") - info_channel.log(f'Starting geocoding of {burst_id} for {date_str}') + info_channel.log(f"Starting geocoding of {burst_id} for {date_str}") # Generate required static layers rdr2geo_cfg = _make_rdr2geo_cfg(cfg.yaml_string) @@ -80,10 +85,12 @@ def main(): parser = YamlArgparse() # Get a runconfig dict from command line arguments - cfg = GeoRunConfig.load_from_yaml(parser.run_config_path, - workflow_name='s1_cslc_geo') + cfg = GeoRunConfig.load_from_yaml( + parser.run_config_path, workflow_name="s1_cslc_geo" + ) run(cfg) + if __name__ == "__main__": main() diff --git a/src/compass/utils/age.py b/src/compass/utils/age.py index 55dc4d12..b82d4830 100644 --- a/src/compass/utils/age.py +++ b/src/compass/utils/age.py @@ -11,15 +11,23 @@ from osgeo import gdal, osr from pyproj import CRS, Proj from shapely import geometry -from compass.utils.h5_helpers import (DATA_PATH, - METADATA_PATH, - TIME_STR_FMT) - - -def run(cslc_file, cr_file, csv_output_file=None, plot_age=False, - correct_set=False, mission_id='S1', pol='VV', ovs_factor=128, - margin=32, apply_az_ramp=True, unflatten=True): - ''' +from compass.utils.h5_helpers import DATA_PATH, METADATA_PATH, TIME_STR_FMT + + +def run( + cslc_file, + cr_file, + csv_output_file=None, + plot_age=False, + correct_set=False, + mission_id="S1", + pol="VV", + ovs_factor=128, + margin=32, + apply_az_ramp=True, + unflatten=True, +): + """ Compute Absolute Geolocation Error (AGE) for geocoded SLC products from Sentinel-1 or NISAR missions. AGE is computed by differencing the surveyed corner reflector (CR) positions from @@ -64,16 +72,16 @@ def run(cslc_file, cr_file, csv_output_file=None, plot_age=False, CR peak. Set this option to True for AGE computation as it dramatically affect the correct determination of the peak location - ''' + """ # Check that the CSLC-S1 product file exists if not os.path.exists(cslc_file): - err_str = f'{cslc_file} input geocoded SLC product does not exist' + err_str = f"{cslc_file} input geocoded SLC product does not exist" raise FileNotFoundError(err_str) # Check corner reflector file exists if not os.path.exists(cr_file): - err_str = f'{cr_file} CSV CR position file does not exist' + err_str = f"{cr_file} CSV CR position file does not exist" raise FileNotFoundError(err_str) # Open and load CSV CR file in pandas dataframe @@ -81,8 +89,7 @@ def run(cslc_file, cr_file, csv_output_file=None, plot_age=False, # Identify CRs contained in the usable part of the # geocoded SLC - cslc_poly = get_cslc_polygon(cslc_file, - mission_id=mission_id) + cslc_poly = get_cslc_polygon(cslc_file, mission_id=mission_id) # Initialize empty lists to include in the CR # pandas dataframe cr_x = [] @@ -92,8 +99,8 @@ def run(cslc_file, cr_file, csv_output_file=None, plot_age=False, for idx, row in cr_df.iterrows(): # Extract surveyed CR positions from pandas dataframe - cr_lat = row['Latitude (deg)'] - cr_lon = row['Longitude (deg)'] + cr_lat = row["Latitude (deg)"] + cr_lon = row["Longitude (deg)"] cr_loc = geometry.Point(cr_lon, cr_lat) # Add buffer of approx. 30 m to CR location @@ -104,14 +111,14 @@ def run(cslc_file, cr_file, csv_output_file=None, plot_age=False, # the CR from the pandas dataframe if cslc_poly.contains(buff_cr_loc): # Convert corner lat/lon coordinates in UTM - cslc_epsg = get_cslc_epsg(cslc_file, mission_id=mission_id, - pol=pol) + cslc_epsg = get_cslc_epsg(cslc_file, mission_id=mission_id, pol=pol) # Correct corner reflector position for solid Earth tides # otherwise just transform coordinates to UTM if correct_set: - x, y = correct_cr_tides(cslc_file, cr_lat, cr_lon, - mission_id=mission_id, pol=pol) + x, y = correct_cr_tides( + cslc_file, cr_lat, cr_lon, mission_id=mission_id, pol=pol + ) else: x, y = latlon2utm(cr_lat, cr_lon, cslc_epsg) @@ -119,9 +126,9 @@ def run(cslc_file, cr_file, csv_output_file=None, plot_age=False, cr_y.append(y) # Compute CR location in the geocoded SLC at pixel-precision - x_start, dx, y_start, dy = get_xy_info(cslc_file, - mission_id=mission_id, - pol=pol) + x_start, dx, y_start, dy = get_xy_info( + cslc_file, mission_id=mission_id, pol=pol + ) cr_x_cslc.append(int((x - x_start) / dx)) cr_y_cslc.append(int((y - y_start) / dy)) else: @@ -130,10 +137,10 @@ def run(cslc_file, cr_file, csv_output_file=None, plot_age=False, cr_df.drop(idx, inplace=True) # Assign computed data - cr_df['CR_X'] = cr_x - cr_df['CR_Y'] = cr_y - cr_df['CR_X_CSLC'] = cr_x_cslc - cr_df['CR_Y_CSLC'] = cr_y_cslc + cr_df["CR_X"] = cr_x + cr_df["CR_Y"] = cr_y + cr_df["CR_X_CSLC"] = cr_x_cslc + cr_df["CR_Y_CSLC"] = cr_y_cslc x_peak_vect = [] y_peak_vect = [] @@ -141,9 +148,7 @@ def run(cslc_file, cr_file, csv_output_file=None, plot_age=False, # Find peak location for every corner reflector in DataFrame # Open CSLC and apply deramping and flattening if desired - arr = get_cslc(cslc_file, - mission_id=mission_id, - pol=pol) + arr = get_cslc(cslc_file, mission_id=mission_id, pol=pol) # If True, remove azimuth carrier ramp if apply_az_ramp: carrier_phase = get_carrier_phase(cslc_file, mission_id=mission_id) @@ -155,48 +160,52 @@ def run(cslc_file, cr_file, csv_output_file=None, plot_age=False, arr *= np.exp(-1j * flatten_phase) for idx, row in cr_df.iterrows(): - x_peak, y_peak, snr_cr = find_peak(arr, cslc_file, int(row['CR_X_CSLC']), - int(row['CR_Y_CSLC']), pol=pol, - mission_id=mission_id, ovs_factor=ovs_factor, - margin=margin) + x_peak, y_peak, snr_cr = find_peak( + arr, + cslc_file, + int(row["CR_X_CSLC"]), + int(row["CR_Y_CSLC"]), + pol=pol, + mission_id=mission_id, + ovs_factor=ovs_factor, + margin=margin, + ) x_peak_vect.append(x_peak) y_peak_vect.append(y_peak) cr_snr_vect.append(snr_cr) - cr_df['CR_X_CSLC_PEAK'] = x_peak_vect - cr_df['CR_Y_CSLC_PEAK'] = y_peak_vect - cr_df['CR_SNR'] = cr_snr_vect + cr_df["CR_X_CSLC_PEAK"] = x_peak_vect + cr_df["CR_Y_CSLC_PEAK"] = y_peak_vect + cr_df["CR_SNR"] = cr_snr_vect # Compute absolute geolocation error along X and Y direction - cr_df['ALE_X'] = cr_df['CR_X_CSLC_PEAK'] - cr_df['CR_X'] - cr_df['ALE_Y'] = cr_df['CR_Y_CSLC_PEAK'] - cr_df['CR_Y'] + cr_df["ALE_X"] = cr_df["CR_X_CSLC_PEAK"] - cr_df["CR_X"] + cr_df["ALE_Y"] = cr_df["CR_Y_CSLC_PEAK"] - cr_df["CR_Y"] if csv_output_file is not None: cr_df.to_csv(csv_output_file) else: - print('Print to screen AGE results') + print("Print to screen AGE results") print(cr_df) if plot_age: fig, ax = plt.subplots(figsize=(8, 6)) - ax.scatter(cr_df['ALE_X'], cr_df['ALE_Y'], s=200, alpha=0.8, - marker='o') + ax.scatter(cr_df["ALE_X"], cr_df["ALE_Y"], s=200, alpha=0.8, marker="o") ax.grid(True) ax.set_xlim(-10, 10) ax.set_ylim(-10, 10) - ax.axhline(0, color='black') - ax.axvline(0, color='black') - ax.set_xlabel('Easting error (m)') - ax.set_ylabel('Northing error (m)') - fig.suptitle('Absolute geolocation error (AGE)') + ax.axhline(0, color="black") + ax.axvline(0, color="black") + ax.set_xlabel("Easting error (m)") + ax.set_ylabel("Northing error (m)") + fig.suptitle("Absolute geolocation error (AGE)") plt.show() -def correct_cr_tides(cslc_file, cr_lat, cr_lon, - mission_id='S1', pol='VV'): - ''' +def correct_cr_tides(cslc_file, cr_lat, cr_lon, mission_id="S1", pol="VV"): + """ Correct Corner reflector position for Solid Earth tides Parameters ---------- @@ -217,45 +226,46 @@ def correct_cr_tides(cslc_file, cr_lat, cr_lon, y_tide_cr: float Corner reflector position along Y-direction corrected for Solid Earth tide - ''' + """ import pysolid + # Get geocode SLC sensing start and stop - if mission_id == 'S1': - start_path = f'{METADATA_PATH}/processing_information/input_burst_metadata/sensing_start' - stop_path = f'{METADATA_PATH}/processing_information/input_burst_metadata/sensing_stop' - elif mission_id == 'NI': - start_path = '/science/LSAR/GSLC/identification/zeroDopplerStartTime' - stop_path = '/science/LSAR/GSLC/identification/zeroDopplerEndTime' + if mission_id == "S1": + start_path = ( + f"{METADATA_PATH}/processing_information/input_burst_metadata/sensing_start" + ) + stop_path = ( + f"{METADATA_PATH}/processing_information/input_burst_metadata/sensing_stop" + ) + elif mission_id == "NI": + start_path = "/science/LSAR/GSLC/identification/zeroDopplerStartTime" + stop_path = "/science/LSAR/GSLC/identification/zeroDopplerEndTime" else: - err_str = f'{mission_id} is not a valid mission identifier' + err_str = f"{mission_id} is not a valid mission identifier" raise ValueError(err_str) - with h5py.File(cslc_file, 'r') as h5: + with h5py.File(cslc_file, "r") as h5: start = h5[start_path][()] stop = h5[stop_path][()] - sensing_start = dt.datetime.strptime(start.decode('UTF-8'), - TIME_STR_FMT) - sensing_stop = dt.datetime.strptime(stop.decode('UTF-8'), - TIME_STR_FMT) + sensing_start = dt.datetime.strptime(start.decode("UTF-8"), TIME_STR_FMT) + sensing_stop = dt.datetime.strptime(stop.decode("UTF-8"), TIME_STR_FMT) # Compute SET in ENU using pySolid - (_, - tide_e, - tide_n, - _) = pysolid.calc_solid_earth_tides_point(cr_lat, cr_lon, - sensing_start, - sensing_stop, - step_sec=5, - display=False, - verbose=False) + (_, tide_e, tide_n, _) = pysolid.calc_solid_earth_tides_point( + cr_lat, + cr_lon, + sensing_start, + sensing_stop, + step_sec=5, + display=False, + verbose=False, + ) tide_e = np.mean(tide_e[0:2]) tide_n = np.mean(tide_n[0:2]) # Transform CR coordinates to UTM - cslc_epsg = get_cslc_epsg(cslc_file, - mission_id=mission_id, - pol=pol) + cslc_epsg = get_cslc_epsg(cslc_file, mission_id=mission_id, pol=pol) x, y = latlon2utm(cr_lat, cr_lon, cslc_epsg) x_tide_cr = x + tide_e y_tide_cr = y + tide_n @@ -263,9 +273,10 @@ def correct_cr_tides(cslc_file, cr_lat, cr_lon, return x_tide_cr, y_tide_cr -def find_peak(arr, cslc_file, x_loc, y_loc, pol='VV', - mission_id='S1', ovs_factor=128, margin=32): - ''' +def find_peak( + arr, cslc_file, x_loc, y_loc, pol="VV", mission_id="S1", ovs_factor=128, margin=32 +): + """ Find peak location in 'arr' Parameters ---------- @@ -294,11 +305,11 @@ def find_peak(arr, cslc_file, x_loc, y_loc, pol='VV', Peak location along Y-coordinate snr_cr_db: np.float Peak SNR for identified corner reflector - ''' + """ - x_start, x_spac, y_start, y_spac = get_xy_info(cslc_file, - mission_id=mission_id, - pol=pol) + x_start, x_spac, y_start, y_spac = get_xy_info( + cslc_file, mission_id=mission_id, pol=pol + ) # Check if the X/Y coordinate in the image are withing the input CSLC upperleft_x = int(np.round(x_loc)) - margin // 2 @@ -306,9 +317,13 @@ def find_peak(arr, cslc_file, x_loc, y_loc, pol='VV', lowerright_x = upperleft_x + margin lowerright_y = upperleft_y + margin - if (upperleft_x < 0) or (upperleft_y < 0) or \ - (lowerright_x > arr.shape[1]) or (lowerright_y > arr.shape[0]): - err_msg = 'The corner reflector input coordinates are outside of the CSLC' + if ( + (upperleft_x < 0) + or (upperleft_y < 0) + or (lowerright_x > arr.shape[1]) + or (lowerright_y > arr.shape[0]) + ): + err_msg = "The corner reflector input coordinates are outside of the CSLC" raise ValueError(err_msg) # Extract an area around x_loc, y_loc @@ -318,8 +333,7 @@ def find_peak(arr, cslc_file, x_loc, y_loc, pol='VV', snr_cr_db = get_snr_cr(img) # Oversample CSLC subset and get amplitude - img_ovs = isce3.cal.point_target_info.oversample( - img, ovs_factor) + img_ovs = isce3.cal.point_target_info.oversample(img, ovs_factor) idx_peak_ovs = np.argmax(np.abs(img_ovs)) img_peak_ovs = np.unravel_index(idx_peak_ovs, img_ovs.shape) @@ -335,8 +349,8 @@ def find_peak(arr, cslc_file, x_loc, y_loc, pol='VV', return x_cr, y_cr, snr_cr_db -def get_carrier_phase(cslc_file, mission_id='S1'): - ''' +def get_carrier_phase(cslc_file, mission_id="S1"): + """ Get azimuth carrier phase from CSLC product Note, this is implemented only for CSLC-S1 @@ -352,21 +366,21 @@ def get_carrier_phase(cslc_file, mission_id='S1'): ------- carrier_phase: np.ndarray, float64 Numpy array containing azimuth carrier phase - ''' + """ - if mission_id == 'S1': - carrier_path = f'{DATA_PATH}/azimuth_carrier_phase' + if mission_id == "S1": + carrier_path = f"{DATA_PATH}/azimuth_carrier_phase" else: err_str = f"Azimuth carrier phase not present for {mission_id} CSLC product" raise ValueError(err_str) - with h5py.File(cslc_file, 'r') as h5: + with h5py.File(cslc_file, "r") as h5: carrier_phase = h5[carrier_path][()] return carrier_phase -def get_flatten_phase(cslc_file, mission_id='S1'): - ''' +def get_flatten_phase(cslc_file, mission_id="S1"): + """ Get flattening phase from CSLC product Note, this is implemented only for CSLC-S1 @@ -382,22 +396,22 @@ def get_flatten_phase(cslc_file, mission_id='S1'): ------- flatten_phase: np.ndarray, float64 Numpy array containing flattening phase - ''' + """ - if mission_id == 'S1': - rg_off_path = f'{DATA_PATH}/flattening_phase' + if mission_id == "S1": + rg_off_path = f"{DATA_PATH}/flattening_phase" else: err_str = f"Range offsets not present for {mission_id} CSLC product" raise ValueError(err_str) - with h5py.File(cslc_file, 'r') as h5: + with h5py.File(cslc_file, "r") as h5: flatten_phase = h5[rg_off_path][()] return flatten_phase -def get_cslc(cslc_file, mission_id='S1', pol='VV') -> np.ndarray : - ''' +def get_cslc(cslc_file, mission_id="S1", pol="VV") -> np.ndarray: + """ Get CSLC-S1 array associated to 'pol' Parameters @@ -416,28 +430,28 @@ def get_cslc(cslc_file, mission_id='S1', pol='VV') -> np.ndarray : cslc: np.ndarray Geocoded SLC image corresponding to 'pol' polarization channel. - ''' - - if mission_id == 'S1': - cslc_path = f'{DATA_PATH}/{pol}' - elif mission_id == 'NI': - with h5py.File(cslc_file, 'r') as h5: - frequencies = h5["/science/LSAR/identification/listOfFrequencies"][()] - freq = frequencies[0].decode('utf-8') - frequency = f'frequency{freq}' - cslc_path = f'/science/LSAR/GSLC/grids/{frequency}/{pol}' + """ + + if mission_id == "S1": + cslc_path = f"{DATA_PATH}/{pol}" + elif mission_id == "NI": + with h5py.File(cslc_file, "r") as h5: + frequencies = h5["/science/LSAR/identification/listOfFrequencies"][()] + freq = frequencies[0].decode("utf-8") + frequency = f"frequency{freq}" + cslc_path = f"/science/LSAR/GSLC/grids/{frequency}/{pol}" else: - err_str = f'{mission_id} is not a valid mission identifier' + err_str = f"{mission_id} is not a valid mission identifier" raise ValueError(err_str) - with h5py.File(cslc_file, 'r') as h5: + with h5py.File(cslc_file, "r") as h5: cslc = h5[cslc_path][()] return cslc -def get_xy_info(cslc_file, mission_id='S1', pol='VV'): - ''' +def get_xy_info(cslc_file, mission_id="S1", pol="VV"): + """ Get X/Y spacings and coordinate vectors from the geocoded SLC contained in 'cslc_file' @@ -462,17 +476,17 @@ def get_xy_info(cslc_file, mission_id='S1', pol='VV'): CSLC-S1 spacing along X-direction y_spac: np.float CSLC-S1 spacing along Y-direction - ''' - if mission_id == 'S1': + """ + if mission_id == "S1": cslc_path = DATA_PATH - elif mission_id == 'NI': - cslc_path = '/science/LSAR/GSLC/grids/frequencyA' + elif mission_id == "NI": + cslc_path = "/science/LSAR/GSLC/grids/frequencyA" else: - err_str = f'{mission_id} is not a valid mission identifier' + err_str = f"{mission_id} is not a valid mission identifier" raise ValueError(err_str) # Open geocoded SLC with a NetCDF driver - ds_in = gdal.Open(f'NETCDF:{cslc_file}:{cslc_path}/{pol}') + ds_in = gdal.Open(f"NETCDF:{cslc_file}:{cslc_path}/{pol}") geo_trans = ds_in.GetGeoTransform() x_spac = geo_trans[1] @@ -486,7 +500,7 @@ def get_xy_info(cslc_file, mission_id='S1', pol='VV'): def latlon2utm(lat, lon, out_epsg): - ''' + """ Converts lat/lon to x/y coordinates specified by 'out_epsg' @@ -506,15 +520,15 @@ def latlon2utm(lat, lon, out_epsg): X-coordinate y: np.float Y-coordinate - ''' + """ _proj = Proj(CRS.from_epsg(out_epsg)) x, y = _proj(lon, lat, inverse=False) return x, y -def get_cslc_polygon(cslc_file, mission_id='S1'): - ''' +def get_cslc_polygon(cslc_file, mission_id="S1"): + """ Get bounding polygon identifying the valid portion of the geocoded SLC product on the ground @@ -530,25 +544,25 @@ def get_cslc_polygon(cslc_file, mission_id='S1'): ------- cslc_poly: shapely.Polygon Shapely polygon including CSLC-S1 valid values - ''' - if mission_id == 'S1': - poly_path = 'identification/bounding_polygon' - elif mission_id == 'NI': - poly_path = 'science/LSAR/identification/boundingPolygon' + """ + if mission_id == "S1": + poly_path = "identification/bounding_polygon" + elif mission_id == "NI": + poly_path = "science/LSAR/identification/boundingPolygon" else: - err_str = f'{mission_id} is not a valid mission identifier' + err_str = f"{mission_id} is not a valid mission identifier" raise ValueError(err_str) - with h5py.File(cslc_file, 'r') as h5: + with h5py.File(cslc_file, "r") as h5: poly = h5[poly_path][()] - cslc_poly = wkt.loads(poly.decode('UTF-8')) + cslc_poly = wkt.loads(poly.decode("UTF-8")) return cslc_poly -def get_cslc_epsg(cslc_file, mission_id='S1', pol='VV'): - ''' +def get_cslc_epsg(cslc_file, mission_id="S1", pol="VV"): + """ Returns EPSG projection code for geocoded SLC contained in 'cslc_file' @@ -567,30 +581,30 @@ def get_cslc_epsg(cslc_file, mission_id='S1', pol='VV'): epsg: int EPSG code identifying the projection of the geocoded SLC product - ''' - if mission_id == 'S1': - epsg_path = f'{DATA_PATH}/projection' - with h5py.File(cslc_file, 'r') as h5: + """ + if mission_id == "S1": + epsg_path = f"{DATA_PATH}/projection" + with h5py.File(cslc_file, "r") as h5: epsg = h5[epsg_path][()] - elif mission_id == 'NI': - with h5py.File(cslc_file, 'r') as h5: + elif mission_id == "NI": + with h5py.File(cslc_file, "r") as h5: frequencies = h5["/science/LSAR/identification/listOfFrequencies"] - freq = frequencies[0].decode('utf-8') - frequency = f'frequency{freq}' - dataset_path = f'NETCDF:{cslc_file}://science/LSAR/GSLC/grids/{frequency}/{pol}' + freq = frequencies[0].decode("utf-8") + frequency = f"frequency{freq}" + dataset_path = f"NETCDF:{cslc_file}://science/LSAR/GSLC/grids/{frequency}/{pol}" ds = gdal.Open(dataset_path, gdal.GA_ReadOnly) s = osr.SpatialReference(wkt=ds.GetProjection()).ExportToProj4() crs = CRS.from_proj4(s) epsg = crs.to_epsg() else: - err_str = f'{mission_id} is not a valid mission identifier' + err_str = f"{mission_id} is not a valid mission identifier" raise ValueError(err_str) return epsg -def get_snr_cr(img: np.ndarray, cutoff_percentile: float=3.0): - ''' +def get_snr_cr(img: np.ndarray, cutoff_percentile: float = 3.0): + """ Estimate the signal-to-noise ration (SNR) of the corner reflector contained in img in the input image patch @@ -605,20 +619,18 @@ def get_snr_cr(img: np.ndarray, cutoff_percentile: float=3.0): ------- snr_cr_db: float SNR of the peak in decibel (db) - ''' + """ - power_arr = img.real ** 2 + img.imag ** 2 + power_arr = img.real**2 + img.imag**2 # build up the mask array thres_low = np.nanpercentile(power_arr, cutoff_percentile) thres_high = np.nanpercentile(power_arr, 100 - cutoff_percentile) - mask_threshold = np.logical_and(power_arr < thres_low, - power_arr > thres_high) - mask_invalid_pixel = np.logical_and(power_arr <= 0.0, - np.isnan(power_arr)) - ma_power_arr = np.ma.masked_array(power_arr, - mask=np.logical_and(mask_threshold, - mask_invalid_pixel)) + mask_threshold = np.logical_and(power_arr < thres_low, power_arr > thres_high) + mask_invalid_pixel = np.logical_and(power_arr <= 0.0, np.isnan(power_arr)) + ma_power_arr = np.ma.masked_array( + power_arr, mask=np.logical_and(mask_threshold, mask_invalid_pixel) + ) peak_power = power_arr.max() mean_background_power = np.mean(ma_power_arr) @@ -629,56 +641,127 @@ def get_snr_cr(img: np.ndarray, cutoff_percentile: float=3.0): def create_parser(): - ''' + """ Generate command line parser - ''' + """ parser = argparse.ArgumentParser( - description="Compute absolute geolocation error (AGE) for geocoded SLC" - "from Sentinel-1 or NISAR missions", - formatter_class=argparse.ArgumentDefaultsHelpFormatter + description=( + "Compute absolute geolocation error (AGE) for geocoded SLC" + "from Sentinel-1 or NISAR missions" + ), + formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser._action_groups.pop() - required = parser.add_argument_group('required arguments') - optional = parser.add_argument_group('optional arguments') - - required.add_argument('-p', '--cslc-s1', required=True, dest='cslc_file', - help='File path to geocoded SLC product') - required.add_argument('-c', '--cr-file', required=True, dest='cr_file', - help='File path to CSV corner reflector position file') - optional.add_argument('-s', '--save-csv', dest='save_csv', default=None, - help='File path to save AGE results in CSV format') - optional.add_argument('-i', '--plot-age', dest='plot_age', default=False, - help='If True, plots AGE results ') - optional.add_argument('-t', '--set', dest='set', default=False, - help='If True, corrects CSV corner reflector positions for Solid Earth Tides (default: False)') - optional.add_argument('-m', '--mission-id', dest='mission_id', default='S1', - help='Mission identifier; S1: Sentinel1, NI: NISAR') - optional.add_argument('-pol', '--polarization', dest='pol', default='VV', - help='Polarization channel to use to evaluate AGE ') - optional.add_argument('-o', '--ovs', dest='ovs_factor', default=128, type=int, - help='Oversample factor for determining CR location in the ' - 'geocoded SLC with sub-pixel accuracy') - optional.add_argument('-mm', '--margin', dest='margin', default=32, type=int, - help='Padding margin around CR position detected in the geocoded SLC ' - 'image. Actual margin is 2*margin from left-to-right and from' - 'top-to-bottom') - optional.add_argument('-r', '--azimuth-ramp', dest='apply_az_ramp', default=True, - help='If True, removes azimuth carrier ramp prior to CR peak location. ' - 'This option should be set to True for S1-A/B AGE analyses') - optional.add_argument('-u', '--unflatten', dest='unflatten', default=True, - help='If True, adds back the flatten phase. This option should be set' - 'to True if the geocoded SLC product has not been flattened') + required = parser.add_argument_group("required arguments") + optional = parser.add_argument_group("optional arguments") + + required.add_argument( + "-p", + "--cslc-s1", + required=True, + dest="cslc_file", + help="File path to geocoded SLC product", + ) + required.add_argument( + "-c", + "--cr-file", + required=True, + dest="cr_file", + help="File path to CSV corner reflector position file", + ) + optional.add_argument( + "-s", + "--save-csv", + dest="save_csv", + default=None, + help="File path to save AGE results in CSV format", + ) + optional.add_argument( + "-i", + "--plot-age", + dest="plot_age", + default=False, + help="If True, plots AGE results ", + ) + optional.add_argument( + "-t", + "--set", + dest="set", + default=False, + help=( + "If True, corrects CSV corner reflector positions for Solid Earth Tides" + " (default: False)" + ), + ) + optional.add_argument( + "-m", + "--mission-id", + dest="mission_id", + default="S1", + help="Mission identifier; S1: Sentinel1, NI: NISAR", + ) + optional.add_argument( + "-pol", + "--polarization", + dest="pol", + default="VV", + help="Polarization channel to use to evaluate AGE ", + ) + optional.add_argument( + "-o", + "--ovs", + dest="ovs_factor", + default=128, + type=int, + help=( + "Oversample factor for determining CR location in the " + "geocoded SLC with sub-pixel accuracy" + ), + ) + optional.add_argument( + "-mm", + "--margin", + dest="margin", + default=32, + type=int, + help=( + "Padding margin around CR position detected in the geocoded SLC " + "image. Actual margin is 2*margin from left-to-right and from" + "top-to-bottom" + ), + ) + optional.add_argument( + "-r", + "--azimuth-ramp", + dest="apply_az_ramp", + default=True, + help=( + "If True, removes azimuth carrier ramp prior to CR peak location. " + "This option should be set to True for S1-A/B AGE analyses" + ), + ) + optional.add_argument( + "-u", + "--unflatten", + dest="unflatten", + default=True, + help=( + "If True, adds back the flatten phase. This option should be set" + "to True if the geocoded SLC product has not been flattened" + ), + ) return parser.parse_args() def main(): - ''' + """ Create command line interface and run geolocation error script - ''' + """ args = create_parser() - run(cslc_file=args.cslc_file, + run( + cslc_file=args.cslc_file, cr_file=args.cr_file, csv_output_file=args.save_csv, plot_age=args.plot_age, @@ -688,8 +771,9 @@ def main(): ovs_factor=args.ovs_factor, margin=args.margin, apply_az_ramp=args.apply_az_ramp, - unflatten=args.unflatten) + unflatten=args.unflatten, + ) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/compass/utils/browse_image.py b/src/compass/utils/browse_image.py index ba0ef807..9bc0bf91 100644 --- a/src/compass/utils/browse_image.py +++ b/src/compass/utils/browse_image.py @@ -1,6 +1,7 @@ -''' +""" function to generate CSLC browse image and image manipulation helper functions -''' +""" + import argparse import h5py @@ -13,7 +14,7 @@ def _scale_to_max_pixel_dimension(orig_shape, max_dim_allowed=2048): - ''' + """ Scale up or down length and width represented by a shape to a maximum dimension. The larger of length or width used to compute scaling ratio. @@ -28,7 +29,7 @@ def _scale_to_max_pixel_dimension(orig_shape, max_dim_allowed=2048): ------- _: list(int) Shape (length, width) scaled up or down from original shape - ''' + """ # compute scaling ratio based on larger dimension scaling_ratio = max([xy / max_dim_allowed for xy in orig_shape]) @@ -38,7 +39,7 @@ def _scale_to_max_pixel_dimension(orig_shape, max_dim_allowed=2048): def _clip_by_percentage(image, percent_low, percent_high): - ''' + """ Clip image by low and high percentiles Parameters @@ -58,9 +59,9 @@ def _clip_by_percentage(image, percent_low, percent_high): Minimum value of image determined by percent_low vmax_: float Maximum value of image determined by percent_high - ''' + """ if percent_high <= percent_low: - raise ValueError('upper percentile not > lower percentile') + raise ValueError("upper percentile not > lower percentile") # get max/min values by percentile vmax = np.nanpercentile(image, percent_high) @@ -74,7 +75,7 @@ def _clip_by_percentage(image, percent_low, percent_high): def _normalize_apply_gamma(image, vmin, vmax, gamma=1.0): - ''' + """ Normal and gamma correct an image array Parameters @@ -92,9 +93,9 @@ def _normalize_apply_gamma(image, vmin, vmax, gamma=1.0): ------- image: np.ndarray Normalized and gamma corrected image - ''' + """ if vmax <= vmin: - raise ValueError(f'maximum value {vmax} not > minimum value {vmin}') + raise ValueError(f"maximum value {vmax} not > minimum value {vmin}") # scale to 0-1 for gray scale and then apply gamma correction image = (image - vmin) / (vmax - vmin) @@ -107,7 +108,7 @@ def _normalize_apply_gamma(image, vmin, vmax, gamma=1.0): def _image_histogram_equalization(image, number_bins=256): - ''' + """ Apply histogram equalization to an image array Parameters @@ -125,21 +126,22 @@ def _image_histogram_equalization(image, number_bins=256): Reference --------- http://www.janeriksolem.net/histogram-equalization-with-python-and.html - ''' + """ if number_bins <= 0: - raise ValueError('number of histogram bins must be >= 1') + raise ValueError("number of histogram bins must be >= 1") mask = np.isnan(image) # get image histogram based on non-nan values - image_histogram, bins = np.histogram(image[~mask].flatten(), - number_bins, density=True) + image_histogram, bins = np.histogram( + image[~mask].flatten(), number_bins, density=True + ) # cumulative distribution function cdf = image_histogram.cumsum() # normalize - cdf = (number_bins-1) * cdf / cdf[-1] + cdf = (number_bins - 1) * cdf / cdf[-1] # use linear interpolation of cdf to find new pixel values image_eq = np.interp(image.flatten(), bins[:-1], cdf).reshape(image.shape) @@ -149,7 +151,7 @@ def _image_histogram_equalization(image, number_bins=256): def _save_to_disk_as_greyscale(image, fname): - ''' + """ Save image array as greyscale to file Parameters @@ -158,7 +160,7 @@ def _save_to_disk_as_greyscale(image, fname): Numpy array representing an image to be saved to png file fname: str File name of output browse image - ''' + """ # scale to 1-255 # 0 reserved for transparency nan_mask = np.isnan(image) @@ -168,13 +170,21 @@ def _save_to_disk_as_greyscale(image, fname): image[nan_mask] = 0 # save to disk in grayscale ('L') - img = Image.fromarray(image, mode='L') + img = Image.fromarray(image, mode="L") img.save(fname, transparency=0) -def make_browse_image(filename, path_h5, bursts, complex_to_real='amplitude', percent_low=0.0, - percent_high=100.0, gamma=1.0, equalize=False): - ''' +def make_browse_image( + filename, + path_h5, + bursts, + complex_to_real="amplitude", + percent_low=0.0, + percent_high=100.0, + gamma=1.0, + equalize=False, +): + """ Make browse image(s) for geocoded CSLC raster(s) Parameters @@ -196,16 +206,16 @@ def make_browse_image(filename, path_h5, bursts, complex_to_real='amplitude', pe Exponent value used to gamma correct image equalize: bool Enable/disable histogram equalization - ''' + """ # determine how to transform complex imagery in gdal warp - if complex_to_real not in ['amplitude', 'intensity', 'logamplitude']: - raise ValueError(f'{complex_to_real} invalid complex to real transform') - derived_ds_str = f'DERIVED_SUBDATASET:{complex_to_real.upper()}' + if complex_to_real not in ["amplitude", "intensity", "logamplitude"]: + raise ValueError(f"{complex_to_real} invalid complex to real transform") + derived_ds_str = f"DERIVED_SUBDATASET:{complex_to_real.upper()}" # prepend transform to NETCDF path to grid - derived_netcdf_to_grid = f'{derived_ds_str}:NETCDF:{path_h5}:/{DATA_PATH}' + derived_netcdf_to_grid = f"{derived_ds_str}:NETCDF:{path_h5}:/{DATA_PATH}" - with h5py.File(path_h5, 'r', swmr=True) as h5_obj: + with h5py.File(path_h5, "r", swmr=True) as h5_obj: grid_group = h5_obj[DATA_PATH] for b in bursts: @@ -217,31 +227,34 @@ def make_browse_image(filename, path_h5, bursts, complex_to_real='amplitude', pe browse_h, browse_w = _scale_to_max_pixel_dimension(full_shape) # create in memory GDAL raster for GSLC as real value array - src_raster = f'{derived_netcdf_to_grid}/{pol}' + src_raster = f"{derived_netcdf_to_grid}/{pol}" min_x, max_x, min_y, max_y = get_georaster_bounds(path_h5, pol) # Check if the raster crosses antimeridian if max_x - min_x > 180.0: - gdal.SetConfigOption('CENTER_LONG', '180') + gdal.SetConfigOption("CENTER_LONG", "180") # Adjust the min / max in the X direction (longitude) min_x, max_x = max_x, min_x + 360.0 else: - gdal.SetConfigOption('CENTER_LONG', None) + gdal.SetConfigOption("CENTER_LONG", None) # gdal warp to right geo extents, image shape and EPSG - ds_wgs84 = gdal.Warp('', src_raster, format='MEM', - dstSRS='EPSG:4326', - width=browse_w, height=browse_h, - resampleAlg=gdal.GRIORA_Bilinear, - dstNodata=float('nan'), - outputBounds=(min_x, min_y, max_x, max_y) - ) + ds_wgs84 = gdal.Warp( + "", + src_raster, + format="MEM", + dstSRS="EPSG:4326", + width=browse_w, + height=browse_h, + resampleAlg=gdal.GRIORA_Bilinear, + dstNodata=float("nan"), + outputBounds=(min_x, min_y, max_x, max_y), + ) image = ds_wgs84.ReadAsArray() # get hi/lo values by percentile - image, vmin, vmax = _clip_by_percentage(image, percent_low, - percent_high) + image, vmin, vmax = _clip_by_percentage(image, percent_low, percent_high) if equalize: image = _image_histogram_equalization(image) @@ -255,28 +268,51 @@ def make_browse_image(filename, path_h5, bursts, complex_to_real='amplitude', pe if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Create browse images for the geocode cslc workflow from command line', - formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('run-config-path', nargs='?', - default=None, help='Path to run config file') - parser.add_argument('-o', '--out-fname', - help='Path to output png file') - parser.add_argument('-c', '--complex-to-real', - choices=['amplitude', 'intensity', 'logamplitude'], - default='amplitude', help='Method to convert complex data to real') - parser.add_argument('-l', '--percent-low', type=float, default=0.0, - help='Lower percentage of non-NaN pixels to be clipped') - parser.add_argument('-u', '--percent-up', type=float, default=100.0, - help='Upper percentage of non-NaN pixels to be clipped') - parser.add_argument('-g', '--gamma', type=float, default=0.5, - help='Exponent value used for gamma correction') - parser.add_argument('-e', '--equalize', action='store_true', - help='Enable histogram equalization') + parser = argparse.ArgumentParser( + description=( + "Create browse images for the geocode cslc workflow from command line" + ), + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser.add_argument( + "run-config-path", nargs="?", default=None, help="Path to run config file" + ) + parser.add_argument("-o", "--out-fname", help="Path to output png file") + parser.add_argument( + "-c", + "--complex-to-real", + choices=["amplitude", "intensity", "logamplitude"], + default="amplitude", + help="Method to convert complex data to real", + ) + parser.add_argument( + "-l", + "--percent-low", + type=float, + default=0.0, + help="Lower percentage of non-NaN pixels to be clipped", + ) + parser.add_argument( + "-u", + "--percent-up", + type=float, + default=100.0, + help="Upper percentage of non-NaN pixels to be clipped", + ) + parser.add_argument( + "-g", + "--gamma", + type=float, + default=0.5, + help="Exponent value used for gamma correction", + ) + parser.add_argument( + "-e", "--equalize", action="store_true", help="Enable histogram equalization" + ) args = parser.parse_args() # Get a runconfig dict from command line argumens - cfg = GeoRunConfig.load_from_yaml(args.run_config_path, - workflow_name='s1_cslc_geo') + cfg = GeoRunConfig.load_from_yaml(args.run_config_path, workflow_name="s1_cslc_geo") # unpack args to make browse image bursts = cfg.bursts @@ -287,6 +323,13 @@ def make_browse_image(filename, path_h5, bursts, complex_to_real='amplitude', pe output_hdf5 = out_paths.hdf5_path # Run geocode burst workflow - make_browse_image(args.out_fname, output_hdf5, bursts, - args.complex_to_real, args.percent_low, args.percent_up, - args.gamma, args.equalize) + make_browse_image( + args.out_fname, + output_hdf5, + bursts, + args.complex_to_real, + args.percent_low, + args.percent_up, + args.gamma, + args.equalize, + ) diff --git a/src/compass/utils/elevation_antenna_pattern.py b/src/compass/utils/elevation_antenna_pattern.py index d762e6f6..5071d426 100644 --- a/src/compass/utils/elevation_antenna_pattern.py +++ b/src/compass/utils/elevation_antenna_pattern.py @@ -1,13 +1,12 @@ -''' A routine to apply elevation antenna pattern correction -(EAP correction)''' - +"""A routine to apply elevation antenna pattern correction +(EAP correction)""" import numpy as np from osgeo import gdal def apply_eap_correction(burst, path_slc_vrt, path_slc_corrected, check_eap): - ''' + """ Apply Elevation Antenna Pattern correction (EAP correction) on the input burst @@ -23,7 +22,7 @@ def apply_eap_correction(burst, path_slc_vrt, path_slc_corrected, check_eap): A namespace that contains flags if phase and/or magnitude EAP correction are necessary - ''' + """ # Retrieve the EAP correction in range vec_eap_line = burst.eap_compensation_lut @@ -37,18 +36,18 @@ def apply_eap_correction(burst, path_slc_vrt, path_slc_corrected, check_eap): arr_slc_in = slc_in.ReadAsArray() # Shape the correction vector to the size of burst - array_eap_line = ( vec_eap_line[np.newaxis, ...] - * np.ones((arr_slc_in.shape[0], 1))) + array_eap_line = vec_eap_line[np.newaxis, ...] * np.ones((arr_slc_in.shape[0], 1)) # Apply the correction arr_slc_corrected = arr_slc_in / array_eap_line # Write out the EAP-corrected SLC dtype = slc_in.GetRasterBand(1).DataType - drvout = gdal.GetDriverByName('GTiff') - raster_out = drvout.Create(path_slc_corrected, burst.shape[1], - burst.shape[0], 1, dtype) + drvout = gdal.GetDriverByName("GTiff") + raster_out = drvout.Create( + path_slc_corrected, burst.shape[1], burst.shape[0], 1, dtype + ) band_out = raster_out.GetRasterBand(1) band_out.WriteArray(arr_slc_corrected) band_out.FlushCache() - del band_out \ No newline at end of file + del band_out diff --git a/src/compass/utils/geo_grid.py b/src/compass/utils/geo_grid.py index fa681870..e38fd673 100644 --- a/src/compass/utils/geo_grid.py +++ b/src/compass/utils/geo_grid.py @@ -1,6 +1,6 @@ -''' +""" Collection of function for determining and setting the geogrid -''' +""" import numpy as np import journal @@ -10,8 +10,9 @@ from compass.utils import helpers + def assign_check_epsg(epsg, epsg_default): - ''' + """ Assign and check user-defined epsg Parameters @@ -25,15 +26,15 @@ def assign_check_epsg(epsg, epsg_default): ------- epsg: int Checked EPSG code to use in geogrid - ''' - if epsg is None: epsg = epsg_default + """ + if epsg is None: + epsg = epsg_default assert 1024 <= epsg <= 32767 return epsg -def assign_check_spacing(x_spacing, y_spacing, - x_default_spacing, y_default_spacing): - ''' +def assign_check_spacing(x_spacing, y_spacing, x_default_spacing, y_default_spacing): + """ Check validity of input spacings and assign default spacings if one or both input spacings are None @@ -54,8 +55,8 @@ def assign_check_spacing(x_spacing, y_spacing, Verified geogrid spacing along X-direction y_spacing: float Verified geogrid spacing along Y-direction - ''' - error_channel = journal.error('geogrid.assign_check_spacing') + """ + error_channel = journal.error("geogrid.assign_check_spacing") # Check tha x-y_spacings are valid (positive) if y_spacing is not None: @@ -73,19 +74,24 @@ def assign_check_spacing(x_spacing, y_spacing, # Check that x-y_spacings have been correctly assigned # (check on default spacings) if x_spacing <= 0: - err_str = f'Pixel spacing in X/longitude direction needs to be >=0 (x_spacing: {x_spacing})' + err_str = ( + "Pixel spacing in X/longitude direction needs to be >=0 (x_spacing:" + f" {x_spacing})" + ) error_channel.log(err_str) raise ValueError(err_str) if y_spacing >= 0: - err_str = f'Pixel spacing in Y/latitude direction needs to be <=0 (y_spacing: {y_spacing})' + err_str = ( + "Pixel spacing in Y/latitude direction needs to be <=0 (y_spacing:" + f" {y_spacing})" + ) error_channel.log(err_str) raise ValueError(err_str) return x_spacing, y_spacing -def assign_check_geogrid(geo_grid, x_start=None, y_start=None, - x_end=None, y_end=None): - ''' +def assign_check_geogrid(geo_grid, x_start=None, y_start=None, x_end=None, y_end=None): + """ Initialize geogrid with user defined parameters. Check the validity of user-defined parameters @@ -106,42 +112,46 @@ def assign_check_geogrid(geo_grid, x_start=None, y_start=None, ------- geo_grid: isce3.product.geogrid ISCE3 geogrid initialized with user-defined inputs - ''' + """ # Check assigned input coordinates and initialize geogrid accordingly if None in [x_start, y_start, x_end, y_end]: if x_start is not None: new_end_x = geo_grid.start_x + geo_grid.spacing_x * geo_grid.width geo_grid.start_x = x_start - geo_grid.width = int(np.ceil((new_end_x - x_start) / - geo_grid.spacing_x)) + geo_grid.width = int(np.ceil((new_end_x - x_start) / geo_grid.spacing_x)) # Restore geogrid end point if provided by the user if x_end is not None: - geo_grid.width = int(np.ceil((x_end - geo_grid.start_x) / - geo_grid.spacing_x)) + geo_grid.width = int( + np.ceil((x_end - geo_grid.start_x) / geo_grid.spacing_x) + ) if y_start is not None: new_end_y = geo_grid.start_y + geo_grid.spacing_y * geo_grid.length geo_grid.start_y = y_start - geo_grid.length = int(np.ceil((new_end_y - y_start) / - geo_grid.spacing_y)) + geo_grid.length = int(np.ceil((new_end_y - y_start) / geo_grid.spacing_y)) if y_end is not None: - geo_grid.length = int(np.ceil((y_end - geo_grid.start_y) / - geo_grid.spacing_y)) + geo_grid.length = int( + np.ceil((y_end - geo_grid.start_y) / geo_grid.spacing_y) + ) else: # If all the start/end coordinates have been assigned, # initialize the geogrid with them width = _grid_size(x_end, x_start, geo_grid.spacing_x) length = _grid_size(y_end, y_start, geo_grid.spacing_y) - geo_grid = isce3.product.GeoGridParameters(x_start, y_start, - geo_grid.spacing_x, - geo_grid.spacing_y, - width, length, - geo_grid.epsg) + geo_grid = isce3.product.GeoGridParameters( + x_start, + y_start, + geo_grid.spacing_x, + geo_grid.spacing_y, + width, + length, + geo_grid.epsg, + ) return geo_grid def check_geogrid_endpoints(geo_grid, x_end=None, y_end=None): - ''' + """ Check validity of geogrid end points Parameters @@ -159,7 +169,7 @@ def check_geogrid_endpoints(geo_grid, x_end=None, y_end=None): Verified geogrid bottom-right X coordinate y_end: float Verified geogrid bottom-right Y coordinate - ''' + """ end_pt = lambda start, sz, spacing: start + spacing * sz if x_end is None: @@ -170,7 +180,7 @@ def check_geogrid_endpoints(geo_grid, x_end=None, y_end=None): def check_snap_values(x_snap, y_snap, x_spacing, y_spacing): - ''' + """ Check validity of snap values Parameters @@ -183,33 +193,39 @@ def check_snap_values(x_snap, y_snap, x_spacing, y_spacing): Spacing of the geogrid along X-direction y_spacing: float Spacing of the geogrid along Y-direction - ''' - error_channel = journal.error('geogrid.check_snap_values') + """ + error_channel = journal.error("geogrid.check_snap_values") # Check that snap values in X/Y-directions are positive if x_snap is not None and x_snap <= 0: - err_str = f'Snap value in X direction must be > 0 (x_snap: {x_snap})' + err_str = f"Snap value in X direction must be > 0 (x_snap: {x_snap})" error_channel.log(err_str) raise ValueError(err_str) if y_snap is not None and y_snap <= 0: - err_str = f'Snap value in Y direction must be > 0 (y_snap: {y_snap})' + err_str = f"Snap value in Y direction must be > 0 (y_snap: {y_snap})" error_channel.log(err_str) raise ValueError(err_str) # Check that snap values in X/Y are integer multiples of the geogrid # spacings in X/Y directions if x_snap is not None and x_snap % x_spacing != 0.0: - err_str = 'x_snap must be exact multiple of spacing in X direction (x_snap % x_spacing !=0)' + err_str = ( + "x_snap must be exact multiple of spacing in X direction (x_snap %" + " x_spacing !=0)" + ) error_channel.log(err_str) raise ValueError(err_str) if y_snap is not None and y_snap % y_spacing != 0.0: - err_str = 'y_snap must be exact multiple of spacing in Y direction (y_snap % y_spacing !=0)' + err_str = ( + "y_snap must be exact multiple of spacing in Y direction (y_snap %" + " y_spacing !=0)" + ) error_channel.log(err_str) raise ValueError(err_str) def snap_geogrid(geo_grid, x_snap, y_snap, x_end, y_end): - ''' + """ Snap geogrid based on user-defined snapping values Parameters @@ -229,25 +245,25 @@ def snap_geogrid(geo_grid, x_snap, y_snap, x_end, y_end): ------- geo_grid: isce3.product.geogrid ISCE3 object containing the snapped geogrid - ''' - if x_end is None: x_end = geo_grid.end_x - if y_end is None: y_end = geo_grid.end_y + """ + if x_end is None: + x_end = geo_grid.end_x + if y_end is None: + y_end = geo_grid.end_y if x_snap is not None or y_snap is not None: - snap_coord = lambda val, snap, round_func: round_func( - float(val) / snap) * snap + snap_coord = lambda val, snap, round_func: round_func(float(val) / snap) * snap geo_grid.start_x = snap_coord(geo_grid.start_x, x_snap, np.floor) geo_grid.start_y = snap_coord(geo_grid.start_y, y_snap, np.ceil) end_x = snap_coord(x_end, x_snap, np.ceil) end_y = snap_coord(y_end, y_snap, np.floor) - geo_grid.length = _grid_size(end_y, geo_grid.start_y, - geo_grid.spacing_y) + geo_grid.length = _grid_size(end_y, geo_grid.start_y, geo_grid.spacing_y) geo_grid.width = _grid_size(end_x, geo_grid.start_x, geo_grid.spacing_x) return geo_grid def get_point_epsg(lat, lon): - ''' + """ Get EPSG code based on latitude and longitude coordinates of a point @@ -262,8 +278,8 @@ def get_point_epsg(lat, lon): ------- epsg: int UTM zone - ''' - error_channel = journal.error('geogrid.get_point_epsg') + """ + error_channel = journal.error("geogrid.get_point_epsg") if lon >= 180.0: lon = lon - 360.0 @@ -283,7 +299,7 @@ def get_point_epsg(lat, lon): def generate_geogrids_from_db(bursts, geo_dict, dem, burst_db_file): - ''' Create a geogrid for all bursts in given list from provided burst + """Create a geogrid for all bursts in given list from provided burst database Parameters @@ -301,14 +317,14 @@ def generate_geogrids_from_db(bursts, geo_dict, dem, burst_db_file): ------- geo_grids: dict Dict of burst ID keys to isce3.product.GeoGridParameters values - ''' + """ dem_raster = isce3.io.Raster(dem) # Unpack values from geocoding dictionary - x_spacing_dict = geo_dict['x_posting'] - y_spacing_dict = geo_dict['y_posting'] - x_snap_dict = geo_dict['x_snap'] - y_snap_dict = geo_dict['y_snap'] + x_spacing_dict = geo_dict["x_posting"] + y_spacing_dict = geo_dict["y_posting"] + x_snap_dict = geo_dict["x_snap"] + y_snap_dict = geo_dict["y_snap"] geo_grids = {} @@ -332,19 +348,19 @@ def generate_geogrids_from_db(bursts, geo_dict, dem, burst_db_file): # Check spacing in X/Y direction if epsg == dem_raster.get_epsg(): - x_spacing, y_spacing = assign_check_spacing(x_spacing_dict, - y_spacing_dict, - 4.5e-5, 9.0e-5) + x_spacing, y_spacing = assign_check_spacing( + x_spacing_dict, y_spacing_dict, 4.5e-5, 9.0e-5 + ) else: # Assign spacing in meters - x_spacing, y_spacing = assign_check_spacing(x_spacing_dict, - y_spacing_dict, - 5.0, 10.0) + x_spacing, y_spacing = assign_check_spacing( + x_spacing_dict, y_spacing_dict, 5.0, 10.0 + ) # Initialize geogrid with the info checked at this stage - geo_grid_in = isce3.product.bbox_to_geogrid(radar_grid, orbit, - isce3.core.LUT2d(), - x_spacing, y_spacing, epsg) + geo_grid_in = isce3.product.bbox_to_geogrid( + radar_grid, orbit, isce3.core.LUT2d(), x_spacing, y_spacing, epsg + ) # Check and further initialize geo_grid geo_grid = assign_check_geogrid(geo_grid_in, xmin, ymax, xmax, ymin) @@ -361,7 +377,7 @@ def generate_geogrids_from_db(bursts, geo_dict, dem, burst_db_file): def generate_geogrids(bursts, geo_dict, dem): - ''' Create a geogrid for all bursts in given list + """Create a geogrid for all bursts in given list Parameters ---------- @@ -376,14 +392,14 @@ def generate_geogrids(bursts, geo_dict, dem): ------- geo_grids: dict Dict of burst ID keys to isce3.product.GeoGridParameters values - ''' + """ dem_raster = isce3.io.Raster(dem) # Unpack values from geocoding dictionary - x_spacing_dict = geo_dict['x_posting'] - y_spacing_dict = geo_dict['y_posting'] - x_snap_dict = geo_dict['x_snap'] - y_snap_dict = geo_dict['y_snap'] + x_spacing_dict = geo_dict["x_posting"] + y_spacing_dict = geo_dict["y_posting"] + x_snap_dict = geo_dict["x_snap"] + y_snap_dict = geo_dict["y_snap"] geo_grids = {} for burst in bursts: @@ -400,19 +416,19 @@ def generate_geogrids(bursts, geo_dict, dem): # Check spacing in X/Y direction if epsg == dem_raster.get_epsg(): - x_spacing, y_spacing = assign_check_spacing(x_spacing_dict, - y_spacing_dict, - 4.5e-5, 9.0e-5) + x_spacing, y_spacing = assign_check_spacing( + x_spacing_dict, y_spacing_dict, 4.5e-5, 9.0e-5 + ) else: # Assign spacing in meters - x_spacing, y_spacing = assign_check_spacing(x_spacing_dict, - y_spacing_dict, - 5.0, 10.0) + x_spacing, y_spacing = assign_check_spacing( + x_spacing_dict, y_spacing_dict, 5.0, 10.0 + ) # Initialize geogrid with the info checked at this stage - geo_grid = isce3.product.bbox_to_geogrid(radar_grid, orbit, - isce3.core.LUT2d(), - x_spacing, y_spacing, epsg) + geo_grid = isce3.product.bbox_to_geogrid( + radar_grid, orbit, isce3.core.LUT2d(), x_spacing, y_spacing, epsg + ) # Check end point of geogrid before compute snaps x_end, y_end = check_geogrid_endpoints(geo_grid) @@ -427,6 +443,9 @@ def generate_geogrids(bursts, geo_dict, dem): def geogrid_as_dict(grid): - geogrid_dict = {attr:getattr(grid, attr) for attr in grid.__dir__() - if attr != 'print' and attr[:2] != '__'} + geogrid_dict = { + attr: getattr(grid, attr) + for attr in grid.__dir__() + if attr != "print" and attr[:2] != "__" + } return geogrid_dict diff --git a/src/compass/utils/geo_runconfig.py b/src/compass/utils/geo_runconfig.py index 46710278..b6dbfb29 100644 --- a/src/compass/utils/geo_runconfig.py +++ b/src/compass/utils/geo_runconfig.py @@ -10,43 +10,48 @@ import journal from ruamel.yaml import YAML -from compass.utils.geo_grid import (generate_geogrids_from_db, - generate_geogrids, geogrid_as_dict) +from compass.utils.geo_grid import ( + generate_geogrids_from_db, + generate_geogrids, + geogrid_as_dict, +) from compass.utils.helpers import check_file_path from compass.utils.runconfig import ( create_output_paths, runconfig_to_bursts, load_validate_yaml, - RunConfig) + RunConfig, +) from compass.utils.wrap_namespace import wrap_namespace def check_geocode_dict(geocode_cfg: dict) -> None: - error_channel = journal.error('runconfig.check_geocode_dict') + error_channel = journal.error("runconfig.check_geocode_dict") - for xy in 'xy': + for xy in "xy": # check posting value in current axis - posting_key = f'{xy}_posting' + posting_key = f"{xy}_posting" if geocode_cfg[posting_key] is not None: posting = geocode_cfg[posting_key] if posting <= 0: - err_str = '{xy} posting from config of {posting} <= 0' + err_str = "{xy} posting from config of {posting} <= 0" error_channel.log(err_str) raise ValueError(err_str) # check snap value in current axis - snap_key = f'{xy}_snap' + snap_key = f"{xy}_snap" if geocode_cfg[snap_key] is not None: snap = geocode_cfg[snap_key] if snap <= 0: - err_str = f'{xy} snap from config of {snap} <= 0' + err_str = f"{xy} snap from config of {snap} <= 0" error_channel.log(err_str) raise ValueError(err_str) @dataclass(frozen=True) class GeoRunConfig(RunConfig): - '''dataclass containing GSLC runconfig''' + """dataclass containing GSLC runconfig""" + # dict of geogrids associated to burst IDs geogrids: dict[str, GeoGridParameters] @@ -62,30 +67,32 @@ def load_from_yaml(cls, yaml_runconfig: str, workflow_name: str) -> GeoRunConfig workflow_name: str Name of the workflow for which uploading default options """ - error_channel = journal.error('runconfig.load_from_yaml') + error_channel = journal.error("runconfig.load_from_yaml") cfg = load_validate_yaml(yaml_runconfig, workflow_name) - groups_cfg = cfg['runconfig']['groups'] + groups_cfg = cfg["runconfig"]["groups"] - burst_database_file = groups_cfg['static_ancillary_file_group']['burst_database_file'] + burst_database_file = groups_cfg["static_ancillary_file_group"][ + "burst_database_file" + ] if not os.path.isfile(burst_database_file): - err_str = '{burst_database_file} not found' + err_str = "{burst_database_file} not found" error_channel.log(err_str) raise FileNotFoundError(err_str) - geocoding_dict = groups_cfg['processing']['geocoding'] + geocoding_dict = groups_cfg["processing"]["geocoding"] check_geocode_dict(geocoding_dict) # Check TEC file if not None. # The ionosphere correction will be applied only if # the TEC file is not None. - tec_file_path = groups_cfg['dynamic_ancillary_file_group']['tec_file'] + tec_file_path = groups_cfg["dynamic_ancillary_file_group"]["tec_file"] if tec_file_path is not None: check_file_path(tec_file_path) # Check troposphere weather model file if not None. This # troposphere correction is applied only if this file is not None - weather_model_path = groups_cfg['dynamic_ancillary_file_group'][ - 'weather_model_file' + weather_model_path = groups_cfg["dynamic_ancillary_file_group"][ + "weather_model_file" ] if weather_model_path is not None: check_file_path(weather_model_path) @@ -97,12 +104,13 @@ def load_from_yaml(cls, yaml_runconfig: str, workflow_name: str) -> GeoRunConfig bursts = runconfig_to_bursts(sns) # Load geogrids - dem_file = groups_cfg['dynamic_ancillary_file_group']['dem_file'] + dem_file = groups_cfg["dynamic_ancillary_file_group"]["dem_file"] if burst_database_file is None: geogrids = generate_geogrids(bursts, geocoding_dict, dem_file) else: - geogrids = generate_geogrids_from_db(bursts, geocoding_dict, - dem_file, burst_database_file) + geogrids = generate_geogrids_from_db( + bursts, geocoding_dict, dem_file, burst_database_file + ) # Empty reference dict for base runconfig class constructor empty_ref_dict = {} @@ -114,8 +122,15 @@ def load_from_yaml(cls, yaml_runconfig: str, workflow_name: str) -> GeoRunConfig # Get scratch and output paths output_paths = create_output_paths(sns, bursts) - return cls(cfg['runconfig']['name'], sns, bursts, empty_ref_dict, - user_plus_default_yaml_str, output_paths, geogrids) + return cls( + cfg["runconfig"]["name"], + sns, + bursts, + empty_ref_dict, + user_plus_default_yaml_str, + output_paths, + geogrids, + ) @property def product_group(self) -> types.SimpleNamespace: @@ -158,21 +173,21 @@ def output_params(self) -> types.SimpleNamespace: return self.groups.output def as_dict(self): - ''' Convert self to dict for write to YAML/JSON + """Convert self to dict for write to YAML/JSON Unable to dataclasses.asdict() because isce3 objects can not be pickled - ''' + """ # convert to dict first then dump to yaml self_as_dict = super().as_dict() - self_as_dict['geogrids']= {b_id:geogrid_as_dict(geogrid) - for b_id, geogrid in self.geogrids.items()} + self_as_dict["geogrids"] = { + b_id: geogrid_as_dict(geogrid) for b_id, geogrid in self.geogrids.items() + } return self_as_dict - - def to_file(self, dst, fmt:str): - ''' Write self to file + def to_file(self, dst, fmt: str): + """Write self to file Parameter: --------- @@ -180,17 +195,17 @@ def to_file(self, dst, fmt:str): File object to write metadata to fmt: ['yaml', 'json'] Format of output - ''' + """ self_as_dict = self.as_dict() - self_as_dict['nodata'] = 'NO_DATA_VALUE' - self_as_dict['input_data_ipf_version'] = '?' - self_as_dict['isce3_version'] = isce3.__version__ + self_as_dict["nodata"] = "NO_DATA_VALUE" + self_as_dict["input_data_ipf_version"] = "?" + self_as_dict["isce3_version"] = isce3.__version__ - if fmt == 'yaml': - yaml_obj = YAML(typ='safe') + if fmt == "yaml": + yaml_obj = YAML(typ="safe") yaml_obj.dump(self_as_dict, dst) - elif fmt == 'json': + elif fmt == "json": json.dumps(self_as_dict, dst, indent=4) else: raise ValueError(f'{fmt} unsupported. Only "json" or "yaml" supported') diff --git a/src/compass/utils/geometry_utils.py b/src/compass/utils/geometry_utils.py index 8789110f..90e7764d 100644 --- a/src/compass/utils/geometry_utils.py +++ b/src/compass/utils/geometry_utils.py @@ -9,7 +9,8 @@ import numpy as np import isce3 -def los2orbit_azimuth_angle(los_az_angle, look_direction='right'): + +def los2orbit_azimuth_angle(los_az_angle, look_direction="right"): """ Convert the azimuth angle of the LOS vector to the one of the orbit flight vector. The conversion done for this function only works for zero-Doppler geometry. @@ -27,15 +28,15 @@ def los2orbit_azimuth_angle(los_az_angle, look_direction='right'): the north with anti-clockwise direction as positive, in the unit of degrees """ - if look_direction == 'right': + if look_direction == "right": orb_az_angle = los_az_angle - 90 else: orb_az_angle = los_az_angle + 90 - orb_az_angle -= np.round(orb_az_angle / 360.) * 360. + orb_az_angle -= np.round(orb_az_angle / 360.0) * 360.0 return orb_az_angle -def azimuth2heading_angle(az_angle, look_direction='right'): +def azimuth2heading_angle(az_angle, look_direction="right"): """ Convert azimuth angle from ISCE los.tif band2 into satellite orbit heading angle ISCE-2 los.* file band2 is azimuth angle of LOS vector from ground target to the satellite @@ -60,15 +61,15 @@ def azimuth2heading_angle(az_angle, look_direction='right'): from the North in anti-clockwise direction as positive """ - if look_direction == 'right': + if look_direction == "right": head_angle = (az_angle - 90) * -1 else: head_angle = (az_angle + 90) * -1 - head_angle -= np.round(head_angle / 360.) * 360. + head_angle -= np.round(head_angle / 360.0) * 360.0 return head_angle -def heading2azimuth_angle(head_angle, look_direction='right'): +def heading2azimuth_angle(head_angle, look_direction="right"): """ Convert satellite orbit heading angle into azimuth angle as defined in ISCE-2 @@ -86,11 +87,11 @@ def heading2azimuth_angle(head_angle, look_direction='right'): Measured from the North in anti-clockwise direction. Same definition as ISCE2 azimuth angle (second band of *los raster) """ - if look_direction == 'right': + if look_direction == "right": az_angle = (head_angle - 90) * -1 else: az_angle = (head_angle + 90) * -1 - az_angle -= np.round(az_angle / 360.) * 360. + az_angle -= np.round(az_angle / 360.0) * 360.0 return az_angle @@ -127,12 +128,14 @@ def enu2los(v_e, v_n, v_u, inc_angle, head_angle=None, az_angle=None): if head_angle is not None: az_angle = heading2azimuth_angle(head_angle) else: - raise ValueError(f'invalid az_angle: {az_angle}!') + raise ValueError(f"invalid az_angle: {az_angle}!") # project ENU onto LOS - v_los = ( v_e * np.sin(np.deg2rad(inc_angle)) * np.sin(np.deg2rad(az_angle)) * -1 - + v_n * np.sin(np.deg2rad(inc_angle)) * np.cos(np.deg2rad(az_angle)) - + v_u * np.cos(np.deg2rad(inc_angle))) + v_los = ( + v_e * np.sin(np.deg2rad(inc_angle)) * np.sin(np.deg2rad(az_angle)) * -1 + + v_n * np.sin(np.deg2rad(inc_angle)) * np.cos(np.deg2rad(az_angle)) + + v_u * np.cos(np.deg2rad(inc_angle)) + ) return v_los @@ -158,8 +161,9 @@ def en2az(v_e, v_n, orb_az_angle): motion along flight direction as positive """ # project EN onto azimuth - v_az = ( v_e * np.sin(np.deg2rad(orb_az_angle)) * -1 - + v_n * np.cos(np.deg2rad(orb_az_angle))) + v_az = v_e * np.sin(np.deg2rad(orb_az_angle)) * -1 + v_n * np.cos( + np.deg2rad(orb_az_angle) + ) return v_az @@ -185,7 +189,9 @@ def calc_azimuth_from_east_north_obs(east, north): return az_angle -def get_unit_vector4component_of_interest(los_inc_angle, los_az_angle, comp='enu2los', horz_az_angle=None): +def get_unit_vector4component_of_interest( + los_inc_angle, los_az_angle, comp="enu2los", horz_az_angle=None +): """ Get the unit vector for the component of interest. @@ -211,42 +217,53 @@ def get_unit_vector4component_of_interest(los_inc_angle, los_az_angle, comp='enu # check input arguments comps = [ - 'enu2los', 'en2los', 'hz2los', 'horz2los', 'u2los', 'vert2los', # radar LOS / cross-track - 'en2az', 'hz2az', 'orb_az', 'orbit_az', # radar azimuth / along-track - 'vert', 'vertical', 'horz', 'horizontal', # vertical / horizontal + "enu2los", + "en2los", + "hz2los", + "horz2los", + "u2los", + "vert2los", # radar LOS / cross-track + "en2az", + "hz2az", + "orb_az", + "orbit_az", # radar azimuth / along-track + "vert", + "vertical", + "horz", + "horizontal", # vertical / horizontal ] if comp not in comps: - raise ValueError(f'un-recognized comp input: {comp}.\nchoose from: {comps}') + raise ValueError(f"un-recognized comp input: {comp}.\nchoose from: {comps}") - if comp == 'horz' and horz_az_angle is None: - raise ValueError('comp=horz requires horz_az_angle input!') + if comp == "horz" and horz_az_angle is None: + raise ValueError("comp=horz requires horz_az_angle input!") # initiate output unit_vec = None - if comp in ['enu2los']: + if comp in ["enu2los"]: unit_vec = [ np.sin(np.deg2rad(los_inc_angle)) * np.sin(np.deg2rad(los_az_angle)) * -1, np.sin(np.deg2rad(los_inc_angle)) * np.cos(np.deg2rad(los_az_angle)), np.cos(np.deg2rad(los_inc_angle)), ] - elif comp in ['en2los', 'hz2los', 'horz2los']: + elif comp in ["en2los", "hz2los", "horz2los"]: unit_vec = [ np.sin(np.deg2rad(los_inc_angle)) * np.sin(np.deg2rad(los_az_angle)) * -1, np.sin(np.deg2rad(los_inc_angle)) * np.cos(np.deg2rad(los_az_angle)), np.zeros_like(los_inc_angle), ] - elif comp in ['u2los', 'vert2los']: + elif comp in ["u2los", "vert2los"]: unit_vec = [ np.zeros_like(los_inc_angle), np.zeros_like(los_inc_angle), np.cos(np.deg2rad(los_inc_angle)), ] - elif comp in ['en2az', 'hz2az', 'orb_az', 'orbit_az']: + elif comp in ["en2az", "hz2az", "orb_az", "orbit_az"]: orb_az_angle = los2orbit_azimuth_angle(los_az_angle) unit_vec = [ np.sin(np.deg2rad(orb_az_angle)) * -1, @@ -254,10 +271,10 @@ def get_unit_vector4component_of_interest(los_inc_angle, los_az_angle, comp='enu np.zeros_like(orb_az_angle), ] - elif comp in ['vert', 'vertical']: + elif comp in ["vert", "vertical"]: unit_vec = [0, 0, 1] - elif comp in ['horz', 'horizontal']: + elif comp in ["horz", "horizontal"]: unit_vec = [ np.sin(np.deg2rad(horz_az_angle)) * -1, np.cos(np.deg2rad(horz_az_angle)), @@ -267,11 +284,19 @@ def get_unit_vector4component_of_interest(los_inc_angle, los_az_angle, comp='enu return unit_vec -def enu2rgaz(radargrid_ref, orbit, ellipsoid, - lon_arr, lat_arr, hgt_arr, - e_arr, n_arr, u_arr, - geo2rdr_params=None): - ''' +def enu2rgaz( + radargrid_ref, + orbit, + ellipsoid, + lon_arr, + lat_arr, + hgt_arr, + e_arr, + n_arr, + u_arr, + geo2rdr_params=None, +): + """ Convert ENU displacement into range / azimuth displacement, based on the idea mentioned in ETAD ATBD, available in the link below: https://sentinels.copernicus.eu/documents/247904/4629150/ETAD-DLR-DD-0008_Algorithm-Technical-Baseline-Document_2.3.pdf/5cb45b43-76dc-8dec-04ef-ca1252ace434?t=1680181574715 # noqa @@ -321,7 +346,7 @@ def enu2rgaz(radargrid_ref, orbit, ellipsoid, threshold and max # iterations are set to `1.0e-8` and `25` respectively. - ''' + """ if geo2rdr_params is None: # default threshold and # iteration for geo2rdr threshold = 1.0e-8 @@ -342,36 +367,38 @@ def enu2rgaz(radargrid_ref, orbit, ellipsoid, vec_e, vec_n, vec_u = get_enu_vector_ecef(lon_deg, lat_deg) - llh_ref = np.array([np.deg2rad(lon_deg), - np.deg2rad(lat_deg), - hgt]) + llh_ref = np.array([np.deg2rad(lon_deg), np.deg2rad(lat_deg), hgt]) xyz_before = ellipsoid.lon_lat_to_xyz(llh_ref) - xyz_after_set = (xyz_before - + vec_e * e_arr[index_arr] - + vec_n * n_arr[index_arr] - + vec_u * u_arr[index_arr]) + xyz_after_set = ( + xyz_before + + vec_e * e_arr[index_arr] + + vec_n * n_arr[index_arr] + + vec_u * u_arr[index_arr] + ) llh_displaced = ellipsoid.xyz_to_lon_lat(xyz_after_set) - aztime_ref, slant_range_ref =\ - isce3.geometry.geo2rdr(llh_ref, - ellipsoid, - orbit, - isce3.core.LUT2d(), - radargrid_ref.wavelength, - radargrid_ref.lookside, - threshold=threshold, - maxiter=maxiter) - - aztime_displaced, slant_range_displaced =\ - isce3.geometry.geo2rdr(llh_displaced, - ellipsoid, - orbit, - isce3.core.LUT2d(), - radargrid_ref.wavelength, - radargrid_ref.lookside, - threshold=threshold, - maxiter=maxiter) + aztime_ref, slant_range_ref = isce3.geometry.geo2rdr( + llh_ref, + ellipsoid, + orbit, + isce3.core.LUT2d(), + radargrid_ref.wavelength, + radargrid_ref.lookside, + threshold=threshold, + maxiter=maxiter, + ) + + aztime_displaced, slant_range_displaced = isce3.geometry.geo2rdr( + llh_displaced, + ellipsoid, + orbit, + isce3.core.LUT2d(), + radargrid_ref.wavelength, + radargrid_ref.lookside, + threshold=threshold, + maxiter=maxiter, + ) rg_arr[index_arr] = slant_range_displaced - slant_range_ref az_arr[index_arr] = aztime_displaced - aztime_ref @@ -379,8 +406,8 @@ def enu2rgaz(radargrid_ref, orbit, ellipsoid, return rg_arr, az_arr -def get_enu_vector_ecef(lon, lat, units='degrees'): - ''' +def get_enu_vector_ecef(lon, lat, units="degrees"): + """ Calculate the east, north, and up vectors in ECEF for lon / lat provided Parameters @@ -401,27 +428,37 @@ def get_enu_vector_ecef(lon, lat, units='degrees'): unit vector of "north" direction in ECEF vec_u: np.ndarray unit vector of "up" direction in ECEF - ''' - if units == 'degrees': + """ + if units == "degrees": lon_rad = np.deg2rad(lon) lat_rad = np.deg2rad(lat) - elif units == 'radians': + elif units == "radians": lon_rad = lon lat_rad = lat else: - raise ValueError(f'"{units}" was provided for `units`, ' - 'which needs to be either `degrees` or `radians`') + raise ValueError( + f'"{units}" was provided for `units`, ' + "which needs to be either `degrees` or `radians`" + ) # Calculate up, north, and east vectors # reference: https://github.com/isce-framework/isce3/blob/944eba17f4a5b1c88c6a035c2d58ddd0d4f0709c/cxx/isce3/core/Ellipsoid.h#L154-L157 # noqa # https://en.wikipedia.org/wiki/Geographic_coordinate_conversion#From_ECEF_to_ENU # noqa - vec_u = np.array([np.cos(lon_rad) * np.cos(lat_rad), - np.sin(lon_rad) * np.cos(lat_rad), - np.sin(lat_rad)]) + vec_u = np.array( + [ + np.cos(lon_rad) * np.cos(lat_rad), + np.sin(lon_rad) * np.cos(lat_rad), + np.sin(lat_rad), + ] + ) - vec_n = np.array([-np.cos(lon_rad) * np.sin(lat_rad), - -np.sin(lon_rad) * np.sin(lat_rad), - np.cos(lat_rad)]) + vec_n = np.array( + [ + -np.cos(lon_rad) * np.sin(lat_rad), + -np.sin(lon_rad) * np.sin(lat_rad), + np.cos(lat_rad), + ] + ) vec_e = np.cross(vec_n, vec_u, axis=0) diff --git a/src/compass/utils/h5_helpers.py b/src/compass/utils/h5_helpers.py index 92833f4e..41de1163 100644 --- a/src/compass/utils/h5_helpers.py +++ b/src/compass/utils/h5_helpers.py @@ -1,6 +1,6 @@ -''' +""" Collection of functions to help write HDF5 datasets and metadata -''' +""" from dataclasses import dataclass, field from datetime import datetime @@ -15,18 +15,19 @@ import compass +TIME_STR_FMT = "%Y-%m-%d %H:%M:%S.%f" +ROOT_PATH = "/" +DATA_PATH = "/data" +QA_PATH = "/quality_assurance" +METADATA_PATH = "/metadata" -TIME_STR_FMT = '%Y-%m-%d %H:%M:%S.%f' -ROOT_PATH = '/' -DATA_PATH = '/data' -QA_PATH = '/quality_assurance' -METADATA_PATH = '/metadata' @dataclass class Meta: - ''' + """ Convenience dataclass for passing parameters to be written to h5py.Dataset - ''' + """ + # Dataset name name: str # Data to be stored in Dataset @@ -38,16 +39,16 @@ class Meta: def _as_np_string_if_needed(val): - ''' + """ If type str encountered, convert and return as np.string_. Otherwise return as is. - ''' + """ val = np.string_(val) if isinstance(val, str) else val return val def add_dataset_and_attrs(group, meta_item): - '''Write isce3.core.Poly1d properties to hdf5 + """Write isce3.core.Poly1d properties to hdf5 Parameters ---------- @@ -55,7 +56,7 @@ def add_dataset_and_attrs(group, meta_item): h5py Group to store poly1d parameters in meta_item: Meta Name of dataset to add - ''' + """ # Ensure it is clear to write by deleting pre-existing Dataset if meta_item.name in group: del group[meta_item.name] @@ -68,19 +69,20 @@ def add_dataset_and_attrs(group, meta_item): else: group[meta_item.name] = val except TypeError: - raise TypeError(f'unable to write {meta_item.name}') + raise TypeError(f"unable to write {meta_item.name}") # Write data and attributes val_ds = group[meta_item.name] desc = _as_np_string_if_needed(meta_item.description) - val_ds.attrs['description'] = desc + val_ds.attrs["description"] = desc for key, val in meta_item.attr_dict.items(): val_ds.attrs[key] = _as_np_string_if_needed(val) -def init_geocoded_dataset(grid_group, dataset_name, geo_grid, dtype, - description, data=None, output_cfg=None): - ''' +def init_geocoded_dataset( + grid_group, dataset_name, geo_grid, dtype, description, data=None, output_cfg=None +): + """ Create and allocate dataset for isce.geocode.geocode_slc to write to that is CF-compliant @@ -106,28 +108,28 @@ def init_geocoded_dataset(grid_group, dataset_name, geo_grid, dtype, ------- cslc_ds: h5py.Dataset NCDF compliant h5py dataset ready to be populated with geocoded raster - ''' + """ # Default to no dataset keyword args output_kwargs = {} # Always set chunks kwarg - output_kwargs['chunks'] = tuple(output_cfg.chunk_size) + output_kwargs["chunks"] = tuple(output_cfg.chunk_size) # If compression is enabled, populate kwargs from runconfig contents if output_cfg.compression_enabled: - output_kwargs['compression'] = 'gzip' - output_kwargs['compression_opts'] = output_cfg.compression_level - output_kwargs['shuffle'] = output_cfg.shuffle + output_kwargs["compression"] = "gzip" + output_kwargs["compression_opts"] = output_cfg.compression_level + output_kwargs["shuffle"] = output_cfg.shuffle shape = (geo_grid.length, geo_grid.width) if data is None: - cslc_ds = grid_group.require_dataset(dataset_name, dtype=dtype, - shape=shape, **output_kwargs) + cslc_ds = grid_group.require_dataset( + dataset_name, dtype=dtype, shape=shape, **output_kwargs + ) else: - cslc_ds = grid_group.create_dataset(dataset_name, data=data, - **output_kwargs) + cslc_ds = grid_group.create_dataset(dataset_name, data=data, **output_kwargs) - cslc_ds.attrs['description'] = description + cslc_ds.attrs["description"] = description # Compute x scale dx = geo_grid.spacing_x @@ -143,10 +145,12 @@ def init_geocoded_dataset(grid_group, dataset_name, geo_grid, dtype, # following copied and pasted (and slightly modified) from: # https://github-fn.jpl.nasa.gov/isce-3/isce/wiki/CF-Conventions-and-Map-Projections - x_ds = grid_group.require_dataset('x_coordinates', dtype='float64', - data=x_vect, shape=x_vect.shape) - y_ds = grid_group.require_dataset('y_coordinates', dtype='float64', - data=y_vect, shape=y_vect.shape) + x_ds = grid_group.require_dataset( + "x_coordinates", dtype="float64", data=x_vect, shape=x_vect.shape + ) + y_ds = grid_group.require_dataset( + "y_coordinates", dtype="float64", data=y_vect, shape=y_vect.shape + ) # Mapping of dimension scales to datasets is not done automatically in HDF5 # We should label appropriate arrays as scales and attach them to datasets @@ -157,15 +161,21 @@ def init_geocoded_dataset(grid_group, dataset_name, geo_grid, dtype, cslc_ds.dims[0].attach_scale(y_ds) # Associate grid mapping with data - projection created later - cslc_ds.attrs['grid_mapping'] = np.string_("projection") + cslc_ds.attrs["grid_mapping"] = np.string_("projection") grid_meta_items = [ - Meta('x_spacing', geo_grid.spacing_x, - 'Spacing of the geographical grid along X-direction', - {'units': 'meters'}), - Meta('y_spacing', geo_grid.spacing_y, - 'Spacing of the geographical grid along Y-direction', - {'units': 'meters'}) + Meta( + "x_spacing", + geo_grid.spacing_x, + "Spacing of the geographical grid along X-direction", + {"units": "meters"}, + ), + Meta( + "y_spacing", + geo_grid.spacing_y, + "Spacing of the geographical grid along Y-direction", + {"units": "meters"}, + ), ] for meta_item in grid_meta_items: add_dataset_and_attrs(grid_group, meta_item) @@ -174,25 +184,25 @@ def init_geocoded_dataset(grid_group, dataset_name, geo_grid, dtype, srs = osr.SpatialReference() srs.ImportFromEPSG(geo_grid.epsg) - #Create a new single int dataset for projections - projection_ds = grid_group.require_dataset('projection', (), dtype='i') + # Create a new single int dataset for projections + projection_ds = grid_group.require_dataset("projection", (), dtype="i") projection_ds[()] = geo_grid.epsg # Add description as an attribute to projection - projection_ds.attrs['description'] = np.string_("Projection system") + projection_ds.attrs["description"] = np.string_("Projection system") # WGS84 ellipsoid - projection_ds.attrs['semi_major_axis'] = 6378137.0 - projection_ds.attrs['inverse_flattening'] = 298.257223563 - projection_ds.attrs['ellipsoid'] = np.string_("WGS84") + projection_ds.attrs["semi_major_axis"] = 6378137.0 + projection_ds.attrs["inverse_flattening"] = 298.257223563 + projection_ds.attrs["ellipsoid"] = np.string_("WGS84") # Additional fields - projection_ds.attrs['epsg_code'] = geo_grid.epsg + projection_ds.attrs["epsg_code"] = geo_grid.epsg # CF 1.7+ requires this attribute to be named "crs_wkt" # spatial_ref is old GDAL way. Using that for testing only. # For NISAR replace with "crs_wkt" - projection_ds.attrs['spatial_ref'] = np.string_(srs.ExportToWkt()) + projection_ds.attrs["spatial_ref"] = np.string_(srs.ExportToWkt()) # Here we have handcoded the attributes for the different cases # Recommended method is to use pyproj.CRS.to_cf() as shown above @@ -200,119 +210,130 @@ def init_geocoded_dataset(grid_group, dataset_name, geo_grid, dtype, # Geodetic latitude / longitude if geo_grid.epsg == 4326: - #Set up grid mapping - projection_ds.attrs['grid_mapping_name'] = np.string_('latitude_longitude') - projection_ds.attrs['longitude_of_prime_meridian'] = 0.0 + # Set up grid mapping + projection_ds.attrs["grid_mapping_name"] = np.string_("latitude_longitude") + projection_ds.attrs["longitude_of_prime_meridian"] = 0.0 - #Setup units for x and y - x_ds.attrs['standard_name'] = np.string_("longitude") - x_ds.attrs['units'] = np.string_("degrees_east") + # Setup units for x and y + x_ds.attrs["standard_name"] = np.string_("longitude") + x_ds.attrs["units"] = np.string_("degrees_east") - y_ds.attrs['standard_name'] = np.string_("latitude") - y_ds.attrs['units'] = np.string_("degrees_north") + y_ds.attrs["standard_name"] = np.string_("latitude") + y_ds.attrs["units"] = np.string_("degrees_north") # UTM zones - elif (geo_grid.epsg > 32600 and geo_grid.epsg < 32661) or \ - (geo_grid.epsg > 32700 and geo_grid.epsg < 32761): - #Set up grid mapping - projection_ds.attrs['grid_mapping_name'] = np.string_('universal_transverse_mercator') - projection_ds.attrs['utm_zone_number'] = geo_grid.epsg % 100 - - #Setup units for x and y - x_ds.attrs['description'] = np.string_("CF compliant dimension associated with the X coordinate") - x_ds.attrs['standard_name'] = np.string_("projection_x_coordinate") - x_ds.attrs['long_name'] = np.string_("x coordinate of projection") - x_ds.attrs['units'] = np.string_("meters") - - y_ds.attrs['description'] = np.string_("CF compliant dimension associated with the Y coordinate") - y_ds.attrs['standard_name'] = np.string_("projection_y_coordinate") - y_ds.attrs['long_name'] = np.string_("y coordinate of projection") - y_ds.attrs['units'] = np.string_("meters") + elif (geo_grid.epsg > 32600 and geo_grid.epsg < 32661) or ( + geo_grid.epsg > 32700 and geo_grid.epsg < 32761 + ): + # Set up grid mapping + projection_ds.attrs["grid_mapping_name"] = np.string_( + "universal_transverse_mercator" + ) + projection_ds.attrs["utm_zone_number"] = geo_grid.epsg % 100 + + # Setup units for x and y + x_ds.attrs["description"] = np.string_( + "CF compliant dimension associated with the X coordinate" + ) + x_ds.attrs["standard_name"] = np.string_("projection_x_coordinate") + x_ds.attrs["long_name"] = np.string_("x coordinate of projection") + x_ds.attrs["units"] = np.string_("meters") + + y_ds.attrs["description"] = np.string_( + "CF compliant dimension associated with the Y coordinate" + ) + y_ds.attrs["standard_name"] = np.string_("projection_y_coordinate") + y_ds.attrs["long_name"] = np.string_("y coordinate of projection") + y_ds.attrs["units"] = np.string_("meters") # Polar Stereo North elif geo_grid.epsg == 3413: - #Set up grid mapping - projection_ds.attrs['grid_mapping_name'] = np.string_("polar_stereographic") - projection_ds.attrs['latitude_of_projection_origin'] = 90.0 - projection_ds.attrs['standard_parallel'] = 70.0 - projection_ds.attrs['straight_vertical_longitude_from_pole'] = -45.0 - projection_ds.attrs['false_easting'] = 0.0 - projection_ds.attrs['false_northing'] = 0.0 - - #Setup units for x and y - x_ds.attrs['standard_name'] = np.string_("projection_x_coordinate") - x_ds.attrs['long_name'] = np.string_("x coordinate of projection") - x_ds.attrs['units'] = np.string_("m") - - y_ds.attrs['standard_name'] = np.string_("projection_y_coordinate") - y_ds.attrs['long_name'] = np.string_("y coordinate of projection") - y_ds.attrs['units'] = np.string_("m") + # Set up grid mapping + projection_ds.attrs["grid_mapping_name"] = np.string_("polar_stereographic") + projection_ds.attrs["latitude_of_projection_origin"] = 90.0 + projection_ds.attrs["standard_parallel"] = 70.0 + projection_ds.attrs["straight_vertical_longitude_from_pole"] = -45.0 + projection_ds.attrs["false_easting"] = 0.0 + projection_ds.attrs["false_northing"] = 0.0 + + # Setup units for x and y + x_ds.attrs["standard_name"] = np.string_("projection_x_coordinate") + x_ds.attrs["long_name"] = np.string_("x coordinate of projection") + x_ds.attrs["units"] = np.string_("m") + + y_ds.attrs["standard_name"] = np.string_("projection_y_coordinate") + y_ds.attrs["long_name"] = np.string_("y coordinate of projection") + y_ds.attrs["units"] = np.string_("m") # Polar Stereo south elif geo_grid.epsg == 3031: - #Set up grid mapping - projection_ds.attrs['grid_mapping_name'] = np.string_("polar_stereographic") - projection_ds.attrs['latitude_of_projection_origin'] = -90.0 - projection_ds.attrs['standard_parallel'] = -71.0 - projection_ds.attrs['straight_vertical_longitude_from_pole'] = 0.0 - projection_ds.attrs['false_easting'] = 0.0 - projection_ds.attrs['false_northing'] = 0.0 - - #Setup units for x and y - x_ds.attrs['standard_name'] = np.string_("projection_x_coordinate") - x_ds.attrs['long_name'] = np.string_("x coordinate of projection") - x_ds.attrs['units'] = np.string_("m") - - y_ds.attrs['standard_name'] = np.string_("projection_y_coordinate") - y_ds.attrs['long_name'] = np.string_("y coordinate of projection") - y_ds.attrs['units'] = np.string_("m") + # Set up grid mapping + projection_ds.attrs["grid_mapping_name"] = np.string_("polar_stereographic") + projection_ds.attrs["latitude_of_projection_origin"] = -90.0 + projection_ds.attrs["standard_parallel"] = -71.0 + projection_ds.attrs["straight_vertical_longitude_from_pole"] = 0.0 + projection_ds.attrs["false_easting"] = 0.0 + projection_ds.attrs["false_northing"] = 0.0 + + # Setup units for x and y + x_ds.attrs["standard_name"] = np.string_("projection_x_coordinate") + x_ds.attrs["long_name"] = np.string_("x coordinate of projection") + x_ds.attrs["units"] = np.string_("m") + + y_ds.attrs["standard_name"] = np.string_("projection_y_coordinate") + y_ds.attrs["long_name"] = np.string_("y coordinate of projection") + y_ds.attrs["units"] = np.string_("m") # EASE 2 for soil moisture L3 elif geo_grid.epsg == 6933: - #Set up grid mapping - projection_ds.attrs['grid_mapping_name'] = np.string_("lambert_cylindrical_equal_area") - projection_ds.attrs['longitude_of_central_meridian'] = 0.0 - projection_ds.attrs['standard_parallel'] = 30.0 - projection_ds.attrs['false_easting'] = 0.0 - projection_ds.attrs['false_northing'] = 0.0 - - #Setup units for x and y - x_ds.attrs['standard_name'] = np.string_("projection_x_coordinate") - x_ds.attrs['long_name'] = np.string_("x coordinate of projection") - x_ds.attrs['units'] = np.string_("m") - - y_ds.attrs['standard_name'] = np.string_("projection_y_coordinate") - y_ds.attrs['long_name'] = np.string_("y coordinate of projection") - y_ds.attrs['units'] = np.string_("m") + # Set up grid mapping + projection_ds.attrs["grid_mapping_name"] = np.string_( + "lambert_cylindrical_equal_area" + ) + projection_ds.attrs["longitude_of_central_meridian"] = 0.0 + projection_ds.attrs["standard_parallel"] = 30.0 + projection_ds.attrs["false_easting"] = 0.0 + projection_ds.attrs["false_northing"] = 0.0 + + # Setup units for x and y + x_ds.attrs["standard_name"] = np.string_("projection_x_coordinate") + x_ds.attrs["long_name"] = np.string_("x coordinate of projection") + x_ds.attrs["units"] = np.string_("m") + + y_ds.attrs["standard_name"] = np.string_("projection_y_coordinate") + y_ds.attrs["long_name"] = np.string_("y coordinate of projection") + y_ds.attrs["units"] = np.string_("m") # Europe Equal Area for Deformation map (to be implemented in isce3) elif geo_grid.epsg == 3035: - #Set up grid mapping - projection_ds.attrs['grid_mapping_name'] = np.string_("lambert_azimuthal_equal_area") - projection_ds.attrs['longitude_of_projection_origin']= 10.0 - projection_ds.attrs['latitude_of_projection_origin'] = 52.0 - projection_ds.attrs['standard_parallel'] = -71.0 - projection_ds.attrs['straight_vertical_longitude_from_pole'] = 0.0 - projection_ds.attrs['false_easting'] = 4321000.0 - projection_ds.attrs['false_northing'] = 3210000.0 - - #Setup units for x and y - x_ds.attrs['standard_name'] = np.string_("projection_x_coordinate") - x_ds.attrs['long_name'] = np.string_("x coordinate of projection") - x_ds.attrs['units'] = np.string_("m") - - y_ds.attrs['standard_name'] = np.string_("projection_y_coordinate") - y_ds.attrs['long_name'] = np.string_("y coordinate of projection") - y_ds.attrs['units'] = np.string_("m") + # Set up grid mapping + projection_ds.attrs["grid_mapping_name"] = np.string_( + "lambert_azimuthal_equal_area" + ) + projection_ds.attrs["longitude_of_projection_origin"] = 10.0 + projection_ds.attrs["latitude_of_projection_origin"] = 52.0 + projection_ds.attrs["standard_parallel"] = -71.0 + projection_ds.attrs["straight_vertical_longitude_from_pole"] = 0.0 + projection_ds.attrs["false_easting"] = 4321000.0 + projection_ds.attrs["false_northing"] = 3210000.0 + + # Setup units for x and y + x_ds.attrs["standard_name"] = np.string_("projection_x_coordinate") + x_ds.attrs["long_name"] = np.string_("x coordinate of projection") + x_ds.attrs["units"] = np.string_("m") + + y_ds.attrs["standard_name"] = np.string_("projection_y_coordinate") + y_ds.attrs["long_name"] = np.string_("y coordinate of projection") + y_ds.attrs["units"] = np.string_("m") else: - raise NotImplementedError('Waiting for implementation / Not supported in ISCE3') + raise NotImplementedError("Waiting for implementation / Not supported in ISCE3") return cslc_ds def save_orbit(orbit, orbit_direction, orbit_type, orbit_group): - ''' + """ Write burst to HDF5 Parameter @@ -325,39 +346,60 @@ def save_orbit(orbit, orbit_direction, orbit_type, orbit_group): Type of orbit: RESORB or POEORB orbit_group: h5py.Group HDF5 group where orbit parameters will be written - ''' + """ # isce isoformat gives 9 decimal places, but python `fromisoformat` wants 6 - ref_epoch = orbit.reference_epoch.isoformat().replace('T', ' ')[:-3] + ref_epoch = orbit.reference_epoch.isoformat().replace("T", " ")[:-3] orbit_items = [ - Meta('reference_epoch', ref_epoch, 'Reference epoch of the state vectors', - {'format': 'YYYY-MM-DD HH:MM:SS.6f'}), - Meta('time', np.linspace(orbit.time.first, - orbit.time.last, - orbit.time.size), - 'Time of the orbit state vectors relative to the reference epoch', - {'units': 'seconds'}), - Meta('orbit_direction', orbit_direction, - 'Direction of sensor orbit ephemeris (e.g., ascending, descending)') + Meta( + "reference_epoch", + ref_epoch, + "Reference epoch of the state vectors", + {"format": "YYYY-MM-DD HH:MM:SS.6f"}, + ), + Meta( + "time", + np.linspace(orbit.time.first, orbit.time.last, orbit.time.size), + "Time of the orbit state vectors relative to the reference epoch", + {"units": "seconds"}, + ), + Meta( + "orbit_direction", + orbit_direction, + "Direction of sensor orbit ephemeris (e.g., ascending, descending)", + ), ] - for i_ax, axis in enumerate('xyz'): - desc_suffix = f'{axis}-direction with respect to WGS84 G1762 reference frame' - orbit_items.append(Meta(f'position_{axis}', orbit.position[:, i_ax], - f'Platform position along {desc_suffix}', - {'units': 'meters'})) - orbit_items.append(Meta(f'velocity_{axis}', orbit.velocity[:, i_ax], - f'Platform velocity along {desc_suffix}', - {'units': 'meters/second'})) + for i_ax, axis in enumerate("xyz"): + desc_suffix = f"{axis}-direction with respect to WGS84 G1762 reference frame" + orbit_items.append( + Meta( + f"position_{axis}", + orbit.position[:, i_ax], + f"Platform position along {desc_suffix}", + {"units": "meters"}, + ) + ) + orbit_items.append( + Meta( + f"velocity_{axis}", + orbit.velocity[:, i_ax], + f"Platform velocity along {desc_suffix}", + {"units": "meters/second"}, + ) + ) for meta_item in orbit_items: add_dataset_and_attrs(orbit_group, meta_item) - orbit_ds = orbit_group.require_dataset("orbit_type", (), "S10", - data=np.string_(orbit_type)) - orbit_ds.attrs["description"] = np.string_("Type of orbit file used for processing. " - "RESORB: restituted orbit ephemeris or POEORB: precise orbit ephemeris") + orbit_ds = orbit_group.require_dataset( + "orbit_type", (), "S10", data=np.string_(orbit_type) + ) + orbit_ds.attrs["description"] = np.string_( + "Type of orbit file used for processing. " + "RESORB: restituted orbit ephemeris or POEORB: precise orbit ephemeris" + ) def get_polygon_wkt(burst: Sentinel1BurstSlc): - ''' + """ Get WKT for butst's bounding polygon It returns "POLYGON" when there is only one polygon that defines the burst's border @@ -372,9 +414,9 @@ def get_polygon_wkt(burst: Sentinel1BurstSlc): "POLYGON" or "MULTIPOLYGON" in WKT as the bounding polygon of the input burst - ''' + """ - if len(burst.border) ==1: + if len(burst.border) == 1: geometry_polygon = burst.border[0] else: geometry_polygon = shapely.geometry.MultiPolygon(burst.border) @@ -383,7 +425,7 @@ def get_polygon_wkt(burst: Sentinel1BurstSlc): def identity_to_h5group(dst_group, burst, cfg, product_type): - ''' + """ Write burst metadata to HDF5 Parameter: @@ -397,47 +439,90 @@ def identity_to_h5group(dst_group, burst, cfg, product_type): product_type: str Type of COMPASS product - ''' + """ # identification datasets id_meta_items = [ - Meta('product_version', f'{cfg.product_group.product_version}', 'CSLC-S1 product version'), - Meta('product_specification_version', f'{cfg.product_group.product_specification_version}', - 'CSLC-S1 product specification version'), - Meta('absolute_orbit_number', burst.abs_orbit_number, 'Absolute orbit number'), - Meta('track_number', burst.burst_id.track_number, 'Track number', - {'units': 'unitless'}), - Meta('burst_id', str(burst.burst_id), 'Burst identification string (burst ID)'), - Meta('bounding_polygon', get_polygon_wkt(burst), - 'OGR compatible WKT representation of bounding polygon of the image', - {'units':'degrees'}), - Meta('mission_id', burst.platform_id, 'Mission identifier'), - Meta('processing_date_time', datetime.now().strftime(TIME_STR_FMT), - 'Data processing date and time'), - Meta('product_type', product_type, 'Product type'), - Meta('product_level', 'L2', 'L0A: Unprocessed instrument data; L0B: Reformatted, ' - 'unprocessed instrument data; L1: Processed instrument data in radar coordinates system; ' - 'and L2: Processed instrument data in geocoded coordinates system'), - Meta('look_direction', 'Right', 'Look direction can be left or right'), - Meta('instrument_name', 'C-SAR', 'Instrument name'), - Meta('orbit_pass_direction', burst.orbit_direction, - 'Orbit direction can be ascending or descending'), - Meta('radar_band', 'C', 'Radar band'), - Meta('zero_doppler_start_time', burst.sensing_start.strftime(TIME_STR_FMT), - 'Azimuth start time of product'), - Meta('zero_doppler_end_time', burst.sensing_stop.strftime(TIME_STR_FMT), - 'Azimuth stop time of product'), - Meta('is_geocoded', 'True', 'Boolean indicating if product is in radar geometry or geocoded'), - Meta('processing_center', 'Jet Propulsion Laboratory', 'Name of the processing center that produced the product') - ] - id_group = dst_group.require_group('identification') + Meta( + "product_version", + f"{cfg.product_group.product_version}", + "CSLC-S1 product version", + ), + Meta( + "product_specification_version", + f"{cfg.product_group.product_specification_version}", + "CSLC-S1 product specification version", + ), + Meta("absolute_orbit_number", burst.abs_orbit_number, "Absolute orbit number"), + Meta( + "track_number", + burst.burst_id.track_number, + "Track number", + {"units": "unitless"}, + ), + Meta("burst_id", str(burst.burst_id), "Burst identification string (burst ID)"), + Meta( + "bounding_polygon", + get_polygon_wkt(burst), + "OGR compatible WKT representation of bounding polygon of the image", + {"units": "degrees"}, + ), + Meta("mission_id", burst.platform_id, "Mission identifier"), + Meta( + "processing_date_time", + datetime.now().strftime(TIME_STR_FMT), + "Data processing date and time", + ), + Meta("product_type", product_type, "Product type"), + Meta( + "product_level", + "L2", + "L0A: Unprocessed instrument data; L0B: Reformatted, unprocessed instrument" + " data; L1: Processed instrument data in radar coordinates system; and L2:" + " Processed instrument data in geocoded coordinates system", + ), + Meta("look_direction", "Right", "Look direction can be left or right"), + Meta("instrument_name", "C-SAR", "Instrument name"), + Meta( + "orbit_pass_direction", + burst.orbit_direction, + "Orbit direction can be ascending or descending", + ), + Meta("radar_band", "C", "Radar band"), + Meta( + "zero_doppler_start_time", + burst.sensing_start.strftime(TIME_STR_FMT), + "Azimuth start time of product", + ), + Meta( + "zero_doppler_end_time", + burst.sensing_stop.strftime(TIME_STR_FMT), + "Azimuth stop time of product", + ), + Meta( + "is_geocoded", + "True", + "Boolean indicating if product is in radar geometry or geocoded", + ), + Meta( + "processing_center", + "Jet Propulsion Laboratory", + "Name of the processing center that produced the product", + ), + ] + id_group = dst_group.require_group("identification") for meta_item in id_meta_items: add_dataset_and_attrs(id_group, meta_item) -def metadata_to_h5group(parent_group, burst, cfg, save_noise_and_cal=True, - save_processing_parameters=True, - eap_correction_applied='None'): - ''' +def metadata_to_h5group( + parent_group, + burst, + cfg, + save_noise_and_cal=True, + save_processing_parameters=True, + eap_correction_applied="None", +): + """ Write burst metadata to HDF5 Parameter: @@ -452,44 +537,47 @@ def metadata_to_h5group(parent_group, burst, cfg, save_noise_and_cal=True, If true, to save noise and calibration metadata in metadata save_processing_parameters: bool If true, to save processing parameters in metadata - ''' - if 'metadata' in parent_group: - del parent_group['metadata'] + """ + if "metadata" in parent_group: + del parent_group["metadata"] # create metadata group to write datasets to - meta_group = parent_group.require_group('metadata') + meta_group = parent_group.require_group("metadata") # orbit items - if 'orbit' in meta_group: - del meta_group['orbit'] - orbit_group = meta_group.require_group('orbit') + if "orbit" in meta_group: + del meta_group["orbit"] + orbit_group = meta_group.require_group("orbit") # Get orbit type orbit_file_path = os.path.basename(cfg.orbit_path[0]) - if 'RESORB' in orbit_file_path: - orbit_type = 'RESORB' - elif 'POEORB' in orbit_file_path: - orbit_type = 'POEORB' + if "RESORB" in orbit_file_path: + orbit_type = "RESORB" + elif "POEORB" in orbit_file_path: + orbit_type = "POEORB" else: - err_str = f'{cfg.orbit_path[0]} is not a valid RESORB/POERB file' + err_str = f"{cfg.orbit_path[0]} is not a valid RESORB/POERB file" raise ValueError(err_str) - save_orbit(burst.orbit, burst.orbit_direction, - orbit_type, orbit_group) + save_orbit(burst.orbit, burst.orbit_direction, orbit_type, orbit_group) # create metadata group to write datasets to - processing_group = meta_group.require_group('processing_information') + processing_group = meta_group.require_group("processing_information") # write out calibration metadata, if present if burst.burst_calibration is not None and save_noise_and_cal: cal = burst.burst_calibration cal_items = [ - Meta('azimuth_time', cal.azimuth_time.strftime(TIME_STR_FMT), - 'Start time', {'format': 'YYYY-MM-DD HH:MM:SS.6f'}), - Meta('beta_naught', cal.beta_naught, 'beta_naught') + Meta( + "azimuth_time", + cal.azimuth_time.strftime(TIME_STR_FMT), + "Start time", + {"format": "YYYY-MM-DD HH:MM:SS.6f"}, + ), + Meta("beta_naught", cal.beta_naught, "beta_naught"), ] - cal_group = meta_group.require_group('calibration_information') + cal_group = meta_group.require_group("calibration_information") for meta_item in cal_items: add_dataset_and_attrs(cal_group, meta_item) @@ -497,169 +585,313 @@ def metadata_to_h5group(parent_group, burst, cfg, save_noise_and_cal=True, if burst.burst_noise is not None and save_noise_and_cal: noise = burst.burst_noise noise_items = [ - Meta('range_azimuth_time', - noise.range_azimuth_time.strftime(TIME_STR_FMT), - 'Start time', {'format': 'YYYY-MM-DD HH:MM:SS.6f'}) + Meta( + "range_azimuth_time", + noise.range_azimuth_time.strftime(TIME_STR_FMT), + "Start time", + {"format": "YYYY-MM-DD HH:MM:SS.6f"}, + ) ] - noise_group = meta_group.require_group('noise_information') + noise_group = meta_group.require_group("noise_information") for meta_item in noise_items: add_dataset_and_attrs(noise_group, meta_item) # runconfig yaml text - processing_group['runconfig'] = cfg.yaml_string - processing_group['runconfig'].attrs['description'] = np.string_('Run configuration file used to generate the CSLC-S1 product') + processing_group["runconfig"] = cfg.yaml_string + processing_group["runconfig"].attrs["description"] = np.string_( + "Run configuration file used to generate the CSLC-S1 product" + ) # input items orbit_files = [os.path.basename(f) for f in cfg.orbit_path] input_items = [ - Meta('l1_slc_files', burst.safe_filename, - 'List of input L1 RSLC files used for processing'), - Meta('orbit_files', orbit_files, 'List of input orbit files used for processing'), - Meta('calibration_files', burst.burst_calibration.basename_cads, - 'List of input calibration files used for processing'), - Meta('noise_files', burst.burst_noise.basename_nads, - 'List of input noise files used for processing'), - Meta('dem_source', - os.path.basename(cfg.groups.dynamic_ancillary_file_group.dem_description), - 'Description of the DEM used for processing'), + Meta( + "l1_slc_files", + burst.safe_filename, + "List of input L1 RSLC files used for processing", + ), + Meta( + "orbit_files", orbit_files, "List of input orbit files used for processing" + ), + Meta( + "calibration_files", + burst.burst_calibration.basename_cads, + "List of input calibration files used for processing", + ), + Meta( + "noise_files", + burst.burst_noise.basename_nads, + "List of input noise files used for processing", + ), + Meta( + "dem_source", + os.path.basename(cfg.groups.dynamic_ancillary_file_group.dem_description), + "Description of the DEM used for processing", + ), ] - input_group = processing_group.require_group('inputs') + input_group = processing_group.require_group("inputs") for meta_item in input_items: add_dataset_and_attrs(input_group, meta_item) vrt_items = [ - Meta('tiff_path', burst.tiff_path, - 'Path to measurement tiff file inside the SAFE file'), - Meta('burst_index', burst.i_burst, - 'Burst index relative other bursts in swath'), - Meta('first_valid_sample', burst.first_valid_sample, - 'First valid sample for burst in measurement tiff'), - Meta('last_valid_sample', burst.last_valid_sample, - 'Last valid sample for burst in measurement tiff'), - Meta('first_valid_line', burst.first_valid_line, - 'First valid line for burst in measurement tiff'), - Meta('last_valid_line', burst.last_valid_line, - 'Last valid line for burst in measurement tiff') + Meta( + "tiff_path", + burst.tiff_path, + "Path to measurement tiff file inside the SAFE file", + ), + Meta( + "burst_index", burst.i_burst, "Burst index relative other bursts in swath" + ), + Meta( + "first_valid_sample", + burst.first_valid_sample, + "First valid sample for burst in measurement tiff", + ), + Meta( + "last_valid_sample", + burst.last_valid_sample, + "Last valid sample for burst in measurement tiff", + ), + Meta( + "first_valid_line", + burst.first_valid_line, + "First valid line for burst in measurement tiff", + ), + Meta( + "last_valid_line", + burst.last_valid_line, + "Last valid line for burst in measurement tiff", + ), ] - vrt_group = input_group.require_group('burst_location_parameters') + vrt_group = input_group.require_group("burst_location_parameters") for meta_item in vrt_items: add_dataset_and_attrs(vrt_group, meta_item) # burst items burst_meta_items = [ - Meta('ipf_version', str(burst.ipf_version), - 'ESA Instrument Processing Facility software version'), - Meta('sensing_start', burst.sensing_start.strftime(TIME_STR_FMT), - 'Sensing start time of the burst', - {'format': 'YYYY-MM-DD HH:MM:SS.6f'}), - Meta('radar_center_frequency', burst.radar_center_frequency, - 'Radar center frequency', {'units':'Hertz'}), - Meta('wavelength', burst.wavelength, - 'Wavelength of the transmitted signal', {'units':'meters'}), - Meta('azimuth_steering_rate', burst.azimuth_steer_rate, - 'Azimuth steering rate of IW and EW modes', - {'units':'degrees/second'}), - Meta('azimuth_time_interval', burst.azimuth_time_interval, - 'Time spacing between azimuth lines of the burst', - {'units':'seconds'}), - Meta('slant_range_time', burst.slant_range_time, - 'two-way slant range time of Doppler centroid frequency estimate', - {'units':'seconds'}), - Meta('starting_range', burst.starting_range, - 'Slant range of the first sample of the input burst', - {'units':'meters'}), - Meta('sensing_stop', burst.sensing_stop.strftime(TIME_STR_FMT), - 'Sensing stop time of the burst', - {'format': 'YYYY-MM-DD HH:MM:SS.6f'}), - Meta('iw2_mid_range', burst.iw2_mid_range, - 'Slant range of the middle of the IW2 swath', - {'units':'meters'}), - Meta('range_sampling_rate', burst.range_sampling_rate, - 'Sampling rate of slant range in the input burst SLC', - {'units':'Hertz'}), - Meta('range_pixel_spacing', burst.range_pixel_spacing, - 'Pixel spacing between slant range samples in the input burst SLC', - {'units':'meters'}), - Meta('shape', burst.shape, 'Shape (length, width) of the burst in radar coordinates', - {'units':'pixels'}), - Meta('range_bandwidth', burst.range_bandwidth, - 'Slant range bandwidth of the signal', {'units':'Hertz'}), - Meta('polarization', burst.polarization, 'Polarization of the burst'), - Meta('platform_id', burst.platform_id, - 'Sensor platform identification string (e.g., S1A or S1B)'), - Meta('center', [xy[0] for xy in burst.center.coords.xy], - 'Longitude, latitude center of burst', {'units':'degrees'}), + Meta( + "ipf_version", + str(burst.ipf_version), + "ESA Instrument Processing Facility software version", + ), + Meta( + "sensing_start", + burst.sensing_start.strftime(TIME_STR_FMT), + "Sensing start time of the burst", + {"format": "YYYY-MM-DD HH:MM:SS.6f"}, + ), + Meta( + "radar_center_frequency", + burst.radar_center_frequency, + "Radar center frequency", + {"units": "Hertz"}, + ), + Meta( + "wavelength", + burst.wavelength, + "Wavelength of the transmitted signal", + {"units": "meters"}, + ), + Meta( + "azimuth_steering_rate", + burst.azimuth_steer_rate, + "Azimuth steering rate of IW and EW modes", + {"units": "degrees/second"}, + ), + Meta( + "azimuth_time_interval", + burst.azimuth_time_interval, + "Time spacing between azimuth lines of the burst", + {"units": "seconds"}, + ), + Meta( + "slant_range_time", + burst.slant_range_time, + "two-way slant range time of Doppler centroid frequency estimate", + {"units": "seconds"}, + ), + Meta( + "starting_range", + burst.starting_range, + "Slant range of the first sample of the input burst", + {"units": "meters"}, + ), + Meta( + "sensing_stop", + burst.sensing_stop.strftime(TIME_STR_FMT), + "Sensing stop time of the burst", + {"format": "YYYY-MM-DD HH:MM:SS.6f"}, + ), + Meta( + "iw2_mid_range", + burst.iw2_mid_range, + "Slant range of the middle of the IW2 swath", + {"units": "meters"}, + ), + Meta( + "range_sampling_rate", + burst.range_sampling_rate, + "Sampling rate of slant range in the input burst SLC", + {"units": "Hertz"}, + ), + Meta( + "range_pixel_spacing", + burst.range_pixel_spacing, + "Pixel spacing between slant range samples in the input burst SLC", + {"units": "meters"}, + ), + Meta( + "shape", + burst.shape, + "Shape (length, width) of the burst in radar coordinates", + {"units": "pixels"}, + ), + Meta( + "range_bandwidth", + burst.range_bandwidth, + "Slant range bandwidth of the signal", + {"units": "Hertz"}, + ), + Meta("polarization", burst.polarization, "Polarization of the burst"), + Meta( + "platform_id", + burst.platform_id, + "Sensor platform identification string (e.g., S1A or S1B)", + ), + Meta( + "center", + [xy[0] for xy in burst.center.coords.xy], + "Longitude, latitude center of burst", + {"units": "degrees"}, + ), # window parameters - Meta('range_window_type', burst.range_window_type, - 'Name of the weighting window type used during processing'), - Meta('range_window_coefficient', burst.range_window_coefficient, - 'Value of the weighting window coefficient used during processing'), - Meta('rank', burst.rank, - "The number of Pulse Repetition Intervals (PRI) between transmitted pulse and return echo"), - Meta('prf_raw_data', burst.prf_raw_data, - 'Pulse repetition frequency (PRF) of the raw data', - {'units':'Hertz'}), - Meta('range_chirp_rate', burst.range_chirp_rate, - 'Range chirp rate', {'units':'Hertz'}) + Meta( + "range_window_type", + burst.range_window_type, + "Name of the weighting window type used during processing", + ), + Meta( + "range_window_coefficient", + burst.range_window_coefficient, + "Value of the weighting window coefficient used during processing", + ), + Meta( + "rank", + burst.rank, + "The number of Pulse Repetition Intervals (PRI) between transmitted pulse" + " and return echo", + ), + Meta( + "prf_raw_data", + burst.prf_raw_data, + "Pulse repetition frequency (PRF) of the raw data", + {"units": "Hertz"}, + ), + Meta( + "range_chirp_rate", + burst.range_chirp_rate, + "Range chirp rate", + {"units": "Hertz"}, + ), ] - burst_meta_group = processing_group.require_group('input_burst_metadata') + burst_meta_group = processing_group.require_group("input_burst_metadata") for meta_item in burst_meta_items: add_dataset_and_attrs(burst_meta_group, meta_item) # Add parameters group in processing information if save_processing_parameters: - dry_tropo_corr_enabled = \ - True if (cfg.weather_model_file is not None) and \ - ('dry' in cfg.tropo_params.delay_type) else False - wet_tropo_corr_enabled = \ - True if (cfg.weather_model_file is not None) and \ - ('wet' in cfg.tropo_params.delay_type) else False + dry_tropo_corr_enabled = ( + True + if (cfg.weather_model_file is not None) + and ("dry" in cfg.tropo_params.delay_type) + else False + ) + wet_tropo_corr_enabled = ( + True + if (cfg.weather_model_file is not None) + and ("wet" in cfg.tropo_params.delay_type) + else False + ) tec_corr_enabled = True if cfg.tec_file is not None else False - who_applied_eap_correction = 'OPERA' if eap_correction_applied else 'ESA' + who_applied_eap_correction = "OPERA" if eap_correction_applied else "ESA" par_meta_items = [ - Meta('ellipsoidal_flattening_applied', - bool(cfg.geocoding_params.flatten), - "If True, CSLC-S1 phase has been flattened with respect to a zero height ellipsoid"), - Meta('elevation_antenna_pattern_correction_applied', - who_applied_eap_correction, - ("Elevation antenna pattern correction. " - "OPERA: correction applied by s1-reader and COMPASS. " - "ESA: correction was applied by ESA. " - "None: when the correction was not applied.")), - Meta('topographic_flattening_applied', - bool(cfg.geocoding_params.flatten), - "If True, CSLC-S1 phase has been flattened with respect to topographic height using a DEM"), - Meta('bistatic_delay_applied', - bool(cfg.lut_params.enabled), - "If True, bistatic delay timing correction has been applied"), - Meta('azimuth_fm_rate_applied', - bool(cfg.lut_params.enabled), - "If True, azimuth FM-rate mismatch timing correction has been applied"), - Meta('geometry_doppler_applied', - bool(cfg.lut_params.enabled), - "If True, geometry steering doppler timing correction has been applied"), - Meta('los_solid_earth_tides_applied', bool(cfg.lut_params.enabled), - "If True, solid Earth tides correction has been applied in slant range direction"), - Meta('azimuth_solid_earth_tides_applied', False, - "If True, solid Earth tides correction has been applied in azimuth direction"), - Meta('static_troposphere_applied', - bool(cfg.lut_params.enabled), - "If True, troposphere correction based on a static model has been applied"), - Meta('ionosphere_tec_applied', tec_corr_enabled, - "If True, ionosphere correction based on TEC data has been applied"), - Meta('dry_troposphere_weather_model_applied', - dry_tropo_corr_enabled, - "If True, dry troposphere correction based on weather model has been applied"), - Meta('wet_troposphere_weather_model_applied', - wet_tropo_corr_enabled, - "If True, wet troposphere correction based on weather model has been applied") + Meta( + "ellipsoidal_flattening_applied", + bool(cfg.geocoding_params.flatten), + "If True, CSLC-S1 phase has been flattened with respect to a zero" + " height ellipsoid", + ), + Meta( + "elevation_antenna_pattern_correction_applied", + who_applied_eap_correction, + "Elevation antenna pattern correction. " + "OPERA: correction applied by s1-reader and COMPASS. " + "ESA: correction was applied by ESA. " + "None: when the correction was not applied.", + ), + Meta( + "topographic_flattening_applied", + bool(cfg.geocoding_params.flatten), + "If True, CSLC-S1 phase has been flattened with respect to topographic" + " height using a DEM", + ), + Meta( + "bistatic_delay_applied", + bool(cfg.lut_params.enabled), + "If True, bistatic delay timing correction has been applied", + ), + Meta( + "azimuth_fm_rate_applied", + bool(cfg.lut_params.enabled), + "If True, azimuth FM-rate mismatch timing correction has been applied", + ), + Meta( + "geometry_doppler_applied", + bool(cfg.lut_params.enabled), + "If True, geometry steering doppler timing correction has been applied", + ), + Meta( + "los_solid_earth_tides_applied", + bool(cfg.lut_params.enabled), + "If True, solid Earth tides correction has been applied in slant range" + " direction", + ), + Meta( + "azimuth_solid_earth_tides_applied", + False, + "If True, solid Earth tides correction has been applied in azimuth" + " direction", + ), + Meta( + "static_troposphere_applied", + bool(cfg.lut_params.enabled), + "If True, troposphere correction based on a static model has been" + " applied", + ), + Meta( + "ionosphere_tec_applied", + tec_corr_enabled, + "If True, ionosphere correction based on TEC data has been applied", + ), + Meta( + "dry_troposphere_weather_model_applied", + dry_tropo_corr_enabled, + "If True, dry troposphere correction based on weather model has been" + " applied", + ), + Meta( + "wet_troposphere_weather_model_applied", + wet_tropo_corr_enabled, + "If True, wet troposphere correction based on weather model has been" + " applied", + ), ] - par_meta_group = processing_group.require_group('parameters') + par_meta_group = processing_group.require_group("parameters") for meta_item in par_meta_items: add_dataset_and_attrs(par_meta_group, meta_item) def poly1d_to_h5(group, poly1d_name, poly1d): - '''Write isce3.core.Poly1d properties to hdf5 + """Write isce3.core.Poly1d properties to hdf5 Parameters ---------- group: h5py.Group @@ -668,23 +900,23 @@ def poly1d_to_h5(group, poly1d_name, poly1d): Name of Poly1d whose parameters are to be stored poly1d: isce3.core.Poly1d Poly1d ojbect whose parameters are to be stored - ''' + """ poly1d_items = [ - Meta('order', poly1d.order, 'order of the polynomial'), - Meta('mean', poly1d.mean, 'mean of the polynomial'), - Meta('std', poly1d.std, 'standard deviation of the polynomial'), - Meta('coeffs', poly1d.coeffs, 'coefficients of the polynomial'), + Meta("order", poly1d.order, "order of the polynomial"), + Meta("mean", poly1d.mean, "mean of the polynomial"), + Meta("std", poly1d.std, "standard deviation of the polynomial"), + Meta("coeffs", poly1d.coeffs, "coefficients of the polynomial"), ] poly1d_group = group.require_group(poly1d_name) for meta_item in poly1d_items: add_dataset_and_attrs(poly1d_group, meta_item) - poly1d_to_h5(burst_meta_group, 'azimuth_fm_rate', burst.azimuth_fm_rate) - poly1d_to_h5(burst_meta_group, 'doppler', burst.doppler.poly1d) + poly1d_to_h5(burst_meta_group, "azimuth_fm_rate", burst.azimuth_fm_rate) + poly1d_to_h5(burst_meta_group, "doppler", burst.doppler.poly1d) def algorithm_metadata_to_h5group(parent_group, is_static_layers=False): - ''' + """ Write algorithm information to HDF5 Parameter: @@ -693,42 +925,68 @@ def algorithm_metadata_to_h5group(parent_group, is_static_layers=False): HDF5 group Meta data will be written to is_static_layers: bool True if writing algorithm metadata for static layer product - ''' + """ # common algorithm items algorithm_items = [ - Meta('dem_interpolation', 'biquintic', 'DEM interpolation method'), - Meta('float_data_geocoding_interpolator', 'biquintic interpolation', - 'Floating-point data geocoding interpolation method'), - Meta('ISCE3_version', isce3.__version__, - 'ISCE3 version used for processing'), - Meta('s1_reader_version', s1reader.__version__, - 'S1 reader version used for processing'), - Meta('COMPASS_version', compass.__version__, - 'COMPASS (CSLC-S1 processor) version used for processing') + Meta("dem_interpolation", "biquintic", "DEM interpolation method"), + Meta( + "float_data_geocoding_interpolator", + "biquintic interpolation", + "Floating-point data geocoding interpolation method", + ), + Meta("ISCE3_version", isce3.__version__, "ISCE3 version used for processing"), + Meta( + "s1_reader_version", + s1reader.__version__, + "S1 reader version used for processing", + ), + Meta( + "COMPASS_version", + compass.__version__, + "COMPASS (CSLC-S1 processor) version used for processing", + ), ] if is_static_layers: - algorithm_items.extend([ - Meta('uint_data_geocoding_interpolator', - 'nearest neighbor interpolation', - 'Unsigned int geocoding interpolation method'), - Meta('topography_algorithm', 'isce3.geometry.topo', - 'Topography generation algorithm') - ]) + algorithm_items.extend( + [ + Meta( + "uint_data_geocoding_interpolator", + "nearest neighbor interpolation", + "Unsigned int geocoding interpolation method", + ), + Meta( + "topography_algorithm", + "isce3.geometry.topo", + "Topography generation algorithm", + ), + ] + ) if not is_static_layers: algorithm_items.append( - Meta('complex_data_geocoding_interpolator', 'sinc interpolation', - 'Complex data geocoding interpolation method'), + Meta( + "complex_data_geocoding_interpolator", + "sinc interpolation", + "Complex data geocoding interpolation method", + ), ) - algorithm_group = \ - parent_group.require_group('metadata/processing_information/algorithms') + algorithm_group = parent_group.require_group( + "metadata/processing_information/algorithms" + ) for meta_item in algorithm_items: add_dataset_and_attrs(algorithm_group, meta_item) -def corrections_to_h5group(parent_group, burst, cfg, rg_lut, az_lut, - scratch_path, weather_model_path=None, - delay_type='dry'): - ''' +def corrections_to_h5group( + parent_group, + burst, + cfg, + rg_lut, + az_lut, + scratch_path, + weather_model_path=None, + delay_type="dry", +): + """ Write azimuth, slant range, and EAP (if needed) correction LUT2ds to HDF5 Parameter: @@ -753,65 +1011,102 @@ def corrections_to_h5group(parent_group, burst, cfg, rg_lut, az_lut, delay_type: str Type of troposphere delay. Any between 'dry', or 'wet', or 'wet_dry' for the sum of wet and dry troposphere delays. - ''' + """ # If enabled, save the correction LUTs if not cfg.lut_params.enabled: return # Open GDAL dataset to fetch corrections - ds = gdal.Open(f'{scratch_path}/corrections/corrections', - gdal.GA_ReadOnly) - correction_group = parent_group.require_group('timing_corrections') + ds = gdal.Open(f"{scratch_path}/corrections/corrections", gdal.GA_ReadOnly) + correction_group = parent_group.require_group("timing_corrections") # create slant range and azimuth vectors shared by the LUTs x_end = rg_lut.x_start + rg_lut.width * rg_lut.x_spacing - slant_range = np.linspace(rg_lut.x_start, x_end, - rg_lut.width, dtype=np.float64) + slant_range = np.linspace(rg_lut.x_start, x_end, rg_lut.width, dtype=np.float64) y_end = az_lut.y_start + az_lut.length * az_lut.y_spacing - azimuth = np.linspace(az_lut.y_start, y_end, - az_lut.length, dtype=np.float64) + azimuth = np.linspace(az_lut.y_start, y_end, az_lut.length, dtype=np.float64) # correction LUTs axis and doppler correction LUTs - desc = 'correction as a function of slant range and azimuth time' + desc = "correction as a function of slant range and azimuth time" correction_items = [ - Meta('slant_range', slant_range, 'slant range of LUT data', - {'units': 'meters'}), - Meta('slant_range_spacing', rg_lut.x_spacing, - 'spacing of slant range of LUT data', {'units': 'meters'}), - Meta('zero_doppler_time', azimuth, 'azimuth time of LUT data', - {'units': 'seconds'}), - Meta('zero_doppler_time_spacing', rg_lut.y_spacing, - 'spacing of azimuth time of LUT data', {'units': 'seconds'}), - Meta('bistatic_delay', ds.GetRasterBand(2).ReadAsArray(), - f'bistatic delay (azimuth) {desc}', {'units': 'seconds'}), - Meta('geometry_steering_doppler', ds.GetRasterBand(1).ReadAsArray(), - f'geometry steering doppler (range) {desc}', - {'units': 'meters'}), - Meta('azimuth_fm_rate_mismatch', ds.GetRasterBand(3).ReadAsArray(), - f'azimuth FM rate mismatch mitigation (azimuth) {desc}', - {'units': 'seconds'}), - Meta('los_solid_earth_tides', ds.GetRasterBand(4).ReadAsArray(), - f'Solid Earth tides (range) {desc}', - {'units': 'meters'}), - Meta('azimuth_solid_earth_tides', ds.GetRasterBand(5).ReadAsArray(), - f'Solid Earth tides (azimuth) {desc}', - {'units': 'seconds'}), - Meta('los_ionospheric_delay', ds.GetRasterBand(6).ReadAsArray(), - f'Ionospheric delay (range) {desc}', - {'units': 'meters'}), + Meta( + "slant_range", slant_range, "slant range of LUT data", {"units": "meters"} + ), + Meta( + "slant_range_spacing", + rg_lut.x_spacing, + "spacing of slant range of LUT data", + {"units": "meters"}, + ), + Meta( + "zero_doppler_time", + azimuth, + "azimuth time of LUT data", + {"units": "seconds"}, + ), + Meta( + "zero_doppler_time_spacing", + rg_lut.y_spacing, + "spacing of azimuth time of LUT data", + {"units": "seconds"}, + ), + Meta( + "bistatic_delay", + ds.GetRasterBand(2).ReadAsArray(), + f"bistatic delay (azimuth) {desc}", + {"units": "seconds"}, + ), + Meta( + "geometry_steering_doppler", + ds.GetRasterBand(1).ReadAsArray(), + f"geometry steering doppler (range) {desc}", + {"units": "meters"}, + ), + Meta( + "azimuth_fm_rate_mismatch", + ds.GetRasterBand(3).ReadAsArray(), + f"azimuth FM rate mismatch mitigation (azimuth) {desc}", + {"units": "seconds"}, + ), + Meta( + "los_solid_earth_tides", + ds.GetRasterBand(4).ReadAsArray(), + f"Solid Earth tides (range) {desc}", + {"units": "meters"}, + ), + Meta( + "azimuth_solid_earth_tides", + ds.GetRasterBand(5).ReadAsArray(), + f"Solid Earth tides (azimuth) {desc}", + {"units": "seconds"}, + ), + Meta( + "los_ionospheric_delay", + ds.GetRasterBand(6).ReadAsArray(), + f"Ionospheric delay (range) {desc}", + {"units": "meters"}, + ), ] if weather_model_path is not None: - if 'wet' in delay_type: - correction_items.append(Meta('wet_los_troposphere_delay', - ds.GetRasterBand(7).ReadAsArray(), - f'Wet LOS troposphere delay {desc}', - {'units': 'meters'})) - if 'dry' in delay_type: - correction_items.append(Meta('dry_los_troposphere_delay', - ds.GetRasterBand(8).ReadAsArray(), - f'Dry LOS troposphere delay {desc}', - {'units': 'meters'})) + if "wet" in delay_type: + correction_items.append( + Meta( + "wet_los_troposphere_delay", + ds.GetRasterBand(7).ReadAsArray(), + f"Wet LOS troposphere delay {desc}", + {"units": "meters"}, + ) + ) + if "dry" in delay_type: + correction_items.append( + Meta( + "dry_los_troposphere_delay", + ds.GetRasterBand(8).ReadAsArray(), + f"Dry LOS troposphere delay {desc}", + {"units": "meters"}, + ) + ) for meta_item in correction_items: add_dataset_and_attrs(correction_group, meta_item) @@ -819,35 +1114,53 @@ def corrections_to_h5group(parent_group, burst, cfg, rg_lut, az_lut, # Extended FM rate and doppler centroid polynomial coefficients for azimuth # FM rate mismatch mitigation extended_coeffs = burst.extended_coeffs - fm_rate_aztime_vec = [t.strftime(TIME_STR_FMT) - for t in extended_coeffs.fm_rate_aztime_vec] - dc_aztime_vec = [t.strftime(TIME_STR_FMT) - for t in extended_coeffs.dc_aztime_vec] + fm_rate_aztime_vec = [ + t.strftime(TIME_STR_FMT) for t in extended_coeffs.fm_rate_aztime_vec + ] + dc_aztime_vec = [t.strftime(TIME_STR_FMT) for t in extended_coeffs.dc_aztime_vec] extended_coeffs_items = [ - Meta('fm_rate_azimuth_time', fm_rate_aztime_vec, - 'Azimuth time for FM rate coefficient data', - {'format': 'YYYY-MM-DD HH:MM:SS.6f'}), - Meta('fm_rate_slant_range_time', extended_coeffs.fm_rate_tau0_vec, - 'Slant range time for FM rate coefficient data', - {'units':'seconds'}), - Meta('fm_rate_coefficients', extended_coeffs.fm_rate_coeff_arr, - 'FM rate coefficient data'), - Meta('doppler_centroid_azimuth_time', dc_aztime_vec, - 'Azimuth time for doppler centroid coefficient data', - {'format': 'YYYY-MM-DD HH:MM:SS.6f'}), - Meta('doppler_centroid_slant_range_time', extended_coeffs.dc_tau0_vec, - 'Slant range time for doppler centroid coefficient data', - {'units':'seconds'}), - Meta('doppler_centroid_coefficients', extended_coeffs.dc_coeff_arr, - 'Doppler centroid coefficient data') + Meta( + "fm_rate_azimuth_time", + fm_rate_aztime_vec, + "Azimuth time for FM rate coefficient data", + {"format": "YYYY-MM-DD HH:MM:SS.6f"}, + ), + Meta( + "fm_rate_slant_range_time", + extended_coeffs.fm_rate_tau0_vec, + "Slant range time for FM rate coefficient data", + {"units": "seconds"}, + ), + Meta( + "fm_rate_coefficients", + extended_coeffs.fm_rate_coeff_arr, + "FM rate coefficient data", + ), + Meta( + "doppler_centroid_azimuth_time", + dc_aztime_vec, + "Azimuth time for doppler centroid coefficient data", + {"format": "YYYY-MM-DD HH:MM:SS.6f"}, + ), + Meta( + "doppler_centroid_slant_range_time", + extended_coeffs.dc_tau0_vec, + "Slant range time for doppler centroid coefficient data", + {"units": "seconds"}, + ), + Meta( + "doppler_centroid_coefficients", + extended_coeffs.dc_coeff_arr, + "Doppler centroid coefficient data", + ), ] - extended_coeffs_group = correction_group.require_group('extended_coefficients') + extended_coeffs_group = correction_group.require_group("extended_coefficients") for meta_item in extended_coeffs_items: add_dataset_and_attrs(extended_coeffs_group, meta_item) def get_cslc_geotransform(filename: str, pol: str = "VV"): - ''' + """ Extract and return geotransform of a geocoded CSLC raster in an HDFg Parameters @@ -861,13 +1174,13 @@ def get_cslc_geotransform(filename: str, pol: str = "VV"): ------- list Geotransform of the geocoded raster - ''' - gdal_str = f'NETCDF:{filename}:/{DATA_PATH}/{pol}' - return gdal.Info(gdal_str, format='json')['geoTransform'] + """ + gdal_str = f"NETCDF:{filename}:/{DATA_PATH}/{pol}" + return gdal.Info(gdal_str, format="json")["geoTransform"] -def get_georaster_bounds(filename: str, pol: str = 'VV'): - ''' +def get_georaster_bounds(filename: str, pol: str = "VV"): + """ Compute CSLC raster boundary of a given polarization Parameters @@ -882,8 +1195,8 @@ def get_georaster_bounds(filename: str, pol: str = 'VV'): tuple WGS84 coordinates of the geocoded raster boundary given as min_x, max_x, min_y, max_y - ''' - nfo = gdal.Info(f'NETCDF:{filename}:/{DATA_PATH}/{pol}', format='json') + """ + nfo = gdal.Info(f"NETCDF:{filename}:/{DATA_PATH}/{pol}", format="json") # set extreme initial values for min/max x/y min_x = 999999 @@ -892,7 +1205,7 @@ def get_georaster_bounds(filename: str, pol: str = 'VV'): max_y = -999999 # extract wgs84 extent and find min/max x/y - wgs84_coords = nfo['wgs84Extent']['coordinates'][0] + wgs84_coords = nfo["wgs84Extent"]["coordinates"][0] for x, y in wgs84_coords: min_x = min(min_x, x) max_x = max(max_x, x) diff --git a/src/compass/utils/helpers.py b/src/compass/utils/helpers.py index 3353a134..bed3ca7b 100644 --- a/src/compass/utils/helpers.py +++ b/src/compass/utils/helpers.py @@ -1,4 +1,4 @@ -'''collection of useful functions used across workflows''' +"""collection of useful functions used across workflows""" from datetime import timedelta import itertools @@ -15,13 +15,12 @@ import compass - WORKFLOW_SCRIPTS_DIR = os.path.dirname(compass.__file__) -OPERA_OPERATION_CONTACT_EMAIL = 'opera-sds-ops@jpl.nasa.gov' +OPERA_OPERATION_CONTACT_EMAIL = "opera-sds-ops@jpl.nasa.gov" # get the basename given an input file path # example: get_module_name(__file__) -get_module_name = lambda x : os.path.basename(x).split('.')[0] +get_module_name = lambda x: os.path.basename(x).split(".")[0] def check_file_path(file_path: str) -> None: @@ -32,9 +31,9 @@ def check_file_path(file_path: str) -> None: file_path : str Path to file to be checked """ - error_channel = journal.error('helpers.check_file_path') + error_channel = journal.error("helpers.check_file_path") if not os.path.exists(file_path): - err_str = f'{file_path} not found' + err_str = f"{file_path} not found" error_channel.log(err_str) raise FileNotFoundError(err_str) @@ -47,15 +46,15 @@ def check_directory(file_path: str) -> None: file_path: str Path to directory to be checked """ - error_channel = journal.error('helpers.check_directory') + error_channel = journal.error("helpers.check_directory") if not os.path.isdir(file_path): - err_str = f'{file_path} not found' + err_str = f"{file_path} not found" error_channel.log(err_str) raise FileNotFoundError(err_str) def get_file_polarization_mode(file_path: str) -> str: - '''Check polarization mode from file name + """Check polarization mode from file name Taking PP from SAFE file name with following format: MMM_BB_TTTR_LFPP_YYYYMMDDTHHMMSS_YYYYMMDDTHHMMSS_OOOOOO_DDDDDD_CCCC.SAFE @@ -73,10 +72,10 @@ def get_file_polarization_mode(file_path: str) -> str: References ---------- https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/naming-conventions - ''' + """ # index split tokens from rear to account for R in TTTR being possibly # replaced with '_' - safe_pol_mode = os.path.basename(file_path).split('_')[-6][2:] + safe_pol_mode = os.path.basename(file_path).split("_")[-6][2:] return safe_pol_mode @@ -119,9 +118,9 @@ def check_write_dir(dst_path: str): File path to directory for which to check writing permission """ if not dst_path: - dst_path = '.' + dst_path = "." - error_channel = journal.error('helpers.check_write_dir') + error_channel = journal.error("helpers.check_write_dir") # check if scratch path exists dst_path_ok = os.path.isdir(dst_path) @@ -150,17 +149,17 @@ def check_dem(dem_path: str): dem_path : str File path to DEM for which to check GDAL-compatibility """ - error_channel = journal.error('helpers.check_dem') + error_channel = journal.error("helpers.check_dem") try: gdal.Open(dem_path, gdal.GA_ReadOnly) except ValueError: - err_str = f'{dem_path} cannot be opened by GDAL' + err_str = f"{dem_path} cannot be opened by GDAL" error_channel.log(err_str) raise ValueError(err_str) epsg = isce3.io.Raster(dem_path).get_epsg() if not 1024 <= epsg <= 32767: - err_str = f'DEM epsg of {epsg} out of bounds' + err_str = f"DEM epsg of {epsg} out of bounds" error_channel.log(err_str) raise ValueError(err_str) @@ -257,7 +256,9 @@ def burst_bbox_from_db(burst_id, burst_db_file=None, burst_db_conn=None): burst_db_conn = sqlite3.connect(burst_db_file) burst_db_conn.row_factory = sqlite3.Row # return rows as dicts - query = "SELECT epsg, xmin, ymin, xmax, ymax FROM burst_id_map WHERE burst_id_jpl = ?" + query = ( + "SELECT epsg, xmin, ymin, xmax, ymax FROM burst_id_map WHERE burst_id_jpl = ?" + ) cur = burst_db_conn.execute(query, (burst_id,)) result = cur.fetchone() @@ -303,7 +304,7 @@ def burst_bboxes_from_db(burst_ids, burst_db_file=None, burst_db_conn=None): burst_db_conn.row_factory = sqlite3.Row # return rows as dicts # concatenate '?, ' with for each burst ID for IN query - qs_in_query = ', '.join('?' for _ in burst_ids) + qs_in_query = ", ".join("?" for _ in burst_ids) query = f"SELECT * FROM burst_id_map WHERE burst_id_jpl IN ({qs_in_query})" cur = burst_db_conn.execute(query, burst_ids) results = cur.fetchall() @@ -317,8 +318,12 @@ def burst_bboxes_from_db(burst_ids, burst_db_file=None, burst_db_conn=None): burst_ids = [[]] * n_results for i_result, result in enumerate(results): epsgs[i_result] = result["epsg"] - bboxes[i_result] = (result["xmin"], result["ymin"], - result["xmax"], result["ymax"]) + bboxes[i_result] = ( + result["xmin"], + result["ymin"], + result["xmax"], + result["ymax"], + ) burst_ids[i_result] = result["burst_id_jpl"] # TODO add warning if not all burst bounding boxes found @@ -326,7 +331,7 @@ def burst_bboxes_from_db(burst_ids, burst_db_file=None, burst_db_conn=None): def open_raster(filename, band=1): - ''' + """ Return band as numpy array from gdal-friendly raster Parameters @@ -340,10 +345,10 @@ def open_raster(filename, band=1): ------- raster: np.ndarray Numpy array containing the raster band to open - ''' - error_channel = journal.error('helpers.open_raster') + """ + error_channel = journal.error("helpers.open_raster") if not os.path.isfile(filename): - err_str = f'{filename} ' + err_str = f"{filename} " error_channel.log(err_str) raise FileNotFoundError(err_str) @@ -363,15 +368,15 @@ def open_raster(filename, band=1): # bytes of flat binary is that of a jpeg but the binary is not then # GDAL throws a libjpeg runtime error. Follow specifically tries to # load as an ENVI file. - ds = gdal.OpenEx(filename, gdal.OF_VERBOSE_ERROR, - allowed_drivers=['ENVI']) + ds = gdal.OpenEx(filename, gdal.OF_VERBOSE_ERROR, allowed_drivers=["ENVI"]) arr = ds.GetRasterBand(band).ReadAsArray() return arr -def write_raster(filename, data_list, descriptions, - data_type=gdal.GDT_Float32, data_format='GTiff'): - ''' +def write_raster( + filename, data_list, descriptions, data_type=gdal.GDT_Float32, data_format="GTiff" +): + """ Write a multiband GDAL-friendly raster to disk. Each dataset allocated in the output file contains a description of the dataset allocated for that band @@ -391,15 +396,17 @@ def write_raster(filename, data_list, descriptions, GDAL dataset type format: gdal.Format Format for GDAL output file - ''' + """ - error_channel = journal.error('helpers.write_raster') + error_channel = journal.error("helpers.write_raster") # Check number of datasets match number of descriptions if len(data_list) != len(descriptions): - err_str = f'Number of datasets to write does not match' \ - f'the number of descriptions ' \ - f'{len(data_list)} != {len(descriptions)}' + err_str = ( + "Number of datasets to write does not match" + "the number of descriptions " + f"{len(data_list)} != {len(descriptions)}" + ) error_channel.log(err_str) raise ValueError(err_str) @@ -422,7 +429,7 @@ def write_raster(filename, data_list, descriptions, def bursts_grouping_generator(bursts): - ''' + """ Dict to group bursts with the same burst ID but different polarizations key: burst ID, value: list[S1BurstSlc] @@ -437,7 +444,7 @@ def bursts_grouping_generator(bursts): Burst ID of grouped list of bursts v: list[Sentinel1BurstSlc] List of bursts with the same burst ID - ''' + """ grouped_bursts = itertools.groupby(bursts, key=lambda b: str(b.burst_id)) for k, v in grouped_bursts: @@ -445,7 +452,7 @@ def bursts_grouping_generator(bursts): def get_time_delta_str(t_prev: time) -> str: - ''' + """ Helper function that computes difference between current time and a given time object and returns it as a str @@ -456,6 +463,7 @@ def get_time_delta_str(t_prev: time) -> str: _: str Difference from current time and t_prev represented as a string - ''' - return str(timedelta(seconds=time.perf_counter() - - t_prev)).split(".", maxsplit=1)[0] + """ + return str(timedelta(seconds=time.perf_counter() - t_prev)).split(".", maxsplit=1)[ + 0 + ] diff --git a/src/compass/utils/iono.py b/src/compass/utils/iono.py index fd3d148d..f0b510c4 100644 --- a/src/compass/utils/iono.py +++ b/src/compass/utils/iono.py @@ -19,7 +19,7 @@ def read_ionex(tec_file): - ''' + """ Read Total Electron Content (TEC) file in IONEX format @@ -40,30 +40,32 @@ def read_ionex(tec_file): 3D array with vertical TEC in TECU rms_maps: np.ndarray 3d array with vertical TEC RMS in TECU - ''' + """ # functions for parsing strings from ionex file # link: https://github.com/daniestevez/jupyter_notebooks/blob/master/IONEX.ipynb - def parse_map(tec_map_str, key='TEC', exponent=-1): - tec_map_str = re.split(f'.*END OF {key} MAP', tec_map_str)[0] - tec_map = [np.fromstring(x, sep=' ') for x in - re.split('.*LAT/LON1/LON2/DLON/H\\n', tec_map_str)[1:]] - return np.stack(tec_map) * 10 ** exponent + def parse_map(tec_map_str, key="TEC", exponent=-1): + tec_map_str = re.split(f".*END OF {key} MAP", tec_map_str)[0] + tec_map = [ + np.fromstring(x, sep=" ") + for x in re.split(".*LAT/LON1/LON2/DLON/H\\n", tec_map_str)[1:] + ] + return np.stack(tec_map) * 10**exponent # read IONEX file with open(tec_file) as f: fc = f.read() # read header - header = fc.split('END OF HEADER')[0].split('\n') + header = fc.split("END OF HEADER")[0].split("\n") for line in header: - if line.strip().endswith('# OF MAPS IN FILE'): + if line.strip().endswith("# OF MAPS IN FILE"): num_map = int(line.split()[0]) - elif line.strip().endswith('DLAT'): + elif line.strip().endswith("DLAT"): lat0, lat1, lat_step = (float(x) for x in line.split()[:3]) - elif line.strip().endswith('DLON'): + elif line.strip().endswith("DLON"): lon0, lon1, lon_step = (float(x) for x in line.split()[:3]) - elif line.strip().endswith('EXPONENT'): + elif line.strip().endswith("EXPONENT"): exponent = float(line.split()[0]) # spatial coordinates @@ -77,19 +79,28 @@ def parse_map(tec_map_str, key='TEC', exponent=-1): mins = np.arange(0, num_map * min_step, min_step) # read TEC and its RMS maps - tec_maps = np.array([parse_map(t, key='TEC', exponent=exponent) - for t in fc.split('START OF TEC MAP')[1:]], - dtype=np.float32) - rms_maps = np.array([parse_map(t, key='RMS', exponent=exponent) - for t in fc.split('START OF RMS MAP')[1:]], - dtype=np.float32) + tec_maps = np.array( + [ + parse_map(t, key="TEC", exponent=exponent) + for t in fc.split("START OF TEC MAP")[1:] + ], + dtype=np.float32, + ) + rms_maps = np.array( + [ + parse_map(t, key="RMS", exponent=exponent) + for t in fc.split("START OF RMS MAP")[1:] + ], + dtype=np.float32, + ) return mins, lats, lons, tec_maps, rms_maps -def get_ionex_value(tec_file, utc_sec, lat, lon, - interp_method='linear3d', rotate_tec_map=False): - ''' +def get_ionex_value( + tec_file, utc_sec, lat, lon, interp_method="linear3d", rotate_tec_map=False +): + """ Get the TEC value from input IONEX file for the input lat/lon/datetime. Reference: Schaer, S., Gurtner, W., & Feltens, J. (1998). IONEX: The ionosphere map exchange format @@ -114,33 +125,34 @@ def get_ionex_value(tec_file, utc_sec, lat, lon, ------- tec_val: float or 1D np.ndarray Vertical TEC value in TECU - ''' + """ + def interp_3d_rotate(interpfs, mins, lats, lons, utc_min, lat, lon): ind0 = np.where((mins - utc_min) <= 0)[0][-1] ind1 = ind0 + 1 - lon0 = lon + (utc_min - mins[ind0]) * 360. / (24. * 60.) - lon1 = lon + (utc_min - mins[ind1]) * 360. / (24. * 60.) + lon0 = lon + (utc_min - mins[ind0]) * 360.0 / (24.0 * 60.0) + lon1 = lon + (utc_min - mins[ind1]) * 360.0 / (24.0 * 60.0) tec_val0 = interpfs[ind0](lon0, lat) tec_val1 = interpfs[ind1](lon1, lat) - tec_val = ((mins[ind1] - utc_min) / (mins[ind1] - mins[ind0]) * tec_val0 - + (utc_min - mins[ind0]) / ( - mins[ind1] - mins[ind0]) * tec_val1) + tec_val = (mins[ind1] - utc_min) / (mins[ind1] - mins[ind0]) * tec_val0 + ( + utc_min - mins[ind0] + ) / (mins[ind1] - mins[ind0]) * tec_val1 return tec_val # time info - utc_min = utc_sec / 60. + utc_min = utc_sec / 60.0 # read TEC file mins, lats, lons, tec_maps = read_ionex(tec_file)[:4] # resample - if interp_method == 'nearest': + if interp_method == "nearest": lon_ind = np.abs(lons - lon).argmin() lat_ind = np.abs(lats - lat).argmin() time_ind = np.abs(mins - utc_min).argmin() tec_val = tec_maps[time_ind, lat_ind, lon_ind] - elif interp_method in ['linear', 'linear2d', 'bilinear']: + elif interp_method in ["linear", "linear2d", "bilinear"]: time_ind = np.abs(mins.reshape(-1, 1) - utc_min).argmin(axis=0) if isinstance(utc_min, np.ndarray): @@ -151,16 +163,16 @@ def interp_3d_rotate(interpfs, mins, lats, lons, utc_min, lat, lon): x=lons, y=lats, z=tec_maps[time_ind[i], :, :], - kind='linear', + kind="linear", )(lon[i], lat[i]) else: tec_val = interpolate.interp2d( x=lons, y=lats, z=tec_maps[time_ind[0], :, :], - kind='linear', + kind="linear", )(lon, lat) - elif interp_method in ['linear3d', 'trilinear']: + elif interp_method in ["linear3d", "trilinear"]: if not rotate_tec_map: # option 1: interpolate between consecutive TEC maps # testings shows better agreement with SAR obs than option 2. @@ -168,7 +180,7 @@ def interp_3d_rotate(interpfs, mins, lats, lons, utc_min, lat, lon): points=(mins, np.ascontiguousarray(np.flip(lats)), lons), values=np.flip(tec_maps, axis=1), xi=(utc_min, lat, lon), - method='linear', + method="linear", ) else: # option 2: interpolate between consecutive rotated TEC maps @@ -182,7 +194,7 @@ def interp_3d_rotate(interpfs, mins, lats, lons, utc_min, lat, lon): x=lons, y=lats, z=tec_maps[i, :, :], - kind='linear', + kind="linear", ), ) @@ -192,27 +204,35 @@ def interp_3d_rotate(interpfs, mins, lats, lons, utc_min, lat, lon): for i in range(num_pts): tec_val[i] = interp_3d_rotate( interpfs, - mins, lats, lons, - utc_min[i], lat[i], lon[i], + mins, + lats, + lons, + utc_min[i], + lat[i], + lon[i], ) else: tec_val = interp_3d_rotate( interpfs, - mins, lats, lons, - utc_min, lat, lon, + mins, + lats, + lons, + utc_min, + lat, + lon, ) else: - msg = f'Un-recognized interp_method input: {interp_method}!' - msg += '\nSupported inputs: nearest, linear2d, linear3d.' + msg = f"Un-recognized interp_method input: {interp_method}!" + msg += "\nSupported inputs: nearest, linear2d, linear3d." logging.error(msg) raise ValueError(msg) return tec_val -def download_ionex(date_str, tec_dir, sol_code='jpl', date_fmt='%Y%m%d'): - ''' +def download_ionex(date_str, tec_dir, sol_code="jpl", date_fmt="%Y%m%d"): + """ Download IGS vertical TEC files in IONEX format Parameters @@ -230,20 +250,20 @@ def download_ionex(date_str, tec_dir, sol_code='jpl', date_fmt='%Y%m%d'): ------- fname_dst_uncomp: str Path to local uncompressed IONEX file - ''' + """ # get the source (remote) and destination (local) file path/url kwargs = dict(sol_code=sol_code, date_fmt=date_fmt) fname_src = get_ionex_filename(date_str, tec_dir=None, **kwargs) - fname_dst = get_ionex_filename(date_str, tec_dir=tec_dir, **kwargs) + '.Z' + fname_dst = get_ionex_filename(date_str, tec_dir=tec_dir, **kwargs) + ".Z" fname_dst_uncomp = fname_dst[:-2] # download - compose cmd cmd = f'wget --continue --auth-no-challenge "{fname_src}"' if os.path.isfile(fname_dst) and os.path.getsize(fname_dst) > 1000: - cmd += ' --timestamping' + cmd += " --timestamping" # Record executed command line in logging file - logging.info(f'Execute command: {cmd}') + logging.info(f"Execute command: {cmd}") # download - run cmd in output dir pwd = os.getcwd() @@ -253,20 +273,20 @@ def download_ionex(date_str, tec_dir, sol_code='jpl', date_fmt='%Y%m%d'): # uncompress # if output file 1) does not exist or 2) smaller than 400k in size or 3) older - if (not os.path.isfile(fname_dst_uncomp) - or os.path.getsize(fname_dst_uncomp) < 400e3 - or os.path.getmtime(fname_dst_uncomp) < os.path.getmtime( - fname_dst)): + if ( + not os.path.isfile(fname_dst_uncomp) + or os.path.getsize(fname_dst_uncomp) < 400e3 + or os.path.getmtime(fname_dst_uncomp) < os.path.getmtime(fname_dst) + ): cmd = f"gzip --force --keep --decompress {fname_dst}" - logging.info(f'Execute command: {cmd}') + logging.info(f"Execute command: {cmd}") os.system(cmd) return fname_dst_uncomp -def get_ionex_filename(date_str, tec_dir=None, sol_code='jpl', - date_fmt='%Y%m%d'): - ''' +def get_ionex_filename(date_str, tec_dir=None, sol_code="jpl", date_fmt="%Y%m%d"): + """ Get the file name of the IONEX file Parameters @@ -285,10 +305,10 @@ def get_ionex_filename(date_str, tec_dir=None, sol_code='jpl', ------- tec_file: str Path to the local uncompressed (or remote compressed) TEC file - ''' + """ dd = dt.datetime.strptime(date_str, date_fmt) - doy = f'{dd.timetuple().tm_yday:03d}' + doy = f"{dd.timetuple().tm_yday:03d}" yy = str(dd.year)[2:4] # file name base @@ -307,7 +327,7 @@ def get_ionex_filename(date_str, tec_dir=None, sol_code='jpl', def get_ionex_height(tec_file): - ''' + """ Get the height of the thin-shell ionosphere from IONEX file Parameters @@ -319,20 +339,19 @@ def get_ionex_height(tec_file): ------- iono_hgt: float Height above the surface in meters - ''' + """ with open(tec_file) as f: lines = f.readlines() for line in lines: - if line.strip().endswith('DHGT'): + if line.strip().endswith("DHGT"): ion_hgt = float(line.split()[0]) break return ion_hgt -def ionosphere_delay(utc_time, wavelength, - tec_file, lon_arr, lat_arr, inc_arr): - ''' +def ionosphere_delay(utc_time, wavelength, tec_file, lon_arr, lat_arr, inc_arr): + """ Calculate ionosphere delay for geolocation Parameters @@ -354,24 +373,20 @@ def ionosphere_delay(utc_time, wavelength, ------- los_iono_delay: np.ndarray Ionospheric delay in line of sight in meters - ''' + """ if not tec_file: - print('"tec_file" was not provided. ' - 'Ionosphere correction will not be applied.') + print('"tec_file" was not provided. Ionosphere correction will not be applied.') return np.zeros(lon_arr.shape) if not os.path.exists(tec_file): - raise RuntimeError(f'IONEX file was not found: {tec_file}') + raise RuntimeError(f"IONEX file was not found: {tec_file}") - utc_tod_sec = (utc_time.hour * 3600.0 - + utc_time.minute * 60.0 - + utc_time.second) + utc_tod_sec = utc_time.hour * 3600.0 + utc_time.minute * 60.0 + utc_time.second - ionex_val = get_ionex_value(tec_file, - utc_tod_sec, - lat_arr.flatten(), - lon_arr.flatten()) + ionex_val = get_ionex_value( + tec_file, utc_tod_sec, lat_arr.flatten(), lon_arr.flatten() + ) ionex_val = ionex_val.reshape(lon_arr.shape) @@ -383,7 +398,6 @@ def ionosphere_delay(utc_time, wavelength, # Constant in m3/s2 K = 40.31 - los_iono_delay = (K * ELECTRON_PER_SQM / freq_sensor**2 - / np.cos(np.deg2rad(inc_arr))) + los_iono_delay = K * ELECTRON_PER_SQM / freq_sensor**2 / np.cos(np.deg2rad(inc_arr)) return los_iono_delay diff --git a/src/compass/utils/lut.py b/src/compass/utils/lut.py index ab739c40..5a49c5b6 100644 --- a/src/compass/utils/lut.py +++ b/src/compass/utils/lut.py @@ -1,6 +1,7 @@ -''' +""" Placeholder for model-based correction LUT -''' +""" + import os import isce3 import numpy as np @@ -15,13 +16,18 @@ from compass.utils.helpers import write_raster -def cumulative_correction_luts(burst, dem_path, tec_path, - scratch_path=None, - weather_model_path=None, - rg_step=200, az_step=0.25, - delay_type='dry', - geo2rdr_params=None): - ''' +def cumulative_correction_luts( + burst, + dem_path, + tec_path, + scratch_path=None, + weather_model_path=None, + rg_step=200, + az_step=0.25, + delay_type="dry", + geo2rdr_params=None, +): + """ Sum correction LUTs and returns cumulative correction LUT in slant range and azimuth directions @@ -56,73 +62,101 @@ def cumulative_correction_luts(burst, dem_path, tec_path, az_lut: isce3.core.LUT2d Sum of azimuth correction LUTs in seconds as a function of azimuth time and slant range - ''' + """ # Get individual LUTs - geometrical_steer_doppler, bistatic_delay, az_fm_mismatch, [tide_rg, tide_az], \ - los_ionosphere, [wet_los_tropo, dry_los_tropo], los_static_tropo = \ - compute_geocoding_correction_luts(burst, - dem_path=dem_path, - tec_path=tec_path, - scratch_path=scratch_path, - weather_model_path=weather_model_path, - rg_step=rg_step, - az_step=az_step, - geo2rdr_params=geo2rdr_params) + ( + geometrical_steer_doppler, + bistatic_delay, + az_fm_mismatch, + [tide_rg, tide_az], + los_ionosphere, + [wet_los_tropo, dry_los_tropo], + los_static_tropo, + ) = compute_geocoding_correction_luts( + burst, + dem_path=dem_path, + tec_path=tec_path, + scratch_path=scratch_path, + weather_model_path=weather_model_path, + rg_step=rg_step, + az_step=az_step, + geo2rdr_params=geo2rdr_params, + ) # Convert to geometrical doppler from range time (seconds) to range (m) geometry_doppler = geometrical_steer_doppler.data * isce3.core.speed_of_light * 0.5 rg_lut_data = geometry_doppler + tide_rg + los_ionosphere + los_static_tropo # Add troposphere delay to range LUT - if 'wet' in delay_type: + if "wet" in delay_type: rg_lut_data += wet_los_tropo - if 'dry' in delay_type: + if "dry" in delay_type: rg_lut_data += dry_los_tropo # Invert signs to correct for convention az_lut_data = -(bistatic_delay.data + az_fm_mismatch.data) - rg_lut = isce3.core.LUT2d(bistatic_delay.x_start, - bistatic_delay.y_start, - bistatic_delay.x_spacing, - bistatic_delay.y_spacing, - rg_lut_data) - az_lut = isce3.core.LUT2d(bistatic_delay.x_start, - bistatic_delay.y_start, - bistatic_delay.x_spacing, - bistatic_delay.y_spacing, - az_lut_data) + rg_lut = isce3.core.LUT2d( + bistatic_delay.x_start, + bistatic_delay.y_start, + bistatic_delay.x_spacing, + bistatic_delay.y_spacing, + rg_lut_data, + ) + az_lut = isce3.core.LUT2d( + bistatic_delay.x_start, + bistatic_delay.y_start, + bistatic_delay.x_spacing, + bistatic_delay.y_spacing, + az_lut_data, + ) # Save corrections on disk. In this way, we should avoid running # the corrections again when allocating data inside the HDF5 product # Create a directory in the scratch path to save corrections - output_path = f'{scratch_path}/corrections' + output_path = f"{scratch_path}/corrections" os.makedirs(output_path, exist_ok=True) - data_list = [geometry_doppler, bistatic_delay.data, az_fm_mismatch.data, - tide_rg, tide_az, los_ionosphere] - descr = ['slant range geometrical doppler', 'azimuth bistatic delay', - 'azimuth FM rate mismatch', 'slant range Solid Earth tides', - 'azimuth time Solid Earth tides', 'line-of-sight ionospheric delay'] + data_list = [ + geometry_doppler, + bistatic_delay.data, + az_fm_mismatch.data, + tide_rg, + tide_az, + los_ionosphere, + ] + descr = [ + "slant range geometrical doppler", + "azimuth bistatic delay", + "azimuth FM rate mismatch", + "slant range Solid Earth tides", + "azimuth time Solid Earth tides", + "line-of-sight ionospheric delay", + ] if weather_model_path is not None: - if 'wet' in delay_type: + if "wet" in delay_type: data_list.append(wet_los_tropo) - descr.append('wet LOS troposphere') - if 'dry' in delay_type: + descr.append("wet LOS troposphere") + if "dry" in delay_type: data_list.append(dry_los_tropo) - descr.append('dry LOS troposphere') + descr.append("dry LOS troposphere") - write_raster(f'{output_path}/corrections', data_list, descr) + write_raster(f"{output_path}/corrections", data_list, descr) return rg_lut, az_lut -def compute_geocoding_correction_luts(burst, dem_path, tec_path, - scratch_path=None, - weather_model_path=None, - rg_step=200, az_step=0.25, - geo2rdr_params=None): - ''' +def compute_geocoding_correction_luts( + burst, + dem_path, + tec_path, + scratch_path=None, + weather_model_path=None, + rg_step=200, + az_step=0.25, + geo2rdr_params=None, +): + """ Compute slant range and azimuth LUTs corrections to be applied during burst geocoding @@ -181,7 +215,7 @@ def compute_geocoding_correction_luts(burst, dem_path, tec_path, List of numpy.ndarray containing the LOS wet and dry troposphere delays computed from the file specified under 'weather_model_path'. These delays need to be added to the slant range correction LUT2D. - ''' + """ # Get DEM raster dem_raster = isce3.io.Raster(dem_path) @@ -190,58 +224,63 @@ def compute_geocoding_correction_luts(burst, dem_path, tec_path, ellipsoid = proj.ellipsoid # Create directory to store SET temp results - output_path = f'{scratch_path}/corrections' + output_path = f"{scratch_path}/corrections" os.makedirs(output_path, exist_ok=True) # Compute Geometrical Steering Doppler - geometrical_steering_doppler = \ - burst.doppler_induced_range_shift(range_step=rg_step, az_step=az_step) + geometrical_steering_doppler = burst.doppler_induced_range_shift( + range_step=rg_step, az_step=az_step + ) # Compute bistatic delay bistatic_delay = burst.bistatic_delay(range_step=rg_step, az_step=az_step) # Run rdr2geo to obtain the required layers # return contents: lon_path, lat_path, height_path, inc_path, head_path - rdr2geo_raster_paths = compute_rdr2geo_rasters(burst, dem_raster, - output_path, rg_step, - az_step) + rdr2geo_raster_paths = compute_rdr2geo_rasters( + burst, dem_raster, output_path, rg_step, az_step + ) # Open rdr2geo layers - lon, lat, height, inc_angle, head_angle = \ - [open_raster(raster_path) for raster_path in rdr2geo_raster_paths] + lon, lat, height, inc_angle, head_angle = [ + open_raster(raster_path) for raster_path in rdr2geo_raster_paths + ] # Compute azimuth FM-rate mismatch - az_fm_mismatch = burst.az_fm_rate_mismatch_from_llh(lat, lon, height, - ellipsoid, - burst.as_isce3_radargrid( - az_step=az_step, - rg_step=rg_step) - ) + az_fm_mismatch = burst.az_fm_rate_mismatch_from_llh( + lat, + lon, + height, + ellipsoid, + burst.as_isce3_radargrid(az_step=az_step, rg_step=rg_step), + ) # compute Solid Earth Tides using pySolid. Decimate the rdr2geo layers. # compute decimation factor assuming a 5 km spacing along slant range dec_factor = int(np.round(5000.0 / rg_step)) dec_slice = np.s_[::dec_factor, ::dec_factor] - rg_set_temp, az_set_temp = solid_earth_tides(burst, - lat[dec_slice], - lon[dec_slice], - height[dec_slice], - ellipsoid, - geo2rdr_params) + rg_set_temp, az_set_temp = solid_earth_tides( + burst, + lat[dec_slice], + lon[dec_slice], + height[dec_slice], + ellipsoid, + geo2rdr_params, + ) out_shape = bistatic_delay.data.shape - kwargs = dict(order=1, mode='edge', anti_aliasing=True, - preserve_range=True) + kwargs = dict(order=1, mode="edge", anti_aliasing=True, preserve_range=True) rg_set = resize(rg_set_temp, out_shape, **kwargs) az_set = resize(az_set_temp, out_shape, **kwargs) # Compute ionosphere delay - los_ionosphere = ionosphere_delay(burst.sensing_mid, - burst.wavelength, - tec_path, lon, lat, inc_angle) + los_ionosphere = ionosphere_delay( + burst.sensing_mid, burst.wavelength, tec_path, lon, lat, inc_angle + ) # Compute wet and dry troposphere delays using RAiDER - wet_los_tropo, dry_los_tropo, los_static_tropo =\ - [np.zeros(out_shape) for _ in range(3)] + wet_los_tropo, dry_los_tropo, los_static_tropo = [ + np.zeros(out_shape) for _ in range(3) + ] if weather_model_path is None: # Compute static troposphere correction @@ -251,18 +290,20 @@ def compute_geocoding_correction_luts(burst, dem_path, tec_path, from RAiDER.delay import tropo_delay from RAiDER.llreader import RasterRDR from RAiDER.losreader import Zenith + # Instantiate an "aoi" object to read lat/lon/height files - aoi = RasterRDR(rdr2geo_raster_paths[1], rdr2geo_raster_paths[0], - rdr2geo_raster_paths[2]) + aoi = RasterRDR( + rdr2geo_raster_paths[1], rdr2geo_raster_paths[0], rdr2geo_raster_paths[2] + ) # Instantiate the Zenith object. Note RAiDER LOS object requires # the orbit files. los = Zenith() # Compute the troposphere delay along the Zenith - zen_wet, zen_dry = tropo_delay(burst.sensing_start, - weather_model_path, - aoi, los) + zen_wet, zen_dry = tropo_delay( + burst.sensing_start, weather_model_path, aoi, los + ) # RaiDER delay is one-way only. Get the LOS delay my multiplying # by the incidence angle @@ -270,15 +311,25 @@ def compute_geocoding_correction_luts(burst, dem_path, tec_path, dry_los_tropo = 2.0 * zen_dry / np.cos(np.deg2rad(inc_angle)) return ( - geometrical_steering_doppler, bistatic_delay, az_fm_mismatch, - [rg_set, az_set], los_ionosphere, - [wet_los_tropo, dry_los_tropo], los_static_tropo + geometrical_steering_doppler, + bistatic_delay, + az_fm_mismatch, + [rg_set, az_set], + los_ionosphere, + [wet_los_tropo, dry_los_tropo], + los_static_tropo, ) -def solid_earth_tides(burst, lat_radar_grid, lon_radar_grid, hgt_radar_grid, - ellipsoid, geo2rdr_params=None): - ''' +def solid_earth_tides( + burst, + lat_radar_grid, + lon_radar_grid, + hgt_radar_grid, + ellipsoid, + geo2rdr_params=None, +): + """ Compute displacement due to Solid Earth Tides (SET) in slant range and azimuth directions @@ -301,7 +352,7 @@ def solid_earth_tides(burst, lat_radar_grid, lon_radar_grid, hgt_radar_grid, 2D array with SET displacement along LOS az_set: np.ndarray 2D array with SET displacement along azimuth - ''' + """ # Extract top-left coordinates from burst polygon lon_min, lat_min, _, _ = burst.border[0].bounds @@ -313,48 +364,56 @@ def solid_earth_tides(burst, lat_radar_grid, lon_radar_grid, hgt_radar_grid, lon_start = lon_min - margin atr = { - 'LENGTH': 25, - 'WIDTH': 100, - 'X_FIRST': lon_start, - 'Y_FIRST': lat_start, - 'X_STEP': 0.023, - 'Y_STEP': 0.023 + "LENGTH": 25, + "WIDTH": 100, + "X_FIRST": lon_start, + "Y_FIRST": lat_start, + "X_STEP": 0.023, + "Y_STEP": 0.023, } # Run pySolid and get SET in ENU coordinate system - (set_e, - set_n, - set_u) = pysolid.calc_solid_earth_tides_grid(burst.sensing_start, atr, - display=False, verbose=True) + (set_e, set_n, set_u) = pysolid.calc_solid_earth_tides_grid( + burst.sensing_start, atr, display=False, verbose=True + ) # Resample SET from geographical grid to radar grid # Generate the lat/lon arrays for the SET geogrid - lat_geo_array = np.linspace(atr['Y_FIRST'], - lat_start + atr['Y_STEP'] * atr['LENGTH'], - num=atr['LENGTH']) - lon_geo_array = np.linspace(atr['X_FIRST'], - lon_start + atr['X_STEP'] * atr['WIDTH'], - num=atr['WIDTH']) + lat_geo_array = np.linspace( + atr["Y_FIRST"], lat_start + atr["Y_STEP"] * atr["LENGTH"], num=atr["LENGTH"] + ) + lon_geo_array = np.linspace( + atr["X_FIRST"], lon_start + atr["X_STEP"] * atr["WIDTH"], num=atr["WIDTH"] + ) # Use scipy RGI to resample SET from geocoded to radar coordinates pts_src = (np.flipud(lat_geo_array), lon_geo_array) pts_dst = (lat_radar_grid.flatten(), lon_radar_grid.flatten()) - rdr_set_e, rdr_set_n, rdr_set_u = \ - [resample_set(set_enu, pts_src, pts_dst).reshape(lat_radar_grid.shape) - for set_enu in [set_e, set_n, set_u]] + rdr_set_e, rdr_set_n, rdr_set_u = [ + resample_set(set_enu, pts_src, pts_dst).reshape(lat_radar_grid.shape) + for set_enu in [set_e, set_n, set_u] + ] # Convert SET from ENU to range/azimuth coordinates - set_rg, set_az = enu2rgaz(burst.as_isce3_radargrid(), burst.orbit, ellipsoid, - lon_radar_grid, lat_radar_grid, hgt_radar_grid, - rdr_set_e, rdr_set_n, rdr_set_u, geo2rdr_params) + set_rg, set_az = enu2rgaz( + burst.as_isce3_radargrid(), + burst.orbit, + ellipsoid, + lon_radar_grid, + lat_radar_grid, + hgt_radar_grid, + rdr_set_e, + rdr_set_n, + rdr_set_u, + geo2rdr_params, + ) return set_rg, set_az -def compute_rdr2geo_rasters(burst, dem_raster, output_path, - rg_step, az_step): - ''' +def compute_rdr2geo_rasters(burst, dem_raster, output_path, rg_step, az_step): + """ Get latitude, longitude, incidence and azimuth angle on multi-looked radar grid @@ -381,7 +440,7 @@ def compute_rdr2geo_rasters(burst, dem_raster, output_path, Path to incidence angle raster head_path: str Path to heading angle raster - ''' + """ # Some ancillary inputs epsg = dem_raster.get_epsg() @@ -389,33 +448,38 @@ def compute_rdr2geo_rasters(burst, dem_raster, output_path, ellipsoid = proj.ellipsoid # Get radar grid for the correction grid - rdr_grid = burst.as_isce3_radargrid(az_step=az_step, - rg_step=rg_step) + rdr_grid = burst.as_isce3_radargrid(az_step=az_step, rg_step=rg_step) grid_doppler = isce3.core.LUT2d() # Initialize the rdr2geo object - rdr2geo_obj = isce3.geometry.Rdr2Geo(rdr_grid, burst.orbit, - ellipsoid, grid_doppler, - threshold=1.0e-8) + rdr2geo_obj = isce3.geometry.Rdr2Geo( + rdr_grid, burst.orbit, ellipsoid, grid_doppler, threshold=1.0e-8 + ) # Get the rdr2geo raster needed for SET computation - topo_output = {f'{output_path}/x.tif': gdal.GDT_Float64, - f'{output_path}/y.tif': gdal.GDT_Float64, - f'{output_path}/height.tif': gdal.GDT_Float64, - f'{output_path}/incidence_angle.tif': gdal.GDT_Float32, - f'{output_path}/heading_angle.tif': gdal.GDT_Float32} + topo_output = { + f"{output_path}/x.tif": gdal.GDT_Float64, + f"{output_path}/y.tif": gdal.GDT_Float64, + f"{output_path}/height.tif": gdal.GDT_Float64, + f"{output_path}/incidence_angle.tif": gdal.GDT_Float32, + f"{output_path}/heading_angle.tif": gdal.GDT_Float32, + } raster_list = [ - isce3.io.Raster(fname, rdr_grid.width, - rdr_grid.length, 1, dtype, 'GTiff') - for fname, dtype in topo_output.items()] + isce3.io.Raster(fname, rdr_grid.width, rdr_grid.length, 1, dtype, "GTiff") + for fname, dtype in topo_output.items() + ] x_raster, y_raster, height_raster, incidence_raster, heading_raster = raster_list # Run rdr2geo on coarse radar grid - rdr2geo_obj.topo(dem_raster, x_raster, y_raster, - height_raster=height_raster, - incidence_angle_raster=incidence_raster, - heading_angle_raster=heading_raster) + rdr2geo_obj.topo( + dem_raster, + x_raster, + y_raster, + height_raster=height_raster, + incidence_angle_raster=incidence_raster, + heading_angle_raster=heading_raster, + ) # Return file path to rdr2geo layers paths = list(topo_output.keys()) @@ -423,7 +487,7 @@ def compute_rdr2geo_rasters(burst, dem_raster, output_path, def resample_set(geo_tide, pts_src, pts_dest): - ''' + """ Use scipy RegularGridInterpolator to resample geo_tide from a geographical to a radar grid @@ -439,18 +503,19 @@ def resample_set(geo_tide, pts_src, pts_dest): ------- rdr_tide: np.ndarray Tide displacement component resampled on radar grid - ''' + """ # Flip tide displacement component to be consistent with flipped latitudes geo_tide = np.flipud(geo_tide) - rgi_func = RGI(pts_src, geo_tide, method='nearest', - bounds_error=False, fill_value=0) + rgi_func = RGI( + pts_src, geo_tide, method="nearest", bounds_error=False, fill_value=0 + ) rdr_tide = rgi_func(pts_dest) return rdr_tide def compute_static_troposphere_delay(incidence_angle_arr, hgt_arr): - ''' + """ Compute troposphere delay using static model Parameters: @@ -464,7 +529,7 @@ def compute_static_troposphere_delay(incidence_angle_arr, hgt_arr): ------- tropo: np.ndarray Troposphere delay in slant range - ''' + """ ZPD = 2.3 H = 6000.0 diff --git a/src/compass/utils/radar_grid.py b/src/compass/utils/radar_grid.py index 0cd0c8e3..4e0433cc 100644 --- a/src/compass/utils/radar_grid.py +++ b/src/compass/utils/radar_grid.py @@ -3,8 +3,8 @@ def file_to_rdr_grid(ref_grid_path: str) -> isce3.product.RadarGridParameters: - '''read parameters from text file needed to create radar grid object''' - with open(ref_grid_path, 'r') as f_rdr_grid: + """read parameters from text file needed to create radar grid object""" + with open(ref_grid_path, "r") as f_rdr_grid: sensing_start = float(f_rdr_grid.readline()) wavelength = float(f_rdr_grid.readline()) prf = float(f_rdr_grid.readline()) @@ -17,31 +17,39 @@ def file_to_rdr_grid(ref_grid_path: str) -> isce3.product.RadarGridParameters: ref_epoch = isce3.core.DateTime(date_str[:-1]) rdr_grid = isce3.product.RadarGridParameters( - sensing_start, wavelength, prf, starting_range, - range_pixel_spacing, "right", length, width, - ref_epoch) + sensing_start, + wavelength, + prf, + starting_range, + range_pixel_spacing, + "right", + length, + width, + ref_epoch, + ) return rdr_grid -def rdr_grid_to_file(ref_grid_path: str, - rdr_grid: isce3.product.RadarGridParameters) -> None: - '''save parameters needed to create a new radar grid object''' +def rdr_grid_to_file( + ref_grid_path: str, rdr_grid: isce3.product.RadarGridParameters +) -> None: + """save parameters needed to create a new radar grid object""" with open(ref_grid_path, "w") as f_rdr_grid: - f_rdr_grid.write(str(rdr_grid.sensing_start) + '\n') - f_rdr_grid.write(str(rdr_grid.wavelength) + '\n') - f_rdr_grid.write(str(rdr_grid.prf) + '\n') - f_rdr_grid.write(str(rdr_grid.starting_range) + '\n') - f_rdr_grid.write(str(rdr_grid.range_pixel_spacing) + '\n') - f_rdr_grid.write(str(rdr_grid.length) + '\n') - f_rdr_grid.write(str(rdr_grid.width) + '\n') - f_rdr_grid.write(str(rdr_grid.ref_epoch) + '\n') + f_rdr_grid.write(str(rdr_grid.sensing_start) + "\n") + f_rdr_grid.write(str(rdr_grid.wavelength) + "\n") + f_rdr_grid.write(str(rdr_grid.prf) + "\n") + f_rdr_grid.write(str(rdr_grid.starting_range) + "\n") + f_rdr_grid.write(str(rdr_grid.range_pixel_spacing) + "\n") + f_rdr_grid.write(str(rdr_grid.length) + "\n") + f_rdr_grid.write(str(rdr_grid.width) + "\n") + f_rdr_grid.write(str(rdr_grid.ref_epoch) + "\n") -def get_decimated_rdr_grd(rdr_grid_original, - dec_factor_rng, - dec_factor_az) -> isce3.product.RadarGridParameters: - ''' +def get_decimated_rdr_grd( + rdr_grid_original, dec_factor_rng, dec_factor_az +) -> isce3.product.RadarGridParameters: + """ Decimate the `rdr_grid_original` by the factor close to `dec_factor_rng` and `dec_factor_az` in range / azimuth direction respectively, while making sure that the very first / last samples / lines in @@ -60,7 +68,7 @@ def get_decimated_rdr_grd(rdr_grid_original, ------- rdr_grid_decimated: isce3.product.RadarGridParameters Decimated radar grid - ''' + """ rdr_grid_decimated = rdr_grid_original.copy() rdr_grid_decimated.width = int(np.ceil(rdr_grid_original.width / dec_factor_rng)) interval_rng = (rdr_grid_original.width - 1) / (rdr_grid_decimated.width - 1) diff --git a/src/compass/utils/range_split_spectrum.py b/src/compass/utils/range_split_spectrum.py index c4142686..a9ffbfaf 100644 --- a/src/compass/utils/range_split_spectrum.py +++ b/src/compass/utils/range_split_spectrum.py @@ -3,8 +3,9 @@ from isce3.splitspectrum import splitspectrum from osgeo import gdal + def find_next_power(number): - ''' + """ Finds the next power of 2 of 'number' Parameters @@ -16,21 +17,18 @@ def find_next_power(number): ------- power: int Next power of 2 of 'number' - ''' + """ power = 1 - if (number and not (number & (number - 1))): + if number and not (number & (number - 1)): return number - while (power < number): + while power < number: power <<= 1 return number -def range_split_spectrum(burst, - burst_path, - cfg_split_spectrum, - scratch_path): - ''' +def range_split_spectrum(burst, burst_path, cfg_split_spectrum, scratch_path): + """ Split burst range spectrum Parameters ---------- @@ -48,16 +46,19 @@ def range_split_spectrum(burst, burst_raster: isce3.io.Raster 3-bands ISCE3 Raster. Band #1: low band; Band #2: main band; Band #3: high band - ''' + """ length, width = burst.shape lines_per_block = cfg_split_spectrum.lines_per_block - burst_id_pol = f'{str(burst.burst_id)}_{burst.polarization}' + burst_id_pol = f"{str(burst.burst_id)}_{burst.polarization}" # In ISCE3, we can use raised cosine to implement S1-A/B Hamming window_type = burst.range_window_type - window_type = 'Cosine' if window_type.casefold() == 'hamming' else window_type - window_shape = 2 * burst.range_window_coefficient - 1.0 if \ - window_type.casefold() == 'hamming' else burst.range_window_coefficient + window_type = "Cosine" if window_type.casefold() == "hamming" else window_type + window_shape = ( + 2 * burst.range_window_coefficient - 1.0 + if window_type.casefold() == "hamming" + else burst.range_window_coefficient + ) # Extract bandwidths (bw) and create frequency vectors half_bw = 0.5 * burst.range_bandwidth @@ -80,16 +81,21 @@ def range_split_spectrum(burst, rg_bandwidth=burst.range_bandwidth, center_frequency=burst.radar_center_frequency, slant_range=rdr_grid.slant_range, - freq='A') - + freq="A", + ) # The output burst will # contain 3 bands: Band #1: low-band image; Band #2 main-band image; # Band #3: high-band image. in_ds = gdal.Open(burst_path, gdal.GA_ReadOnly) - driver = gdal.GetDriverByName('GTiff') - out_ds = driver.Create(f'{scratch_path}/{burst_id_pol}_low_main_high.slc.tif', - width, length, 3, gdal.GDT_CFloat32) + driver = gdal.GetDriverByName("GTiff") + out_ds = driver.Create( + f"{scratch_path}/{burst_id_pol}_low_main_high.slc.tif", + width, + length, + 3, + gdal.GDT_CFloat32, + ) # Prepare necessary variables for block processing lines_per_block = min(length, lines_per_block) @@ -103,35 +109,42 @@ def range_split_spectrum(burst, block_length = lines_per_block # Read a block of valid burst data - burst_data = in_ds.GetRasterBand(1).ReadAsArray(0, line_start, - width, block_length) + burst_data = in_ds.GetRasterBand(1).ReadAsArray( + 0, line_start, width, block_length + ) # Get the low band sub-image and corresponding metadata burst_low_data, _ = split_spectrum_obj.bandpass_shift_spectrum( - slc_raster=burst_data, low_frequency=low_band_freqs[0], + slc_raster=burst_data, + low_frequency=low_band_freqs[0], high_frequency=low_band_freqs[1], new_center_frequency=low_center_freq, - fft_size=find_next_power(width), window_shape=window_shape, - window_function=window_type, resampling=False + fft_size=find_next_power(width), + window_shape=window_shape, + window_function=window_type, + resampling=False, ) # Get the high sub-image and corresponding metadata burst_high_data, _ = split_spectrum_obj.bandpass_shift_spectrum( - slc_raster=burst_data, low_frequency=high_band_freqs[0], + slc_raster=burst_data, + low_frequency=high_band_freqs[0], high_frequency=high_band_freqs[1], new_center_frequency=high_center_freq, - fft_size=find_next_power(width), window_shape=window_shape, - window_function=window_type, resampling=False + fft_size=find_next_power(width), + window_shape=window_shape, + window_function=window_type, + resampling=False, ) # Write back all the processed data - out_ds.GetRasterBand(1).WriteArray(burst_low_data[0:block_length], - yoff=line_start) - out_ds.GetRasterBand(2).WriteArray(burst_data[0:block_length], - yoff=line_start) - out_ds.GetRasterBand(3).WriteArray(burst_high_data[0:block_length], - yoff=line_start) + out_ds.GetRasterBand(1).WriteArray( + burst_low_data[0:block_length], yoff=line_start + ) + out_ds.GetRasterBand(2).WriteArray(burst_data[0:block_length], yoff=line_start) + out_ds.GetRasterBand(3).WriteArray( + burst_high_data[0:block_length], yoff=line_start + ) out_ds.FlushCache() out_ds = None - burst_raster = isce3.io.Raster( - f'{scratch_path}/{burst_id_pol}_low_main_high.slc') + burst_raster = isce3.io.Raster(f"{scratch_path}/{burst_id_pol}_low_main_high.slc") return burst_raster diff --git a/src/compass/utils/raster_polygon.py b/src/compass/utils/raster_polygon.py index 069f3f88..bd9d3fed 100644 --- a/src/compass/utils/raster_polygon.py +++ b/src/compass/utils/raster_polygon.py @@ -1,6 +1,7 @@ -''' +""" Functions for computing and adding boundary polygon to geo_runconfig dicts -''' +""" + import os import numpy as np @@ -8,9 +9,8 @@ from shapely.geometry import MultiPoint -def get_boundary_polygon(filename, invalid_value=np.nan, - dataset_path_template=None): - ''' +def get_boundary_polygon(filename, invalid_value=np.nan, dataset_path_template=None): + """ Get boundary polygon for raster in 'filename'. Polygon includes only valid pixels @@ -28,35 +28,38 @@ def get_boundary_polygon(filename, invalid_value=np.nan, -------- poly: shapely.Polygon Shapely polygon including valid values - ''' + """ if not os.path.isfile(filename): - raise FileNotFoundError('cannot generate raster boundary - ' - f'{filename} not found') + raise FileNotFoundError( + f"cannot generate raster boundary - {filename} not found" + ) # Optimize this with block-processing? if dataset_path_template is not None: - dataset_path = dataset_path_template.replace('%FILE_PATH%', filename) + dataset_path = dataset_path_template.replace("%FILE_PATH%", filename) else: dataset_path = filename try: ds = gdal.Open(dataset_path, gdal.GA_ReadOnly) except: - raise ValueError(f'GDAL unable to open: {dataset_path}') + raise ValueError(f"GDAL unable to open: {dataset_path}") burst = ds.GetRasterBand(1).ReadAsArray() if np.isnan(invalid_value): - idy, idx = np.where((~np.isnan(burst.real)) & - (~np.isnan(burst.imag))) + idy, idx = np.where((~np.isnan(burst.real)) & (~np.isnan(burst.imag))) else: - idy, idx = np.where((burst.real == invalid_value) & - (burst.imag == invalid_value)) + idy, idx = np.where( + (burst.real == invalid_value) & (burst.imag == invalid_value) + ) # Get geotransform defining geogrid xmin, xsize, _, ymin, _, ysize = ds.GetGeoTransform() # Use geotransform to convert indices to geogrid points - tgt_xy = [[x_idx * xsize + xmin, y_idx * ysize + ymin] - for x_idx, y_idx in zip(idx[::100], idy[::100])] + tgt_xy = [ + [x_idx * xsize + xmin, y_idx * ysize + ymin] + for x_idx, y_idx in zip(idx[::100], idy[::100]) + ] points = MultiPoint(tgt_xy) poly = points.convex_hull diff --git a/src/compass/utils/runconfig.py b/src/compass/utils/runconfig.py index c16bbadb..76deb347 100644 --- a/src/compass/utils/runconfig.py +++ b/src/compass/utils/runconfig.py @@ -35,39 +35,42 @@ def load_validate_yaml(yaml_runconfig: str, workflow_name: str) -> dict: dict Validated user runconfig dict with defaults inserted """ - error_channel = journal.error('runconfig.load_validate_yaml') + error_channel = journal.error("runconfig.load_validate_yaml") try: # Load schema corresponding to 'workflow_name' and to validate against - schema_name = workflow_name if workflow_name == 's1_cslc_geo' \ - else 's1_cslc_radar' + schema_name = ( + workflow_name if workflow_name == "s1_cslc_geo" else "s1_cslc_radar" + ) schema = yamale.make_schema( - f'{helpers.WORKFLOW_SCRIPTS_DIR}/schemas/{schema_name}.yaml', - parser='ruamel') + f"{helpers.WORKFLOW_SCRIPTS_DIR}/schemas/{schema_name}.yaml", + parser="ruamel", + ) except: - err_str = f'unable to load schema for workflow {workflow_name}.' + err_str = f"unable to load schema for workflow {workflow_name}." error_channel.log(err_str) raise ValueError(err_str) # Determine run config type based on existence of newlines - run_config_is_txt = '\n' in yaml_runconfig + run_config_is_txt = "\n" in yaml_runconfig # Prepare part of load and validation error message just in case - what_is_broken = 'from runconfig string' if run_config_is_txt \ - else yaml_runconfig + what_is_broken = "from runconfig string" if run_config_is_txt else yaml_runconfig if not run_config_is_txt and not os.path.isfile(yaml_runconfig): - raise FileNotFoundError(f'Yaml file {yaml_runconfig} not found.') + raise FileNotFoundError(f"Yaml file {yaml_runconfig} not found.") # load yaml file or string from command line try: if run_config_is_txt: - data = yamale.make_data(content=yaml_runconfig, - parser='ruamel') + data = yamale.make_data(content=yaml_runconfig, parser="ruamel") else: - data = yamale.make_data(yaml_runconfig, parser='ruamel') + data = yamale.make_data(yaml_runconfig, parser="ruamel") except yamale.YamaleError as yamale_err: - err_str = f'Yamale unable to load {workflow_name} runconfig yaml {what_is_broken} for validation.' + err_str = ( + f"Yamale unable to load {workflow_name} runconfig yaml {what_is_broken} for" + " validation." + ) error_channel.log(err_str) raise yamale.YamaleError(err_str) from yamale_err @@ -75,28 +78,30 @@ def load_validate_yaml(yaml_runconfig: str, workflow_name: str) -> dict: try: yamale.validate(schema, data) except yamale.YamaleError as yamale_err: - err_str = f'Validation fail for {workflow_name} runconfig yaml {what_is_broken}.' + err_str = ( + f"Validation fail for {workflow_name} runconfig yaml {what_is_broken}." + ) error_channel.log(err_str) raise yamale.YamaleError(err_str) from yamale_err # load default runconfig - parser = YAML(typ='safe') - default_cfg_path = f'{helpers.WORKFLOW_SCRIPTS_DIR}/defaults/{schema_name}.yaml' - with open(default_cfg_path, 'r') as f_default: + parser = YAML(typ="safe") + default_cfg_path = f"{helpers.WORKFLOW_SCRIPTS_DIR}/defaults/{schema_name}.yaml" + with open(default_cfg_path, "r") as f_default: default_cfg = parser.load(f_default) # load user config based on input type if run_config_is_txt: user_cfg = parser.load(yaml_runconfig) else: - with open(yaml_runconfig, 'r') as f_yaml: + with open(yaml_runconfig, "r") as f_yaml: user_cfg = parser.load(f_yaml) # Copy user-supplied configuration options into default runconfig helpers.deep_update(default_cfg, user_cfg) # Validate YAML values under groups dict - validate_group_dict(default_cfg['runconfig']['groups'], workflow_name) + validate_group_dict(default_cfg["runconfig"]["groups"], workflow_name) return default_cfg @@ -109,21 +114,21 @@ def validate_group_dict(group_cfg: dict, workflow_name) -> None: group_cfg : dict Dictionary storing runconfig options to validate """ - error_channel = journal.error('runconfig.validate_group_dict') + error_channel = journal.error("runconfig.validate_group_dict") # Check 'input_file_group' section of runconfig - input_group = group_cfg['input_file_group'] + input_group = group_cfg["input_file_group"] # If is_reference flag is False, check that file path to reference # burst is assigned and valid (required by geo2rdr and resample) - if workflow_name == 's1_cslc_radar': - is_reference = input_group['reference_burst']['is_reference'] + if workflow_name == "s1_cslc_radar": + is_reference = input_group["reference_burst"]["is_reference"] if not is_reference: - helpers.check_directory(input_group['reference_burst']['file_path']) + helpers.check_directory(input_group["reference_burst"]["file_path"]) # Check SAFE files - run_pol_mode = group_cfg['processing']['polarization'] + run_pol_mode = group_cfg["processing"]["polarization"] safe_pol_modes = [] - for safe_file in input_group['safe_file_path']: + for safe_file in input_group["safe_file_path"]: # Check if files exists helpers.check_file_path(safe_file) @@ -132,8 +137,8 @@ def validate_group_dict(group_cfg: dict, workflow_name) -> None: safe_pol_modes.append(safe_pol_mode) # Raise error if given co-pol file and expecting cross-pol or dual-pol - if run_pol_mode != 'co-pol' and safe_pol_mode in ['SV', 'SH']: - err_str = f'{run_pol_mode} polarization lacks cross-pol in {safe_file}' + if run_pol_mode != "co-pol" and safe_pol_mode in ["SV", "SH"]: + err_str = f"{run_pol_mode} polarization lacks cross-pol in {safe_file}" error_channel.log(err_str) raise ValueError(err_str) @@ -142,29 +147,29 @@ def validate_group_dict(group_cfg: dict, workflow_name) -> None: first_safe_pol_mode = safe_pol_modes[0][1] for safe_pol_mode in safe_pol_modes[1:]: if safe_pol_mode[1] != first_safe_pol_mode: - err_str = 'SH/SV SAFE file mixed with DH/DV' + err_str = "SH/SV SAFE file mixed with DH/DV" error_channel.log(err_str) raise ValueError(err_str) - for orbit_file in input_group['orbit_file_path']: + for orbit_file in input_group["orbit_file_path"]: helpers.check_file_path(orbit_file) # Check 'dynamic_ancillary_file_groups' section of runconfig # Check that DEM file exists and is GDAL-compatible - dem_path = group_cfg['dynamic_ancillary_file_group']['dem_file'] + dem_path = group_cfg["dynamic_ancillary_file_group"]["dem_file"] helpers.check_file_path(dem_path) helpers.check_dem(dem_path) # Check 'product_path_group' section of runconfig. # Check that directories herein have writing permissions - product_path_group = group_cfg['product_path_group'] - helpers.check_write_dir(product_path_group['product_path']) - helpers.check_write_dir(product_path_group['scratch_path']) - helpers.check_write_dir(product_path_group['sas_output_file']) + product_path_group = group_cfg["product_path_group"] + helpers.check_write_dir(product_path_group["product_path"]) + helpers.check_write_dir(product_path_group["scratch_path"]) + helpers.check_write_dir(product_path_group["sas_output_file"]) def runconfig_to_bursts(cfg: SimpleNamespace) -> list[Sentinel1BurstSlc]: - '''Return bursts based on parameters in given runconfig + """Return bursts based on parameters in given runconfig Parameters ---------- @@ -175,8 +180,8 @@ def runconfig_to_bursts(cfg: SimpleNamespace) -> list[Sentinel1BurstSlc]: ------- _ : list[Sentinel1BurstSlc] List of bursts loaded according to given configuration. - ''' - error_channel = journal.error('runconfig.correlate_burst_to_orbit') + """ + error_channel = journal.error("runconfig.correlate_burst_to_orbit") # dict to store list of bursts keyed by burst_ids bursts = [] @@ -185,30 +190,39 @@ def runconfig_to_bursts(cfg: SimpleNamespace) -> list[Sentinel1BurstSlc]: for safe_file in cfg.input_file_group.safe_file_path: # get orbit file from directory of first orbit file orbit_path = get_orbit_file_from_dir( - safe_file, - Path(cfg.input_file_group.orbit_file_path[0]).parent) + safe_file, Path(cfg.input_file_group.orbit_file_path[0]).parent + ) if not orbit_path: - err_str = f"No orbit file correlates to safe file: {os.path.basename(safe_file)}" + err_str = ( + f"No orbit file correlates to safe file: {os.path.basename(safe_file)}" + ) error_channel.log(err_str) raise ValueError(err_str) # from SAFE file mode, create dict of runconfig pol mode to polarization(s) safe_pol_mode = helpers.get_file_polarization_mode(safe_file) - if safe_pol_mode == 'SV': - mode_to_pols = {'co-pol':['VV']} - elif safe_pol_mode == 'DV': - mode_to_pols = {'co-pol':['VV'], 'cross-pol':['VH'], 'dual-pol':['VV', 'VH']} - elif safe_pol_mode == 'SH': - mode_to_pols = {'co-pol':['HH']} + if safe_pol_mode == "SV": + mode_to_pols = {"co-pol": ["VV"]} + elif safe_pol_mode == "DV": + mode_to_pols = { + "co-pol": ["VV"], + "cross-pol": ["VH"], + "dual-pol": ["VV", "VH"], + } + elif safe_pol_mode == "SH": + mode_to_pols = {"co-pol": ["HH"]} else: - mode_to_pols = {'co-pol':['HH'], 'cross-pol':['HV'], 'dual-pol':['HH', 'HV']} + mode_to_pols = { + "co-pol": ["HH"], + "cross-pol": ["HV"], + "dual-pol": ["HH", "HV"], + } pols = mode_to_pols[cfg.processing.polarization] # zip pol and IW subswath indices together i_subswaths = [1, 2, 3] - pol_subswath_index_pairs = [(pol, i) - for pol in pols for i in i_subswaths] + pol_subswath_index_pairs = [(pol, i) for pol in pols for i in i_subswaths] # list of burst ID + polarization tuples # used to prevent reference repeats @@ -228,8 +242,10 @@ def runconfig_to_bursts(cfg: SimpleNamespace) -> list[Sentinel1BurstSlc]: # include ALL bursts if no burst IDs given # is burst_id wanted? skip if not given in config - if (cfg.input_file_group.burst_id is not None and - burst_id not in cfg.input_file_group.burst_id): + if ( + cfg.input_file_group.burst_id is not None + and burst_id not in cfg.input_file_group.burst_id + ): continue # get polarization and save as tuple with burst ID @@ -244,7 +260,7 @@ def runconfig_to_bursts(cfg: SimpleNamespace) -> list[Sentinel1BurstSlc]: continue # check if not a reference burst (radar grid workflow only) - if 'reference_burst' in cfg.input_file_group.__dict__: + if "reference_burst" in cfg.input_file_group.__dict__: not_ref = not cfg.input_file_group.reference_burst.is_reference else: not_ref = True @@ -266,7 +282,7 @@ def runconfig_to_bursts(cfg: SimpleNamespace) -> list[Sentinel1BurstSlc]: def get_ref_radar_grid_info(ref_path): - ''' Find all reference radar grids info + """Find all reference radar grids info Parameters ---------- @@ -278,11 +294,11 @@ def get_ref_radar_grid_info(ref_path): ref_radar_grids: reference radar path and grid values found associated with burst ID keys - ''' - rdr_grid_files = f'{ref_path}/radar_grid.txt' + """ + rdr_grid_files = f"{ref_path}/radar_grid.txt" if not os.path.isfile(rdr_grid_files): - raise FileNotFoundError(f'No reference radar grids not found in {ref_path}') + raise FileNotFoundError(f"No reference radar grids not found in {ref_path}") ref_rdr_path = os.path.dirname(rdr_grid_files) ref_rdr_grid = file_to_rdr_grid(rdr_grid_files) @@ -310,7 +326,7 @@ def create_output_paths(sns, bursts): # Save output dir, output hdf5 and scratch dir to dict as # SimpleNamespace - out_dir = f'{product_paths.product_path}/{burst_id}/{date_str}' + out_dir = f"{product_paths.product_path}/{burst_id}/{date_str}" os.makedirs(out_dir, exist_ok=True) fname_stem = f"{burst_id}_{date_str}" @@ -319,21 +335,25 @@ def create_output_paths(sns, bursts): browse_path = f"{out_dir}/{fname_stem}.png" stats_json_path = f"{out_dir}/{fname_stem}.json" - scratch_path = f'{product_paths.scratch_path}/{burst_id}/{date_str}' + scratch_path = f"{product_paths.scratch_path}/{burst_id}/{date_str}" os.makedirs(scratch_path, exist_ok=True) - output_paths[path_key] = SimpleNamespace(output_directory=out_dir, - file_name_stem=fname_stem, - file_name_pol=fname_pol, - hdf5_path=h5_path, - browse_path=browse_path, - stats_json_path=stats_json_path, - scratch_directory=scratch_path) + output_paths[path_key] = SimpleNamespace( + output_directory=out_dir, + file_name_stem=fname_stem, + file_name_pol=fname_pol, + hdf5_path=h5_path, + browse_path=browse_path, + stats_json_path=stats_json_path, + scratch_directory=scratch_path, + ) return output_paths + @dataclass(frozen=True) class RunConfig: - '''dataclass containing CSLC runconfig''' + """dataclass containing CSLC runconfig""" + # workflow name name: str # runconfig options converted from dict @@ -351,8 +371,7 @@ class RunConfig: output_paths: dict[tuple[str, str], SimpleNamespace] @classmethod - def load_from_yaml(cls, yaml_runconfig: str, - workflow_name: str) -> RunConfig: + def load_from_yaml(cls, yaml_runconfig: str, workflow_name: str) -> RunConfig: """Initialize RunConfig class with options from given yaml file. Parameters @@ -366,7 +385,7 @@ def load_from_yaml(cls, yaml_runconfig: str, cfg = load_validate_yaml(yaml_runconfig, workflow_name) # Convert runconfig dict to SimpleNamespace - sns = wrap_namespace(cfg['runconfig']['groups']) + sns = wrap_namespace(cfg["runconfig"]["groups"]) bursts = runconfig_to_bursts(sns) @@ -374,7 +393,8 @@ def load_from_yaml(cls, yaml_runconfig: str, ref_rdr_grid_info = None if not sns.input_file_group.reference_burst.is_reference: ref_rdr_grid_info = get_ref_radar_grid_info( - sns.input_file_group.reference_burst.file_path) + sns.input_file_group.reference_burst.file_path + ) # For saving entire file with defaults filled-in as string to metadata. # Stop gap for writing dict to individual elements to HDF5 metadata @@ -382,8 +402,14 @@ def load_from_yaml(cls, yaml_runconfig: str, output_paths = create_output_paths(sns, bursts) - return cls(cfg['runconfig']['name'], sns, bursts, ref_rdr_grid_info, - user_plus_default_yaml_str, output_paths) + return cls( + cfg["runconfig"]["name"], + sns, + bursts, + ref_rdr_grid_info, + user_plus_default_yaml_str, + output_paths, + ) @property def burst_id(self) -> list[str]: @@ -454,33 +480,31 @@ def gpu_id(self): return self.groups.worker.gpu_id def as_dict(self): - '''Convert self to dict for write to YAML/JSON + """Convert self to dict for write to YAML/JSON Unable to dataclasses.asdict() because isce3 objects can not be pickled - ''' + """ + # Convenience functions def date_str(burst): - '''Burst datetime sensing_start to str conversion - ''' - return burst.sensing_start.date().strftime('%Y%m%d') + """Burst datetime sensing_start to str conversion""" + return burst.sensing_start.date().strftime("%Y%m%d") def burst_as_key(burst): - '''Create an unique key of burst ID, date string, and polarization - ''' - return '_'.join([str(burst.burst_id), date_str(burst), burst.polarization]) + """Create an unique key of burst ID, date string, and polarization""" + return "_".join([str(burst.burst_id), date_str(burst), burst.polarization]) self_as_dict = {} for key, val in self.__dict__.items(): - if key == 'groups': + if key == "groups": val = unwrap_to_dict(val) - elif key == 'bursts': + elif key == "bursts": val = {burst_as_key(burst): burst.as_dict() for burst in val} self_as_dict[key] = val return self_as_dict def to_yaml(self): - '''Dump runconfig as string to sys.stdout - ''' + """Dump runconfig as string to sys.stdout""" self_as_dict = self.as_dict() - yaml_obj = YAML(typ='safe') + yaml_obj = YAML(typ="safe") yaml_obj.dump(self_as_dict, sys.stdout) diff --git a/src/compass/utils/validate_product.py b/src/compass/utils/validate_product.py index 2e0dccae..b89f3b87 100644 --- a/src/compass/utils/validate_product.py +++ b/src/compass/utils/validate_product.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -''' +""" Collection of functions to compare 2 CSLC HDF5 contents and metadata -''' +""" import argparse import os @@ -18,16 +18,31 @@ def cmd_line_parser(): """ parser = argparse.ArgumentParser( description="""Validate reference and generated (secondary) S1 CSLC products""", - formatter_class=argparse.ArgumentDefaultsHelpFormatter) + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) parser.add_argument( - '-r', '--ref-product', type=str, dest='ref_product', - help='Reference CSLC or static layer product (i.e., golden dataset)') + "-r", + "--ref-product", + type=str, + dest="ref_product", + help="Reference CSLC or static layer product (i.e., golden dataset)", + ) parser.add_argument( - '-s', '--sec-product', type=str, dest='sec_product', - help='Secondary CSLC or static layer product to compare with reference') - parser.add_argument('-p', '--product-type', type=str, dest='product_type', - choices=['CSLC', 'static_layers'], - default='CSLC', help='Type of file to be validated') + "-s", + "--sec-product", + type=str, + dest="sec_product", + help="Secondary CSLC or static layer product to compare with reference", + ) + parser.add_argument( + "-p", + "--product-type", + type=str, + dest="product_type", + choices=["CSLC", "static_layers"], + default="CSLC", + help="Type of file to be validated", + ) return parser.parse_args() @@ -57,14 +72,15 @@ def _grid_info_retrieve(path_h5, dataset_names, is_static_layer): data_path = DATA_PATH # Extract existing dataset names with h5py - with h5py.File(path_h5, 'r') as h: - datasets_found = [ds_name for ds_name in dataset_names - if ds_name in h[data_path]] + with h5py.File(path_h5, "r") as h: + datasets_found = [ + ds_name for ds_name in dataset_names if ds_name in h[data_path] + ] ds_name = datasets_found[0] # Extract some info from reference/secondary products with GDAL - h5_gdal_path = f'NETCDF:{path_h5}://{data_path}/{ds_name}' + h5_gdal_path = f"NETCDF:{path_h5}://{data_path}/{ds_name}" dataset = gdal.Open(h5_gdal_path, gdal.GA_ReadOnly) geotransform = dataset.GetGeoTransform() proj = dataset.GetProjection() @@ -73,7 +89,7 @@ def _grid_info_retrieve(path_h5, dataset_names, is_static_layer): def compare_products(file_ref, file_sec, product_type): - ''' + """ Compare a reference and a newly generated (i.e., secondary) CSLC or static layer product @@ -85,26 +101,38 @@ def compare_products(file_ref, file_sec, product_type): File path to generated product to use for comparison product_type: str Product type of CSLC or static_layers - ''' + """ # Check if file paths exits if not os.path.exists(file_ref): - print(f'ERROR reference {product_type} product not found: {file_ref}') + print(f"ERROR reference {product_type} product not found: {file_ref}") return if not os.path.exists(file_sec): - print(f'ERROR secondary {product_type} product not found: {file_sec}') + print(f"ERROR secondary {product_type} product not found: {file_sec}") return # Extract some info from reference/secondary products - dataset_names = ['VV', 'VH', 'HH', 'HV'] if product_type == 'CSLC' else \ - ['x', 'y', 'z', 'incidence', 'local_incidence', 'heading', - 'layover_shadow_mask'] - is_static_layer = product_type == 'static_layers' - items_ref, geotransform_ref, proj_ref = \ - _grid_info_retrieve(file_ref, dataset_names, is_static_layer) - items_sec, geotransform_sec, proj_sec = \ - _grid_info_retrieve(file_sec, dataset_names, is_static_layer) + dataset_names = ( + ["VV", "VH", "HH", "HV"] + if product_type == "CSLC" + else [ + "x", + "y", + "z", + "incidence", + "local_incidence", + "heading", + "layover_shadow_mask", + ] + ) + is_static_layer = product_type == "static_layers" + items_ref, geotransform_ref, proj_ref = _grid_info_retrieve( + file_ref, dataset_names, is_static_layer + ) + items_sec, geotransform_sec, proj_sec = _grid_info_retrieve( + file_sec, dataset_names, is_static_layer + ) # Intersect grid items found set_ref_minus_sec = items_ref - items_sec @@ -112,32 +140,37 @@ def compare_products(file_ref, file_sec, product_type): err_str = "Grid items do not match.\n" if set_ref_minus_sec: - err_str += \ - f'\nReference {product_type} extra entries: {set_ref_minus_sec}' + err_str += f"\nReference {product_type} extra entries: {set_ref_minus_sec}" if set_sec_minus_ref: - err_str += \ - f'\nSecondary {product_type} extra entries: {set_sec_minus_ref}' + err_str += f"\nSecondary {product_type} extra entries: {set_sec_minus_ref}" # Check if metadata key differ if set_ref_minus_sec or set_sec_minus_ref: print(err_str) return - print(f'Comparing {product_type} projection ...') + print(f"Comparing {product_type} projection ...") if not proj_ref == proj_sec: - print(f'ERROR projection in reference {proj_ref} differs ' - f'from projection in secondary {proj_sec}') + print( + f"ERROR projection in reference {proj_ref} differs " + f"from projection in secondary {proj_sec}" + ) return - print('Comparing geo transform arrays ...') + print("Comparing geo transform arrays ...") if not np.array_equal(geotransform_ref, geotransform_sec): - print(f'ERROR Reference geo transform array {geotransform_ref} differs' - f'from secondary geo transform array {geotransform_sec}') + print( + f"ERROR Reference geo transform array {geotransform_ref} differs" + f"from secondary geo transform array {geotransform_sec}" + ) return - print('Comparing raster arrays...') - compare_rasters = _compare_complex_slc_rasters if product_type == 'CSLC' \ + print("Comparing raster arrays...") + compare_rasters = ( + _compare_complex_slc_rasters + if product_type == "CSLC" else _compare_static_layer_rasters + ) compare_rasters(file_ref, file_sec, items_ref) @@ -155,14 +188,14 @@ def _compare_static_layer_rasters(file_ref, file_sec, static_layer_items): static_layer_items: list[str] List of names of static layers to compare """ - data_path = f'{DATA_PATH}' - with h5py.File(file_ref, 'r') as h_ref, h5py.File(file_sec, 'r') as h_sec: + data_path = f"{DATA_PATH}" + with h5py.File(file_ref, "r") as h_ref, h5py.File(file_sec, "r") as h_sec: for static_layer_item in static_layer_items: - if static_layer_item == 'layover_shadow_mask': + if static_layer_item == "layover_shadow_mask": continue # Retrieve static layer raster from ref and sec HDF5 - static_path = f'{data_path}/{static_layer_item}' + static_path = f"{data_path}/{static_layer_item}" slc_ref = h_ref[static_path] slc_sec = h_sec[static_path] @@ -180,8 +213,7 @@ def _compare_static_layer_rasters(file_ref, file_sec, static_layer_items): # Count the number of pixels in real and imaginary part above threshold pixel_diff_threshold = 1e-5 - failed_pixels = np.count_nonzero( - ref_sec_diff > pixel_diff_threshold) + failed_pixels = np.count_nonzero(ref_sec_diff > pixel_diff_threshold) # Compute percentage of pixels above threshold tot_pixels_ref = np.count_nonzero(ref_nan) @@ -189,10 +221,11 @@ def _compare_static_layer_rasters(file_ref, file_sec, static_layer_items): # Check that percentage of pixels above threshold is lower than 0.1 % total_fail_threshold = 0.001 - assert percentage_fail < total_fail_threshold, \ - f'{static_layer_item} exceeded {total_fail_threshold * 100} ' \ - '% of pixels where reference and secondary differed by more than ' \ - f'{pixel_diff_threshold}.' + assert percentage_fail < total_fail_threshold, ( + f"{static_layer_item} exceeded {total_fail_threshold * 100} " + "% of pixels where reference and secondary differed by more than " + f"{pixel_diff_threshold}." + ) def _compare_complex_slc_rasters(file_ref, file_sec, pols): @@ -208,11 +241,11 @@ def _compare_complex_slc_rasters(file_ref, file_sec, pols): pols: list[str] List of polarizations of rasters to compare """ - with h5py.File(file_ref, 'r') as h_ref, h5py.File(file_sec, 'r') as h_sec: + with h5py.File(file_ref, "r") as h_ref, h5py.File(file_sec, "r") as h_sec: for pol in pols: # Retrieve SLC raster from ref and sec HDF5 - slc_ref = h_ref[f'{DATA_PATH}/{pol}'] - slc_sec = h_sec[f'{DATA_PATH}/{pol}'] + slc_ref = h_ref[f"{DATA_PATH}/{pol}"] + slc_sec = h_sec[f"{DATA_PATH}/{pol}"] # Compute total number of pixels different from nan from ref and sec ref_nan = np.isnan(slc_ref) @@ -224,10 +257,8 @@ def _compare_complex_slc_rasters(file_ref, file_sec, pols): # Compute absolute difference between real and imaginary ma_slc_ref = np.ma.masked_array(slc_ref, mask=ref_nan) ma_slc_sec = np.ma.masked_array(slc_sec, mask=sec_nan) - diff_real = \ - np.abs((ma_slc_ref.real - ma_slc_sec.real) / ma_slc_sec.real) - diff_imag = \ - np.abs((ma_slc_ref.imag - ma_slc_sec.imag) / ma_slc_sec.imag) + diff_real = np.abs((ma_slc_ref.real - ma_slc_sec.real) / ma_slc_sec.real) + diff_imag = np.abs((ma_slc_ref.imag - ma_slc_sec.imag) / ma_slc_sec.imag) # Count the number of pixels in real and imaginary part above threshold pixel_diff_threshold = 1e-5 @@ -243,19 +274,21 @@ def _compare_complex_slc_rasters(file_ref, file_sec, pols): total_fail_threshold = 0.001 fails = [] if percentage_real >= total_fail_threshold: - fails.append('real') + fails.append("real") if percentage_imag >= total_fail_threshold: - fails.append('imaginary') + fails.append("imaginary") # Format fails. join() doesn't affect emtpy lists - fails = ', '.join(fails) - assert len(fails) == 0, f'{fails} exceeded {total_fail_threshold * 100} ' \ - '% of pixels where reference and secondary differed by more than ' \ - f'{pixel_diff_threshold} in polarization {pol}.' + fails = ", ".join(fails) + assert len(fails) == 0, ( + f"{fails} exceeded {total_fail_threshold * 100} " + "% of pixels where reference and secondary differed by more than " + f"{pixel_diff_threshold} in polarization {pol}." + ) def _get_group_item_paths(h5py_group): - ''' + """ Get paths for all datasets and groups nested within a h5py.Group Parameters @@ -267,14 +300,14 @@ def _get_group_item_paths(h5py_group): ------- paths: list[str] Paths of all items in given h5py.Group - ''' + """ paths = [] h5py_group.visit(lambda path: paths.append(path)) return paths def compare_cslc_metadata(file_ref, file_sec): - ''' + """ Compare reference and generated CSLC metadata Parameters ---------- @@ -282,19 +315,19 @@ def compare_cslc_metadata(file_ref, file_sec): File path to reference metadata file (golden dataset) file_sec: str File path to secondary metadata file to use for comparison - ''' + """ # Check if metadata files exists if not os.path.exists(file_ref): - print(f'ERROR reference CSLC metadata not found: {file_ref}') + print(f"ERROR reference CSLC metadata not found: {file_ref}") return if not os.path.exists(file_sec): - print(f'ERROR CSLC metadata not found: {file_sec}') + print(f"ERROR CSLC metadata not found: {file_sec}") return # Get metadata keys - with h5py.File(file_ref, 'r') as h_ref, h5py.File(file_sec, 'r') as h_sec: + with h5py.File(file_ref, "r") as h_ref, h5py.File(file_sec, "r") as h_sec: metadata_ref = set(_get_group_item_paths(h_ref[ROOT_PATH])) metadata_sec = set(_get_group_item_paths(h_sec[ROOT_PATH])) @@ -304,26 +337,26 @@ def compare_cslc_metadata(file_ref, file_sec): err_str = "Metadata keys do not match.\n" if set_ref_minus_sec: - err_str += f'\nReference CSLC metadata extra entries: {set_ref_minus_sec}' + err_str += f"\nReference CSLC metadata extra entries: {set_ref_minus_sec}" if set_sec_minus_ref: - err_str += f'\nSecondary CSLC metadata extra entries: {set_sec_minus_ref}' + err_str += f"\nSecondary CSLC metadata extra entries: {set_sec_minus_ref}" # Check if metadata key differ assert not set_ref_minus_sec or not set_sec_minus_ref, err_str def main(): - '''Entrypoint of the script''' + """Entrypoint of the script""" cmd = cmd_line_parser() # Check CSLC products compare_products(cmd.ref_product, cmd.sec_product, cmd.product_type) - print('All CSLC product checks have passed') + print("All CSLC product checks have passed") # Check CSLC metadata compare_cslc_metadata(cmd.ref_product, cmd.sec_product) - print('All CSLC metadata checks have passed') + print("All CSLC metadata checks have passed") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/compass/utils/wrap_namespace.py b/src/compass/utils/wrap_namespace.py index fe973f70..6c6f9795 100644 --- a/src/compass/utils/wrap_namespace.py +++ b/src/compass/utils/wrap_namespace.py @@ -1,26 +1,29 @@ -'''Utility functions to convert to/from nested dicts or lists, and nested namespaces +"""Utility functions to convert to/from nested dicts or lists, and nested namespaces References ---------- https://stackoverflow.com/a/50491016 -''' +""" from functools import singledispatch from types import SimpleNamespace + @singledispatch def wrap_namespace(ob): return ob + @wrap_namespace.register(dict) def _wrap_dict(ob): - return SimpleNamespace(**{key: wrap_namespace(val) - for key, val in ob.items()}) + return SimpleNamespace(**{key: wrap_namespace(val) for key, val in ob.items()}) + @wrap_namespace.register(list) def _wrap_list(ob): return [wrap_namespace(val) for val in ob] + def unwrap_to_dict(sns: SimpleNamespace) -> dict: sns_as_dict = {} for key, val in sns.__dict__.items(): diff --git a/src/compass/utils/yaml_argparse.py b/src/compass/utils/yaml_argparse.py index 689e7a58..2d042e6c 100644 --- a/src/compass/utils/yaml_argparse.py +++ b/src/compass/utils/yaml_argparse.py @@ -1,16 +1,32 @@ import argparse -class YamlArgparse(): + +class YamlArgparse: def __init__(self, add_grid_type=False): - '''Initialize YamlArgparse class and parse CLI arguments for COMPASS.''' - parser = argparse.ArgumentParser(description='', formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('run_config_path', type=str, nargs='?', default=None, help='Path to run config file') + """Initialize YamlArgparse class and parse CLI arguments for COMPASS.""" + parser = argparse.ArgumentParser( + description="", formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + "run_config_path", + type=str, + nargs="?", + default=None, + help="Path to run config file", + ) # additional arguments for s1_cslc.py if add_grid_type: - parser.add_argument('-g','--grid','--grid-type', dest='grid_type', type=str, - default='geo', choices=['geo', 'radar'], - help='Grid (coordinates) type of the output CSLC') + parser.add_argument( + "-g", + "--grid", + "--grid-type", + dest="grid_type", + type=str, + default="geo", + choices=["geo", "radar"], + help="Grid (coordinates) type of the output CSLC", + ) # parse arguments self.args = parser.parse_args() diff --git a/src/compass/version.py b/src/compass/version.py index 2980a3f0..563c7fe6 100644 --- a/src/compass/version.py +++ b/src/compass/version.py @@ -1,29 +1,28 @@ -''' +""" release history -''' +""" import collections - # release history -Tag = collections.namedtuple('Tag', 'version date') +Tag = collections.namedtuple("Tag", "version date") release_history = ( - Tag('0.5.5', '2024-03-15'), - Tag('0.5.4', '2023-10-13'), - Tag('0.5.3', '2023-10-05'), - Tag('0.5.2', '2023-09-21'), - Tag('0.5.1', '2023-09-09'), - Tag('0.5.0', '2023-08-25'), - Tag('0.4.1', '2023-08-14'), - Tag('0.4.0', '2023-07-26'), - Tag('0.3.1', '2023-06-01'), - Tag('0.3.0', '2023-05-31'), - Tag('0.1.5', '2023-05-10'), - Tag('0.1.4', '2023-03-23'), - Tag('0.1.3', '2022-12-21'), - Tag('0.1.2', '2022-07-21'), - Tag('0.1.1', '2022-06-08'), - Tag('0.1.0', '2022-06-07'), + Tag("0.5.5", "2024-03-15"), + Tag("0.5.4", "2023-10-13"), + Tag("0.5.3", "2023-10-05"), + Tag("0.5.2", "2023-09-21"), + Tag("0.5.1", "2023-09-09"), + Tag("0.5.0", "2023-08-25"), + Tag("0.4.1", "2023-08-14"), + Tag("0.4.0", "2023-07-26"), + Tag("0.3.1", "2023-06-01"), + Tag("0.3.0", "2023-05-31"), + Tag("0.1.5", "2023-05-10"), + Tag("0.1.4", "2023-03-23"), + Tag("0.1.3", "2022-12-21"), + Tag("0.1.2", "2022-07-21"), + Tag("0.1.1", "2022-06-08"), + Tag("0.1.0", "2022-06-07"), ) # latest release version number and date diff --git a/tests/conftest.py b/tests/conftest.py index 487726d4..defa8406 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,14 +12,14 @@ def download_if_needed(local_path): - ''' + """ Check if given path to file exists. Download if it from zenodo does not. Parameters ---------- local_path: str Path to file - ''' + """ # return if file is found if os.path.isfile(local_path): return @@ -33,52 +33,59 @@ def download_if_needed(local_path): os.makedirs(dst_dir, exist_ok=True) # download data - dataset_url = 'https://zenodo.org/record/7668411/files/' - target_url = f'{dataset_url}/{file_name}' - with open(local_path, 'wb') as f: + dataset_url = "https://zenodo.org/record/7668411/files/" + target_url = f"{dataset_url}/{file_name}" + with open(local_path, "wb") as f: f.write(requests.get(target_url).content) @pytest.fixture(scope="session") def geocode_slc_params(): - ''' + """ Parameters to be used by geocode SLC unit test Returns ------- test_params: SimpleNamespace SimpleNamespace containing geocode SLC unit test parameters - ''' + """ test_params = types.SimpleNamespace() # burst ID and date of burst - burst_id = 't064_135523_iw2' - burst_date = '20221016' + burst_id = "t064_135523_iw2" + burst_date = "20221016" # get test working directory test_path = pathlib.Path(__file__).parent.resolve() # set other paths relative to working directory - test_data_path = f'{test_path}/data' + test_data_path = f"{test_path}/data" # paths for template and actual runconfig - gslc_template_path = f'{test_data_path}/geo_cslc_s1_template.yaml' - test_params.gslc_cfg_path = f'{test_data_path}/geo_cslc_s1.yaml' + gslc_template_path = f"{test_data_path}/geo_cslc_s1_template.yaml" + test_params.gslc_cfg_path = f"{test_data_path}/geo_cslc_s1.yaml" # read runconfig template, replace pieces, write to runconfig - with open(gslc_template_path, 'r') as f_template, \ - open(test_params.gslc_cfg_path, 'w') as f_cfg: - cfg = f_template.read().replace('@TEST_PATH@', str(test_path)).\ - replace('@DATA_PATH@', test_data_path).\ - replace('@BURST_ID@', burst_id) + with open(gslc_template_path, "r") as f_template, open( + test_params.gslc_cfg_path, "w" + ) as f_cfg: + cfg = ( + f_template.read() + .replace("@TEST_PATH@", str(test_path)) + .replace("@DATA_PATH@", test_data_path) + .replace("@BURST_ID@", burst_id) + ) f_cfg.write(cfg) # files needed for geocode SLC unit test - test_files = ['S1A_IW_SLC__1SDV_20221016T015043_20221016T015111_045461_056FC0_6681.zip', - 'orbits/S1A_OPER_AUX_POEORB_OPOD_20221105T083813_V20221015T225942_20221017T005942.EOF', - 'test_dem.tiff', 'test_burst_map.sqlite3', - '2022-10-16_0000_Rosamond-corner-reflectors.csv'] - test_files = [f'{test_data_path}/{test_file}' for test_file in test_files] + test_files = [ + "S1A_IW_SLC__1SDV_20221016T015043_20221016T015111_045461_056FC0_6681.zip", + "orbits/S1A_OPER_AUX_POEORB_OPOD_20221105T083813_V20221015T225942_20221017T005942.EOF", + "test_dem.tiff", + "test_burst_map.sqlite3", + "2022-10-16_0000_Rosamond-corner-reflectors.csv", + ] + test_files = [f"{test_data_path}/{test_file}" for test_file in test_files] # parallel download of test files (if necessary) with mp.Pool(len(test_files)) as pool: @@ -88,19 +95,20 @@ def geocode_slc_params(): test_params.corner_coord_csv_path = test_files[-1] # path the output HDF5 - output_path = f'{test_path}/product/{burst_id}/{burst_date}' - output_file_name = f'{burst_id}_{burst_date}.h5' - test_params.output_hdf5_path = f'{output_path}/{output_file_name}' + output_path = f"{test_path}/product/{burst_id}/{burst_date}" + output_file_name = f"{burst_id}_{burst_date}.h5" + test_params.output_hdf5_path = f"{output_path}/{output_file_name}" # path to groups and datasets in output HDF5 test_params.grid_group_path = DATA_PATH - test_params.raster_path = f'{test_params.grid_group_path}/VV' + test_params.raster_path = f"{test_params.grid_group_path}/VV" return test_params -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def ionex_params(download_data=True): - ''' + """ Prepare IONEX data for unit test Parameters @@ -113,31 +121,33 @@ def ionex_params(download_data=True): ------- test_params : SimpleNamespace SimpleNamespace containing parameters needed for ionex unit test - ''' + """ test_params = types.SimpleNamespace() # Set the path to fetch data for the test test_params.tec_dir = os.path.join(os.path.dirname(__file__), "data") - test_params.date_str = '20151115' - test_params.sol_code = 'jpl' + test_params.date_str = "20151115" + test_params.sol_code = "jpl" # Create TEC directory os.makedirs(test_params.tec_dir, exist_ok=True) # Generate the TEC filename - test_params.tec_file = iono.get_ionex_filename(test_params.date_str, - tec_dir=test_params.tec_dir, - sol_code=test_params.sol_code) + test_params.tec_file = iono.get_ionex_filename( + test_params.date_str, tec_dir=test_params.tec_dir, sol_code=test_params.sol_code + ) # TODO figure out how to toggle download # If prep_mode=True, download data if download_data: if not os.path.isfile(test_params.tec_file): - print(f'Download IONEX file at {test_params.date_str} from ' - f'{test_params.sol_code} to {test_params.tec_dir}') - test_params.tec_file = iono.download_ionex(test_params.date_str, - test_params.tec_dir, - sol_code=test_params.sol_code) + print( + f"Download IONEX file at {test_params.date_str} from " + f"{test_params.sol_code} to {test_params.tec_dir}" + ) + test_params.tec_file = iono.download_ionex( + test_params.date_str, test_params.tec_dir, sol_code=test_params.sol_code + ) return test_params diff --git a/tests/data/jplg3190.15i b/tests/data/jplg3190.15i index 7950755a..27fb9dbb 100644 --- a/tests/data/jplg3190.15i +++ b/tests/data/jplg3190.15i @@ -1,63 +1,63 @@ 1.0 IONOSPHERE MAPS GPS IONEX VERSION / TYPE -GIM V3.0 JPL - GNISD 18-nov-2015 02:09 PGM / RUN BY / DATE -JPL'S GLOBAL IONOSPHERE MAPS YEAR 2015 DAY 319 COMMENT -Global Ionospheric Maps (GIM) are generated on an hourly DESCRIPTION -and daily basis at JPL using data from up to 100 GPS sites DESCRIPTION -of the IGS and others institutions. DESCRIPTION -The vertical TEC is modeled in a solar-geomagnetic DESCRIPTION -reference frame using bi-cubic splines on a spherical grid. DESCRIPTION -A Kalman filter is used to solve simultaneously for DESCRIPTION -instrumental biases and VTEC on the grid (as stochastic DESCRIPTION -parameters). DESCRIPTION -Contact Address: gpsiono@cobra.jpl.nasa.gov . DESCRIPTION - 2015 11 15 0 0 0 EPOCH OF FIRST MAP - 2015 11 16 0 0 0 EPOCH OF LAST MAP - 7200 INTERVAL - 13 # OF MAPS IN FILE - NONE MAPPING FUNCTION - 10.0 ELEVATION CUTOFF -One-way carrier phase leveled to code OBSERVABLES USED - 170 # OF STATIONS - 31 # OF SATELLITES - 6371.0 BASE RADIUS - 2 MAP DIMENSION - 450.0 450.0 0.0 HGT1 / HGT2 / DHGT - 87.5 -87.5 -2.5 LAT1 / LAT2 / DLAT - -180.0 180.0 5.0 LON1 / LON2 / DLON - -1 EXPONENT -TEC/RMS values in 0.1 TECU; 9999, if no value available COMMENT -DIFFERENTIAL CODE BIASES START OF AUX DATA - 01 -7.571 0.007 PRN / BIAS / RMS - 02 8.990 0.004 PRN / BIAS / RMS - 03 -5.325 0.007 PRN / BIAS / RMS - 05 2.745 0.004 PRN / BIAS / RMS - 06 -7.010 0.007 PRN / BIAS / RMS - 07 3.025 0.004 PRN / BIAS / RMS - 08 -7.290 0.007 PRN / BIAS / RMS - 09 -4.799 0.004 PRN / BIAS / RMS - 10 -5.817 0.007 PRN / BIAS / RMS - 11 3.657 0.007 PRN / BIAS / RMS - 12 3.692 0.004 PRN / BIAS / RMS - 13 3.166 0.004 PRN / BIAS / RMS - 14 1.938 0.004 PRN / BIAS / RMS - 15 2.745 0.004 PRN / BIAS / RMS - 16 2.534 0.004 PRN / BIAS / RMS - 17 2.885 0.004 PRN / BIAS / RMS - 18 3.131 0.004 PRN / BIAS / RMS - 19 5.692 0.007 PRN / BIAS / RMS - 20 1.201 0.000 PRN / BIAS / RMS - 21 3.271 0.004 PRN / BIAS / RMS - 22 7.306 0.004 PRN / BIAS / RMS - 23 8.885 0.004 PRN / BIAS / RMS - 24 -5.992 0.004 PRN / BIAS / RMS - 25 -7.852 0.004 PRN / BIAS / RMS - 26 -8.904 0.004 PRN / BIAS / RMS - 27 -5.220 0.007 PRN / BIAS / RMS - 28 2.780 0.004 PRN / BIAS / RMS - 29 2.324 0.004 PRN / BIAS / RMS - 30 -6.589 0.004 PRN / BIAS / RMS - 31 4.394 0.004 PRN / BIAS / RMS - 32 -1.992 0.007 PRN / BIAS / RMS +GIM V3.0 JPL - GNISD 18-nov-2015 02:09 PGM / RUN BY / DATE +JPL'S GLOBAL IONOSPHERE MAPS YEAR 2015 DAY 319 COMMENT +Global Ionospheric Maps (GIM) are generated on an hourly DESCRIPTION +and daily basis at JPL using data from up to 100 GPS sites DESCRIPTION +of the IGS and others institutions. DESCRIPTION +The vertical TEC is modeled in a solar-geomagnetic DESCRIPTION +reference frame using bi-cubic splines on a spherical grid. DESCRIPTION +A Kalman filter is used to solve simultaneously for DESCRIPTION +instrumental biases and VTEC on the grid (as stochastic DESCRIPTION +parameters). DESCRIPTION +Contact Address: gpsiono@cobra.jpl.nasa.gov . DESCRIPTION + 2015 11 15 0 0 0 EPOCH OF FIRST MAP + 2015 11 16 0 0 0 EPOCH OF LAST MAP + 7200 INTERVAL + 13 # OF MAPS IN FILE + NONE MAPPING FUNCTION + 10.0 ELEVATION CUTOFF +One-way carrier phase leveled to code OBSERVABLES USED + 170 # OF STATIONS + 31 # OF SATELLITES + 6371.0 BASE RADIUS + 2 MAP DIMENSION + 450.0 450.0 0.0 HGT1 / HGT2 / DHGT + 87.5 -87.5 -2.5 LAT1 / LAT2 / DLAT + -180.0 180.0 5.0 LON1 / LON2 / DLON + -1 EXPONENT +TEC/RMS values in 0.1 TECU; 9999, if no value available COMMENT +DIFFERENTIAL CODE BIASES START OF AUX DATA + 01 -7.571 0.007 PRN / BIAS / RMS + 02 8.990 0.004 PRN / BIAS / RMS + 03 -5.325 0.007 PRN / BIAS / RMS + 05 2.745 0.004 PRN / BIAS / RMS + 06 -7.010 0.007 PRN / BIAS / RMS + 07 3.025 0.004 PRN / BIAS / RMS + 08 -7.290 0.007 PRN / BIAS / RMS + 09 -4.799 0.004 PRN / BIAS / RMS + 10 -5.817 0.007 PRN / BIAS / RMS + 11 3.657 0.007 PRN / BIAS / RMS + 12 3.692 0.004 PRN / BIAS / RMS + 13 3.166 0.004 PRN / BIAS / RMS + 14 1.938 0.004 PRN / BIAS / RMS + 15 2.745 0.004 PRN / BIAS / RMS + 16 2.534 0.004 PRN / BIAS / RMS + 17 2.885 0.004 PRN / BIAS / RMS + 18 3.131 0.004 PRN / BIAS / RMS + 19 5.692 0.007 PRN / BIAS / RMS + 20 1.201 0.000 PRN / BIAS / RMS + 21 3.271 0.004 PRN / BIAS / RMS + 22 7.306 0.004 PRN / BIAS / RMS + 23 8.885 0.004 PRN / BIAS / RMS + 24 -5.992 0.004 PRN / BIAS / RMS + 25 -7.852 0.004 PRN / BIAS / RMS + 26 -8.904 0.004 PRN / BIAS / RMS + 27 -5.220 0.007 PRN / BIAS / RMS + 28 2.780 0.004 PRN / BIAS / RMS + 29 2.324 0.004 PRN / BIAS / RMS + 30 -6.589 0.004 PRN / BIAS / RMS + 31 4.394 0.004 PRN / BIAS / RMS + 32 -1.992 0.007 PRN / BIAS / RMS AJAC 29.466 0.011 STATION / BIAS / RMS ALBH -7.517 0.011 STATION / BIAS / RMS ALGO 10.799 0.011 STATION / BIAS / RMS @@ -255,9 +255,9 @@ DIFFERENTIAL CODE BIASES START OF AUX DATA ZECK 9.185 0.011 STATION / BIAS / RMS ZIMJ 8.308 0.011 STATION / BIAS / RMS ZIMM -11.131 0.011 STATION / BIAS / RMS -DIFFERENTIAL CODE BIASES END OF AUX DATA - END OF HEADER - 1 START OF TEC MAP +DIFFERENTIAL CODE BIASES END OF AUX DATA + END OF HEADER + 1 START OF TEC MAP 2015 11 15 0 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 96 97 97 97 97 98 98 98 97 97 97 97 96 96 96 95 @@ -685,8 +685,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 180 179 178 177 176 175 175 174 173 172 172 171 171 170 170 170 169 169 169 169 169 169 169 168 168 168 168 168 168 168 168 168 168 168 168 168 168 169 169 170 170 - 1 END OF TEC MAP - 2 START OF TEC MAP + 1 END OF TEC MAP + 2 START OF TEC MAP 2015 11 15 2 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 90 90 89 89 88 88 87 87 86 86 85 84 84 83 83 82 @@ -1114,8 +1114,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 184 183 182 180 179 178 177 175 174 173 172 172 171 170 170 170 169 169 169 169 170 170 170 171 171 172 172 173 173 174 174 175 175 176 177 177 178 179 180 180 181 - 2 END OF TEC MAP - 3 START OF TEC MAP + 2 END OF TEC MAP + 3 START OF TEC MAP 2015 11 15 4 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 81 81 80 80 80 79 79 79 78 78 78 78 77 77 77 77 @@ -1543,8 +1543,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 178 177 176 174 173 172 171 170 169 168 168 167 167 167 167 167 167 168 168 169 170 170 171 172 173 174 175 176 177 178 179 179 180 181 182 182 183 184 184 185 185 - 3 END OF TEC MAP - 4 START OF TEC MAP + 3 END OF TEC MAP + 4 START OF TEC MAP 2015 11 15 6 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 71 70 70 70 70 70 70 70 70 70 70 70 70 70 70 70 @@ -1972,8 +1972,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 167 166 166 166 165 165 165 166 166 166 167 167 168 169 169 170 171 172 174 175 176 177 178 180 181 182 183 184 185 186 187 188 189 189 190 190 190 190 191 190 190 - 4 END OF TEC MAP - 5 START OF TEC MAP + 4 END OF TEC MAP + 5 START OF TEC MAP 2015 11 15 8 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 58 58 59 59 60 60 60 61 61 61 61 61 61 61 61 61 @@ -2401,8 +2401,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 156 155 154 153 153 152 151 150 150 149 149 149 148 148 148 148 148 149 149 149 150 150 150 151 151 152 152 153 153 153 154 154 154 155 155 155 155 156 156 156 156 - 5 END OF TEC MAP - 6 START OF TEC MAP + 5 END OF TEC MAP + 6 START OF TEC MAP 2015 11 15 10 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 58 59 59 59 59 59 59 60 60 60 60 60 60 60 60 60 @@ -2830,8 +2830,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 165 165 164 164 164 164 164 163 163 162 162 161 161 160 159 159 158 157 156 155 154 153 152 151 150 149 148 147 146 145 145 144 144 143 143 143 143 143 143 143 143 - 6 END OF TEC MAP - 7 START OF TEC MAP + 6 END OF TEC MAP + 7 START OF TEC MAP 2015 11 15 12 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 51 51 51 51 51 51 52 52 52 52 52 52 53 53 53 53 @@ -3259,8 +3259,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 160 161 161 162 162 162 162 162 162 161 161 160 159 158 157 156 154 153 151 149 147 145 143 142 140 138 136 134 132 130 129 127 126 125 124 123 123 122 122 122 122 - 7 END OF TEC MAP - 8 START OF TEC MAP + 7 END OF TEC MAP + 8 START OF TEC MAP 2015 11 15 14 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 61 61 61 61 61 61 61 61 61 61 61 61 61 61 60 60 @@ -3688,8 +3688,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 168 168 168 168 168 168 167 167 166 165 164 163 162 160 159 157 156 154 152 151 149 148 146 145 143 142 141 139 138 138 137 136 136 135 135 135 135 136 136 136 137 - 8 END OF TEC MAP - 9 START OF TEC MAP + 8 END OF TEC MAP + 9 START OF TEC MAP 2015 11 15 16 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 70 70 70 70 70 70 70 70 70 70 70 70 70 70 70 70 @@ -4117,8 +4117,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 170 169 169 168 167 166 165 164 163 162 160 159 158 156 155 154 152 151 150 149 147 146 145 144 143 143 142 141 141 141 140 140 140 140 141 141 141 142 142 143 144 - 9 END OF TEC MAP - 10 START OF TEC MAP + 9 END OF TEC MAP + 10 START OF TEC MAP 2015 11 15 18 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 65 65 65 65 65 65 65 65 65 65 65 65 65 65 65 65 @@ -4546,8 +4546,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 139 139 138 137 136 135 134 133 132 132 131 130 129 128 127 126 125 125 124 123 123 122 122 122 121 121 121 121 121 122 122 122 123 123 124 125 126 127 127 128 129 - 10 END OF TEC MAP - 11 START OF TEC MAP + 10 END OF TEC MAP + 11 START OF TEC MAP 2015 11 15 20 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 100 100 100 101 101 101 102 102 102 103 103 103 103 103 103 103 @@ -4975,8 +4975,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 176 175 174 173 172 172 171 170 169 168 167 166 165 164 163 162 162 161 160 160 159 159 158 158 158 158 158 158 158 158 159 159 160 161 161 162 163 164 165 166 167 - 11 END OF TEC MAP - 12 START OF TEC MAP + 11 END OF TEC MAP + 12 START OF TEC MAP 2015 11 15 22 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 115 116 117 118 118 119 120 120 121 121 121 121 121 121 120 120 @@ -5404,8 +5404,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 242 240 238 237 235 233 232 230 229 227 226 225 224 223 222 221 220 220 219 219 219 219 219 219 219 219 220 220 221 222 223 224 225 226 227 229 230 232 233 235 236 - 12 END OF TEC MAP - 13 START OF TEC MAP + 12 END OF TEC MAP + 13 START OF TEC MAP 2015 11 16 0 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 116 117 118 119 119 120 121 121 121 122 122 122 122 122 121 121 @@ -5833,8 +5833,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 213 211 210 209 208 207 207 206 206 206 206 207 207 208 209 210 211 212 214 215 217 219 221 223 225 227 230 232 234 236 238 241 243 245 247 248 250 252 253 254 255 - 13 END OF TEC MAP - 1 START OF RMS MAP + 13 END OF TEC MAP + 1 START OF RMS MAP 2015 11 15 0 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 24 24 24 24 24 24 24 24 24 25 25 25 25 25 25 25 @@ -6262,8 +6262,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 23 23 23 23 23 23 23 23 23 23 23 23 23 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 - 1 END OF RMS MAP - 2 START OF RMS MAP + 1 END OF RMS MAP + 2 START OF RMS MAP 2015 11 15 2 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 24 24 24 26 26 26 26 26 26 26 26 26 26 26 26 26 @@ -6691,8 +6691,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 25 25 25 25 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 25 25 25 25 25 25 25 26 26 26 26 26 26 26 26 26 - 2 END OF RMS MAP - 3 START OF RMS MAP + 2 END OF RMS MAP + 3 START OF RMS MAP 2015 11 15 4 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 17 @@ -7120,8 +7120,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 25 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 27 27 27 27 - 3 END OF RMS MAP - 4 START OF RMS MAP + 3 END OF RMS MAP + 4 START OF RMS MAP 2015 11 15 6 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 26 26 26 26 26 26 26 26 26 17 17 17 17 17 17 16 @@ -7549,8 +7549,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 26 26 26 26 26 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 27 27 27 27 27 27 27 27 27 27 - 4 END OF RMS MAP - 5 START OF RMS MAP + 4 END OF RMS MAP + 5 START OF RMS MAP 2015 11 15 8 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 25 25 25 15 15 15 15 15 15 15 15 15 15 15 15 15 @@ -7978,8 +7978,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 26 26 26 26 26 26 26 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 - 5 END OF RMS MAP - 6 START OF RMS MAP + 5 END OF RMS MAP + 6 START OF RMS MAP 2015 11 15 10 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 16 16 16 16 16 16 16 16 16 16 16 16 16 16 16 24 @@ -8407,8 +8407,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 26 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 24 24 24 24 - 6 END OF RMS MAP - 7 START OF RMS MAP + 6 END OF RMS MAP + 7 START OF RMS MAP 2015 11 15 12 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 17 17 17 17 17 17 17 17 17 24 24 24 24 24 24 24 @@ -8836,8 +8836,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 25 25 25 25 25 25 25 25 25 25 25 25 25 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 23 23 24 24 24 24 24 24 24 24 - 7 END OF RMS MAP - 8 START OF RMS MAP + 7 END OF RMS MAP + 8 START OF RMS MAP 2015 11 15 14 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 18 18 18 24 24 24 24 24 24 24 24 24 24 24 24 24 @@ -9265,8 +9265,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 26 26 26 26 26 26 26 28 28 28 28 28 28 28 28 28 28 28 28 28 28 28 28 28 28 25 25 25 25 26 26 26 26 26 26 26 26 26 27 27 27 - 8 END OF RMS MAP - 9 START OF RMS MAP + 8 END OF RMS MAP + 9 START OF RMS MAP 2015 11 15 16 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 @@ -9694,8 +9694,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 27 29 29 29 29 29 29 29 29 29 29 29 29 29 29 29 29 29 29 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 28 28 28 28 - 9 END OF RMS MAP - 10 START OF RMS MAP + 9 END OF RMS MAP + 10 START OF RMS MAP 2015 11 15 18 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 @@ -10123,8 +10123,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 28 28 28 28 28 28 28 28 28 28 28 28 28 27 27 27 27 27 27 26 26 26 26 26 26 26 26 26 26 26 26 27 27 27 27 27 27 27 27 27 27 - 10 END OF RMS MAP - 11 START OF RMS MAP + 10 END OF RMS MAP + 11 START OF RMS MAP 2015 11 15 20 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 @@ -10552,8 +10552,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 28 28 28 28 28 28 28 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 26 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 - 11 END OF RMS MAP - 12 START OF RMS MAP + 11 END OF RMS MAP + 12 START OF RMS MAP 2015 11 15 22 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 24 24 24 24 24 24 24 24 24 24 24 24 24 24 24 26 @@ -10981,8 +10981,8 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 29 28 28 28 28 28 28 28 28 28 28 28 28 28 28 28 28 28 28 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 27 28 26 26 27 27 - 12 END OF RMS MAP - 13 START OF RMS MAP + 12 END OF RMS MAP + 13 START OF RMS MAP 2015 11 16 0 0 0 EPOCH OF CURRENT MAP 87.5-180.0 180.0 5.0 450.0 LAT/LON1/LON2/DLON/H 24 24 24 24 24 24 24 24 24 25 25 25 25 25 25 25 @@ -11410,5 +11410,5 @@ DIFFERENTIAL CODE BIASES END OF AUX DATA 22 21 21 21 21 21 21 21 21 21 21 21 21 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 25 - 13 END OF RMS MAP - END OF FILE + 13 END OF RMS MAP + END OF FILE diff --git a/tests/requirements.txt b/tests/requirements.txt index 7fa5e959..114768e8 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,2 +1,2 @@ pytest -pytest-order \ No newline at end of file +pytest-order diff --git a/tests/test_ionex.py b/tests/test_ionex.py index acba3404..a397d334 100644 --- a/tests/test_ionex.py +++ b/tests/test_ionex.py @@ -1,31 +1,34 @@ -''' +""" Test IONEX functionalities: file reading and interpolation -''' +""" import numpy as np from compass.utils import iono + def test_read_ionex(ionex_params): - ''' + """ Test the reader for IONEX data Parameters ---------- ionex_params: types.SimpleNamespace Variable containing IONEX parameters - ''' + """ time_ind = 1 x0, x1, y0, y1 = 3, 9, 28, 33 # Create TEC data on a region of interest (AOI) tec_aoi = np.array( - [[71.8, 64.2, 55.9, 47.1, 38.6, 31.2], - [80.2, 73.9, 66.6, 58.2, 49.5, 41.1], - [83.2, 79.6, 74.6, 68., 60.1, 51.6], - [79.6, 79.5, 78.1, 74.5, 68.5, 60.9], - [71.9, 74.5, 76.5, 76.2, 73.1, 67.3]], + [ + [71.8, 64.2, 55.9, 47.1, 38.6, 31.2], + [80.2, 73.9, 66.6, 58.2, 49.5, 41.1], + [83.2, 79.6, 74.6, 68.0, 60.1, 51.6], + [79.6, 79.5, 78.1, 74.5, 68.5, 60.9], + [71.9, 74.5, 76.5, 76.2, 73.1, 67.3], + ], ) # Read IONEX tec_maps data - ignore mins, lats, and lons @@ -34,32 +37,34 @@ def test_read_ionex(ionex_params): def test_get_ionex_value(ionex_params): - ''' + """ Test IONEX TEC data interpolation Parameters ---------- ionex_params: types.SimpleNamespace Variable containing IONEX parameters - ''' + """ # Lat/Lon coordinates over Chile lat, lon = -21.3, -67.4 # 23:07 UTC time - utc_sec = 23* 3600 +7 *60 + utc_sec = 23 * 3600 + 7 * 60 # Interpolation methods - methods = ['nearest', 'linear2d', 'linear3d', 'linear3d'] + methods = ["nearest", "linear2d", "linear3d", "linear3d"] rotates = [False, False, False, True] values = [60.8, 58.90687978, 64.96605174, 65.15525905] # Perform comparison for method, rotate, value in zip(methods, rotates, values): - tec_val = iono.get_ionex_value(ionex_params.tec_file, - utc_sec, lat, lon, - interp_method=method, - rotate_tec_map=rotate, - ) + tec_val = iono.get_ionex_value( + ionex_params.tec_file, + utc_sec, + lat, + lon, + interp_method=method, + rotate_tec_map=rotate, + ) assert np.allclose(tec_val, value, atol=1e-05, rtol=1e-05) - diff --git a/tests/test_qa.py b/tests/test_qa.py index c7538655..02071a0e 100644 --- a/tests/test_qa.py +++ b/tests/test_qa.py @@ -5,6 +5,7 @@ TOLERANCE_TRUNCATION_ERR = 1.0e-7 + def test_qa_power_stats(geocode_slc_params): def _power_test(arr): return arr > 0.0 @@ -13,12 +14,11 @@ def _phase_test(arr): return abs(arr) <= np.pi + TOLERANCE_TRUNCATION_ERR # basic sanity checks of mean, min, and max - with h5py.File(geocode_slc_params.output_hdf5_path, 'r') as h5_obj: - for pwr_phase, test in {'power':_power_test, - 'phase':_phase_test}.items(): - h5_stats_path = f'{QA_PATH}/statistics/data/VV/{pwr_phase}' - stat_names = ['mean', 'min', 'max'] + with h5py.File(geocode_slc_params.output_hdf5_path, "r") as h5_obj: + for pwr_phase, test in {"power": _power_test, "phase": _phase_test}.items(): + h5_stats_path = f"{QA_PATH}/statistics/data/VV/{pwr_phase}" + stat_names = ["mean", "min", "max"] for stat_name in stat_names: - stat_val = h5_obj[f'{h5_stats_path}/{stat_name}'][()] - print(f'Testing: {stat_name} of {pwr_phase} = {stat_val}') + stat_val = h5_obj[f"{h5_stats_path}/{stat_name}"][()] + print(f"Testing: {stat_name} of {pwr_phase} = {stat_val}") assert test(stat_val) diff --git a/tests/test_s1_geocode_slc.py b/tests/test_s1_geocode_slc.py index 48c61839..6c327a64 100644 --- a/tests/test_s1_geocode_slc.py +++ b/tests/test_s1_geocode_slc.py @@ -11,24 +11,25 @@ @pytest.mark.order(1) def test_geocode_slc_run(geocode_slc_params): - ''' + """ Run s1_geocode_slc to ensure it does not crash Parameters ---------- geocode_slc_params: SimpleNamespace SimpleNamespace containing geocode SLC unit test parameters - ''' + """ # load yaml to cfg - cfg = GeoRunConfig.load_from_yaml(geocode_slc_params.gslc_cfg_path, - workflow_name='s1_cslc_geo') + cfg = GeoRunConfig.load_from_yaml( + geocode_slc_params.gslc_cfg_path, workflow_name="s1_cslc_geo" + ) # pass cfg to s1_geocode_slc s1_geocode_slc.run(cfg) def _get_nearest_index(arr, val): - ''' + """ Find index of element in given array closest to given value Parameters @@ -42,12 +43,12 @@ def _get_nearest_index(arr, val): ------- _: int Index of element in arr where val is closest - ''' + """ return np.abs(arr - val).argmin() def _get_reflectors_bounding_slice(geocode_slc_params): - ''' + """ Get latitude, longitude slice that contains all the corner reflectors in CSV list of corner reflectors @@ -55,23 +56,25 @@ def _get_reflectors_bounding_slice(geocode_slc_params): ---------- geocode_slc_params: SimpleNamespace SimpleNamespace containing geocode SLC unit test parameters - ''' + """ # extract from HDF5 - with h5py.File(geocode_slc_params.output_hdf5_path, 'r') as h5_obj: + with h5py.File(geocode_slc_params.output_hdf5_path, "r") as h5_obj: grid_group = h5_obj[geocode_slc_params.grid_group_path] # create projection to covert from UTM to LLH - epsg = int(grid_group['projection'][()]) + epsg = int(grid_group["projection"][()]) proj = isce3.core.UTM(epsg) - x_coords_utm = grid_group['x_coordinates'][()] - y_coords_utm = grid_group['y_coordinates'][()] + x_coords_utm = grid_group["x_coordinates"][()] + y_coords_utm = grid_group["y_coordinates"][()] - lons = np.array([np.degrees(proj.inverse([x, y_coords_utm[0], 0])[0]) - for x in x_coords_utm]) + lons = np.array( + [np.degrees(proj.inverse([x, y_coords_utm[0], 0])[0]) for x in x_coords_utm] + ) - lats = np.array([np.degrees(proj.inverse([x_coords_utm[0], y, 0])[1]) - for y in y_coords_utm]) + lats = np.array( + [np.degrees(proj.inverse([x_coords_utm[0], y, 0])[1]) for y in y_coords_utm] + ) # get array shape for later check of slice with margins applied height, width = h5_obj[geocode_slc_params.raster_path].shape @@ -79,21 +82,19 @@ def _get_reflectors_bounding_slice(geocode_slc_params): # extract all lat/lon corner reflector coordinates corner_lats = [] corner_lons = [] - with open(geocode_slc_params.corner_coord_csv_path, 'r') as csvfile: + with open(geocode_slc_params.corner_coord_csv_path, "r") as csvfile: corner_reader = csv.DictReader(csvfile) for row in corner_reader: - corner_lats.append(float(row['Latitude (deg)'])) - corner_lons.append(float(row['Longitude (deg)'])) + corner_lats.append(float(row["Latitude (deg)"])) + corner_lons.append(float(row["Longitude (deg)"])) # find nearest index for min/max of lats/lons and apply margin # apply margin to bounding box and ensure raster bounds are not exceeded # application of margin y indices reversed due descending order lats vector margin = 50 i_max_y = max(_get_nearest_index(lats, np.max(corner_lats)) - margin, 0) - i_min_y = min(_get_nearest_index(lats, np.min(corner_lats)) + margin, - height - 1) - i_max_x = min(_get_nearest_index(lons, np.max(corner_lons)) + margin, - width - 1) + i_min_y = min(_get_nearest_index(lats, np.min(corner_lats)) + margin, height - 1) + i_max_x = min(_get_nearest_index(lons, np.max(corner_lons)) + margin, width - 1) i_min_x = max(_get_nearest_index(lons, np.min(corner_lons)) - margin, 0) # return as slice @@ -102,19 +103,19 @@ def _get_reflectors_bounding_slice(geocode_slc_params): def test_geocode_slc_validate(geocode_slc_params): - ''' + """ Check for presence of any reflectors in geocoded output Parameters ---------- geocode_slc_params: SimpleNamespace SimpleNamespace containing geocode SLC unit test parameters - ''' + """ # get slice where corner reflectors should be s_ = _get_reflectors_bounding_slice(geocode_slc_params) # slice raster array - with h5py.File(geocode_slc_params.output_hdf5_path, 'r') as h5_obj: + with h5py.File(geocode_slc_params.output_hdf5_path, "r") as h5_obj: arr = h5_obj[geocode_slc_params.raster_path][s_] # check for bright spots in sliced array @@ -123,6 +124,8 @@ def test_geocode_slc_validate(geocode_slc_params): def test_metadata(geocode_slc_params): - with h5py.File(geocode_slc_params.output_hdf5_path, 'r') as h5_obj: - assert (h5_obj['/metadata/processing_information/inputs/dem_source'][()].decode() == - 'DEM description was not provided.') + with h5py.File(geocode_slc_params.output_hdf5_path, "r") as h5_obj: + assert ( + h5_obj["/metadata/processing_information/inputs/dem_source"][()].decode() + == "DEM description was not provided." + )