diff --git a/my_sites.txt b/my_sites.txt
index b372056..a2c34fa 100644
--- a/my_sites.txt
+++ b/my_sites.txt
@@ -613,6 +613,25 @@
"do_tropo" : "True",
"tropo_model" : "ERA5"},
+ "NISAR_ALOS1_Hawaii_0380" : {
+ "calval_location" : "ALOS_Hawaii0380",
+ "download_region" : "'18.95 19.92 -155.72 -154.83'",
+ "analysis_region" : "'19.01 19.91 -155.68 -154.89'",
+ "reference_lalo" : "auto",
+ "download_start_date" : "20060526",
+ "download_end_date" : "20110309",
+ "tempBaseMax" : "auto",
+ "ifgExcludePair" : "auto",
+ "ifgExcludeDate" : "auto",
+ "ifgExcludeIndex" : "auto",
+ "maskWater" : "True",
+ "do_SET" : "True",
+ "SET_source" : "mintpy",
+ "do_iono" : "True",
+ "do_tropo" : "False",
+ "tropo_model" : "ERA5"},
+
+
"Info" : "######## PERMAFROST #########",
"NorthSlopeEastD102" : {
@@ -627,6 +646,26 @@
"maskWater" : "True",
"sentinel_direction" : "DESCENDING",
"sentinel_path" : "102",
- "sentinel_frame" : "362"}
+ "sentinel_frame" : "362"},
+
+ "Info" : "######## NISAR DATA ########",
+
+ "Erta_Ale" : {
+ "calval_location" : "Erta_Ale",
+ "download_region" : "'11.47 14.12 40.19 42.97'",
+ "analysis_region" : "'11.47 14.12 40.19 42.97'",
+ "reference_lalo" : "auto",
+ "download_start_date" : "20251122",
+ "download_end_date" : "20251204",
+ "tempBaseMax" : "auto",
+ "ifgExcludePair" : "auto",
+ "ifgExcludeDate" : "auto",
+ "ifgExcludeIndex" : "auto",
+ "maskWater" : "True",
+ "do_SET" : "True",
+ "SET_source" : "mintpy",
+ "do_iono" : "True",
+ "do_tropo" : "True",
+ "tropo_model" : "NISAR"}
}
}
diff --git a/prep/NISAR_prep.ipynb b/prep/NISAR_prep.ipynb
index 21c1a38..1f637ce 100644
--- a/prep/NISAR_prep.ipynb
+++ b/prep/NISAR_prep.ipynb
@@ -2,304 +2,425 @@
"cells": [
{
"cell_type": "markdown",
- "id": "53e1ad8e-bf0c-4dcb-8db2-777650e45566",
- "metadata": {
- "tags": []
- },
+ "id": "0b8d77a2-b3ea-47a1-9f9f-f49d0ed160ab",
+ "metadata": {},
"source": [
- "# Preparing NISAR-like data for validation of Solid Earth requirements\n",
+ "# Preparing NISAR data for validation of Solid Earth requirements\n",
"\n",
- "Prepared by Ekaterina Tymofyeyeva, Heresh Fattahi, Sara Mirzaee, Max Zhan, and Jeff Pon March 2024, \n",
+ "**Prepared by:** Emre Havazli in October 2025 replacing the initial notebook prepared by Ekaterina Tymofyeyeva, Heresh Fattahi, Sara Mirzaee, Max Zhan, and Jeff Pon March 2024
\n",
"\n",
"
\n",
- "Both the initial setup (Prep A section) and download of the data (Prep B section) should be run at the start of the notebook. Methods for validation of transient, secular, and coseismic requirements using Sentinel-1 ARIA data can be run subsequently.\n",
- "
\n",
- "\n",
- "
\n",
- "\n"
+ "This notebook pre-processes data for different NISAR Solid Earth calval sites amd requirements. Subsequent validation is done via separate notebooks for the Transient, Secular, and Coseismic requirements. These are located under /ATBD_main/methods/.\n",
+ ""
]
},
{
- "cell_type": "code",
- "execution_count": null,
- "id": "fa5223dc-ffcb-4c8d-bb47-1720e9f7652d",
+ "cell_type": "markdown",
+ "id": "397daf73-728b-4e62-a25e-2d321f41384f",
"metadata": {
"tags": []
},
- "outputs": [],
"source": [
- "## Define CalVal Site \n",
- "# This is a NISAR-like test dataset from California derived from ALOS-1\n",
+ "
\n",
+ "\n",
+ "## Table of Contents: \n",
"\n",
- "site='NISAR_ALOS1' \n",
+ "[**Environment Setup**](#setup)\n",
+ "- [Load Python Packages](#load_packages)\n",
+ "- [Define CalVal Site and Parameters](#set_calval_params)\n",
+ "- [Define Directories](#set_directories)\n",
+ "- [Authentication](#set_authentication)\n",
"\n",
- "# Specify the directory where you want to do all your work and store your output time series\n",
- "my_directory = '/scratch/katia/ATBD_outputs'\n",
+ "[**1. Download and Prepare Interferograms**](#prep_ifg)\n",
+ "- [1.1. Get Data List](#prep_data)\n",
+ "- [1.2. Download DEM](#prep_dem)\n",
+ "- [1.3. Create MintPy configuration file](#create_config)\n",
+ "- [1.4. Load Data into MintPy](#prep_load_data)\n",
"\n",
- "# Where the raw data are stored (staged for now, later searchable and streamed directly from the DAAC)\n",
- "gunw_dir = '/scratch/katia/ALOS_ROSAMOND_20242610'"
+ "
"
]
},
{
"cell_type": "markdown",
- "id": "10583e4f-f160-4447-b92b-0dd1ad87365c",
+ "id": "5a9b4566-7905-4d0e-8e4a-097266495e61",
"metadata": {},
"source": [
- "# Environment Setup\n",
- "Setup your environment for processing data"
+ "\n",
+ "## Environment Setup"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "831854a8-9258-46fd-a136-ffa7ef9f72d8",
+ "metadata": {},
+ "source": [
+ "### Load Python Packages "
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "06ae0da4-5814-4738-9b83-2724211d677e",
+ "id": "ae784898-3e41-4463-a3f9-bd701a80dd23",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
- "#Load Packages\n",
- "import glob\n",
+ "import json\n",
+ "import netrc\n",
"import os\n",
"import subprocess\n",
- "from datetime import datetime as dt\n",
- "from pathlib import Path\n",
- "\n",
- "import numpy as np\n",
- "import pandas as pd\n",
- "from matplotlib import pyplot as plt\n",
- "#from mintpy import view, plot_network\n",
- "from mintpy.cli import view, plot_network\n",
- "from mintpy.objects import gps, timeseries\n",
- "from mintpy.smallbaselineApp import TimeSeriesAnalysis\n",
- "from mintpy.utils import ptime, readfile, utils as ut\n",
- "from scipy import signal\n",
- "\n",
- "from solid_utils.sampling import load_geo, samp_pair, profile_samples, haversine_distance\n",
- "\n",
- "#Set Global Plot Parameters\n",
- "plt.rcParams.update({'font.size': 12})\n",
- "\n",
- "################# Set Directories ##########################################\n",
- "\n",
- "# Where the raw data are stored\n",
- "data_dir = '/scratch/katia/NISAR_20240321'\n",
- "\n",
- "# Bounding box\n",
- "bbox='-119.0 33.0 -117.0 35.0'\n",
- "\n",
- "print(\" GUNW dir:\", gunw_dir) \n",
- "\n",
- "# Directory for MintPy processing\n",
- "mintpy_dir = os.path.join(my_directory,site,'MintPy')\n",
- "os.makedirs(mintpy_dir,exist_ok=True)\n",
- "\n",
- "print(\" MintPy dir:\", mintpy_dir)\n",
- "\n",
- "### Change to MintPy workdir\n",
- "vel_file = os.path.join(mintpy_dir, 'velocity.h5')\n",
- "msk_file = os.path.join(mintpy_dir, 'maskConnComp.h5') # maskTempCoh.h5 maskConnComp.h5\n",
- "\n",
- "os.chdir(mintpy_dir)"
+ "import glob"
]
},
{
"cell_type": "markdown",
- "id": "cf7861dd-a341-4296-9d1e-49588af5129a",
- "metadata": {},
+ "id": "07a433c2-ce25-40bc-a1db-f5c4506d3e01",
+ "metadata": {
+ "tags": []
+ },
"source": [
- "\n",
- "## Set up site parameters"
+ "### Define Calval Site and Parameters "
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "9ce69b32-a30d-4b59-85ef-44bd6080a22f",
+ "id": "18978e7b-b33d-4f6d-a38d-a87044275edb",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# === Basic Configuration ===\n",
+ "site = \"Erta_Ale\" # Cal/Val location ID\n",
+ "requirement = \"Transient\" # Options: 'Secular', 'Coseismic', 'Transient'\n",
+ "dataset = \"NISAR_sample\" # Dataset type: 'NISAR_sample', 'ARIA_S1', 'ARIA_S1_new'\n",
+ "\n",
+ "\n",
+ "rundate = \"20260205\" # Date of this Cal/Val run\n",
+ "version = \"1\" # Version of this Cal/Val run\n",
+ "custom_sites = \"/home/jovyan/my_sites.txt\" # Path to custom site metadata\n",
+ "\n",
+ "# === Username Detection / Creation ===\n",
+ "user_file = \"/home/jovyan/me.txt\"\n",
+ "if os.path.exists(user_file):\n",
+ " with open(user_file, \"r\") as f:\n",
+ " you = f.readline().strip()\n",
+ "else:\n",
+ " you = input(\"Please type a username for your Cal/Val outputs: \").strip()\n",
+ " with open(user_file, \"w\") as f:\n",
+ " f.write(you)\n",
+ "\n",
+ "# === Load Cal/Val Site Metadata ===\n",
+ "try:\n",
+ " with open(custom_sites, \"r\") as f:\n",
+ " sitedata = json.load(f)\n",
+ " site_info = sitedata[\"sites\"][site]\n",
+ "except (FileNotFoundError, json.JSONDecodeError) as e:\n",
+ " raise RuntimeError(f\"Failed to load site metadata from {custom_sites}: {e}\")\n",
+ "except KeyError:\n",
+ " raise ValueError(f\"Site ID '{site}' not found in {custom_sites}\")\n",
+ "\n",
+ "print(f\"Loaded site: {site}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "9f00c720-2ba2-4843-a667-f2411f468204",
"metadata": {
"tags": []
},
+ "source": [
+ "### Set Directories and Files "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8c3be183-ef89-4ad9-b4e8-498bd85d3b60",
+ "metadata": {},
"outputs": [],
"source": [
- "sites = {\n",
- " ########## NISAR_ALOS1 ##############\n",
- " 'NISAR_ALOS1' : {'calval_location' : 'NISAR_ALOS1',\n",
- " 'reference_lalo' : 'auto',\n",
- " 'download_start_date' : '20071210',\n",
- " 'download_end_date' : '20110320',\n",
- " 'analysis_region' : ' 33.0 35.0 -119.0 -117.0',\n",
- " 'tempBaseMax' : 'auto',\n",
- " 'ifgExcludeList' : 'auto',\n",
- " 'maskWater' : False}} \n",
- "\n",
- "print(sites[site]['analysis_region'])\n",
- "\n",
- "S = sites[site]['analysis_region'].strip().split(' ')[0]\n",
- "N = sites[site]['analysis_region'].strip().split(' ')[1]\n",
- "W = sites[site]['analysis_region'].strip().split(' ')[2]\n",
- "E = sites[site]['analysis_region'].strip().split(' ')[3]\n",
- "bbox = W + ' ' + S + ' ' + E + ' ' + N\n",
- "print('bbox: ', bbox)"
+ "# Static base directory for Cal/Val processing\n",
+ "BASE_DIR = \"/scratch/nisar-st-calval-solidearth\"\n",
+ "\n",
+ "# Define key path components\n",
+ "site_dir = os.path.join(BASE_DIR, dataset, site)\n",
+ "work_dir = os.path.join(site_dir, requirement, you, rundate, f\"v{version}\")\n",
+ "gunw_dir = os.path.join(site_dir, \"products\")\n",
+ "mintpy_dir = os.path.join(work_dir, \"MintPy\")\n",
+ "dem_dir = os.path.join(work_dir, \"DEM\")\n",
+ "\n",
+ "# Create required directories\n",
+ "for path in [work_dir, gunw_dir, mintpy_dir, dem_dir]:\n",
+ " os.makedirs(path, exist_ok=True)\n",
+ "\n",
+ "# Set working directory\n",
+ "os.chdir(work_dir)\n",
+ "\n",
+ "# Log directory structure\n",
+ "print(f\" Work directory: {work_dir}\")\n",
+ "print(f\" GUNW directory: {gunw_dir}\")\n",
+ "print(f\"MintPy directory: {mintpy_dir}\")\n",
+ "print(f\"DEM directory: {dem_dir}\")\n",
+ "\n",
+ "# Configuration file path\n",
+ "site_code = site_info.get('calval_location')\n",
+ "config_file = os.path.join(mintpy_dir, f\"{site_code}.cfg\")"
]
},
{
"cell_type": "markdown",
- "id": "a0430b2e-1d50-43c6-a0eb-2b3e84eb8614",
+ "id": "f1a50541-6604-400f-ac7e-c14361dc8dec",
"metadata": {
"tags": []
},
"source": [
- "\n",
- "# 1. Data access and prep\n",
- "### Under Construction \n",
- "\n",
- "## 1.1 Search for data on the DAAC \n",
- "\n",
- "Currently, the data are staged locally, but we will be exercising searching for them on the DAAC using bounding boxes and time spans of interest.\n",
+ "### Authentication "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "e1328ea4-a4a3-452d-905e-0e4f3c5d795c",
+ "metadata": {
+ "tags": []
+ },
+ "outputs": [],
+ "source": [
+ "def ensure_permission(path, mode=0o600):\n",
+ " if os.path.exists(path):\n",
+ " os.chmod(path, mode)\n",
+ "\n",
+ "# === Earthdata Login ===\n",
+ "fnetrc = os.path.expanduser(\"~/.netrc\")\n",
+ "earthdata_host = \"urs.earthdata.nasa.gov\"\n",
+ "earthdata = False\n",
+ "\n",
+ "if os.path.exists(fnetrc):\n",
+ " ensure_permission(fnetrc)\n",
+ " nrc = netrc.netrc()\n",
+ " credentials = nrc.authenticators(earthdata_host)\n",
+ " if credentials:\n",
+ " earthdata_user, _, earthdata_password = credentials\n",
+ " earthdata = True\n",
+ " print(f\"Earthdata credentials found for user: {earthdata_user}\")\n",
+ "\n",
+ "if not earthdata:\n",
+ " print(\"\\nNEEDED to Download ARIA GUNWs\")\n",
+ " print(\"Create account at: https://urs.earthdata.nasa.gov/\")\n",
+ " earthdata_user = input(\"Earthdata username: \").strip()\n",
+ " earthdata_password = input(\"Earthdata password: \").strip()\n",
+ " with open(fnetrc, \"a\") as f:\n",
+ " f.write(f\"machine {earthdata_host}\\nlogin {earthdata_user}\\npassword {earthdata_password}\\n\")\n",
+ " ensure_permission(fnetrc)\n",
+ " print(\"Earthdata credentials saved.\")\n",
+ "\n",
+ "# === OpenTopography API Key ===\n",
+ "fopentopo = os.path.expanduser(\"~/.topoapi\")\n",
+ "if os.path.exists(fopentopo):\n",
+ " ensure_permission(fopentopo)\n",
+ " with open(fopentopo) as f:\n",
+ " opentopography_api_key = f.read().strip()\n",
+ "else:\n",
+ " print(\"\\nNEEDED To Download DEMs:\")\n",
+ " print(\"Register at: https://portal.opentopography.org/login\")\n",
+ " print(\"Navigate: My Account → myOpenTopo Authorizations and API Key → Request API key\")\n",
+ " opentopography_api_key = input(\"OpenTopo API key: \").strip()\n",
+ " with open(fopentopo, \"w\") as f:\n",
+ " f.write(opentopography_api_key + \"\\n\")\n",
+ " ensure_permission(fopentopo)\n",
+ " print(\"OpenTopography API key saved.\")\n",
+ "\n",
+ "# === CDS (ERA5) API Key ===\n",
+ "cds_config_path = os.path.expanduser(\"~/.cdsapirc\")\n",
+ "if not os.path.exists(cds_config_path):\n",
+ " print(\"\\nNEEDED to use ERA5 correction:\")\n",
+ " print(\"Register and get token: https://cds.climate.copernicus.eu/how-to-api\")\n",
+ " cds_key = input(\"CDS API key (uid:api-key): \").strip()\n",
+ " with open(cds_config_path, \"w\") as f:\n",
+ " f.write(\"url: https://cds.climate.copernicus.eu/api\\n\")\n",
+ " f.write(f\"key: {cds_key}\\n\")\n",
+ " ensure_permission(cds_config_path)\n",
+ " print(\"CDS API config created.\")\n",
+ "else:\n",
+ " print(\"CDS API config file detected. (Ensure it is current)\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b939e0c7-6b25-488a-a4b6-c5fc72b3a7de",
+ "metadata": {},
+ "source": [
+ "
\n",
+ "
\n",
"\n",
- "## 1.2 Create additional interferograms to supplement your network \n",
+ "\n",
+ "## 1. Download and Prepare Interferograms\n",
"\n",
- "- Define your network by specifying how many interferograms you want to make (n+1, n+2?)\n",
- "- Optionally, recreate the entire network (including nearest neighbor pairs) using custom user-defined parameters. These can include:\n",
- " - Resampling, \n",
- " - Stitching together multiple frames at the RSLC level,\n",
- " - Changing the unwrapping algorithm and other processing options\n",
- "- Stage the additional products on the DAAC, where they will be searchable."
+ "In this initial processing step, all the necessary Level-2 unwrapped interferogram products are gathered and organized for analysis with MintPy."
]
},
{
"cell_type": "markdown",
- "id": "1fe5d56e-f84f-44c3-8958-7fc9c7401773",
+ "id": "df51f44d-b58d-4862-885c-180e8e5c3df6",
"metadata": {
"tags": []
},
"source": [
- "\n",
- "# 2. Making the NISAR data cube\n",
- "\n",
- "InSAR time series (i.e., the unfiltered displacement of each pixel vs. time) are estimated from a processed InSAR stack from Section 3.1 (either ascending or descending) using a variant of the small baseline subset (SBAS) approach and then parameterized using the approach described in Section 4. This step uses tools available in the MintPy software package (REF), which provides both SBAS time series and model-based time series parameterization. Recent results on InSAR closure phase and “fading signal” recommend the of use all available interferograms to avoid systematic bias (_Ansari et al._, 2020; _Zheng Y.J. et al._, 2022). As we expect high-quality orbital control for NISAR, we anticipate that the interferogram stack will typically include all nearest-neighbor (i.e., ~12-day pairs) and skip-1 interferograms, which will be the minimum inputs into the SBAS generation step.\n",
- "\n",
- "We use the open-source ARIA-tools package to download processed L2 interferograms over selected cal/val regions from the Alaska Satellite Facility archive and to stitch/crop the frame-based NISAR GUNW products to stacks that can be directly ingested into MintPy for time-series processing. ARIA-tools uses a phase-minimization approach in the product overlap region to stitch the unwrapped and ionospheric phase, a mosaicing approach for coherence and amplitude, and extracts the geometric information from the 3D data cubes through a mosaicking of the 3D datacubes and subsequent intersection with a DEM. ARIA has been used to pre-process NISAR beta products derived from Sentinel-1 which have revealed interseismic deformation and creep along the San Andreas Fault system, along with subsidence, landsliding, and other signals. \n",
- "\n",
- "We use MintPy to validate and modify the InSAR stack, removing interferograms that do not meet minimum coherence criteria, generating a quality control mask for the purpose of identifying noisy pixels within the stack, and referencing estimated deformation to a common location in all interferograms."
+ "### 1.1. Get data list "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7f9211df-fd07-4ad2-a6c4-6890ddeaf559",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Parse date range from site metadata\n",
+ "start_date = int(site_info.get('download_start_date'))\n",
+ "end_date = int(site_info.get('download_end_date'))\n",
+ "\n",
+ "# Filter GUNW files based on date range and version\n",
+ "gunw_list = []\n",
+ "for filename in os.listdir(gunw_dir):\n",
+ " if filename.endswith(\".h5\"):\n",
+ " ref_date = int(filename.split('_')[12].split('T')[0])\n",
+ " sec_date = int(filename.split('_')[13].split('T')[0])\n",
+ " if start_date <= ref_date and sec_date <= end_date:\n",
+ " gunw_list.append(os.path.join(gunw_dir, filename))\n",
+ " else:\n",
+ " print(f\"Warning: Skipping malformed filename: {filename}\") \n",
+ " else:\n",
+ " continue\n",
+ "\n",
+ "# Sort and write list to product file for future use with ARIA-tools\n",
+ "gunw_list.sort()\n",
+ "product_file = os.path.join(work_dir, \"product_file.txt\")\n",
+ "with open(product_file, \"w\") as f:\n",
+ " f.write(\"\\n\".join(gunw_list))\n",
+ "\n",
+ "print(f\"Wrote {len(gunw_list)} GUNW files to: {product_file}\")\n"
]
},
{
"cell_type": "markdown",
- "id": "c58d8886-766c-43fe-b64a-e4a730f67e77",
+ "id": "dea2d918-ccc8-4b15-b45f-a9d9e935b879",
"metadata": {},
"source": [
- "\n",
- "## 2.1. Set Up MintPy Configuration file\n",
- "\n",
- "\n",
- "The default processing parameters for MintPy's **smallbaselineApp.py** need to be modified by including the following lines in config_file (which must be manually created and placed into mint_dir):\n",
- "\n",
- "- mintpy.load.processor = aria\n",
- "- mintpy.load.unwFile = ../stack/unwrapStack.vrt\n",
- "- mintpy.load.corFile = ../stack/cohStack.vrt\n",
- "- mintpy.load.connCompFile = ../stack/connCompStack.vrt\n",
- "- mintpy.load.demFile = ../DEM/SRTM_3arcsec.dem\n",
- "- mintpy.load.incAngleFile = ../incidenceAngle/{download_start_date}_{download_edn_date}.vrt\n",
- "- mintpy.load.azAngleFile = ../azimuthAngle/{download_start_date}_{download_edn_date}.vrt\n",
- "- mintpy.load.waterMaskFile = ../mask/watermask.msk\n",
- "- mintpy.reference.lalo = auto, or somewhere in your bounding box\n",
- "- mintpy.topographicResidual.pixelwiseGeometry = no\n",
- "- mintpy.troposphericDelay.method = no\n",
- "- mintpy.topographicResidual = no"
+ "### 1.2. Download DEM "
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "dc0f67bb-3a9c-48e1-97df-cc9a85b5828d",
- "metadata": {
- "tags": []
- },
+ "id": "454a2d9e-a276-4538-b627-e65baeeaf4a2",
+ "metadata": {},
"outputs": [],
"source": [
- "config_file = Path(mintpy_dir)/(sites[site]['calval_location'] + '.cfg')\n",
- "\n",
- "\n",
- "####################################################################\n",
- "### Write smallbaseline.py config file\n",
- "# TODO fix \n",
- "config_file_content = \"\"\"\n",
- "mintpy.load.processor = nisar\n",
- "mintpy.compute.numWorker = auto\n",
- "mintpy.load.unwFile = {workdir}/products/*h5\n",
- "mintpy.load.demFile = {mintpydir}/DEM/elevation.dem\n",
- "mintpy.topographicResidual.pixelwiseGeometry = no\n",
- "mintpy.troposphericDelay.method = no\n",
- "mintpy.topographicResidual = no\n",
- "mintpy.network.tempBaseMax = {tempmax}\n",
- "mintpy.network.startDate = {startdatenet}\n",
- "mintpy.network.endDate = {enddatenet}\n",
- "mintpy.velocity.startDate = {startdatevel}\n",
- "mintpy.velocity.endDate = {enddatevel}\n",
- "mintpy.reference.lalo = {reference_lalo}\n",
- "mintpy.network.excludeIfgIndex = {excludeIfg}\"\"\".format(workdir = work_dir,\n",
- " mintpydir = mintpy_dir,\n",
- " tempmax=sites[site]['tempBaseMax'],\n",
- " excludeIfg=sites[site]['ifgExcludeList'],\n",
- " startdatenet=sites[site]['download_start_date'],\n",
- " enddatenet=sites[site]['download_end_date'],\n",
- " startdatevel=sites[site]['download_start_date'],\n",
- " enddatevel=sites[site]['download_end_date'],\n",
- " reference_lalo=sites[site]['reference_lalo'])\n",
- "\n",
- "config_file.write_text(config_file_content)\n",
- "\n",
- "print('MintPy config file:\\n {}:'.format(config_file))\n",
- "print(config_file.read_text())"
+ "# Parse \"lat_min lat_max lon_min lon_max\" (e.g., \"18.50 20.50 -156 -154\")\n",
+ "region_str = site_info.get('analysis_region').strip(\"'\\\"\")\n",
+ "lat_min, lat_max, lon_min, lon_max = map(float, region_str.split())\n",
+ "\n",
+ "# Reorder to left,bottom,right,top for --bbox\n",
+ "bbox = [str(lon_min), str(lat_min), str(lon_max), str(lat_max)]\n",
+ "\n",
+ "out_dem = os.path.join(dem_dir, f\"{site}_elevation.dem\")\n",
+ "\n",
+ "if os.path.isfile(out_dem):\n",
+ " print(f\"{out_dem} exists, skip downloading\")\n",
+ "else:\n",
+ " cmd = [\n",
+ " \"sardem\",\n",
+ " \"--bbox\", *bbox,\n",
+ " \"--output\", out_dem,\n",
+ " \"--output-format\", \"ENVI\"\n",
+ " ]\n",
+ " \n",
+ " print(\"Running:\", \" \".join(cmd))\n",
+ " try:\n",
+ " res = subprocess.run(cmd, check=True, capture_output=True, text=True)\n",
+ " if res.stdout:\n",
+ " print(res.stdout)\n",
+ " print(\"DEM files:\", os.listdir(dem_dir))\n",
+ " except subprocess.CalledProcessError as e:\n",
+ " print(\"sardem failed:\")\n",
+ " print(e.stdout or \"\")\n",
+ " print(e.stderr or \"\")\n",
+ " raise"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "54c045a9-65ce-47ac-9fd3-d32d560667f8",
+ "metadata": {},
+ "source": [
+ "### 1.3. Create MintPy configuration file "
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "9d882244-5f5b-46c0-9552-ad0cf602929c",
- "metadata": {
- "tags": []
- },
+ "id": "c3817649-4966-4ab9-a213-27c6dddbfd00",
+ "metadata": {},
"outputs": [],
"source": [
- "# Get the DEM\n",
- "dem_dir = os.path.join(mintpy_dir,'DEM')\n",
- "os.makedirs(dem_dir,exist_ok=True)\n",
- "demfile = os.path.join(dem_dir,'elevation.dem')\n",
+ "os.chdir(mintpy_dir)\n",
"\n",
- "cmd = 'sardem --bbox ' + bbox + ' --data-source NASA -o ' + demfile\n",
- "os.system(cmd)"
+ "# Build config as a dictionary first\n",
+ "config_file_content = {\n",
+ " \"mintpy.load.processor\": \"nisar\",\n",
+ " \"mintpy.compute.cluster\": \"local\",\n",
+ " \"mintpy.compute.numWorker\": \"auto\",\n",
+ " \"mintpy.topographicResidual.pixelwiseGeometry\": \"no\",\n",
+ " \"mintpy.troposphericDelay.method\": \"no\",\n",
+ " \"mintpy.topographicResidual\": \"no\",\n",
+ " \"mintpy.network.tempBaseMax\": site_info.get('tempBaseMax'),\n",
+ " \"mintpy.network.startDate\": site_info.get('download_start_date'),\n",
+ " \"mintpy.network.endDate\": site_info.get('download_end_date'),\n",
+ " \"mintpy.velocity.startDate\": site_info.get('download_start_date'),\n",
+ " \"mintpy.velocity.endDate\": site_info.get('download_end_date'),\n",
+ " \"mintpy.reference.lalo\": site_info.get('reference_lalo'),\n",
+ " \"mintpy.network.excludeDate12\": site_info.get('ifgExcludePair'),\n",
+ " \"mintpy.network.excludeDate\" : site_info.get('ifgExcludeDate'),\n",
+ " \"mintpy.network.excludeIfgIndex\" : site_info.get('ifgExcludeIndex'),\n",
+ "}\n",
+ "\n",
+ "# Write config dictionary to text file\n",
+ "with open(config_file, \"w\") as f:\n",
+ " f.writelines(f\"{k} = {v}\\n\" for k, v in config_file_content.items())\n",
+ "\n",
+ "# Confirm output\n",
+ "print(f\"MintPy config file written to:\\n {config_file}\\n\")\n",
+ "with open(config_file, \"r\") as f:\n",
+ " print(f.read())\n"
]
},
{
"cell_type": "markdown",
- "id": "7b536b6f-aac9-4cee-a391-8a2e152de55d",
+ "id": "0de4a38e-60f3-421b-949f-aff1145e5881",
"metadata": {},
"source": [
- "\n",
- "## 2.2. Load Data into MintPy\n",
+ "### 1.4. Load Data into MintPy Cubes \n",
"\n",
- "The output of this step is an \"inputs\" directory in 'calval_directory/calval_location/MintPy/\" containing two HDF5 files:\n",
- "- ifgramStack.h5: This file contains 6 dataset cubes (e.g. unwrapped phase, coherence, connected components etc.) and multiple metadata\n",
- "- geometryGeo.h5: This file contains geometrical datasets (e.g., incidence/azimuth angle, masks, etc.)"
+ "The output of this step is an \"inputs\" directory in 'calval_directory/calval_location/MintPy/\" containing three HDF5 files:\n",
+ "- ifgramStack.h5: Contains 6 dataset cubes (e.g. unwrapped phase, coherence, connected components etc.) and multiple metadata.\n",
+ "- geometryGeo.h5: Contains geometrical datasets (e.g., incidence/azimuth angle, masks, etc.).\n",
+ "- ionStack.h5 : Contains pairwise ionosphere data present in the NISAR L2 GUNW.\n",
+ "- tropoStack.h5 : Contains 3D interpolated total tropospheric delay calculated from NISAR radar grid tropospheric delay layers."
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "e0cb7791-8cfc-4ec1-9afe-dc30f1201820",
- "metadata": {
- "tags": []
- },
+ "id": "40b57cd4-a7b0-44e8-b0df-1a1a33c90a9d",
+ "metadata": {},
"outputs": [],
"source": [
- "os.chdir(mintpy_dir)\n",
- "print(mintpy_dir)\n",
- "command = 'smallbaselineApp.py ' + str(config_file) + ' --dir ' + mintpy_dir + ' --dostep load_data'\n",
+ "command = f'prep_nisar.py -i \"{gunw_dir}/*.h5\" -d {out_dem} -o {mintpy_dir}'\n",
"process = subprocess.run(command, shell=True)\n",
"print('Mintpy input files:')\n",
- "[x for x in os.listdir('inputs') if x.endswith('.h5')]"
+ "[x for x in os.listdir(mintpy_dir + '/inputs') if x.endswith('.h5')]"
]
}
],
@@ -319,7 +440,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.13.5"
+ "version": "3.14.3"
}
},
"nbformat": 4,
diff --git a/solid_utils/nisar_l2_cube_interpolator.py b/solid_utils/nisar_l2_cube_interpolator.py
new file mode 100644
index 0000000..3e5101a
--- /dev/null
+++ b/solid_utils/nisar_l2_cube_interpolator.py
@@ -0,0 +1,426 @@
+
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on 10/23/2024
+
+@author: Xiaodong Huang @JPL Caltech
+"""
+
+import argparse
+import os
+import time
+import warnings
+
+import h5py
+import numpy as np
+import rasterio
+from osgeo import gdal, osr
+from rasterio.coords import BoundingBox
+from rasterio.warp import transform_bounds
+from scipy.interpolate import RegularGridInterpolator
+
+# GCOV datasets in its radar grid datacube
+GCOV_RADAR_GRID_DATASETS = ['alongTrackUnitVectorX',
+ 'alongTrackUnitVectorY',
+ 'elevationAngle',
+ 'groundTrackVelocity',
+ 'incidenceAngle',
+ 'losUnitVectorX',
+ 'losUnitVectorY',
+ 'zeroDopplerAzimuthTime',
+ 'slantRange']
+
+# GSLC datasets in its radar grid datacube
+GSLC_RADAR_GRID_DATASETS = GCOV_RADAR_GRID_DATASETS
+
+# GOFF datasets in its radar grid datacube
+GOFF_RADAR_GRID_DATASETS = \
+ GCOV_RADAR_GRID_DATASETS[:-2] + ['parallelBaseline',
+ 'perpendicularBaseline',
+ 'referenceSlantRange',
+ 'referenceZeroDopplerAzimuthTime',
+ 'secondarySlantRange',
+ 'secondaryZeroDopplerAzimuthTime']
+
+# GUNW datasets in its radar grid datacube
+GUNW_RADAR_GRID_DATASETS = \
+ GOFF_RADAR_GRID_DATASETS + ['hydrostaticTroposphericPhaseScreen',
+ 'wetTroposphericPhaseScreen',
+ 'combinedTroposphericPhaseScreen',
+ 'slantRangeSolidEarthTidesPhase']
+
+def get_parser():
+ '''
+ Command line parser.
+ '''
+ descr = 'Interpolate the NISAR L2 3D metadata datacube'
+ parser = argparse.ArgumentParser(description=descr)
+
+ parser.add_argument(type=str,
+ dest='input_file',
+ help='Input NISAR L2 file')
+
+ parser.add_argument('--dem',
+ '--dem-file',
+ dest='input_dem_file',
+ required=True,
+ type=str,
+ help='Reference DEM file')
+
+ parser.add_argument('--out',
+ '--output',
+ dest='output_file',
+ type=str,
+ required=True,
+ help='Output file of the interplated 2D dataset')
+
+ parser.add_argument('--frequency',
+ '--freq',
+ type=str,
+ default='A',
+ dest='frequency',
+ choices=['A', 'B'],
+ help='Frequency band, default: A')
+
+ parser.add_argument('--polarization',
+ '--pol',
+ type=str,
+ default='HH',
+ dest='polarization',
+ choices=['HH', 'HV', 'VH', 'VV'],
+ help='Polarizations, default: HH')
+
+ parser.add_argument('--cube-interp-method',
+ dest='cube_interp_method',
+ type=str,
+ default='linear',
+ choices=['linear', 'nearest', 'slinear', 'cubic',
+ 'quintic', 'pchip'],
+ help='Datacube interpolation method'
+ ' of the python RegularGridInterpolator, default: linear')
+
+ parser.add_argument('--dem-resample-method',
+ dest='dem_resample_method',
+ type=str,
+ default='cubicspline',
+ choices=['near', 'bilinear', 'cubic', 'cubicspline',
+ 'lanczos', 'average','med'],
+ help='DEM interpolation method, default: cubicspline')
+
+ parser.add_argument('--out-resampled-dem',
+ '--out-dem',
+ type=str,
+ default=None,
+ dest='out_resampled_dem',
+ help='Output of the resampled dem, default: None')
+
+ parser.add_argument('--out-format',
+ dest='out_format',
+ default='GTiff',
+ help="The raster format of the output,"
+ " must be the GDAL supported raster format, default: GTiff")
+
+ parser.add_argument('--force-resample-dem',
+ action='store_true',
+ dest='force_resample_dem',
+ help='Force resample dem')
+
+ parser.add_argument('--gunw-ds-name',
+ dest='gunw_dataset_name',
+ default='unwrappedInterferogram',
+ choices=['unwrappedInterferogram',
+ 'wrappedInterferogram',
+ 'pixelOffsets'],
+ help='GUNW dataset names, default: unwrappedInterferogram')
+
+ parser.add_argument('--cube-ds-name',
+ dest='cube_dataset_name',
+ required=True,
+ default='incidenceAngle',
+ choices=GUNW_RADAR_GRID_DATASETS + ['zeroDopplerAzimuthTime',
+ 'slantRange'],
+ help='3D metadata datacube names')
+ return parser.parse_args()
+
+
+def compute_coverage_area(dem_bounds, ds_bounds):
+ x_min = max(dem_bounds.left, ds_bounds.left)
+ y_min = max(dem_bounds.bottom, ds_bounds.bottom)
+ x_max = min(dem_bounds.right, ds_bounds.right)
+ y_max = min(dem_bounds.top, ds_bounds.top)
+
+ if x_min < x_max and y_min < y_max:
+ return (x_max - x_min) * (y_max - y_min) / \
+ ((ds_bounds.right - ds_bounds.left)*\
+ (ds_bounds.top - ds_bounds.bottom)) * 100.0
+ else:
+ return 0.0
+
+def resample_dem(input_dem_path,
+ out_epsg,
+ out_start_x,
+ out_start_y,
+ out_spacing_x,
+ out_spacing_y,
+ out_length,
+ out_width,
+ force_resample = False,
+ dem_resample_method = 'cubicspline',
+ out_path = None):
+
+ # Check the DEM coverage over the dataset
+ # raise warning if the coverage is less than 100.0
+ with rasterio.open(input_dem_path) as src:
+ dem_bounds = src.bounds
+ original_crs = src.crs
+ target_crs = f'EPSG:{out_epsg}'
+
+ # DEM bounds to the target CRS
+ dem_bounds = \
+ transform_bounds(original_crs, target_crs,
+ dem_bounds.left, dem_bounds.bottom,
+ dem_bounds.right, dem_bounds.top)
+ dem_bounds = BoundingBox(left = dem_bounds[0],
+ bottom = dem_bounds[1],
+ right = dem_bounds[2],
+ top = dem_bounds[3])
+ # the dataset bounds
+ ds_bounds = BoundingBox(left=out_start_x,
+ bottom=out_start_y + out_length * out_spacing_y,
+ right=out_start_x + out_width * out_spacing_x,
+ top=out_start_y)
+ coverage_area = round(compute_coverage_area(dem_bounds, ds_bounds),2)
+ if coverage_area < 100.0:
+ warnings.warn(
+ f"DEM only covers {coverage_area}% of the dataset!",
+ UserWarning)
+
+ # If the DEM exists and the resample is not forced,
+ # then read it from the existing file
+ if ((out_path is not None) and
+ (os.path.exists(out_path)) and
+ (not force_resample)):
+ des = gdal.Open(out_path)
+ band = des.GetRasterBand(1)
+ resampled_dem = band.ReadAsArray()
+ else:
+ # Resample the DEM using GDALWarp
+ input_raster = gdal.Open(input_dem_path)
+ output_extent = (out_start_x,
+ out_start_y + out_length * out_spacing_y,
+ out_start_x + out_width * out_spacing_x,
+ out_start_y)
+
+ gdalwarp_options = gdal.WarpOptions(format="MEM",
+ outputType = gdal.GDT_Float32,
+ dstSRS=f"EPSG:{out_epsg}",
+ xRes=out_spacing_x,
+ yRes=np.abs(out_spacing_y),
+ resampleAlg=dem_resample_method,
+ outputBounds=output_extent)
+
+ dst_ds = gdal.Warp("", input_raster, options=gdalwarp_options)
+ resampled_dem = dst_ds.ReadAsArray()
+
+ # Save to the harddrive if the out_path is not None
+ if out_path is not None:
+ driver = gdal.GetDriverByName('GTiff')
+ data_type = gdal.GDT_Float32
+ des = driver.Create(out_path, out_width, out_length, 1, data_type)
+
+ # Write data to the hard drive
+ band = des.GetRasterBand(1)
+ band.WriteArray(resampled_dem)
+
+ # Set the geotransform
+ geotransform = (out_start_x, out_spacing_x, 0,
+ out_start_y, 0, out_spacing_y)
+ des.SetGeoTransform(geotransform)
+
+ # Set the projection
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(out_epsg)
+ des.SetProjection(srs.ExportToWkt())
+
+ return resampled_dem
+
+
+def interpolate_L2_datacube(nisar_L2_product_path,
+ dem_path,
+ cube_ds_name,
+ out_ds_path,
+ cube_interp_method = 'linear',
+ gunw_geogrid_ds_name = None,
+ frequency = 'A',
+ polarization = 'HH',
+ force_resample_dem = False,
+ dem_resample_method = 'cubicspline',
+ out_dem_file = None,
+ out_format = "GTiff"):
+ """
+ Interpolate the 3D metadata datacube in the NISAR L2 product
+ """
+
+ with h5py.File(nisar_L2_product_path) as f:
+ # get the product type in the HDF5, and it must be
+ # 'GCOV', 'GSLC', 'GUNW', or 'GOFF'
+ product_type = f['science/LSAR/identification/productType'][()].astype(str)
+ if product_type not in ['GCOV', 'GSLC', 'GUNW', 'GOFF']:
+ err_str = f'Product type {product_type} is not supproted'
+ raise ValueError(err_str)
+
+ # Check the cube dataset name in the radar grid metadata cube
+ if product_type == 'GCOV':
+ if cube_ds_name not in GCOV_RADAR_GRID_DATASETS:
+ err_str = f"{product_type} does not have {cube_ds_name} in its datacube,"
+ " please pick up one of:\n"+\
+ ','.join(GCOV_RADAR_GRID_DATASETS)
+ raise ValueError(err_str)
+ elif product_type == 'GSLC':
+ if cube_ds_name not in GSLC_RADAR_GRID_DATASETS:
+ err_str = f"{product_type} does not have {cube_ds_name} in its datacube,"
+ " please pick up one of:\n"+\
+ ','.join(GSLC_RADAR_GRID_DATASETS)
+ raise ValueError(err_str)
+ elif product_type == 'GOFF':
+ if cube_ds_name not in GOFF_RADAR_GRID_DATASETS:
+ err_str = f"{product_type} does not have {cube_ds_name} in its datacube,"
+ " please pick up one of:\n"+\
+ ','.join(GOFF_RADAR_GRID_DATASETS)
+ raise ValueError(err_str)
+ elif product_type == 'GUNW':
+ if cube_ds_name not in GUNW_RADAR_GRID_DATASETS:
+ err_str = f"{product_type} does not have {cube_ds_name} in its datacube,"
+ " please pick up one of:\n"+\
+ ','.join(GUNW_RADAR_GRID_DATASETS)
+ raise ValueError(err_str)
+
+ # Get the X, Y, and Z coordindates of the datacube
+ xcoords = f[f'/science/LSAR/{product_type}'
+ '/metadata/radarGrid/xCoordinates'][()]
+ ycoords = f[f'/science/LSAR/{product_type}'
+ '/metadata/radarGrid/yCoordinates'][()]
+ zcoords = f[f'/science/LSAR/{product_type}'
+ '/metadata/radarGrid/heightAboveEllipsoid'][()]
+
+ # Get the datacube datataset
+ ds_name = f'/science/LSAR/{product_type}/metadata/radarGrid/{cube_ds_name}'
+ if ds_name not in f:
+ err_str = f'{ds_name} is not in the radar grid metadata cube of {product_type}'
+ raise ValueError(err_str)
+
+ # cube dataset
+ ds_cube = f[ds_name][()]
+
+ # Build the regular grid interpolator
+ # and check the first dimension since the Baseline top/bottom mode only has 2 heights
+ ds_cube_shape = ds_cube.shape
+ if ds_cube_shape[0] == 2:
+ interplator = RegularGridInterpolator(
+ (np.array([zcoords[0], zcoords[-1]]),
+ ycoords, xcoords), ds_cube, method = cube_interp_method)
+ else:
+ interplator = RegularGridInterpolator(
+ (zcoords, ycoords, xcoords),ds_cube, method = cube_interp_method)
+
+ # Get the geogrid of the product in the L2 product and check if
+ # the frequency is in the product
+ group_name = f'/science/LSAR/{product_type}/grids/frequency{frequency}/'
+ if group_name not in f:
+ err_str = f'Frequency {frequency} is not in the product'
+ raise ValueError(err_str)
+
+ out_epsg = 4326
+ if product_type == 'GUNW':
+ if gunw_geogrid_ds_name is None:
+ gunw_ds_name = 'unwrappedInterferogram'
+ else:
+ gunw_ds_name = gunw_geogrid_ds_name
+ ds_x = f[f'{group_name}/{gunw_ds_name}/{polarization}/xCoordinates'][()]
+ ds_y = f[f'{group_name}/{gunw_ds_name}/{polarization}/yCoordinates'][()]
+ out_epsg = int(f[f'{group_name}/{gunw_ds_name}/{polarization}/projection'][()])
+ elif product_type == 'GOFF':
+ ds_x = f[f'{group_name}/pixelOffsets/{polarization}/layer1/xCoordinates'][()]
+ ds_y = f[f'{group_name}/pixelOffsets/{polarization}/layer1/yCoordinates'][()]
+ out_epsg = int(f[f'{group_name}/pixelOffsets/{polarization}/layer1/projection'][()])
+ else:
+ ds_x = f[f'{group_name}/xCoordinates'][()]
+ ds_y = f[f'{group_name}/yCoordinates'][()]
+ out_epsg = int(f[f'{group_name}/projection'][()])
+
+ # Resample the DEM to be the same geogrid with the dataset
+ ds_dem_data = resample_dem(dem_path, out_epsg,
+ ds_x[0], ds_y[0],
+ ds_x[1] - ds_x[0],
+ ds_y[1] - ds_y[0],
+ len(ds_y), len(ds_x),
+ force_resample_dem,
+ dem_resample_method,
+ out_dem_file)
+
+ # Build the meshgrid for the interpolator
+ y, x = np.meshgrid(ds_y, ds_x,
+ indexing='ij')
+ pts = np.stack([ds_dem_data.ravel(),
+ y.ravel(),
+ x.ravel()]).T
+
+ # Interpolating the points
+ interp_pts = interplator(pts)
+ group_name = \
+ f"NETCDF:{nisar_L2_product_path}://science/LSAR/{product_type}/grids/frequency{frequency}"
+ if product_type == 'GCOV':
+ src = rasterio.open(f"{group_name}/{[polarization]*2}",
+ driver = 'netCDF')
+ if product_type == 'GSLC':
+ src = rasterio.open(f"{group_name}/{polarization}",
+ driver = 'netCDF')
+ if product_type == 'GUNW':
+ if ((gunw_geogrid_ds_name is None) or
+ (gunw_geogrid_ds_name == 'unwrappedInterferogram')):
+ gunw_ds_name = f'{gunw_geogrid_ds_name}/{polarization}/coherenceMagnitude'
+ elif gunw_geogrid_ds_name == 'wrappedInterferogram':
+ gunw_ds_name = f'{gunw_geogrid_ds_name}/{polarization}/coherenceMagnitude'
+ elif gunw_geogrid_ds_name == 'pixelOffsets':
+ gunw_ds_name = f'{gunw_geogrid_ds_name}/{polarization}/alongTrackOffset'
+ src = rasterio.open(f"{group_name}/{gunw_ds_name}",
+ driver = 'netCDF')
+ if product_type == 'GOFF':
+ src = rasterio.open(
+ f"{group_name}/pixelOffsets/{polarization}/layer1/alongTrackOffset",
+ driver = 'netCDF')
+
+ # Write the data to the harddrive
+ out_meta = src.meta.copy()
+ out_meta.update({"driver": out_format,
+ "height": src.shape[0],
+ "width": src.shape[1],
+ "dtype": 'float32',
+ 'nodata': 0.0,
+ })
+
+ with rasterio.open(out_ds_path, "w", **out_meta) as dest:
+ dest.write(interp_pts.reshape(src.shape),1)
+
+def main():
+ parser = get_parser()
+ t_all = time.time()
+ interpolate_L2_datacube(parser.input_file,
+ parser.input_dem_file,
+ parser.cube_dataset_name,
+ parser.output_file,
+ parser.cube_interp_method,
+ parser.gunw_dataset_name,
+ parser.frequency,
+ parser.polarization,
+ parser.force_resample_dem,
+ parser.dem_resample_method,
+ parser.out_resampled_dem,
+ parser.out_format)
+ t_all_elapsed = time.time() - t_all
+ print(f"Successfully ran the {parser.cube_dataset_name}"
+ f" interpolation in {t_all_elapsed:.3f} seconds")
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file