diff --git a/.github/workflows/auto-update-bulletin-a.yml b/.github/workflows/auto-update-bulletin-a.yml
index 5945c40..b6106d0 100644
--- a/.github/workflows/auto-update-bulletin-a.yml
+++ b/.github/workflows/auto-update-bulletin-a.yml
@@ -1,6 +1,6 @@
# This workflow will install Python dependencies and get the latest bulletin-A file
-name: Auto-Update Bulletin-A files
+name: Update Bulletin-A
on:
schedule:
diff --git a/.github/workflows/auto-update-files.yml b/.github/workflows/auto-update-files.yml
index fa53562..a6b2c39 100644
--- a/.github/workflows/auto-update-files.yml
+++ b/.github/workflows/auto-update-files.yml
@@ -1,6 +1,6 @@
# This workflow will install Python dependencies and update the time and EOP files
-name: Auto-Update Files
+name: Update Data
on:
schedule:
diff --git a/.github/workflows/python-request.yml b/.github/workflows/python-request.yml
index 2dce92e..485ebff 100644
--- a/.github/workflows/python-request.yml
+++ b/.github/workflows/python-request.yml
@@ -1,7 +1,7 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
-name: Python on pull request
+name: pytest Build
on:
pull_request:
@@ -42,11 +42,19 @@ jobs:
run: |
# install the package in editable mode
pip install --no-deps --editable .
- pixi run --environment dev pytest -n 0 --cov=./ --cov-report=xml \
+ pixi run --environment dev pytest -n 0 --cov=timescale \
+ --junitxml=pytest.xml --cov-report=xml \
--username=${{ secrets.EARTHDATA_USERNAME }} \
--password=${{ secrets.EARTHDATA_PASSWORD }}
+ - name: Create coverage comment
+ uses: MishaKav/pytest-coverage-comment@main
+ with:
+ pytest-xml-coverage-path: coverage.xml
+ junitxml-path: pytest.xml
+ unique-id-for-comment: ${{ matrix.os }}
+ title: Coverage for Python on ${{ matrix.os }}
- name: Archive code coverage results
uses: actions/upload-artifact@v4
with:
name: code-coverage-report-${{ matrix.os }}
- path: ./coverage.xml
+ path: coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/ruff-format.yml b/.github/workflows/ruff-format.yml
new file mode 100644
index 0000000..19e1a6f
--- /dev/null
+++ b/.github/workflows/ruff-format.yml
@@ -0,0 +1,18 @@
+name: Ruff Format
+
+on:
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+ branches:
+ - main
+
+jobs:
+ ruff-format:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Format and annotate PR
+ uses: astral-sh/ruff-action@v3
+ with:
+ version: "latest"
+ args: "format --check --diff"
diff --git a/.gitignore b/.gitignore
index 000cf60..4302522 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,6 +16,7 @@
*.gz
*.iso
*.jar
+*.mat
*.rar
*.tar
*.zip
@@ -46,6 +47,8 @@ pythonenv*/
venv/
*build-commands.txt
setup-miniconda-patched-environment.yml
+pytest.xml
+coverage.xml
# OS generated files #
######################
.DS_Store
diff --git a/MANIFEST.in b/MANIFEST.in
index 86e54ec..47625a7 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -3,5 +3,10 @@ prune .github*
prune doc*
prune run*
prune test*
-exclude *.cfg
+exclude *.cff
+exclude *.md
exclude *.yml
+exclude pixi.lock
+exclude .gitattributes
+exclude .gitignore
+exclude .zenodo.json
diff --git a/README.md b/README.md
index a677696..04a19c4 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,40 @@
# timescale
-[](https://github.com/pyTMD/timescale/blob/main/LICENSE)
-[](https://timescale.readthedocs.io/en/latest/?badge=latest)
-[](https://pypi.python.org/pypi/timescale/)
-[](https://anaconda.org/conda-forge/timescale)
-[](https://github.com/pyTMD/timescale/releases/latest)
-[](https://doi.org/10.5281/zenodo.5555395)
-
Python tools for time and astronomical calculations
+## About
+
+
+
+ | Version: |
+
+
+
+
+ |
+
+
+ | Citation: |
+
+
+ |
+
+
+ | Tests: |
+
+
+
+
+ |
+
+
+ | License: |
+
+
+ |
+
+
+
For more information: see the documentation at [timescale.readthedocs.io](https://timescale.readthedocs.io/)
## Installation
diff --git a/pixi.lock b/pixi.lock
index f514993..1580dba 100644
--- a/pixi.lock
+++ b/pixi.lock
@@ -6168,7 +6168,7 @@ packages:
- pypi: ./
name: timescale
version: 0.1.1
- sha256: 7407852dfdd8082e9072aa56621ecac7e30a68a029fd574e41decbe8c6337e31
+ sha256: 43965d2a04b4245f267d6a0333ee2967d9162636fa2e7c75db22cefd9e811980
requires_dist:
- lxml
- numpy
@@ -6188,7 +6188,7 @@ packages:
- pytest>=4.6 ; extra == 'dev'
- pytest-cov ; extra == 'dev'
- pytest-xdist ; extra == 'dev'
- requires_python: ~=3.6
+ requires_python: ~=3.9
editable: true
- conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_hd72426e_102.conda
sha256: a84ff687119e6d8752346d1d408d5cf360dee0badd487a472aa8ddedfdc219e1
diff --git a/pyproject.toml b/pyproject.toml
index dc97134..f2b97e2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -17,18 +17,15 @@ keywords = [
authors = [
{name = "Tyler Sutterley"},
- {name = "Karen Alley"},
- {name = "Kelly Brunt"},
- {name = "Susan Howard"},
- {name = "Laurie Padman"},
- {name = "Matt Siegfried"},
{email = "tsutterl@uw.edu"}
]
-maintainers = [{ name = "timescale contributors" }]
+maintainers = [
+ { name = "timescale contributors" }
+]
license = {file = "LICENSE"}
readme = {file = "README.md", content-type = "text/markdown"}
-requires-python = "~=3.6"
+requires-python = "~=3.9"
dependencies = [
"lxml",
"numpy",
@@ -42,13 +39,12 @@ classifiers=[
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
"Topic :: Scientific/Engineering :: Physics",
]
@@ -83,8 +79,7 @@ source = [
"test",
]
omit = [
- "setup.py",
- "conf.py",
+ "doc/*",
]
[tool.coverage.report]
@@ -94,13 +89,37 @@ precision = 2
[tool.ruff]
line-length = 80
indent-width = 4
+exclude = [
+ ".git",
+ ".pixi",
+ "build",
+ "doc",
+ "run",
+ "test",
+]
+
+[tool.ruff.lint]
+select = [
+ # Pyflakes
+ "F",
+ # pyupgrade
+ "UP",
+ # flake8-bugbear
+ "B",
+ # flake8-simplify
+ "SIM",
+ # isort
+ "I",
+ # ruff-specific
+ "RUF",
+]
[tool.ruff.lint.pydocstyle]
convention = "numpy"
[tool.ruff.format]
-quote-style = "single"
indent-style = "space"
+line-ending = "auto"
docstring-code-format = false
[tool.pixi.workspace]
diff --git a/timescale/__init__.py b/timescale/__init__.py
index 14db22d..5b3ff0b 100644
--- a/timescale/__init__.py
+++ b/timescale/__init__.py
@@ -6,48 +6,57 @@
Documentation is available at https://timescale.readthedocs.io
"""
+
import timescale.eop
import timescale.time
import timescale.utilities
import timescale.version
+
# shortcut wrapper functions for timescale.time.Timescale methods
# delta time
def from_deltatime(*args, **kwargs):
- """Wrapper for ``timescale.time.Timescale().from_deltatime``
- """
+ """Wrapper for ``timescale.time.Timescale().from_deltatime``"""
return timescale.time.Timescale.from_deltatime(*args, **kwargs)
+
+
# Julian dates
def from_julian(ut1, **kwargs):
return timescale.time.Timescale(ut1 - 2400000.5)
+
+
# Besselian years
def from_besselian(B, **kwargs):
- """Wrapper for creating a ``Timescale`` object from Besselian years
- """
+ """Wrapper for creating a ``Timescale`` object from Besselian years"""
# convert Besselian year to MJD
MJD = 15019.81352 + (B - 1900.0) * 365.242198781
return timescale.time.Timescale(MJD)
+
+
# calendar dates
def from_calendar(*args, **kwargs):
- """Wrapper for ``timescale.time.Timescale().from_calendar``
- """
+ """Wrapper for ``timescale.time.Timescale().from_calendar``"""
return timescale.time.Timescale.from_calendar(*args, **kwargs)
+
+
# datetime arrays
def from_datetime(*args, **kwargs):
- """Wrapper for ``timescale.time.Timescale().from_datetime``
- """
+ """Wrapper for ``timescale.time.Timescale().from_datetime``"""
return timescale.time.Timescale.from_datetime(*args, **kwargs)
+
+
# range of dates
def from_range(start, end, *args, **kwargs):
- """Wrapper for creating a ``Timescale`` object from a range of dates
- """
+ """Wrapper for creating a ``Timescale`` object from a range of dates"""
d = timescale.time.date_range(start, end, *args, **kwargs)
return timescale.time.Timescale.from_datetime(d)
+
+
# list of timescale objects
def from_list(*args, **kwargs):
- """Wrapper for ``timescale.time.Timescale().from_list``
- """
+ """Wrapper for ``timescale.time.Timescale().from_list``"""
return timescale.time.Timescale.from_list(*args, **kwargs)
+
# get version information
__version__ = timescale.version.version
diff --git a/timescale/eop.py b/timescale/eop.py
index 16ee8b9..7bb6239 100644
--- a/timescale/eop.py
+++ b/timescale/eop.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-u"""
+"""
eop.py
Written by Tyler Sutterley (07/2025)
Utilities for maintaining and calculating Earth Orientation Parameters (EOP)
@@ -27,6 +27,7 @@
Updated 03/2021: replaced numpy bool/int to prevent deprecation warnings
Written 11/2020
"""
+
from __future__ import annotations
import logging
@@ -37,9 +38,10 @@
import timescale.utilities
# IERS mean pole file for 2015 conventional mean pole
-_mean_pole_file = timescale.utilities.get_data_path(['data','mean-pole.tab'])
+_mean_pole_file = timescale.utilities.get_data_path(["data", "mean-pole.tab"])
# daily polar motion file from IERS
-_finals_file = timescale.utilities.get_data_path(['data','finals.all'])
+_finals_file = timescale.utilities.get_data_path(["data", "finals.all"])
+
# PURPOSE: connects to servers and downloads mean pole files
def update_mean_pole(verbose: bool = False, mode: oct = 0o775):
@@ -65,14 +67,10 @@ def update_mean_pole(verbose: bool = False, mode: oct = 0o775):
HASH = timescale.utilities.get_hash(_mean_pole_file)
# try downloading from Paris Observatory IERS Centers ftp servers
- HOST = ['hpiers.obspm.fr', 'iers', 'eop', 'eopc01', 'mean-pole.tab']
+ HOST = ["hpiers.obspm.fr", "iers", "eop", "eopc01", "mean-pole.tab"]
try:
- timescale.utilities.from_ftp(HOST,
- timeout=20,
- local=LOCAL,
- hash=HASH,
- verbose=verbose,
- mode=mode
+ timescale.utilities.from_ftp(
+ HOST, timeout=20, local=LOCAL, hash=HASH, verbose=verbose, mode=mode
)
except Exception as exc:
logging.debug(traceback.format_exc(exc))
@@ -81,14 +79,10 @@ def update_mean_pole(verbose: bool = False, mode: oct = 0o775):
return
# try downloading from Paris Observatory IERS Centers https servers
- HOST = ['http://hpiers.obspm.fr', 'eoppc', 'eop', 'eopc01', 'mean-pole.tab']
+ HOST = ["http://hpiers.obspm.fr", "eoppc", "eop", "eopc01", "mean-pole.tab"]
try:
- timescale.utilities.from_http(HOST,
- timeout=20,
- local=LOCAL,
- hash=HASH,
- verbose=verbose,
- mode=mode
+ timescale.utilities.from_http(
+ HOST, timeout=20, local=LOCAL, hash=HASH, verbose=verbose, mode=mode
)
except Exception as exc:
logging.debug(traceback.format_exc(exc))
@@ -97,7 +91,8 @@ def update_mean_pole(verbose: bool = False, mode: oct = 0o775):
return
# raise exception
- raise RuntimeError(f'Unable to download {LOCAL}')
+ raise RuntimeError(f"Unable to download {LOCAL}")
+
# PURPOSE: read table of IERS pole coordinates and calculate Gaussian average
def calculate_mean_pole(verbose: bool = False, mode: oct = 0o775):
@@ -121,17 +116,17 @@ def calculate_mean_pole(verbose: bool = False, mode: oct = 0o775):
permissions mode of output file
"""
# download the IERS pole coordinates file from remote servers
- FILE = 'eopc01.1900-now.dat'
+ FILE = "eopc01.1900-now.dat"
try:
remote_buffer = pull_pole_coordinates(FILE, verbose=verbose)
except Exception as exc:
return
# read contents from input file object
- file_contents = remote_buffer.read().decode('utf8').splitlines()
+ file_contents = remote_buffer.read().decode("utf8").splitlines()
header = file_contents[0][1:].split()
nlines = len(file_contents) - 1
- data = {h:np.zeros((nlines)) for h in header}
+ data = {h: np.zeros((nlines)) for h in header}
# extract data for all lines
for i, line in enumerate(file_contents[1:]):
line_contents = line.split()
@@ -142,19 +137,20 @@ def calculate_mean_pole(verbose: bool = False, mode: oct = 0o775):
ym = np.zeros((nlines))
# output file with mean pole coordinates
LOCAL = _mean_pole_file
- fid = LOCAL.open(mode='w', encoding='utf8')
+ fid = LOCAL.open(mode="w", encoding="utf8")
logging.info(str(LOCAL))
- for i, T in enumerate(data['an']):
+ for i, T in enumerate(data["an"]):
# mean pole is Gaussian Weight of all dates with a = 3.40 years.
- Wi = np.exp(-0.5*((data['an'] - T)/3.4)**2)
- xm[i] = np.sum(Wi*data['x(")'])/np.sum(Wi)
- ym[i] = np.sum(Wi*data['y(")'])/np.sum(Wi)
- print(f'{T:6.2f} {xm[i]:11.7f} {ym[i]:11.7f}', file=fid)
+ Wi = np.exp(-0.5 * ((data["an"] - T) / 3.4) ** 2)
+ xm[i] = np.sum(Wi * data['x(")']) / np.sum(Wi)
+ ym[i] = np.sum(Wi * data['y(")']) / np.sum(Wi)
+ print(f"{T:6.2f} {xm[i]:11.7f} {ym[i]:11.7f}", file=fid)
# close the output file
fid.close()
# change the permissions mode of the output mean pole file
LOCAL.chmod(mode)
+
# PURPOSE: connects to servers and downloads IERS pole coordinates files
def pull_pole_coordinates(FILE: str, verbose: bool = False):
"""
@@ -179,35 +175,28 @@ def pull_pole_coordinates(FILE: str, verbose: bool = False):
print file information about output file
"""
# try downloading from IERS ftp server
- HOST = ['ftp.iers.org', 'products', 'eop', 'long-term', 'c01', FILE]
+ HOST = ["ftp.iers.org", "products", "eop", "long-term", "c01", FILE]
try:
- buffer = timescale.utilities.from_ftp(HOST,
- verbose=verbose,
- timeout=20
- )
+ buffer = timescale.utilities.from_ftp(HOST, verbose=verbose, timeout=20)
except Exception as exc:
pass
else:
return buffer
# try downloading from Paris Observatory IERS Centers ftp servers
- HOST = ['hpiers.obspm.fr', 'iers', 'eop', 'eopc01', FILE]
+ HOST = ["hpiers.obspm.fr", "iers", "eop", "eopc01", FILE]
try:
- buffer = timescale.utilities.from_ftp(HOST,
- verbose=verbose,
- timeout=20
- )
+ buffer = timescale.utilities.from_ftp(HOST, verbose=verbose, timeout=20)
except Exception as exc:
pass
else:
return buffer
# try downloading from Paris Observatory IERS Centers https servers
- HOST = ['http://hpiers.obspm.fr', 'eoppc', 'eop', 'eopc01', FILE]
+ HOST = ["http://hpiers.obspm.fr", "eoppc", "eop", "eopc01", FILE]
try:
- buffer = timescale.utilities.from_http(HOST,
- verbose=verbose,
- timeout=20
+ buffer = timescale.utilities.from_http(
+ HOST, verbose=verbose, timeout=20
)
except Exception as exc:
pass
@@ -215,16 +204,17 @@ def pull_pole_coordinates(FILE: str, verbose: bool = False):
return buffer
# raise exception
- raise RuntimeError(f'Unable to download {FILE}')
+ raise RuntimeError(f"Unable to download {FILE}")
+
# PURPOSE: connects to servers and downloads finals files
def update_finals_file(
- username: str | None = None,
- password: str | None = None,
- timeout: int | None = 20,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ username: str | None = None,
+ password: str | None = None,
+ timeout: int | None = 20,
+ verbose: bool = False,
+ mode: oct = 0o775,
+):
"""
Connects to servers and downloads finals EOP files
@@ -253,14 +243,15 @@ def update_finals_file(
HASH = timescale.utilities.get_hash(LOCAL)
# try downloading from US Naval Oceanography Portal
- HOST = ['http://maia.usno.navy.mil', 'ser7', 'finals.all']
+ HOST = ["http://maia.usno.navy.mil", "ser7", "finals.all"]
try:
- timescale.utilities.from_http(HOST,
+ timescale.utilities.from_http(
+ HOST,
timeout=timeout,
local=LOCAL,
hash=HASH,
verbose=verbose,
- mode=mode
+ mode=mode,
)
except Exception as exc:
pass
@@ -271,16 +262,17 @@ def update_finals_file(
# note: anonymous ftp access will be discontinued on 2020-10-31
# will require using the following https Earthdata server after that date
server = []
- server.append(['cddis.nasa.gov', 'pub', 'products', 'iers', 'finals.all'])
- server.append(['cddis.gsfc.nasa.gov', 'products', 'iers', 'finals.all'])
+ server.append(["cddis.nasa.gov", "pub", "products", "iers", "finals.all"])
+ server.append(["cddis.gsfc.nasa.gov", "products", "iers", "finals.all"])
for HOST in server:
try:
- timescale.utilities.from_ftp(HOST,
+ timescale.utilities.from_ftp(
+ HOST,
timeout=timeout,
local=LOCAL,
hash=HASH,
verbose=verbose,
- mode=mode
+ mode=mode,
)
except Exception as exc:
pass
@@ -289,30 +281,36 @@ def update_finals_file(
# try downloading from NASA Crustal Dynamics Data Information System
# using NASA Earthdata credentials stored in netrc file
- HOST = ['https://cddis.nasa.gov', 'archive', 'products', 'iers', 'finals.all']
+ HOST = [
+ "https://cddis.nasa.gov",
+ "archive",
+ "products",
+ "iers",
+ "finals.all",
+ ]
try:
- timescale.utilities.from_cddis(HOST,
+ timescale.utilities.from_cddis(
+ HOST,
username=username,
password=password,
timeout=timeout,
local=LOCAL,
hash=HASH,
verbose=verbose,
- mode=mode
+ mode=mode,
)
except Exception as exc:
pass
else:
return
+
# IERS mean or secular pole conventions
-_conventions = ('2003', '2010', '2015', 'Desai', '2018')
+_conventions = ("2003", "2010", "2015", "Desai", "2018")
+
+
# read table of mean pole values, calculate angular coordinates at epoch
-def iers_mean_pole(
- input_epoch: np.ndarray,
- convention: str = '2018',
- **kwargs
- ):
+def iers_mean_pole(input_epoch: np.ndarray, convention: str = "2018", **kwargs):
"""
Calculates the angular coordinates of the IERS Conventional Mean Pole (CMP)
or IERS Secular Pole (2018 convention) :cite:p:`Petit:2010tp,Desai:2015jr`
@@ -345,64 +343,85 @@ def iers_mean_pole(
epoch is valid for version and version number is valid
"""
# set default keyword arguments
- kwargs.setdefault('file', _mean_pole_file)
- kwargs.setdefault('fill_value', np.nan)
+ kwargs.setdefault("file", _mean_pole_file)
+ kwargs.setdefault("fill_value", np.nan)
# verify IERS model version
assert convention in _conventions, "Incorrect IERS model convention"
# read the conventional mean pole file
- if (convention == '2015'):
+ if convention == "2015":
# read mean pole file
- input_file = pathlib.Path(kwargs['file']).expanduser().absolute()
+ input_file = pathlib.Path(kwargs["file"]).expanduser().absolute()
table = np.loadtxt(input_file)
# Reduce table following 2015 conventions:
# 1. trim dates prior to 1971
# 2. only keep rows falling on exact years
- ii, = np.nonzero((table[:, 0] >= 1971) & ((table[:, 0] % 1) == 0.0))
- table = np.copy(table[ii,:])
+ (ii,) = np.nonzero((table[:, 0] >= 1971) & ((table[:, 0] % 1) == 0.0))
+ table = np.copy(table[ii, :])
# allocate for output arrays
- x = np.full_like(input_epoch, kwargs['fill_value'])
- y = np.full_like(input_epoch, kwargs['fill_value'])
+ x = np.full_like(input_epoch, kwargs["fill_value"])
+ y = np.full_like(input_epoch, kwargs["fill_value"])
flag = np.zeros_like(input_epoch, dtype=bool)
for t, epoch in enumerate(input_epoch):
# Conventional mean pole model in IERS Conventions 2003
- if (convention == '2003') and (epoch >= 1975):
- x[t] = 0.054 + 0.00083*(epoch - 2000.0)
- y[t] = 0.357 + 0.00395*(epoch - 2000.0)
+ if (convention == "2003") and (epoch >= 1975):
+ x[t] = 0.054 + 0.00083 * (epoch - 2000.0)
+ y[t] = 0.357 + 0.00395 * (epoch - 2000.0)
flag[t] = True
# Conventional mean pole model in IERS Conventions 2010
- elif (convention == '2010') and (epoch >= 1975):
+ elif (convention == "2010") and (epoch >= 1975):
dx = epoch - 2000.0
- if (dx < 10.0):
- x[t] = 0.055974 + 1.8243e-3*dx + 1.8413e-4*dx**2 + 7.024e-6*dx**3
- y[t] = 0.346346 + 1.7896e-3*dx - 1.0729e-4*dx**2 - 0.908e-6*dx**3
+ if dx < 10.0:
+ x[t] = (
+ 0.055974
+ + 1.8243e-3 * dx
+ + 1.8413e-4 * dx**2
+ + 7.024e-6 * dx**3
+ )
+ y[t] = (
+ 0.346346
+ + 1.7896e-3 * dx
+ - 1.0729e-4 * dx**2
+ - 0.908e-6 * dx**3
+ )
else:
- x[t] = 0.023513 + 0.0076141*dx
- y[t] = 0.358891 - 0.0006287*dx
+ x[t] = 0.023513 + 0.0076141 * dx
+ y[t] = 0.358891 - 0.0006287 * dx
flag[t] = True
# Conventional mean pole model in IERS Conventions 2015
# epoch must be within the dates in the mean pole file
- elif (convention == '2015') and (epoch >= 1975):
+ elif (convention == "2015") and (epoch >= 1975):
# interpolate using times in table
- x[t] = np.interp(epoch, table[:,0], table[:,1],
- left=kwargs['fill_value'], right=kwargs['fill_value'])
- y[t] = np.interp(epoch, table[:,0], table[:,2],
- left=kwargs['fill_value'], right=kwargs['fill_value'])
- flag[t] = (x[t] != kwargs['fill_value'])
+ x[t] = np.interp(
+ epoch,
+ table[:, 0],
+ table[:, 1],
+ left=kwargs["fill_value"],
+ right=kwargs["fill_value"],
+ )
+ y[t] = np.interp(
+ epoch,
+ table[:, 0],
+ table[:, 2],
+ left=kwargs["fill_value"],
+ right=kwargs["fill_value"],
+ )
+ flag[t] = x[t] != kwargs["fill_value"]
# Secular pole model in Desai et al. (2015)
- elif (convention == 'Desai'):
+ elif convention == "Desai":
# calculate secular pole using equation 10a/b of Desai (2015)
- x[t] = 0.05097 + 0.00062*(epoch - 2000.0)
- y[t] = 0.33449 + 0.00348*(epoch - 2000.0)
+ x[t] = 0.05097 + 0.00062 * (epoch - 2000.0)
+ y[t] = 0.33449 + 0.00348 * (epoch - 2000.0)
flag[t] = True
# Secular pole model in IERS Conventions 2018
- elif (convention == '2018'):
+ elif convention == "2018":
# calculate secular pole
- x[t] = 0.055 + 0.001677*(epoch - 2000.0)
- y[t] = 0.3205 + 0.00346*(epoch - 2000.0)
+ x[t] = 0.055 + 0.001677 * (epoch - 2000.0)
+ y[t] = 0.3205 + 0.00346 * (epoch - 2000.0)
flag[t] = True
# return mean/secular pole values
return (x, y, flag)
+
# PURPOSE: read daily earth orientation parameters (EOP) file from IERS
def iers_daily_EOP(input_file: str | pathlib.Path = _finals_file):
"""
@@ -429,40 +448,42 @@ def iers_daily_EOP(input_file: str | pathlib.Path = _finals_file):
if not input_file.exists():
raise FileNotFoundError(input_file)
# read data file splitting at line breaks
- with input_file.open(mode='r', encoding='utf8') as f:
+ with input_file.open(mode="r", encoding="utf8") as f:
file_contents = f.read().splitlines()
# number of data lines
n_lines = len(file_contents)
dinput = {}
- dinput['MJD'] = np.zeros((n_lines))
- dinput['x'] = np.zeros((n_lines))
- dinput['y'] = np.zeros((n_lines))
+ dinput["MJD"] = np.zeros((n_lines))
+ dinput["x"] = np.zeros((n_lines))
+ dinput["y"] = np.zeros((n_lines))
# for each line in the file
- flag = 'I'
+ flag = "I"
counter = 0
- while (flag == 'I'):
+ while flag == "I":
line = file_contents[counter]
- i = 2+2+2+1; j = i+8
- dinput['MJD'][counter] = np.float64(line[i:j])
- i = j+1
+ i = 2 + 2 + 2 + 1
+ j = i + 8
+ dinput["MJD"][counter] = np.float64(line[i:j])
+ i = j + 1
flag = line[i]
- i += 2; j = i+9
- dinput['x'][counter] = np.float64(line[i:j])
- i = j+10; j = i+9
- dinput['y'][counter] = np.float64(line[i:j])
+ i += 2
+ j = i + 9
+ dinput["x"][counter] = np.float64(line[i:j])
+ i = j + 10
+ j = i + 9
+ dinput["y"][counter] = np.float64(line[i:j])
counter += 1
# reduce to data values
- dinput['MJD'] = dinput['MJD'][:counter]
- dinput['x'] = dinput['x'][:counter]
- dinput['y'] = dinput['y'][:counter]
+ dinput["MJD"] = dinput["MJD"][:counter]
+ dinput["x"] = dinput["x"][:counter]
+ dinput["y"] = dinput["y"][:counter]
# return the date, flag and polar motion values
return dinput
+
def iers_polar_motion(
- MJD: float | np.ndarray,
- file: str | pathlib.Path = _finals_file,
- **kwargs
- ):
+ MJD: float | np.ndarray, file: str | pathlib.Path = _finals_file, **kwargs
+):
"""
Interpolates daily earth orientation parameters (EOP) file from IERS
:cite:p:`Petit:2010tp`
@@ -486,13 +507,13 @@ def iers_polar_motion(
Angular coordinate y [arcsec]
"""
# set default parameters
- kwargs.setdefault('k', 3)
- kwargs.setdefault('s', 0)
+ kwargs.setdefault("k", 3)
+ kwargs.setdefault("s", 0)
# read IERS daily polar motion values
EOP = timescale.eop.iers_daily_EOP(file)
# interpolate daily polar motion values to MJD using cubic splines
- xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['x'], **kwargs)
- ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'], EOP['y'], **kwargs)
+ xSPL = scipy.interpolate.UnivariateSpline(EOP["MJD"], EOP["x"], **kwargs)
+ ySPL = scipy.interpolate.UnivariateSpline(EOP["MJD"], EOP["y"], **kwargs)
px = xSPL(MJD)
py = ySPL(MJD)
return (px, py)
diff --git a/timescale/time.py b/timescale/time.py
index e080a8c..b9bd904 100755
--- a/timescale/time.py
+++ b/timescale/time.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-u"""
+"""
time.py
Written by Tyler Sutterley (12/2025)
Utilities for calculating time operations
@@ -66,6 +66,7 @@
Updated 08/2020: added NASA Earthdata routines for downloading from CDDIS
Written 07/2020
"""
+
from __future__ import annotations
import re
@@ -81,34 +82,57 @@
import timescale.utilities
# conversion factors between time units and seconds
-_to_sec = {'nanoseconds': 1e-9, 'nanosecond': 1e-9,
- 'nanosec': 1e-9, 'nanosecs': 1e-9,
- 'nsec': 1e-9, 'nsecs': 1e-9, 'ns': 1e-9,
- 'microseconds': 1e-6, 'microsecond': 1e-6,
- 'microsec': 1e-6, 'microsecs': 1e-6, 'us': 1e-6,
- 'milliseconds': 1e-3, 'millisecond': 1e-3,
- 'millisec': 1e-3, 'millisecs': 1e-3,
- 'msec': 1e-3, 'msecs': 1e-3, 'ms': 1e-3,
- 'seconds': 1.0, 'second': 1.0, 'sec': 1.0,
- 'secs': 1.0, 's': 1.0,
- 'minutes': 60.0, 'minute': 60.0,
- 'min': 60.0, 'mins': 60.0,
- 'hours': 3600.0, 'hour': 3600.0,
- 'hr': 3600.0, 'hrs': 3600.0, 'h': 3600.0,
- 'day': 86400.0, 'days': 86400.0, 'd': 86400.0,
- 'D': 86400.0}
+_to_sec = {
+ "nanoseconds": 1e-9,
+ "nanosecond": 1e-9,
+ "nanosec": 1e-9,
+ "nanosecs": 1e-9,
+ "nsec": 1e-9,
+ "nsecs": 1e-9,
+ "ns": 1e-9,
+ "microseconds": 1e-6,
+ "microsecond": 1e-6,
+ "microsec": 1e-6,
+ "microsecs": 1e-6,
+ "us": 1e-6,
+ "milliseconds": 1e-3,
+ "millisecond": 1e-3,
+ "millisec": 1e-3,
+ "millisecs": 1e-3,
+ "msec": 1e-3,
+ "msecs": 1e-3,
+ "ms": 1e-3,
+ "seconds": 1.0,
+ "second": 1.0,
+ "sec": 1.0,
+ "secs": 1.0,
+ "s": 1.0,
+ "minutes": 60.0,
+ "minute": 60.0,
+ "min": 60.0,
+ "mins": 60.0,
+ "hours": 3600.0,
+ "hour": 3600.0,
+ "hr": 3600.0,
+ "hrs": 3600.0,
+ "h": 3600.0,
+ "day": 86400.0,
+ "days": 86400.0,
+ "d": 86400.0,
+ "D": 86400.0,
+}
# approximate conversions for longer periods
-_to_sec['mon'] = 30.0 * 86400.0
-_to_sec['month'] = 30.0 * 86400.0
-_to_sec['months'] = 30.0 * 86400.0
-_to_sec['common_year'] = 365.0 * 86400.0
-_to_sec['common_years'] = 365.0 * 86400.0
-_to_sec['year'] = 365.25 * 86400.0
-_to_sec['years'] = 365.25 * 86400.0
-_to_sec['quarter'] = 365.25 * 86400.0 / 4.0
-_to_sec['quarters'] = 365.25 * 86400.0 / 4.0
+_to_sec["mon"] = 30.0 * 86400.0
+_to_sec["month"] = 30.0 * 86400.0
+_to_sec["months"] = 30.0 * 86400.0
+_to_sec["common_year"] = 365.0 * 86400.0
+_to_sec["common_years"] = 365.0 * 86400.0
+_to_sec["year"] = 365.25 * 86400.0
+_to_sec["years"] = 365.25 * 86400.0
+_to_sec["quarter"] = 365.25 * 86400.0 / 4.0
+_to_sec["quarters"] = 365.25 * 86400.0 / 4.0
# conversion factors from seconds to named time units
-_from_sec = {k: 1.0/v for k,v in _to_sec.items()}
+_from_sec = {k: 1.0 / v for k, v in _to_sec.items()}
# standard (common) epochs
_mjd_epoch = (1858, 11, 17, 0, 0, 0)
@@ -134,6 +158,7 @@
_mjd_atlas_sdp = 58119
_mjd_serial = _jd_serial - _jd_mjd
+
# PURPOSE: parse a date string and convert to a datetime object in UTC
def parse(date_string: str):
"""
@@ -158,6 +183,7 @@ def parse(date_string: str):
# return the datetime object
return date
+
# PURPOSE: parse a date string into epoch and units scale
def parse_date_string(date_string: str):
"""
@@ -189,10 +215,11 @@ def parse_date_string(date_string: str):
# split the date string into units and epoch
units, epoch = split_date_string(date_string)
if units not in _to_sec.keys():
- raise ValueError(f'Invalid units: {units}')
+ raise ValueError(f"Invalid units: {units}")
# return the epoch (as list) and the time unit conversion factors
return (datetime_to_list(epoch), _to_sec[units])
+
# PURPOSE: split a date string into units and epoch
def split_date_string(date_string: str):
"""
@@ -204,12 +231,13 @@ def split_date_string(date_string: str):
time-units since yyyy-mm-dd hh:mm:ss
"""
try:
- units,_,epoch = date_string.split(None, 2)
+ units, _, epoch = date_string.split(None, 2)
except ValueError:
- raise ValueError(f'Invalid format: {date_string}')
+ raise ValueError(f"Invalid format: {date_string}")
else:
return (units.lower(), parse(epoch))
+
# PURPOSE: convert a datetime object into a list
def datetime_to_list(date):
"""
@@ -225,16 +253,23 @@ def datetime_to_list(date):
date: list
[year,month,day,hour,minute,second]
"""
- return [date.year, date.month, date.day,
- date.hour, date.minute, date.second]
+ return [
+ date.year,
+ date.month,
+ date.day,
+ date.hour,
+ date.minute,
+ date.second,
+ ]
+
# PURPOSE: create a range of dates
def date_range(
- start: str | np.datetime64 | datetime.datetime,
- end: str | np.datetime64 | datetime.datetime,
- step: int | float = 1,
- units: str = 'D'
- ):
+ start: str | np.datetime64 | datetime.datetime,
+ end: str | np.datetime64 | datetime.datetime,
+ step: int | float = 1,
+ units: str = "D",
+):
"""
Create a range of dates
@@ -260,17 +295,19 @@ def date_range(
"""
# convert start and end dates to datetime64
if isinstance(start, str):
- start = np.array(parse(start), dtype=f'datetime64[{units}]')
+ start = np.array(parse(start), dtype=f"datetime64[{units}]")
if isinstance(end, str):
- end = np.array(parse(end), dtype=f'datetime64[{units}]')
+ end = np.array(parse(end), dtype=f"datetime64[{units}]")
# create date range
return np.arange(start, end + step, step)
+
# days per month in a leap and a standard year
# only difference is February (29 vs. 28)
_dpm_leap = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
_dpm_stnd = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
+
def is_leap(year: int | float) -> bool:
"""
Determines if a year is a leap year
@@ -287,12 +324,13 @@ def is_leap(year: int | float) -> bool:
# Subtracting a leap year every 100 years ==> average 365.24
# Adding a leap year back every 400 years ==> average 365.2425
# Subtracting a leap year every 4000 years ==> average 365.24225
- m4 = (year % 4)
- m100 = (year % 100)
- m400 = (year % 400)
- m4000 = (year % 4000)
+ m4 = year % 4
+ m100 = year % 100
+ m400 = year % 400
+ m4000 = year % 4000
# determine if the year is a leap year using criteria
- return ((m4 == 0) & (m100 != 0) | (m400 == 0) & (m4000 != 0))
+ return (m4 == 0) & (m100 != 0) | (m400 == 0) & (m4000 != 0)
+
# PURPOSE: gets the number of days per month for a given year
def calendar_days(year: int | float) -> np.ndarray:
@@ -316,11 +354,12 @@ def calendar_days(year: int | float) -> np.ndarray:
else:
return np.array(_dpm_stnd, dtype=np.float64)
+
# PURPOSE: convert a numpy datetime array to delta times since an epoch
def convert_datetime(
- date: float | np.ndarray,
- epoch: str | tuple | list | np.datetime64 = _unix_epoch
- ):
+ date: float | np.ndarray,
+ epoch: str | tuple | list | np.datetime64 = _unix_epoch,
+):
"""
Convert a ``numpy`` ``datetime`` array to seconds since ``epoch``
@@ -342,15 +381,16 @@ def convert_datetime(
elif isinstance(epoch, str):
epoch = np.datetime64(parse(epoch))
# convert to delta time
- return (date - epoch) / np.timedelta64(1, 's')
+ return (date - epoch) / np.timedelta64(1, "s")
+
# PURPOSE: convert times from seconds since epoch1 to time since epoch2
def convert_delta_time(
- delta_time: np.ndarray,
- epoch1: str | tuple | list | np.datetime64 | None = None,
- epoch2: str | tuple | list | np.datetime64 | None = None,
- scale: float = 1.0
- ):
+ delta_time: np.ndarray,
+ epoch1: str | tuple | list | np.datetime64 | None = None,
+ epoch2: str | tuple | list | np.datetime64 | None = None,
+ scale: float = 1.0,
+):
"""
Convert delta time from seconds since ``epoch1`` to time since ``epoch2``
@@ -375,22 +415,23 @@ def convert_delta_time(
elif isinstance(epoch2, str):
epoch2 = np.datetime64(parse(epoch2))
# calculate the total difference in time in seconds
- delta_time_epochs = (epoch2 - epoch1) / np.timedelta64(1, 's')
+ delta_time_epochs = (epoch2 - epoch1) / np.timedelta64(1, "s")
# subtract difference in time and rescale to output units
- return scale*(delta_time - delta_time_epochs)
+ return scale * (delta_time - delta_time_epochs)
+
# PURPOSE: calculate the delta time from calendar date
# http://scienceworld.wolfram.com/astronomy/JulianDate.html
def convert_calendar_dates(
- year: np.ndarray,
- month: np.ndarray,
- day: np.ndarray,
- hour: np.ndarray | float = 0.0,
- minute: np.ndarray | float = 0.0,
- second: np.ndarray | float = 0.0,
- epoch: tuple | list | np.datetime64 = _tide_epoch,
- scale: float = 1.0
- ) -> np.ndarray:
+ year: np.ndarray,
+ month: np.ndarray,
+ day: np.ndarray,
+ hour: np.ndarray | float = 0.0,
+ minute: np.ndarray | float = 0.0,
+ second: np.ndarray | float = 0.0,
+ epoch: tuple | list | np.datetime64 = _tide_epoch,
+ scale: float = 1.0,
+) -> np.ndarray:
"""
Calculate the time in units since ``epoch`` from calendar dates
@@ -427,10 +468,20 @@ def convert_calendar_dates(
second = np.array(second, dtype=np.float64)
# calculate date in Modified Julian Days (MJD) from calendar date
# MJD: days since November 17, 1858 (1858-11-17T00:00:00)
- MJD = 367.0*year - np.floor(7.0*(year + np.floor((month+9.0)/12.0))/4.0) - \
- np.floor(3.0*(np.floor((year + (month - 9.0)/7.0)/100.0) + 1.0)/4.0) + \
- np.floor(275.0*month/9.0) + day + hour/24.0 + minute/1440.0 + \
- second/86400.0 + 1721028.5 - _jd_mjd
+ MJD = (
+ 367.0 * year
+ - np.floor(7.0 * (year + np.floor((month + 9.0) / 12.0)) / 4.0)
+ - np.floor(
+ 3.0 * (np.floor((year + (month - 9.0) / 7.0) / 100.0) + 1.0) / 4.0
+ )
+ + np.floor(275.0 * month / 9.0)
+ + day
+ + hour / 24.0
+ + minute / 1440.0
+ + second / 86400.0
+ + 1721028.5
+ - _jd_mjd
+ )
# convert epochs to datetime variables
epoch1 = np.datetime64(datetime.datetime(*_mjd_epoch))
if isinstance(epoch, (tuple, list)):
@@ -438,20 +489,21 @@ def convert_calendar_dates(
elif isinstance(epoch, str):
epoch = np.datetime64(parse(epoch))
# calculate the total difference in time in days
- delta_time_epochs = (epoch - epoch1) / np.timedelta64(1, 'D')
+ delta_time_epochs = (epoch - epoch1) / np.timedelta64(1, "D")
# return the date in units (default days) since epoch
- return scale*np.array(MJD - delta_time_epochs, dtype=np.float64)
+ return scale * np.array(MJD - delta_time_epochs, dtype=np.float64)
+
# PURPOSE: Converts from calendar dates into decimal years
def convert_calendar_decimal(
- year: np.ndarray,
- month: np.ndarray,
- day: np.ndarray,
- hour: np.ndarray | float | None = None,
- minute: np.ndarray | float | None = None,
- second: np.ndarray | float | None = None,
- DofY: np.ndarray | float | None = None,
- ) -> np.ndarray:
+ year: np.ndarray,
+ month: np.ndarray,
+ day: np.ndarray,
+ hour: np.ndarray | float | None = None,
+ minute: np.ndarray | float | None = None,
+ second: np.ndarray | float | None = None,
+ DofY: np.ndarray | float | None = None,
+) -> np.ndarray:
"""
Converts from calendar date into decimal years taking into
account leap years :cite:p:`Dershowitz:2007cc`
@@ -484,18 +536,18 @@ def convert_calendar_decimal(
# create arrays for calendar date variables
cal_date = {}
- cal_date['year'] = np.zeros((n_dates))
- cal_date['month'] = np.zeros((n_dates))
- cal_date['day'] = np.zeros((n_dates))
- cal_date['hour'] = np.zeros((n_dates))
- cal_date['minute'] = np.zeros((n_dates))
- cal_date['second'] = np.zeros((n_dates))
+ cal_date["year"] = np.zeros((n_dates))
+ cal_date["month"] = np.zeros((n_dates))
+ cal_date["day"] = np.zeros((n_dates))
+ cal_date["hour"] = np.zeros((n_dates))
+ cal_date["minute"] = np.zeros((n_dates))
+ cal_date["second"] = np.zeros((n_dates))
# day of the year
- cal_date['DofY'] = np.zeros((n_dates))
+ cal_date["DofY"] = np.zeros((n_dates))
# remove singleton dimensions and use year and month
- cal_date['year'][:] = np.squeeze(year)
- cal_date['month'][:] = np.squeeze(month)
+ cal_date["year"][:] = np.squeeze(year)
+ cal_date["month"][:] = np.squeeze(month)
# create output date variable
t_date = np.zeros((n_dates))
@@ -512,34 +564,34 @@ def convert_calendar_decimal(
# Subtracting a leap year every 100 years ==> average 365.24
# Adding a leap year back every 400 years ==> average 365.2425
# Subtracting a leap year every 4000 years ==> average 365.24225
- m4 = (cal_date['year'] % 4)
- m100 = (cal_date['year'] % 100)
- m400 = (cal_date['year'] % 400)
- m4000 = (cal_date['year'] % 4000)
+ m4 = cal_date["year"] % 4
+ m100 = cal_date["year"] % 100
+ m400 = cal_date["year"] % 400
+ m4000 = cal_date["year"] % 4000
# find indices for standard years and leap years using criteria
- leap, = np.nonzero((m4 == 0) & (m100 != 0) | (m400 == 0) & (m4000 != 0))
- stnd, = np.nonzero((m4 != 0) | (m100 == 0) & (m400 != 0) | (m4000 == 0))
+ (leap,) = np.nonzero((m4 == 0) & (m100 != 0) | (m400 == 0) & (m4000 != 0))
+ (stnd,) = np.nonzero((m4 != 0) | (m100 == 0) & (m400 != 0) | (m4000 == 0))
# calculate the day of the year
if DofY is not None:
# if entered directly as an input
# remove 1 so day 1 (Jan 1st) = 0.0 in decimal format
- cal_date['DofY'][:] = np.squeeze(DofY)-1
+ cal_date["DofY"][:] = np.squeeze(DofY) - 1
else:
# use calendar month and day of the month to calculate day of the year
# month minus 1: January = 0, February = 1, etc (indice of month)
# in decimal form: January = 0.0
- month_m1 = np.array(cal_date['month'],dtype=int) - 1
+ month_m1 = np.array(cal_date["month"], dtype=int) - 1
# day of month
if day is not None:
# remove 1 so 1st day of month = 0.0 in decimal format
- cal_date['day'][:] = np.squeeze(day)-1.0
+ cal_date["day"][:] = np.squeeze(day) - 1.0
else:
# if not entering days as an input
# will use the mid-month value
- cal_date['day'][leap] = dpm_leap[month_m1[leap]]/2.0
- cal_date['day'][stnd] = dpm_stnd[month_m1[stnd]]/2.0
+ cal_date["day"][leap] = dpm_leap[month_m1[leap]] / 2.0
+ cal_date["day"][stnd] = dpm_stnd[month_m1[stnd]] / 2.0
# create matrix with the lower half = 1
# this matrix will be used in a matrix multiplication
@@ -547,7 +599,7 @@ def convert_calendar_decimal(
# the -1 will make the diagonal == 0
# i.e. first row == all zeros and the
# last row == ones for all but the last element
- mon_mat = np.tri(12,12,-1)
+ mon_mat = np.tri(12, 12, -1)
# using a dot product to calculate total number of days
# for the months before the input date
# basically is sum(i*dpm)
@@ -559,39 +611,46 @@ def convert_calendar_decimal(
# calculate the day of the year for leap and standard
# use total days of all months before date
# and add number of days before date in month
- cal_date['DofY'][stnd] = cal_date['day'][stnd] + \
- np.dot(mon_mat[month_m1[stnd],:],dpm_stnd)
- cal_date['DofY'][leap] = cal_date['day'][leap] + \
- np.dot(mon_mat[month_m1[leap],:],dpm_leap)
+ cal_date["DofY"][stnd] = cal_date["day"][stnd] + np.dot(
+ mon_mat[month_m1[stnd], :], dpm_stnd
+ )
+ cal_date["DofY"][leap] = cal_date["day"][leap] + np.dot(
+ mon_mat[month_m1[leap], :], dpm_leap
+ )
# hour of day (else is zero)
if hour is not None:
- cal_date['hour'][:] = np.squeeze(hour)
+ cal_date["hour"][:] = np.squeeze(hour)
# minute of hour (else is zero)
if minute is not None:
- cal_date['minute'][:] = np.squeeze(minute)
+ cal_date["minute"][:] = np.squeeze(minute)
# second in minute (else is zero)
if second is not None:
- cal_date['second'][:] = np.squeeze(second)
+ cal_date["second"][:] = np.squeeze(second)
# calculate decimal date
# convert hours, minutes and seconds into days
# convert calculated fractional days into decimal fractions of the year
# Leap years
- t_date[leap] = cal_date['year'][leap] + \
- (cal_date['DofY'][leap] + cal_date['hour'][leap]/24. + \
- cal_date['minute'][leap]/1440. + \
- cal_date['second'][leap]/86400.)/np.sum(dpm_leap)
+ t_date[leap] = cal_date["year"][leap] + (
+ cal_date["DofY"][leap]
+ + cal_date["hour"][leap] / 24.0
+ + cal_date["minute"][leap] / 1440.0
+ + cal_date["second"][leap] / 86400.0
+ ) / np.sum(dpm_leap)
# Standard years
- t_date[stnd] = cal_date['year'][stnd] + \
- (cal_date['DofY'][stnd] + cal_date['hour'][stnd]/24. + \
- cal_date['minute'][stnd]/1440. + \
- cal_date['second'][stnd]/86400.)/np.sum(dpm_stnd)
+ t_date[stnd] = cal_date["year"][stnd] + (
+ cal_date["DofY"][stnd]
+ + cal_date["hour"][stnd] / 24.0
+ + cal_date["minute"][stnd] / 1440.0
+ + cal_date["second"][stnd] / 86400.0
+ ) / np.sum(dpm_stnd)
return t_date
+
# PURPOSE: Converts from Julian day to calendar date and time
def convert_julian(JD: np.ndarray, **kwargs):
"""
@@ -627,19 +686,22 @@ def convert_julian(JD: np.ndarray, **kwargs):
second of the minute
"""
# set default keyword arguments
- kwargs.setdefault('astype', None)
- kwargs.setdefault('format', 'dict')
+ kwargs.setdefault("astype", None)
+ kwargs.setdefault("format", "dict")
# raise warnings for deprecated keyword arguments
- deprecated_keywords = dict(ASTYPE='astype', FORMAT='format')
- for old,new in deprecated_keywords.items():
+ deprecated_keywords = dict(ASTYPE="astype", FORMAT="format")
+ for old, new in deprecated_keywords.items():
if old in kwargs.keys():
- warnings.warn(f"""Deprecated keyword argument {old}.
- Changed to '{new}'""", DeprecationWarning)
+ warnings.warn(
+ f"""Deprecated keyword argument {old}.
+ Changed to '{new}'""",
+ DeprecationWarning,
+ )
# set renamed argument to not break workflows
kwargs[new] = copy.copy(kwargs[old])
# convert to array if only a single value was imported
- if (np.ndim(JD) == 0):
+ if np.ndim(JD) == 0:
JD = np.atleast_1d(JD)
single_value = True
else:
@@ -650,33 +712,33 @@ def convert_julian(JD: np.ndarray, **kwargs):
C = np.zeros_like(JD)
# calculate C for dates before and after the switch to Gregorian
IGREG = 2299161.0
- ind1, = np.nonzero(JDO < IGREG)
+ (ind1,) = np.nonzero(JDO < IGREG)
C[ind1] = JDO[ind1] + 1524.0
- ind2, = np.nonzero(JDO >= IGREG)
- B = np.floor((JDO[ind2] - 1867216.25)/36524.25)
- C[ind2] = JDO[ind2] + B - np.floor(B/4.0) + 1525.0
+ (ind2,) = np.nonzero(JDO >= IGREG)
+ B = np.floor((JDO[ind2] - 1867216.25) / 36524.25)
+ C[ind2] = JDO[ind2] + B - np.floor(B / 4.0) + 1525.0
# calculate coefficients for date conversion
- D = np.floor((C - 122.1)/365.25)
- E = np.floor((365.0 * D) + np.floor(D/4.0))
- F = np.floor((C - E)/30.6001)
+ D = np.floor((C - 122.1) / 365.25)
+ E = np.floor((365.0 * D) + np.floor(D / 4.0))
+ F = np.floor((C - E) / 30.6001)
# calculate day, month, year and hour
- day = np.floor(C - E + 0.5) - np.floor(30.6001*F)
- month = F - 1.0 - 12.0*np.floor(F/14.0)
- year = D - 4715.0 - np.floor((7.0 + month)/10.0)
- hour = np.floor(24.0*(JD + 0.5 - JDO))
+ day = np.floor(C - E + 0.5) - np.floor(30.6001 * F)
+ month = F - 1.0 - 12.0 * np.floor(F / 14.0)
+ year = D - 4715.0 - np.floor((7.0 + month) / 10.0)
+ hour = np.floor(24.0 * (JD + 0.5 - JDO))
# calculate minute and second
- G = (JD + 0.5 - JDO) - hour/24.0
- minute = np.floor(G*1440.0)
- second = (G - minute/1440.0) * 86400.0
+ G = (JD + 0.5 - JDO) - hour / 24.0
+ minute = np.floor(G * 1440.0)
+ second = (G - minute / 1440.0) * 86400.0
# convert all variables to output type (from float)
- if kwargs['astype'] is not None:
- year = year.astype(kwargs['astype'])
- month = month.astype(kwargs['astype'])
- day = day.astype(kwargs['astype'])
- hour = hour.astype(kwargs['astype'])
- minute = minute.astype(kwargs['astype'])
- second = second.astype(kwargs['astype'])
+ if kwargs["astype"] is not None:
+ year = year.astype(kwargs["astype"])
+ month = month.astype(kwargs["astype"])
+ day = day.astype(kwargs["astype"])
+ hour = hour.astype(kwargs["astype"])
+ minute = minute.astype(kwargs["astype"])
+ second = second.astype(kwargs["astype"])
# if only a single value was imported initially: remove singleton dims
if single_value:
@@ -688,16 +750,24 @@ def convert_julian(JD: np.ndarray, **kwargs):
second = second.item(0)
# return date variables in output format
- if (kwargs['format'] == 'dict'):
- return dict(year=year, month=month, day=day,
- hour=hour, minute=minute, second=second)
- elif (kwargs['format'] == 'tuple'):
+ if kwargs["format"] == "dict":
+ return dict(
+ year=year,
+ month=month,
+ day=day,
+ hour=hour,
+ minute=minute,
+ second=second,
+ )
+ elif kwargs["format"] == "tuple":
return (year, month, day, hour, minute, second)
- elif (kwargs['format'] == 'zip'):
+ elif kwargs["format"] == "zip":
return zip(year, month, day, hour, minute, second)
+
# delta time (TT - UT1) file
-_delta_file = timescale.utilities.get_data_path(['data', 'merged_deltat.data'])
+_delta_file = timescale.utilities.get_data_path(["data", "merged_deltat.data"])
+
class Timescale:
"""
@@ -710,6 +780,7 @@ class Timescale:
MJD: np.ndarray
Modified Julian Days
"""
+
# Julian century
century = 36525.0
# seconds per day
@@ -717,9 +788,9 @@ class Timescale:
# 360 degrees
turn = 1.0
turndeg = 360.0
- tau = 2.0*np.pi
+ tau = 2.0 * np.pi
# degrees to radians
- deg2rad = np.pi/180.0
+ deg2rad = np.pi / 180.0
# degrees to arcseconds
deg2asec = 3600.0
@@ -732,11 +803,12 @@ def __init__(self, MJD=None, leaps=None):
self.__index__ = 0
@classmethod
- def from_deltatime(cls,
- delta_time: np.ndarray,
- epoch: str | tuple | list | np.ndarray,
- standard: str = 'UTC'
- ):
+ def from_deltatime(
+ cls,
+ delta_time: np.ndarray,
+ epoch: str | tuple | list | np.ndarray,
+ standard: str = "UTC",
+ ):
"""
Converts a delta time array and into a ``Timescale`` object
@@ -752,51 +824,65 @@ def from_deltatime(cls,
# assert delta time is an array
delta_time = np.atleast_1d(delta_time)
# calculate leap seconds if specified
- if (standard.upper() == 'GPS'):
- GPS_Epoch_Time = convert_delta_time(0, epoch1=epoch,
- epoch2= _gps_epoch, scale=1.0)
- GPS_Time = convert_delta_time(delta_time, epoch1=epoch,
- epoch2=_gps_epoch, scale=1.0)
+ if standard.upper() == "GPS":
+ GPS_Epoch_Time = convert_delta_time(
+ 0, epoch1=epoch, epoch2=_gps_epoch, scale=1.0
+ )
+ GPS_Time = convert_delta_time(
+ delta_time, epoch1=epoch, epoch2=_gps_epoch, scale=1.0
+ )
# calculate difference in leap seconds from start of epoch
- leaps = count_leap_seconds(GPS_Time) - \
- count_leap_seconds(np.atleast_1d(GPS_Epoch_Time))
- elif (standard.upper() == 'LORAN'):
+ leaps = count_leap_seconds(GPS_Time) - count_leap_seconds(
+ np.atleast_1d(GPS_Epoch_Time)
+ )
+ elif standard.upper() == "LORAN":
# LORAN time is ahead of GPS time by 9 seconds
- GPS_Epoch_Time = convert_delta_time(-9.0, epoch1=epoch,
- epoch2=_gps_epoch, scale=1.0)
- GPS_Time = convert_delta_time(delta_time - 9.0, epoch1=epoch,
- epoch2=_gps_epoch, scale=1.0)
+ GPS_Epoch_Time = convert_delta_time(
+ -9.0, epoch1=epoch, epoch2=_gps_epoch, scale=1.0
+ )
+ GPS_Time = convert_delta_time(
+ delta_time - 9.0, epoch1=epoch, epoch2=_gps_epoch, scale=1.0
+ )
# calculate difference in leap seconds from start of epoch
- leaps = count_leap_seconds(GPS_Time) - \
- count_leap_seconds(np.atleast_1d(GPS_Epoch_Time))
- elif (standard.upper() == 'TAI'):
+ leaps = count_leap_seconds(GPS_Time) - count_leap_seconds(
+ np.atleast_1d(GPS_Epoch_Time)
+ )
+ elif standard.upper() == "TAI":
# TAI time is ahead of GPS time by 19 seconds
- GPS_Epoch_Time = convert_delta_time(-19.0, epoch1=epoch,
- epoch2=_gps_epoch, scale=1.0)
- GPS_Time = convert_delta_time(delta_time-19.0, epoch1=epoch,
- epoch2=_gps_epoch, scale=1.0)
+ GPS_Epoch_Time = convert_delta_time(
+ -19.0, epoch1=epoch, epoch2=_gps_epoch, scale=1.0
+ )
+ GPS_Time = convert_delta_time(
+ delta_time - 19.0, epoch1=epoch, epoch2=_gps_epoch, scale=1.0
+ )
# calculate difference in leap seconds from start of epoch
- leaps = count_leap_seconds(GPS_Time) - \
- count_leap_seconds(np.atleast_1d(GPS_Epoch_Time))
- elif (standard.upper() == 'GLONASS'):
+ leaps = count_leap_seconds(GPS_Time) - count_leap_seconds(
+ np.atleast_1d(GPS_Epoch_Time)
+ )
+ elif standard.upper() == "GLONASS":
# GLONASS time is ahead of UTC time by 3 hours
- leaps = 3.0*3600.0
+ leaps = 3.0 * 3600.0
else:
leaps = 0.0
# convert time to days relative to Modified Julian days in UTC
- MJD = convert_delta_time(delta_time - leaps,
- epoch1=epoch, epoch2=_mjd_epoch, scale=(1.0/86400.0))
+ MJD = convert_delta_time(
+ delta_time - leaps,
+ epoch1=epoch,
+ epoch2=_mjd_epoch,
+ scale=(1.0 / 86400.0),
+ )
return cls(MJD=MJD, leaps=leaps)
@classmethod
- def from_calendar(cls,
+ def from_calendar(
+ cls,
year: np.ndarray,
month: np.ndarray,
day: np.ndarray,
hour: np.ndarray | float = 0.0,
minute: np.ndarray | float = 0.0,
second: np.ndarray | float = 0.0,
- ):
+ ):
"""
Converts calendar date arrays into a ``Timescale`` object
@@ -824,11 +910,20 @@ def from_calendar(cls,
second = np.array(second, dtype=np.float64)
# calculate date in Modified Julian Days (MJD) from calendar date
# MJD: days since November 17, 1858 (1858-11-17T00:00:00)
- MJD = 367.0*year - \
- np.floor(1.75*(year + np.floor((month + 9.0)/12.0))) - \
- np.floor(0.75*(np.floor((year + (month - 9.0)/7.0)/100.0) + 1.0)) + \
- np.floor(275.0*month/9.0) + day + hour/24.0 + minute/1440.0 + \
- second/86400.0 + 1721028.5 - _jd_mjd
+ MJD = (
+ 367.0 * year
+ - np.floor(1.75 * (year + np.floor((month + 9.0) / 12.0)))
+ - np.floor(
+ 0.75 * (np.floor((year + (month - 9.0) / 7.0) / 100.0) + 1.0)
+ )
+ + np.floor(275.0 * month / 9.0)
+ + day
+ + hour / 24.0
+ + minute / 1440.0
+ + second / 86400.0
+ + 1721028.5
+ - _jd_mjd
+ )
return cls(MJD=MJD)
@classmethod
@@ -843,7 +938,7 @@ def from_datetime(cls, dtime: np.ndarray):
"""
# convert delta time array from datetime object
# to days relative to 1992-01-01T00:00:00
- MJD = convert_datetime(dtime, epoch=_mjd_epoch)/86400.0
+ MJD = convert_datetime(dtime, epoch=_mjd_epoch) / 86400.0
return cls(MJD=MJD)
@classmethod
@@ -867,10 +962,9 @@ def to_calendar(self):
"""
return Calendar(self.utc)
- def to_deltatime(self,
- epoch: str | tuple | list | np.ndarray,
- scale: float = 1.0
- ):
+ def to_deltatime(
+ self, epoch: str | tuple | list | np.ndarray, scale: float = 1.0
+ ):
"""
Convert a ``Timescale`` object to a delta time array
@@ -893,11 +987,11 @@ def to_deltatime(self,
elif isinstance(epoch, str):
epoch = np.datetime64(parse(epoch))
# calculate the difference in epochs in days
- delta_time_epochs = (epoch - epoch1) / np.timedelta64(1, 'D')
+ delta_time_epochs = (epoch - epoch1) / np.timedelta64(1, "D")
# return the date in time (default days) since epoch
- return scale*np.array(self.MJD - delta_time_epochs, dtype=np.float64)
+ return scale * np.array(self.MJD - delta_time_epochs, dtype=np.float64)
- def to_datetime(self, unit='ns'):
+ def to_datetime(self, unit="ns"):
"""
Convert a ``Timescale`` object to a ``datetime`` array
@@ -907,16 +1001,16 @@ def to_datetime(self, unit='ns'):
``numpy.datetime64`` array
"""
# verify that units are numpy datetime compatible
- assert unit in ['D','h','m','s','ms','us','ns']
+ assert unit in ["D", "h", "m", "s", "ms", "us", "ns"]
# convert Modified Julian Day epoch to datetime variable
epoch = np.datetime64(datetime.datetime(*_mjd_epoch))
# calculate the delta time in the specified units
- scale = self.day*_from_sec[unit]
- delta_time = np.atleast_1d(self.MJD*scale).astype(np.int64)
+ scale = self.day * _from_sec[unit]
+ delta_time = np.atleast_1d(self.MJD * scale).astype(np.int64)
# return the datetime array
- return np.array(epoch + delta_time.astype(f'timedelta64[{unit}]'))
+ return np.array(epoch + delta_time.astype(f"timedelta64[{unit}]"))
- def to_string(self, unit: str = 's', **kwargs):
+ def to_string(self, unit: str = "s", **kwargs):
"""
Convert a ``Timescale`` object to a formatted string array
@@ -928,9 +1022,7 @@ def to_string(self, unit: str = 's', **kwargs):
keyword arguments for datetime formatting
"""
return np.datetime_as_string(
- self.to_datetime(unit=unit),
- unit=unit,
- **kwargs
+ self.to_datetime(unit=unit), unit=unit, **kwargs
)
# PURPOSE: calculate the sum of a polynomial function of time
@@ -947,71 +1039,67 @@ def polynomial_sum(self, coefficients: list | np.ndarray, t: np.ndarray):
"""
# convert time to array if importing a single value
t = np.atleast_1d(t)
- return np.sum([c * (t ** i) for i, c in enumerate(coefficients)], axis=0)
+ return np.sum([c * (t**i) for i, c in enumerate(coefficients)], axis=0)
@timescale.utilities.reify
def era(self):
- """Earth Rotation Angle (ERA) in degrees
- """
+ """Earth Rotation Angle (ERA) in degrees"""
# earth rotation angle using Universal Time
_jd_j2000 = _jd_mjd + _mjd_j2000
# UT1 in days since J2000
J = self.ut1 - _jd_j2000
fraction = np.mod(J, self.turn)
- theta = np.mod(0.7790572732640 + 0.00273781191135448*J, self.turn)
- return self.turndeg*np.mod(theta + fraction, self.turn)
+ theta = np.mod(0.7790572732640 + 0.00273781191135448 * J, self.turn)
+ return self.turndeg * np.mod(theta + fraction, self.turn)
@timescale.utilities.reify
def gha(self):
- """Greenwich Hour Angle (GHA) in degrees
- """
- return np.mod(self.gmst*self.turndeg +
- self.turndeg*self.T*self.century +
- self.turndeg/2.0, self.turndeg)
+ """Greenwich Hour Angle (GHA) in degrees"""
+ return np.mod(
+ self.gmst * self.turndeg
+ + self.turndeg * self.T * self.century
+ + self.turndeg / 2.0,
+ self.turndeg,
+ )
@timescale.utilities.reify
def gmst(self):
- """Greenwich Mean Sidereal Time (GMST) in fractions of day
- """
+ """Greenwich Mean Sidereal Time (GMST) in fractions of day"""
GMST = np.array([24110.54841, 8640184.812866, 9.3104e-2, -6.2e-6])
# UT1 as Julian centuries
_jd_j2000 = _jd_mjd + _mjd_j2000
- ut1 = (self.ut1 - _jd_j2000)/self.century
+ ut1 = (self.ut1 - _jd_j2000) / self.century
# convert from seconds to fractions of day
- return np.mod(self.polynomial_sum(GMST, ut1)/self.day, self.turn)
+ return np.mod(self.polynomial_sum(GMST, ut1) / self.day, self.turn)
@timescale.utilities.reify
def gps(self):
- """Seconds since 1980-01-06T00:00:00
- """
+ """Seconds since 1980-01-06T00:00:00"""
# return the GPS time
- return (self.utc - _jd_gps)*self.day + self.gps_utc
+ return (self.utc - _jd_gps) * self.day + self.gps_utc
@timescale.utilities.reify
def gps_utc(self):
- """Leap seconds between GPS and UTC time
- """
+ """Leap seconds between GPS and UTC time"""
# dynamic time is ahead of TAI by 32.184 seconds
_tt_tai = 32.184
# TAI time is ahead of GPS by 19 seconds
_tai_gps = 19.0
# convert from dynamic time to TAI
- TAI = np.atleast_1d(self.tt - _jd_gps)*self.day - _tt_tai
+ TAI = np.atleast_1d(self.tt - _jd_gps) * self.day - _tt_tai
# calculate the number of leap seconds
return count_leap_seconds(TAI - _tai_gps)
@timescale.utilities.reify
def gps_week(self):
- """GPS week number since 1980-01-06T00:00:00
- """
- return (self.gps/(self.day*7)).astype(np.int64)
+ """GPS week number since 1980-01-06T00:00:00"""
+ return (self.gps / (self.day * 7)).astype(np.int64)
@timescale.utilities.reify
def J2000(self):
- """Seconds (Terrestrial Time) since 2000-01-01T12:00:00
- """
+ """Seconds (Terrestrial Time) since 2000-01-01T12:00:00"""
_jd_j2000 = _jd_mjd + _mjd_j2000
- return (self.tt - _jd_j2000)*self.day
+ return (self.tt - _jd_j2000) * self.day
@timescale.utilities.reify
def st(self):
@@ -1020,17 +1108,19 @@ def st(self):
"""
# IAU 2000 model for GMST
# sidereal time polynomial coefficients in arcseconds
- sidereal_time = np.array([0.014506, 4612.156534, 1.3915817, -4.4e-7,
- -2.9956e-05, -3.68e-08])
+ sidereal_time = np.array(
+ [0.014506, 4612.156534, 1.3915817, -4.4e-7, -2.9956e-05, -3.68e-08]
+ )
ST = self.polynomial_sum(sidereal_time, self.T)
# get earth rotation angle and convert to arcseconds
# convert from arcseconds to fractions of day
- return np.mod(ST + self.era*self.deg2asec, self.turnasec)/self.turnasec
+ return (
+ np.mod(ST + self.era * self.deg2asec, self.turnasec) / self.turnasec
+ )
@timescale.utilities.reify
def tdb(self):
- """Approximate Barycentric Dynamical Time (TDB) as Julian Days
- """
+ """Approximate Barycentric Dynamical Time (TDB) as Julian Days"""
# calculate the approximate TDB time
return self.tt + self.tdb_tt
@@ -1041,26 +1131,26 @@ def tdb_tt(self):
terrestrial time (TT) :cite:p:`Fairhead:1990vz,Kaplan:2005kj`
"""
# truncated Fairhead and Bretagnon series
- FB = 0.001657 * np.sin(628.3076 * self.T + 6.2401) + \
- 0.000022 * np.sin(575.3385 * self.T + 4.2970) + \
- 0.000014 * np.sin(1256.6152 * self.T + 6.1969) + \
- 0.000005 * np.sin(606.9777 * self.T + 4.0212) + \
- 0.000005 * np.sin(52.9691 * self.T + 0.4444) + \
- 0.000002 * np.sin(21.3299 * self.T + 5.5431) + \
- 0.000010 * self.T * np.sin(628.3076 * self.T + 4.2490)
+ FB = (
+ 0.001657 * np.sin(628.3076 * self.T + 6.2401)
+ + 0.000022 * np.sin(575.3385 * self.T + 4.2970)
+ + 0.000014 * np.sin(1256.6152 * self.T + 6.1969)
+ + 0.000005 * np.sin(606.9777 * self.T + 4.0212)
+ + 0.000005 * np.sin(52.9691 * self.T + 0.4444)
+ + 0.000002 * np.sin(21.3299 * self.T + 5.5431)
+ + 0.000010 * self.T * np.sin(628.3076 * self.T + 4.2490)
+ )
# convert from seconds to days
- return FB/self.day
+ return FB / self.day
@timescale.utilities.reify
def tide(self):
- """Days since 1992-01-01T00:00:00
- """
+ """Days since 1992-01-01T00:00:00"""
return self.MJD - _mjd_tide
@timescale.utilities.reify
def tt(self):
- """Terrestrial Time (TT) as Julian Days
- """
+ """Terrestrial Time (TT) as Julian Days"""
return self.MJD + self.tt_ut1 + _jd_mjd
@timescale.utilities.reify
@@ -1070,7 +1160,7 @@ def tt_ut1(self):
"""
# return the delta time for the input date converted to days
return interpolate_delta_time(_delta_file, self.tide)
-
+
@timescale.utilities.reify
def ut1_utc(self):
"""
@@ -1082,90 +1172,81 @@ def ut1_utc(self):
# TAI time is ahead of GPS by 19 seconds
_tai_gps = 19.0
# convert from delta times back to seconds
- _tt_ut1 = self.day*self.tt_ut1
+ _tt_ut1 = self.day * self.tt_ut1
# recalculate UT1-UTC (seconds)
return _tt_tai + _tai_gps + self.gps_utc - _tt_ut1
@timescale.utilities.reify
def T(self):
- """Centuries since 2000-01-01T12:00:00
- """
+ """Centuries since 2000-01-01T12:00:00"""
_jd_j2000 = _jd_mjd + _mjd_j2000
- return (self.tt - _jd_j2000)/self.century
-
+ return (self.tt - _jd_j2000) / self.century
+
@timescale.utilities.reify
def B(self):
- """Time in Besselian years :cite:p:`Lieske:1979wv`
- """
+ """Time in Besselian years :cite:p:`Lieske:1979wv`"""
return 1900.0 + (self.MJD - 15019.81352) / 365.242198781
@timescale.utilities.reify
def ut1(self):
- """Universal Time (UT) as Julian Days
- """
+ """Universal Time (UT) as Julian Days"""
# convert UT1-UTC to days
- return self.utc + self.ut1_utc/self.day
-
+ return self.utc + self.ut1_utc / self.day
+
@timescale.utilities.reify
def ut2(self):
- """UT0 corrected for polar motion and seasonal variation
- """
- theta = 2.0*np.pi*self.B
- ut2_ut1 = 0.022*np.sin(theta) - 0.012*np.cos(theta) - \
- 0.006*np.sin(2.0*theta) + 0.007*np.cos(2.0*theta)
+ """UT0 corrected for polar motion and seasonal variation"""
+ theta = 2.0 * np.pi * self.B
+ ut2_ut1 = (
+ 0.022 * np.sin(theta)
+ - 0.012 * np.cos(theta)
+ - 0.006 * np.sin(2.0 * theta)
+ + 0.007 * np.cos(2.0 * theta)
+ )
return self.ut1 + ut2_ut1
@timescale.utilities.reify
def utc(self):
- """Coordinated Universal Time (UTC) as Julian Days
- """
- return self.MJD + _jd_mjd
-
+ """Coordinated Universal Time (UTC) as Julian Days"""
+ return self.MJD + _jd_mjd
+
@timescale.utilities.reify
def year(self):
- """Universal Time (UT) as calendar year
- """
- Y, M, D, h, m, s = convert_julian(self.utc, format='tuple')
+ """Universal Time (UT) as calendar year"""
+ Y, M, D, h, m, s = convert_julian(self.utc, format="tuple")
return convert_calendar_decimal(Y, M, D, hour=h, minute=m, second=s)
-
+
@timescale.utilities.reify
def nominal_year(self):
- """Universal Time (UT) as nominal years of 365.25 days
- """
- return 1992.0 + self.tide/365.25
+ """Universal Time (UT) as nominal years of 365.25 days"""
+ return 1992.0 + self.tide / 365.25
def min(self):
- """Minimum time value as a ``Timescale`` object
- """
+ """Minimum time value as a ``Timescale`` object"""
return Timescale(MJD=np.nanmin(self.MJD))
def max(self):
- """Maximum time value as a ``Timescale`` object
- """
+ """Maximum time value as a ``Timescale`` object"""
return Timescale(MJD=np.nanmax(self.MJD))
def mean(self):
- """Mean time value as a ``Timescale`` object
- """
+ """Mean time value as a ``Timescale`` object"""
return Timescale(MJD=np.nanmean(self.MJD))
@property
def turnasec(self):
- """Arcseconds in a full turn
- """
- return self.turndeg*self.deg2asec
+ """Arcseconds in a full turn"""
+ return self.turndeg * self.deg2asec
@property
def asec2rad(self):
- """Arcseconds to radians
- """
- return self.deg2rad/self.deg2asec
+ """Arcseconds to radians"""
+ return self.deg2rad / self.deg2asec
@property
def masec2rad(self):
- """Microarcseconds to radians
- """
- return self.asec2rad/1.0e6
+ """Microarcseconds to radians"""
+ return self.asec2rad / 1.0e6
@property
def dtype(self):
@@ -1174,43 +1255,36 @@ def dtype(self):
@property
def shape(self):
- """Dimensions of ``Timescale`` object
- """
+ """Dimensions of ``Timescale`` object"""
return np.shape(self.MJD)
@property
def ndim(self):
- """Number of dimensions in ``Timescale`` object
- """
+ """Number of dimensions in ``Timescale`` object"""
return np.ndim(self.MJD)
def __str__(self):
- """String representation of the ``Timescale`` object
- """
- properties = ['timescale.time.Timescale']
- return '\n'.join(properties)
+ """String representation of the ``Timescale`` object"""
+ properties = ["timescale.time.Timescale"]
+ return "\n".join(properties)
def __len__(self):
- """Number of time values
- """
+ """Number of time values"""
return len(np.atleast_1d(self.MJD))
def __getitem__(self, ind):
- """Subset ``Timescale`` object to indices
- """
+ """Subset ``Timescale`` object to indices"""
temp = Timescale()
temp.MJD = np.atleast_1d(self.MJD)[ind].copy()
return temp
def __iter__(self):
- """Iterate over time values
- """
+ """Iterate over time values"""
self.__index__ = 0
return self
def __next__(self):
- """Get the next time step
- """
+ """Get the next time step"""
temp = Timescale()
try:
temp.MJD = np.atleast_1d(self.MJD)[self.__index__].copy()
@@ -1220,10 +1294,12 @@ def __next__(self):
self.__index__ += 1
return temp
+
class Calendar:
"""
Class for converting from Julian dates to calendar dates
"""
+
def __init__(self, utc=None):
# Julian Days
self.utc = utc
@@ -1244,42 +1320,35 @@ def dtype(self):
@property
def shape(self):
- """Dimensions of ``Calendar`` object
- """
+ """Dimensions of ``Calendar`` object"""
return np.shape(self.utc)
@property
def ndim(self):
- """Number of dimensions in ``Calendar`` object
- """
+ """Number of dimensions in ``Calendar`` object"""
return np.ndim(self.utc)
def __str__(self):
- """String representation of the ``Calendar`` object
- """
- properties = ['timescale.time.Calendar']
- return '\n'.join(properties)
+ """String representation of the ``Calendar`` object"""
+ properties = ["timescale.time.Calendar"]
+ return "\n".join(properties)
def __len__(self):
- """Number of time values
- """
+ """Number of time values"""
return len(np.atleast_1d(self.utc))
def __getitem__(self, ind):
- """Subset ``Calendar`` object to indices
- """
+ """Subset ``Calendar`` object to indices"""
utc = np.atleast_1d(self.utc)[ind].copy()
return Calendar(utc=utc)
def __iter__(self):
- """Iterate over time values
- """
+ """Iterate over time values"""
self.__index__ = 0
return self
def __next__(self):
- """Get the next time step
- """
+ """Get the next time step"""
try:
utc = np.atleast_1d(self.utc)[self.__index__].copy()
except IndexError as exc:
@@ -1288,12 +1357,13 @@ def __next__(self):
self.__index__ += 1
return Calendar(utc=utc)
+
# PURPOSE: calculate the difference between universal time and dynamical time
# by interpolating a delta time file to a given date
def interpolate_delta_time(
- delta_file: str | pathlib.Path | None,
- idays: np.ndarray,
- ):
+ delta_file: str | pathlib.Path | None,
+ idays: np.ndarray,
+):
"""
Calculates the difference between universal time (UT) and
dynamical time (TT) :cite:p:`Meeus:1991vh`
@@ -1315,18 +1385,18 @@ def interpolate_delta_time(
dinput = np.loadtxt(delta_file)
# calculate Julian days and then convert to days since 1992-01-01T00:00:00
days = convert_calendar_dates(
- dinput[:,0], dinput[:,1], dinput[:,2],
- epoch=_tide_epoch)
+ dinput[:, 0], dinput[:, 1], dinput[:, 2], epoch=_tide_epoch
+ )
# use scipy interpolating splines to interpolate delta times
- spl = scipy.interpolate.UnivariateSpline(days, dinput[:,3], k=1, s=0, ext=0)
+ spl = scipy.interpolate.UnivariateSpline(
+ days, dinput[:, 3], k=1, s=0, ext=0
+ )
# return the delta time for the input date converted to days
- return spl(idays)/86400.0
+ return spl(idays) / 86400.0
+
# PURPOSE: Count number of leap seconds that have passed for each GPS time
-def count_leap_seconds(
- GPS_Time: np.ndarray | float,
- truncate: bool = True
- ):
+def count_leap_seconds(GPS_Time: np.ndarray | float, truncate: bool = True):
"""
Counts the number of leap seconds between a given GPS time and UTC
@@ -1345,15 +1415,16 @@ def count_leap_seconds(
# get the valid leap seconds
leaps = get_leap_seconds(truncate=truncate)
# number of leap seconds prior to GPS_Time
- n_leaps = np.zeros_like(GPS_Time,dtype=np.float64)
- for i,leap in enumerate(leaps):
+ n_leaps = np.zeros_like(GPS_Time, dtype=np.float64)
+ for i, leap in enumerate(leaps):
count = np.count_nonzero(GPS_Time >= leap)
- if (count > 0):
+ if count > 0:
indices = np.nonzero(GPS_Time >= leap)
n_leaps[indices] += 1.0
# return the number of leap seconds for converting to UTC
return n_leaps
+
# PURPOSE: Define GPS leap seconds
def get_leap_seconds(truncate: bool = True):
"""
@@ -1369,35 +1440,40 @@ def get_leap_seconds(truncate: bool = True):
leap_GPS: float
GPS seconds when leap seconds occurred
"""
- leap_secs = timescale.utilities.get_data_path(['data','leap-seconds.list'])
+ leap_secs = timescale.utilities.get_data_path(["data", "leap-seconds.list"])
# find line with file expiration as delta time
- with leap_secs.open(mode='r', encoding='utf8') as fid:
- secs, = [re.findall(r'\d+',i).pop() for i in fid.read().splitlines()
- if re.match(r'^(?=#@)',i)]
+ with leap_secs.open(mode="r", encoding="utf8") as fid:
+ (secs,) = [
+ re.findall(r"\d+", i).pop()
+ for i in fid.read().splitlines()
+ if re.match(r"^(?=#@)", i)
+ ]
# check that leap seconds file is still valid
- expiry = datetime.datetime(*_ntp_epoch) + datetime.timedelta(seconds=int(secs))
+ expiry = datetime.datetime(*_ntp_epoch) + datetime.timedelta(
+ seconds=int(secs)
+ )
today = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
update_leap_seconds() if (expiry < today) else None
# get leap seconds
- leap_UTC,TAI_UTC = np.loadtxt(leap_secs).T
+ leap_UTC, TAI_UTC = np.loadtxt(leap_secs).T
# TAI time is ahead of GPS by 19 seconds
TAI_GPS = 19.0
# convert leap second epochs from NTP to GPS
# convert from time of 2nd leap second to time of 1st leap second
- leap_GPS = convert_delta_time(leap_UTC + TAI_UTC - TAI_GPS - 1,
- epoch1=_ntp_epoch, epoch2=_gps_epoch)
+ leap_GPS = convert_delta_time(
+ leap_UTC + TAI_UTC - TAI_GPS - 1, epoch1=_ntp_epoch, epoch2=_gps_epoch
+ )
# return the GPS times of leap second occurrence
if truncate:
return leap_GPS[leap_GPS >= 0].astype(np.float64)
else:
return leap_GPS.astype(np.float64)
+
# PURPOSE: connects to servers and downloads leap second files
def update_leap_seconds(
- timeout: int | None = 20,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ timeout: int | None = 20, verbose: bool = False, mode: oct = 0o775
+):
"""
Connects to servers to download leap-seconds.list files from NIST servers
@@ -1419,20 +1495,22 @@ def update_leap_seconds(
permissions mode of output file
"""
# local version of file
- FILE = 'leap-seconds.list'
- LOCAL = timescale.utilities.get_data_path(['data',FILE])
+ FILE = "leap-seconds.list"
+ LOCAL = timescale.utilities.get_data_path(["data", FILE])
HASH = timescale.utilities.get_hash(LOCAL)
# try downloading from NIST Boulder ftp servers
- HOST = ['ftp.boulder.nist.gov','pub','time',FILE]
+ HOST = ["ftp.boulder.nist.gov", "pub", "time", FILE]
try:
timescale.utilities.check_ftp_connection(HOST[0])
- timescale.utilities.from_ftp(HOST,
+ timescale.utilities.from_ftp(
+ HOST,
timeout=timeout,
local=LOCAL,
hash=HASH,
verbose=verbose,
- mode=mode)
+ mode=mode,
+ )
except Exception as exc:
logging.debug(traceback.format_exc())
pass
@@ -1440,14 +1518,16 @@ def update_leap_seconds(
return
# try downloading from Paris Observatory IERS Centers
- REMOTE = ['https://hpiers.obspm.fr','iers','bul','bulc','ntp',FILE]
+ REMOTE = ["https://hpiers.obspm.fr", "iers", "bul", "bulc", "ntp", FILE]
try:
- timescale.utilities.from_http(REMOTE,
+ timescale.utilities.from_http(
+ REMOTE,
timeout=timeout,
local=LOCAL,
hash=HASH,
verbose=verbose,
- mode=mode)
+ mode=mode,
+ )
except Exception as exc:
logging.debug(traceback.format_exc())
pass
@@ -1455,27 +1535,30 @@ def update_leap_seconds(
return
# try downloading from Internet Assigned Numbers Authority (IANA)
- REMOTE = ['https://data.iana.org','time-zones','data',FILE]
+ REMOTE = ["https://data.iana.org", "time-zones", "data", FILE]
try:
- timescale.utilities.from_http(REMOTE,
+ timescale.utilities.from_http(
+ REMOTE,
timeout=timeout,
local=LOCAL,
hash=HASH,
verbose=verbose,
- mode=mode)
+ mode=mode,
+ )
except Exception as exc:
logging.debug(traceback.format_exc())
pass
else:
return
+
# PURPOSE: Download delta time files and merge into a single
def merge_delta_time(
- username: str | None = None,
- password: str | None = None,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ username: str | None = None,
+ password: str | None = None,
+ verbose: bool = False,
+ mode: oct = 0o775,
+):
"""
Connects to servers to download historic_deltat.data and deltat.data files
@@ -1503,65 +1586,71 @@ def merge_delta_time(
Delta times are the difference between universal time and dynamical time
"""
# retrieve history delta time files
- pull_deltat_file('historic_deltat.data',
+ pull_deltat_file(
+ "historic_deltat.data",
username=username,
password=password,
verbose=verbose,
- mode=mode
+ mode=mode,
)
# read historic delta time file
- historic_file=timescale.utilities.get_data_path(['data','historic_deltat.data'])
+ historic_file = timescale.utilities.get_data_path(
+ ["data", "historic_deltat.data"]
+ )
historic = np.loadtxt(historic_file, skiprows=2)
- HY = np.floor(historic[:,0])
- HM = 12.0*np.mod(historic[:,0], 1.0) + 1.0
- HD = np.ones_like(historic[:,0])
+ HY = np.floor(historic[:, 0])
+ HM = 12.0 * np.mod(historic[:, 0], 1.0) + 1.0
+ HD = np.ones_like(historic[:, 0])
# retrieve monthly delta time files
- pull_deltat_file('deltat.data',
+ pull_deltat_file(
+ "deltat.data",
username=username,
password=password,
verbose=verbose,
- mode=mode
+ mode=mode,
)
# read modern monthly delta time file
- monthly_file = timescale.utilities.get_data_path(['data','deltat.data'])
+ monthly_file = timescale.utilities.get_data_path(["data", "deltat.data"])
monthly = np.loadtxt(monthly_file)
- monthly_time = convert_calendar_decimal(monthly[:,0],monthly[:,1],
- day=monthly[:,2])
+ monthly_time = convert_calendar_decimal(
+ monthly[:, 0], monthly[:, 1], day=monthly[:, 2]
+ )
# retrieve daily delta time files
merge_bulletin_a_files(
- username=username,
- password=password,
- verbose=verbose,
- mode=mode
+ username=username, password=password, verbose=verbose, mode=mode
)
# read modern daily delta time file from IERS Bulletin A files
- daily_file = timescale.utilities.get_data_path(['data','iers_deltat.data'])
+ daily_file = timescale.utilities.get_data_path(["data", "iers_deltat.data"])
daily = np.loadtxt(daily_file)
- daily_time = convert_calendar_decimal(daily[:,0], daily[:,1],
- day=daily[:,2])
+ daily_time = convert_calendar_decimal(
+ daily[:, 0], daily[:, 1], day=daily[:, 2]
+ )
# write to new merged file
- merged_file = timescale.utilities.get_data_path(['data','merged_deltat.data'])
- fid = merged_file.open(mode='w', encoding='utf8')
+ merged_file = timescale.utilities.get_data_path(
+ ["data", "merged_deltat.data"]
+ )
+ fid = merged_file.open(mode="w", encoding="utf8")
logging.info(str(merged_file))
- file_format = ' {0:4.0f} {1:2.0f} {2:2.0f} {3:7.4f}'
+ file_format = " {0:4.0f} {1:2.0f} {2:2.0f} {3:7.4f}"
# use historical values for times prior to monthly
- ind1, = np.nonzero(historic[:,0] < monthly_time[0])
+ (ind1,) = np.nonzero(historic[:, 0] < monthly_time[0])
for i in ind1:
- args = (HY[i], HM[i], HD[i], historic[i,1])
+ args = (HY[i], HM[i], HD[i], historic[i, 1])
print(file_format.format(*args), file=fid)
# use monthly values for times prior to daily
- ind2, = np.nonzero(monthly_time < np.min(daily_time))
+ (ind2,) = np.nonzero(monthly_time < np.min(daily_time))
for i in ind2:
- args = (monthly[i,0], monthly[i,1], monthly[i,2], monthly[i,3])
+ args = (monthly[i, 0], monthly[i, 1], monthly[i, 2], monthly[i, 3])
print(file_format.format(*args), file=fid)
# use daily values for all times available
for i in np.argsort(daily_time):
- args = (daily[i,0], daily[i,1], daily[i,2], daily[i,3])
+ args = (daily[i, 0], daily[i, 1], daily[i, 2], daily[i, 3])
print(file_format.format(*args), file=fid)
# close the merged file and change the permissions mode
fid.close()
merged_file.chmod(mode)
+
# PURPOSE: Append Bulletin-A file to merged delta time file
def append_delta_time(verbose: bool = False, mode: oct = 0o775):
"""
@@ -1579,40 +1668,43 @@ def append_delta_time(verbose: bool = False, mode: oct = 0o775):
Delta times are the difference between universal time and dynamical time
"""
# merged delta time file
- merged_file = timescale.utilities.get_data_path(['data','merged_deltat.data'])
+ merged_file = timescale.utilities.get_data_path(
+ ["data", "merged_deltat.data"]
+ )
# read merged delta time file
dinput = np.loadtxt(merged_file)
- merged_time = convert_calendar_decimal(dinput[:,0], dinput[:,1],
- day=dinput[:,2])
+ merged_time = convert_calendar_decimal(
+ dinput[:, 0], dinput[:, 1], day=dinput[:, 2]
+ )
# append to merged file
- fid = merged_file.open(mode='a', encoding='utf8')
+ fid = merged_file.open(mode="a", encoding="utf8")
logging.info(str(merged_file))
# read latest Bulletin-A file from IERS
- bulletin_file = timescale.utilities.get_data_path(['data','ser7.dat'])
+ bulletin_file = timescale.utilities.get_data_path(["data", "ser7.dat"])
logging.info(str(bulletin_file))
- with bulletin_file.open(mode='rb') as fileID:
- YY,MM,DD,DELTAT = read_iers_bulletin_a(fileID)
+ with bulletin_file.open(mode="rb") as fileID:
+ YY, MM, DD, DELTAT = read_iers_bulletin_a(fileID)
# append latest delta time values to merged file
for Y, M, D, T in zip(YY, MM, DD, DELTAT):
- daily_time = convert_calendar_decimal(float(Y), float(M),
- day=float(D))
+ daily_time = convert_calendar_decimal(float(Y), float(M), day=float(D))
# check if date is already in merged file
if daily_time in merged_time:
- logging.info(f'{Y:4.0f}-{M:02.0f}-{D:02.0f} exists in merged file')
+ logging.info(f"{Y:4.0f}-{M:02.0f}-{D:02.0f} exists in merged file")
continue
# write to merged file
- print(f' {Y:4.0f} {M:2.0f} {D:2.0f} {T:7.4f}', file=fid)
+ print(f" {Y:4.0f} {M:2.0f} {D:2.0f} {T:7.4f}", file=fid)
# close the merged file and change the permissions mode
fid.close()
merged_file.chmod(mode)
+
# PURPOSE: connect to IERS or CDDIS server and merge Bulletin-A files
def merge_bulletin_a_files(
- username: str | None = None,
- password: str | None = None,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ username: str | None = None,
+ password: str | None = None,
+ verbose: bool = False,
+ mode: oct = 0o775,
+):
"""
Attempt to connects to the IERS server and the CDDIS Earthdata server
to download and merge Bulletin-A files
@@ -1641,14 +1733,11 @@ def merge_bulletin_a_files(
Delta times are the difference between universal time and dynamical time
"""
# if complete: replace previous version of file
- LOCAL = timescale.utilities.get_data_path(['data','iers_deltat.data'])
- COPY = timescale.utilities.get_data_path(['data','iers_deltat.temp'])
+ LOCAL = timescale.utilities.get_data_path(["data", "iers_deltat.data"])
+ COPY = timescale.utilities.get_data_path(["data", "iers_deltat.temp"])
# try connecting to IERS http servers and merge Bulletin-A files
try:
- iers_delta_time(COPY,
- verbose=verbose,
- mode=mode
- )
+ iers_delta_time(COPY, verbose=verbose, mode=mode)
except Exception as exc:
logging.debug(traceback.format_exc())
COPY.unlink() if COPY.exists() else None
@@ -1659,10 +1748,7 @@ def merge_bulletin_a_files(
# try connecting to IERS ftp servers and merge Bulletin-A files
try:
- iers_ftp_delta_time(COPY,
- verbose=verbose,
- mode=mode
- )
+ iers_ftp_delta_time(COPY, verbose=verbose, mode=mode)
except Exception as exc:
logging.debug(traceback.format_exc())
COPY.unlink() if COPY.exists() else None
@@ -1673,11 +1759,12 @@ def merge_bulletin_a_files(
# try connecting to CDDIS https servers and merge Bulletin-A files
try:
- cddis_delta_time(COPY,
+ cddis_delta_time(
+ COPY,
username=username,
password=password,
verbose=verbose,
- mode=mode
+ mode=mode,
)
except Exception as exc:
logging.debug(traceback.format_exc())
@@ -1687,13 +1774,14 @@ def merge_bulletin_a_files(
timescale.utilities.copy(COPY, LOCAL, move=True)
return
+
# PURPOSE: connects to IERS ftp servers and finds Bulletin-A files
def iers_ftp_delta_time(
- daily_file: str | pathlib.Path,
- timeout: int | None = 120,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ daily_file: str | pathlib.Path,
+ timeout: int | None = 120,
+ verbose: bool = False,
+ mode: oct = 0o775,
+):
"""
Connects to the IERS ftp server to download Bulletin-A files
@@ -1721,44 +1809,38 @@ def iers_ftp_delta_time(
Delta times are the difference between universal time and dynamical time
"""
# connect to ftp host for IERS bulletins
- HOST = ['ftp.iers.org','products','eop','rapid','bulletina']
+ HOST = ["ftp.iers.org", "products", "eop", "rapid", "bulletina"]
timescale.utilities.check_ftp_connection(HOST[0])
# regular expression pattern for finding files
- rx = re.compile(r'bulletina-(.*?)-(\d+).txt$',re.VERBOSE)
+ rx = re.compile(r"bulletina-(.*?)-(\d+).txt$", re.VERBOSE)
# open output daily delta time file
daily_file = pathlib.Path(daily_file).expanduser().absolute()
- fid = daily_file.open(mode='w', encoding='utf8')
+ fid = daily_file.open(mode="w", encoding="utf8")
# find subdirectories
- subdirectory,_ = timescale.utilities.ftp_list(HOST,
- timeout=timeout,
- basename=True,
- sort=True
+ subdirectory, _ = timescale.utilities.ftp_list(
+ HOST, timeout=timeout, basename=True, sort=True
)
# for each subdirectory
for SUB in subdirectory:
# find Bulletin-A files in ftp subdirectory
HOST.append(SUB)
logging.info(SUB)
- bulletin_files,_ = timescale.utilities.ftp_list(HOST,
- timeout=timeout,
- basename=True,
- sort=True,
- pattern=rx
+ bulletin_files, _ = timescale.utilities.ftp_list(
+ HOST, timeout=timeout, basename=True, sort=True, pattern=rx
)
# for each Bulletin-A file
for f in sorted(bulletin_files):
logging.info(f)
# copy remote file contents to BytesIO object
HOST.append(f)
- remote_buffer = timescale.utilities.from_ftp(HOST,
- timeout=timeout,
- verbose=verbose
+ remote_buffer = timescale.utilities.from_ftp(
+ HOST, timeout=timeout, verbose=verbose
)
# read Bulletin-A file from BytesIO object
- YY,MM,DD,DELTAT = read_iers_bulletin_a(remote_buffer)
+ YY, MM, DD, DELTAT = read_iers_bulletin_a(remote_buffer)
# print delta time for week to output file
for Y, M, D, T in zip(YY, MM, DD, DELTAT):
- print(f' {Y:4.0f} {M:2.0f} {D:2.0f} {T:7.4f}', file=fid)
+ print(f" {Y:4.0f} {M:2.0f} {D:2.0f} {T:7.4f}", file=fid)
# close the bytesIO object
remote_buffer.close()
# remove the file from the list
@@ -1770,13 +1852,14 @@ def iers_ftp_delta_time(
# change the permissions mode
daily_file.chmod(mode)
+
# PURPOSE: connects to IERS http servers and finds Bulletin-A files
def iers_delta_time(
- daily_file: str | pathlib.Path,
- timeout: int | None = 120,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ daily_file: str | pathlib.Path,
+ timeout: int | None = 120,
+ verbose: bool = False,
+ mode: oct = 0o775,
+):
"""
Connects to the IERS server to download Bulletin-A files
@@ -1805,20 +1888,20 @@ def iers_delta_time(
"""
# open output daily delta time file
daily_file = pathlib.Path(daily_file).expanduser().absolute()
- fid = daily_file.open(mode='w', encoding='utf8')
- file_format = ' {0:4.0f} {1:2.0f} {2:2.0f} {3:7.4f}'
+ fid = daily_file.open(mode="w", encoding="utf8")
+ file_format = " {0:4.0f} {1:2.0f} {2:2.0f} {3:7.4f}"
# connect to http host for IERS Bulletin-A files
- HOST = 'https://datacenter.iers.org/availableVersions.php?id=6'
- bulletin_files,_ = timescale.utilities.iers_list(HOST, timeout=timeout)
+ HOST = "https://datacenter.iers.org/availableVersions.php?id=6"
+ bulletin_files, _ = timescale.utilities.iers_list(HOST, timeout=timeout)
# for each Bulletin-A file
for f in bulletin_files:
logging.info(f)
remote_buffer = timescale.utilities.from_http(f, timeout=timeout)
# read Bulletin-A file from BytesIO object
- YY,MM,DD,DELTAT = read_iers_bulletin_a(remote_buffer)
+ YY, MM, DD, DELTAT = read_iers_bulletin_a(remote_buffer)
# print delta time for week to output file
- for Y,M,D,T in zip(YY,MM,DD,DELTAT):
- print(file_format.format(Y,M,D,T), file=fid)
+ for Y, M, D, T in zip(YY, MM, DD, DELTAT):
+ print(file_format.format(Y, M, D, T), file=fid)
# close the bytesIO object
remote_buffer.close()
# close the output file
@@ -1826,14 +1909,15 @@ def iers_delta_time(
# change the permissions mode
daily_file.chmod(mode)
+
# PURPOSE: connects to CDDIS Earthdata https server and finds Bulletin-A files
def cddis_delta_time(
- daily_file: str | pathlib.Path,
- username: str | None = None,
- password: str | None = None,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ daily_file: str | pathlib.Path,
+ username: str | None = None,
+ password: str | None = None,
+ verbose: bool = False,
+ mode: oct = 0o775,
+):
"""
Connects to the CDDIS Earthdata server to download Bulletin-A files
@@ -1861,45 +1945,59 @@ def cddis_delta_time(
Delta times are the difference between universal time and dynamical time
"""
# connect to CDDIS Earthdata host for IERS bulletins
- HOST = ['https://cddis.nasa.gov','archive','products','iers',
- 'iers_bulletins','bulletin_a']
+ HOST = [
+ "https://cddis.nasa.gov",
+ "archive",
+ "products",
+ "iers",
+ "iers_bulletins",
+ "bulletin_a",
+ ]
# build NASA Earthdata opener for CDDIS and check credentials
timescale.utilities.build_opener(username, password)
timescale.utilities.check_credentials()
# regular expression pattern for finding directories
- R1 = re.compile(r'volume_(.*?)$',re.VERBOSE)
+ R1 = re.compile(r"volume_(.*?)$", re.VERBOSE)
# regular expression pattern for finding files
- R2 = re.compile(r'iers_bulletina\.(.*?)_(\d+)$',re.VERBOSE)
+ R2 = re.compile(r"iers_bulletina\.(.*?)_(\d+)$", re.VERBOSE)
# open output daily delta time file
daily_file = pathlib.Path(daily_file).expanduser().absolute()
- fid = daily_file.open(mode='w', encoding='utf8')
- file_format = ' {0:4.0f} {1:2.0f} {2:2.0f} {3:7.4f}'
+ fid = daily_file.open(mode="w", encoding="utf8")
+ file_format = " {0:4.0f} {1:2.0f} {2:2.0f} {3:7.4f}"
# for each subdirectory
subdirectory, mtimes = timescale.utilities.cddis_list(
- HOST, build=False, pattern=R1)
+ HOST, build=False, pattern=R1
+ )
# extract roman numerals from subdirectories
roman = [R1.findall(s).pop() for s in subdirectory]
# sort the list of Roman numerals
- subdirectory = [subdirectory[i] for i,j in sorted(enumerate(roman),
- key=lambda i: timescale.utilities.roman_to_int(i[1]))]
+ subdirectory = [
+ subdirectory[i]
+ for i, j in sorted(
+ enumerate(roman),
+ key=lambda i: timescale.utilities.roman_to_int(i[1]),
+ )
+ ]
# output file format
for SUB in subdirectory:
# find Bulletin-A files in https subdirectory
HOST.append(SUB)
bulletin_files, mtimes = timescale.utilities.cddis_list(
- HOST, build=False, sort=True, pattern=R2)
+ HOST, build=False, sort=True, pattern=R2
+ )
# for each Bulletin-A file
for f in sorted(bulletin_files):
logging.info(f)
# copy remote file contents to BytesIO object
HOST.append(f)
- remote_buffer = timescale.utilities.from_cddis(HOST,
- build=False,timeout=20)
+ remote_buffer = timescale.utilities.from_cddis(
+ HOST, build=False, timeout=20
+ )
# read Bulletin-A file from BytesIO object
- YY,MM,DD,DELTAT = read_iers_bulletin_a(remote_buffer)
+ YY, MM, DD, DELTAT = read_iers_bulletin_a(remote_buffer)
# print delta time for week to output file
- for Y,M,D,T in zip(YY,MM,DD,DELTAT):
- print(file_format.format(Y,M,D,T),file=fid)
+ for Y, M, D, T in zip(YY, MM, DD, DELTAT):
+ print(file_format.format(Y, M, D, T), file=fid)
# close the bytesIO object
remote_buffer.close()
# remove the file from the list
@@ -1911,6 +2009,7 @@ def cddis_delta_time(
# change the permissions mode
daily_file.chmod(mode)
+
# PURPOSE: reads IERS Bulletin-A and calculates the delta times
def read_iers_bulletin_a(fileID):
"""
@@ -1938,7 +2037,7 @@ def read_iers_bulletin_a(fileID):
Delta times are the difference between universal time and dynamical time
"""
# read contents from input file object
- file_contents = fileID.read().decode('utf8').splitlines()
+ file_contents = fileID.read().decode("utf8").splitlines()
# parse header text to find time offsets
# TT-TAI
@@ -1953,12 +2052,14 @@ def read_iers_bulletin_a(fileID):
# file line at count
l = file_contents[count]
# check if line contains time offsets
- if re.search(r'TT\s\=\sTAI',l):
- TT_TAI = np.float64(re.findall(r'(\d+\.\d+)',l).pop())
- if re.search(r'TAI-UTC',l):
- TAI_UTC = np.float64(re.findall(r'=\s(\d+\.\d+)',l).pop())
+ if re.search(r"TT\s\=\sTAI", l):
+ TT_TAI = np.float64(re.findall(r"(\d+\.\d+)", l).pop())
+ if re.search(r"TAI-UTC", l):
+ TAI_UTC = np.float64(re.findall(r"=\s(\d+\.\d+)", l).pop())
# find line to set HEADER flag to True
- HEADER = bool(re.search(r'COMBINED\sEARTH\sORIENTATION\sPARAMETERS:',l))
+ HEADER = bool(
+ re.search(r"COMBINED\sEARTH\sORIENTATION\sPARAMETERS:", l)
+ )
# add 1 to counter
count += 1
@@ -1970,13 +2071,13 @@ def read_iers_bulletin_a(fileID):
for i in range(7):
try:
# split numerical instances from data line
- line_contents = file_contents[count+i+4].split()
+ line_contents = file_contents[count + i + 4].split()
# years are not always complete in the bulletin file
# Modified Julian Day (days since 1858-11-17T00:00:00)
MJD[i] = np.float64(line_contents[3])
# difference between UT1 and UTC times
UT1_UTC[i] = np.float64(line_contents[8])
- except (IndexError,ValueError):
+ except (IndexError, ValueError):
pass
else:
valid += 1
@@ -1985,11 +2086,19 @@ def read_iers_bulletin_a(fileID):
# TAI time is ahead of GPS by 19 seconds
TAI_GPS = 19.0
# calculate calendar dates from Modified Julian days
- Y,M,D,h,m,s = convert_julian(MJD[:valid] + _jd_mjd, format='tuple')
+ Y, M, D, h, m, s = convert_julian(MJD[:valid] + _jd_mjd, format="tuple")
# calculate GPS Time (seconds since 1980-01-06T00:00:00)
# by converting the Modified Julian days (days since 1858-11-17T00:00:00)
- GPS_Time = convert_delta_time(MJD[:valid]*8.64e4, epoch1=_mjd_epoch,
- epoch2=_gps_epoch, scale=1.0) + TAI_UTC - TAI_GPS
+ GPS_Time = (
+ convert_delta_time(
+ MJD[:valid] * 8.64e4,
+ epoch1=_mjd_epoch,
+ epoch2=_gps_epoch,
+ scale=1.0,
+ )
+ + TAI_UTC
+ - TAI_GPS
+ )
# number of leap seconds between GPS and UTC
# this finds the daily correction for weeks with leap seconds
GPS_UTC = count_leap_seconds(GPS_Time)
@@ -1998,14 +2107,13 @@ def read_iers_bulletin_a(fileID):
DELTAT = TT_TAI + TAI_GPS + GPS_UTC - UT1_UTC[:valid]
# return dates and delta times
- return (Y,M,D,DELTAT)
+ return (Y, M, D, DELTAT)
+
# PURPOSE: connects to servers and downloads latest Bulletin-A file
def update_bulletin_a(
- timeout: int | None = 20,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ timeout: int | None = 20, verbose: bool = False, mode: oct = 0o775
+):
"""
Connects to IERS Rapid Service/Prediction Center (RS/PC) and
downloads latest Bulletin-A file
@@ -2026,29 +2134,36 @@ def update_bulletin_a(
permissions mode of output file
"""
# local version of file
- LOCAL = timescale.utilities.get_data_path(['data','ser7.dat'])
+ LOCAL = timescale.utilities.get_data_path(["data", "ser7.dat"])
HASH = timescale.utilities.get_hash(LOCAL)
# try downloading from IERS Rapid Service/Prediction Center (RS/PC)
- REMOTE = ['https://maia.usno.navy.mil','ser7','ser7.dat']
+ REMOTE = ["https://maia.usno.navy.mil", "ser7", "ser7.dat"]
try:
- timescale.utilities.from_http(REMOTE, timeout=timeout, local=LOCAL,
- hash=HASH, verbose=verbose, mode=mode)
+ timescale.utilities.from_http(
+ REMOTE,
+ timeout=timeout,
+ local=LOCAL,
+ hash=HASH,
+ verbose=verbose,
+ mode=mode,
+ )
except Exception as exc:
logging.debug(traceback.format_exc())
pass
else:
return
+
# PURPOSE: connects to servers and downloads delta time files
def pull_deltat_file(
- FILE: str,
- username: str | None = None,
- password: str | None = None,
- timeout: int | None = 20,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ FILE: str,
+ username: str | None = None,
+ password: str | None = None,
+ timeout: int | None = 20,
+ verbose: bool = False,
+ mode: oct = 0o775,
+):
"""
Connects to servers and downloads delta time files
@@ -2082,18 +2197,20 @@ def pull_deltat_file(
Delta times are the difference between universal time and dynamical time
"""
# local version of file
- LOCAL = timescale.utilities.get_data_path(['data',FILE])
+ LOCAL = timescale.utilities.get_data_path(["data", FILE])
HASH = timescale.utilities.get_hash(LOCAL)
# try downloading from US Naval Oceanography Portal
- HOST = ['http://maia.usno.navy.mil','ser7',FILE]
+ HOST = ["http://maia.usno.navy.mil", "ser7", FILE]
try:
- timescale.utilities.from_http(HOST,
+ timescale.utilities.from_http(
+ HOST,
timeout=timeout,
local=LOCAL,
hash=HASH,
verbose=verbose,
- mode=mode)
+ mode=mode,
+ )
except Exception as exc:
logging.debug(traceback.format_exc())
pass
@@ -2109,12 +2226,14 @@ def pull_deltat_file(
for HOST in server:
try:
timescale.utilities.check_ftp_connection(HOST[0])
- timescale.utilities.from_ftp(HOST,
+ timescale.utilities.from_ftp(
+ HOST,
timeout=timeout,
local=LOCAL,
hash=HASH,
verbose=verbose,
- mode=mode)
+ mode=mode,
+ )
except Exception as exc:
logging.debug(traceback.format_exc())
pass
@@ -2123,19 +2242,20 @@ def pull_deltat_file(
# try downloading from NASA Crustal Dynamics Data Information System
# using NASA Earthdata credentials stored in netrc file
- HOST = ['https://cddis.nasa.gov','archive','products','iers',FILE]
+ HOST = ["https://cddis.nasa.gov", "archive", "products", "iers", FILE]
try:
- timescale.utilities.from_cddis(HOST,
+ timescale.utilities.from_cddis(
+ HOST,
username=username,
password=password,
timeout=timeout,
local=LOCAL,
hash=HASH,
verbose=verbose,
- mode=mode)
+ mode=mode,
+ )
except Exception as exc:
logging.debug(traceback.format_exc())
pass
else:
return
-
diff --git a/timescale/utilities.py b/timescale/utilities.py
index 6f08908..8c612e2 100644
--- a/timescale/utilities.py
+++ b/timescale/utilities.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-u"""
+"""
utilities.py
Written by Tyler Sutterley (08/2024)
Download and management utilities for syncing time and auxiliary files
@@ -7,6 +7,8 @@
PYTHON DEPENDENCIES:
lxml: processing XML and HTML in Python
https://pypi.python.org/pypi/lxml
+ dateutil: powerful extensions to datetime
+ https://dateutil.readthedocs.io/en/stable/
UPDATE HISTORY:
Updated 08/2024: generalize hash function to use any available algorithm
@@ -40,6 +42,7 @@
Updated 08/2020: add GSFC CDDIS opener, login and download functions
Written 08/2020
"""
+
from __future__ import print_function, division, annotations
import sys
@@ -65,6 +68,7 @@
import lxml.etree
import calendar, time
import dateutil.parser
+
if sys.version_info[0] == 2:
from urllib import quote_plus
from cookielib import CookieJar
@@ -74,6 +78,7 @@
from http.cookiejar import CookieJar
import urllib.request as urllib2
+
# PURPOSE: get absolute path within a package from a relative path
def get_data_path(relpath: list | str | pathlib.Path):
"""
@@ -93,9 +98,11 @@ def get_data_path(relpath: list | str | pathlib.Path):
elif isinstance(relpath, (str, pathlib.Path)):
return filepath.joinpath(relpath)
+
class reify(object):
"""Class decorator that puts the result of the method it
decorates into the instance"""
+
def __init__(self, wrapped):
self.wrapped = wrapped
self.__name__ = wrapped.__name__
@@ -108,6 +115,7 @@ def __get__(self, inst, objtype=None):
setattr(inst, self.wrapped.__name__, val)
return val
+
# PURPOSE: platform independent file opener
def file_opener(filename: str | pathlib.Path):
"""
@@ -119,18 +127,16 @@ def file_opener(filename: str | pathlib.Path):
path to file
"""
filename = pathlib.Path(filename).expanduser()
- if (sys.platform == "win32"):
+ if sys.platform == "win32":
os.startfile(filename, "explore")
- elif (sys.platform == "darwin"):
+ elif sys.platform == "darwin":
subprocess.call(["open", filename])
else:
subprocess.call(["xdg-open", filename])
+
# PURPOSE: get the hash value of a file
-def get_hash(
- local: str | io.IOBase | pathlib.Path,
- algorithm: str = 'md5'
- ):
+def get_hash(local: str | io.IOBase | pathlib.Path, algorithm: str = "md5"):
"""
Get the hash value from a local file or ``BytesIO`` object
@@ -147,28 +153,26 @@ def get_hash(
if algorithm in hashlib.algorithms_available:
return hashlib.new(algorithm, local.getvalue()).hexdigest()
else:
- raise ValueError(f'Invalid hashing algorithm: {algorithm}')
+ raise ValueError(f"Invalid hashing algorithm: {algorithm}")
elif isinstance(local, (str, pathlib.Path)):
# generate checksum hash for local file
local = pathlib.Path(local).expanduser()
# if file currently doesn't exist, return empty string
if not local.exists():
- return ''
+ return ""
# open the local_file in binary read mode
- with local.open(mode='rb') as local_buffer:
+ with local.open(mode="rb") as local_buffer:
# generate checksum hash for a given type
if algorithm in hashlib.algorithms_available:
return hashlib.new(algorithm, local_buffer.read()).hexdigest()
else:
- raise ValueError(f'Invalid hashing algorithm: {algorithm}')
+ raise ValueError(f"Invalid hashing algorithm: {algorithm}")
else:
- return ''
+ return ""
+
# PURPOSE: get the git hash value
-def get_git_revision_hash(
- refname: str = 'HEAD',
- short: bool = False
- ):
+def get_git_revision_hash(refname: str = "HEAD", short: bool = False):
"""
Get the ``git`` hash value for a particular reference
@@ -182,28 +186,29 @@ def get_git_revision_hash(
# get path to .git directory from current file path
filename = inspect.getframeinfo(inspect.currentframe()).filename
basepath = pathlib.Path(filename).absolute().parent.parent
- gitpath = basepath.joinpath('.git')
+ gitpath = basepath.joinpath(".git")
# build command
- cmd = ['git', f'--git-dir={gitpath}', 'rev-parse']
- cmd.append('--short') if short else None
+ cmd = ["git", f"--git-dir={gitpath}", "rev-parse"]
+ cmd.append("--short") if short else None
cmd.append(refname)
# get output
with warnings.catch_warnings():
- return str(subprocess.check_output(cmd), encoding='utf8').strip()
+ return str(subprocess.check_output(cmd), encoding="utf8").strip()
+
# PURPOSE: get the current git status
def get_git_status():
- """Get the status of a ``git`` repository as a boolean value
- """
+ """Get the status of a ``git`` repository as a boolean value"""
# get path to .git directory from current file path
filename = inspect.getframeinfo(inspect.currentframe()).filename
basepath = pathlib.Path(filename).absolute().parent.parent
- gitpath = basepath.joinpath('.git')
+ gitpath = basepath.joinpath(".git")
# build command
- cmd = ['git', f'--git-dir={gitpath}', 'status', '--porcelain']
+ cmd = ["git", f"--git-dir={gitpath}", "status", "--porcelain"]
with warnings.catch_warnings():
return bool(subprocess.check_output(cmd))
+
# PURPOSE: recursively split a url path
def url_split(s: str):
"""
@@ -215,12 +220,13 @@ def url_split(s: str):
url string
"""
head, tail = posixpath.split(s)
- if head in ('http:','https:','ftp:','s3:'):
- return s,
- elif head in ('', posixpath.sep):
- return tail,
+ if head in ("http:", "https:", "ftp:", "s3:"):
+ return (s,)
+ elif head in ("", posixpath.sep):
+ return (tail,)
return url_split(head) + (tail,)
+
# PURPOSE: convert file lines to arguments
def convert_arg_line_to_args(arg_line):
"""
@@ -232,11 +238,12 @@ def convert_arg_line_to_args(arg_line):
line string containing a single argument and/or comments
"""
# remove commented lines and after argument comments
- for arg in re.sub(r'\#(.*?)$',r'',arg_line).split():
+ for arg in re.sub(r"\#(.*?)$", r"", arg_line).split():
if not arg.strip():
continue
yield arg
+
# PURPOSE: build a logging instance with a specified name
def build_logger(name: str, **kwargs):
"""
@@ -256,24 +263,25 @@ def build_logger(name: str, **kwargs):
specified stream to initialize StreamHandler
"""
# set default arguments
- kwargs.setdefault('format', '%(levelname)s:%(name)s:%(message)s')
- kwargs.setdefault('level', logging.CRITICAL)
- kwargs.setdefault('propagate',False)
- kwargs.setdefault('stream',None)
+ kwargs.setdefault("format", "%(levelname)s:%(name)s:%(message)s")
+ kwargs.setdefault("level", logging.CRITICAL)
+ kwargs.setdefault("propagate", False)
+ kwargs.setdefault("stream", None)
# build logger
logger = logging.getLogger(name)
- logger.setLevel(kwargs['level'])
- logger.propagate = kwargs['propagate']
+ logger.setLevel(kwargs["level"])
+ logger.propagate = kwargs["propagate"]
# create and add handlers to logger
if not logger.handlers:
# create handler for logger
- handler = logging.StreamHandler(stream=kwargs['stream'])
- formatter = logging.Formatter(kwargs['format'])
+ handler = logging.StreamHandler(stream=kwargs["stream"])
+ formatter = logging.Formatter(kwargs["format"])
handler.setFormatter(formatter)
# add handler to logger
logger.addHandler(handler)
return logger
+
# PURPOSE: convert Roman numerals to (Arabic) integers
def roman_to_int(roman: str):
"""
@@ -285,24 +293,30 @@ def roman_to_int(roman: str):
Roman numeral string
"""
# mapping between Roman and Arabic numerals
- roman_map = {'i':1, 'v':5, 'x':10, 'l':50, 'c':100, 'd':500, 'm':1000}
+ roman_map = {
+ "i": 1,
+ "v": 5,
+ "x": 10,
+ "l": 50,
+ "c": 100,
+ "d": 500,
+ "m": 1000,
+ }
# verify case
roman = roman.lower()
output = 0
# iterate through roman numerals in string and calculate total
- for i,s in enumerate(roman):
- if (i > 0) and (roman_map[s] > roman_map[roman[i-1]]):
- output += roman_map[s] - 2*roman_map[roman[i-1]]
+ for i, s in enumerate(roman):
+ if (i > 0) and (roman_map[s] > roman_map[roman[i - 1]]):
+ output += roman_map[s] - 2 * roman_map[roman[i - 1]]
else:
output += roman_map[s]
# return the integer value
return output
+
# PURPOSE: returns the Unix timestamp value for a formatted date string
-def get_unix_time(
- time_string: str,
- format: str = '%Y-%m-%d %H:%M:%S'
- ):
+def get_unix_time(time_string: str, format: str = "%Y-%m-%d %H:%M:%S"):
"""
Get the Unix timestamp value for a formatted date string
@@ -327,6 +341,7 @@ def get_unix_time(
else:
return parsed_time.timestamp()
+
# PURPOSE: output a time string in isoformat
def isoformat(time_string: str):
"""
@@ -345,6 +360,7 @@ def isoformat(time_string: str):
else:
return parsed_time.isoformat()
+
# PURPOSE: rounds a number to an even number less than or equal to original
def even(value: float):
"""
@@ -355,7 +371,8 @@ def even(value: float):
value: float
number to be rounded
"""
- return 2*int(value//2)
+ return 2 * int(value // 2)
+
# PURPOSE: rounds a number upward to its nearest integer
def ceil(value: float):
@@ -367,15 +384,16 @@ def ceil(value: float):
value: float
number to be rounded upward
"""
- return -int(-value//1)
+ return -int(-value // 1)
+
# PURPOSE: make a copy of a file with all system information
def copy(
- source: str | pathlib.Path,
- destination: str | pathlib.Path,
- move: bool = False,
- **kwargs
- ):
+ source: str | pathlib.Path,
+ destination: str | pathlib.Path,
+ move: bool = False,
+ **kwargs,
+):
"""
Copy or move a file with all system information
@@ -391,19 +409,18 @@ def copy(
source = pathlib.Path(source).expanduser().absolute()
destination = pathlib.Path(destination).expanduser().absolute()
# log source and destination
- logging.info(f'{str(source)} -->\n\t{str(destination)}')
+ logging.info(f"{str(source)} -->\n\t{str(destination)}")
shutil.copyfile(source, destination)
shutil.copystat(source, destination)
# remove the original file if moving
if move:
source.unlink()
+
# PURPOSE: check ftp connection
def check_ftp_connection(
- HOST: str,
- username: str | None = None,
- password: str | None = None
- ):
+ HOST: str, username: str | None = None, password: str | None = None
+):
"""
Check internet connection with ftp host
@@ -422,22 +439,23 @@ def check_ftp_connection(
f.login(username, password)
f.voidcmd("NOOP")
except IOError:
- raise RuntimeError('Check internet connection')
+ raise RuntimeError("Check internet connection")
except ftplib.error_perm:
- raise RuntimeError('Check login credentials')
+ raise RuntimeError("Check login credentials")
else:
return True
+
# PURPOSE: list a directory on a ftp host
def ftp_list(
- HOST: str | list,
- username: str | None = None,
- password: str | None = None,
- timeout: int | None = None,
- basename: bool = False,
- pattern: str | None = None,
- sort: bool = False
- ):
+ HOST: str | list,
+ username: str | None = None,
+ password: str | None = None,
+ timeout: int | None = None,
+ basename: bool = False,
+ pattern: str | None = None,
+ sort: bool = False,
+):
"""
List a directory on a ftp host
@@ -470,20 +488,20 @@ def ftp_list(
HOST = url_split(HOST)
# try to connect to ftp host
try:
- ftp = ftplib.FTP(HOST[0],timeout=timeout)
- except (socket.gaierror,IOError):
- raise RuntimeError(f'Unable to connect to {HOST[0]}')
+ ftp = ftplib.FTP(HOST[0], timeout=timeout)
+ except (socket.gaierror, IOError):
+ raise RuntimeError(f"Unable to connect to {HOST[0]}")
else:
- ftp.login(username,password)
+ ftp.login(username, password)
# list remote path
output = ftp.nlst(posixpath.join(*HOST[1:]))
# get last modified date of ftp files and convert into unix time
- mtimes = [None]*len(output)
+ mtimes = [None] * len(output)
# iterate over each file in the list and get the modification time
- for i,f in enumerate(output):
+ for i, f in enumerate(output):
try:
# try sending modification time command
- mdtm = ftp.sendcmd(f'MDTM {f}')
+ mdtm = ftp.sendcmd(f"MDTM {f}")
except ftplib.error_perm:
# directories will return with an error
pass
@@ -495,13 +513,13 @@ def ftp_list(
output = [posixpath.basename(i) for i in output]
# reduce using regular expression pattern
if pattern:
- i = [i for i,f in enumerate(output) if re.search(pattern,f)]
+ i = [i for i, f in enumerate(output) if re.search(pattern, f)]
# reduce list of listed items and last modified times
output = [output[indice] for indice in i]
mtimes = [mtimes[indice] for indice in i]
# sort the list
if sort:
- i = [i for i,j in sorted(enumerate(output), key=lambda i: i[1])]
+ i = [i for i, j in sorted(enumerate(output), key=lambda i: i[1])]
# sort list of listed items and last modified times
output = [output[indice] for indice in i]
mtimes = [mtimes[indice] for indice in i]
@@ -510,19 +528,20 @@ def ftp_list(
# return the list of items and last modified times
return (output, mtimes)
+
# PURPOSE: download a file from a ftp host
def from_ftp(
- HOST: str | list,
- username: str | None = None,
- password: str | None = None,
- timeout: int | None = None,
- local: str | pathlib.Path | None = None,
- hash: str = '',
- chunk: int = 8192,
- verbose: bool = False,
- fid=sys.stdout,
- mode: oct = 0o775
- ):
+ HOST: str | list,
+ username: str | None = None,
+ password: str | None = None,
+ timeout: int | None = None,
+ local: str | pathlib.Path | None = None,
+ hash: str = "",
+ chunk: int = 8192,
+ verbose: bool = False,
+ fid=sys.stdout,
+ mode: oct = 0o775,
+):
"""
Download a file from a ftp host
@@ -564,23 +583,24 @@ def from_ftp(
try:
# try to connect to ftp host
ftp = ftplib.FTP(HOST[0], timeout=timeout)
- except (socket.gaierror,IOError):
- raise RuntimeError(f'Unable to connect to {HOST[0]}')
+ except (socket.gaierror, IOError):
+ raise RuntimeError(f"Unable to connect to {HOST[0]}")
else:
- ftp.login(username,password)
+ ftp.login(username, password)
# remote path
ftp_remote_path = posixpath.join(*HOST[1:])
# copy remote file contents to bytesIO object
remote_buffer = io.BytesIO()
- ftp.retrbinary(f'RETR {ftp_remote_path}',
- remote_buffer.write, blocksize=chunk)
+ ftp.retrbinary(
+ f"RETR {ftp_remote_path}", remote_buffer.write, blocksize=chunk
+ )
remote_buffer.seek(0)
# save file basename with bytesIO object
remote_buffer.filename = HOST[-1]
# generate checksum hash for remote file
remote_hash = hashlib.md5(remote_buffer.getvalue()).hexdigest()
# get last modified date of remote file and convert into unix time
- mdtm = ftp.sendcmd(f'MDTM {ftp_remote_path}')
+ mdtm = ftp.sendcmd(f"MDTM {ftp_remote_path}")
remote_mtime = get_unix_time(mdtm[4:], format="%Y%m%d%H%M%S")
# compare checksums
if local and (hash != remote_hash):
@@ -590,10 +610,10 @@ def from_ftp(
local.parent.mkdir(mode=mode, parents=True, exist_ok=True)
# print file information
args = (posixpath.join(*HOST), str(local))
- logging.info('{0} -->\n\t{1}'.format(*args))
+ logging.info("{0} -->\n\t{1}".format(*args))
# store bytes to file using chunked transfer encoding
remote_buffer.seek(0)
- with local.open(mode='wb') as f:
+ with local.open(mode="wb") as f:
shutil.copyfileobj(remote_buffer, f, chunk)
# change the permissions mode
local.chmod(mode)
@@ -605,25 +625,25 @@ def from_ftp(
remote_buffer.seek(0)
return remote_buffer
+
def _create_default_ssl_context() -> ssl.SSLContext:
- """Creates the default SSL context
- """
+ """Creates the default SSL context"""
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
_set_ssl_context_options(context)
context.options |= ssl.OP_NO_COMPRESSION
return context
+
def _create_ssl_context_no_verify() -> ssl.SSLContext:
- """Creates an SSL context for unverified connections
- """
+ """Creates an SSL context for unverified connections"""
context = _create_default_ssl_context()
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
return context
+
def _set_ssl_context_options(context: ssl.SSLContext) -> None:
- """Sets the default options for the SSL context
- """
+ """Sets the default options for the SSL context"""
if sys.version_info >= (3, 10) or ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7):
context.minimum_version = ssl.TLSVersion.TLSv1_2
else:
@@ -632,14 +652,16 @@ def _set_ssl_context_options(context: ssl.SSLContext) -> None:
context.options |= ssl.OP_NO_TLSv1
context.options |= ssl.OP_NO_TLSv1_1
+
# default ssl context
_default_ssl_context = _create_ssl_context_no_verify()
+
# PURPOSE: check internet connection
def check_connection(
- HOST: str,
- context: ssl.SSLContext = _default_ssl_context,
- ):
+ HOST: str,
+ context: ssl.SSLContext = _default_ssl_context,
+):
"""
Check internet connection with http host
@@ -658,20 +680,21 @@ def check_connection(
raise RuntimeError(exc.reason) from exc
except urllib2.URLError as exc:
logging.debug(exc.reason)
- raise RuntimeError('Check internet connection') from exc
+ raise RuntimeError("Check internet connection") from exc
else:
return True
+
# PURPOSE: list a directory on an Apache http Server
def http_list(
- HOST: str | list,
- timeout: int | None = None,
- context: ssl.SSLContext = _default_ssl_context,
- parser = lxml.etree.HTMLParser(),
- format: str = '%Y-%m-%d %H:%M',
- pattern: str = '',
- sort: bool = False
- ):
+ HOST: str | list,
+ timeout: int | None = None,
+ context: ssl.SSLContext = _default_ssl_context,
+ parser=lxml.etree.HTMLParser(),
+ format: str = "%Y-%m-%d %H:%M",
+ pattern: str = "",
+ sort: bool = False,
+):
"""
List a directory on an Apache http Server
@@ -712,42 +735,45 @@ def http_list(
raise RuntimeError(exc.reason) from exc
except urllib2.URLError as exc:
logging.debug(exc.reason)
- msg = 'List error from {0}'.format(posixpath.join(*HOST))
+ msg = "List error from {0}".format(posixpath.join(*HOST))
raise Exception(msg) from exc
else:
# read and parse request for files (column names and modified times)
tree = lxml.etree.parse(response, parser)
- colnames = tree.xpath('//tr/td[not(@*)]//a/@href')
+ colnames = tree.xpath("//tr/td[not(@*)]//a/@href")
# get the Unix timestamp value for a modification time
- collastmod = [get_unix_time(i,format=format)
- for i in tree.xpath('//tr/td[@align="right"][1]/text()')]
+ collastmod = [
+ get_unix_time(i, format=format)
+ for i in tree.xpath('//tr/td[@align="right"][1]/text()')
+ ]
# reduce using regular expression pattern
if pattern:
- i = [i for i,f in enumerate(colnames) if re.search(pattern, f)]
+ i = [i for i, f in enumerate(colnames) if re.search(pattern, f)]
# reduce list of column names and last modified times
colnames = [colnames[indice] for indice in i]
collastmod = [collastmod[indice] for indice in i]
# sort the list
if sort:
- i = [i for i,j in sorted(enumerate(colnames), key=lambda i: i[1])]
+ i = [i for i, j in sorted(enumerate(colnames), key=lambda i: i[1])]
# sort list of column names and last modified times
colnames = [colnames[indice] for indice in i]
collastmod = [collastmod[indice] for indice in i]
# return the list of column names and last modified times
return (colnames, collastmod)
+
# PURPOSE: download a file from a http host
def from_http(
- HOST: str | list,
- timeout: int | None = None,
- context: ssl.SSLContext = _default_ssl_context,
- local: str | pathlib.Path | None = None,
- hash: str = '',
- chunk: int = 16384,
- verbose: bool = False,
- fid = sys.stdout,
- mode: oct = 0o775
- ):
+ HOST: str | list,
+ timeout: int | None = None,
+ context: ssl.SSLContext = _default_ssl_context,
+ local: str | pathlib.Path | None = None,
+ hash: str = "",
+ chunk: int = 16384,
+ verbose: bool = False,
+ fid=sys.stdout,
+ mode: oct = 0o775,
+):
"""
Download a file from a http host
@@ -789,7 +815,7 @@ def from_http(
request = urllib2.Request(posixpath.join(*HOST))
response = urllib2.urlopen(request, timeout=timeout, context=context)
except:
- raise Exception('Download error from {0}'.format(posixpath.join(*HOST)))
+ raise Exception("Download error from {0}".format(posixpath.join(*HOST)))
else:
# copy remote file contents to bytesIO object
remote_buffer = io.BytesIO()
@@ -807,10 +833,10 @@ def from_http(
local.parent.mkdir(mode=mode, parents=True, exist_ok=True)
# print file information
args = (posixpath.join(*HOST), str(local))
- logging.info('{0} -->\n\t{1}'.format(*args))
+ logging.info("{0} -->\n\t{1}".format(*args))
# store bytes to file using chunked transfer encoding
remote_buffer.seek(0)
- with local.open(mode='wb') as f:
+ with local.open(mode="wb") as f:
shutil.copyfileobj(remote_buffer, f, chunk)
# change the permissions mode
local.chmod(mode)
@@ -818,56 +844,58 @@ def from_http(
remote_buffer.seek(0)
return remote_buffer
+
# NASA on-prem DAAC providers
_daac_providers = {
- 'gesdisc': 'GES_DISC',
- 'ghrcdaac': 'GHRC_DAAC',
- 'lpdaac': 'LPDAAC_ECS',
- 'nsidc': 'NSIDC_ECS',
- 'ornldaac': 'ORNL_DAAC',
- 'podaac': 'PODAAC',
+ "gesdisc": "GES_DISC",
+ "ghrcdaac": "GHRC_DAAC",
+ "lpdaac": "LPDAAC_ECS",
+ "nsidc": "NSIDC_ECS",
+ "ornldaac": "ORNL_DAAC",
+ "podaac": "PODAAC",
}
# NASA Cumulus AWS providers
_s3_providers = {
- 'gesdisc': 'GES_DISC',
- 'ghrcdaac': 'GHRC_DAAC',
- 'lpdaac': 'LPCLOUD',
- 'nsidc': 'NSIDC_CPRD',
- 'ornldaac': 'ORNL_CLOUD',
- 'podaac': 'POCLOUD',
+ "gesdisc": "GES_DISC",
+ "ghrcdaac": "GHRC_DAAC",
+ "lpdaac": "LPCLOUD",
+ "nsidc": "NSIDC_CPRD",
+ "ornldaac": "ORNL_CLOUD",
+ "podaac": "POCLOUD",
}
# NASA Cumulus AWS S3 credential endpoints
_s3_endpoints = {
- 'gesdisc': 'https://data.gesdisc.earthdata.nasa.gov/s3credentials',
- 'ghrcdaac': 'https://data.ghrc.earthdata.nasa.gov/s3credentials',
- 'lpdaac': 'https://data.lpdaac.earthdatacloud.nasa.gov/s3credentials',
- 'nsidc': 'https://data.nsidc.earthdatacloud.nasa.gov/s3credentials',
- 'ornldaac': 'https://data.ornldaac.earthdata.nasa.gov/s3credentials',
- 'podaac': 'https://archive.podaac.earthdata.nasa.gov/s3credentials'
+ "gesdisc": "https://data.gesdisc.earthdata.nasa.gov/s3credentials",
+ "ghrcdaac": "https://data.ghrc.earthdata.nasa.gov/s3credentials",
+ "lpdaac": "https://data.lpdaac.earthdatacloud.nasa.gov/s3credentials",
+ "nsidc": "https://data.nsidc.earthdatacloud.nasa.gov/s3credentials",
+ "ornldaac": "https://data.ornldaac.earthdata.nasa.gov/s3credentials",
+ "podaac": "https://archive.podaac.earthdata.nasa.gov/s3credentials",
}
# NASA Cumulus AWS S3 buckets
_s3_buckets = {
- 'gesdisc': 'gesdisc-cumulus-prod-protected',
- 'ghrcdaac': 'ghrc-cumulus-dev',
- 'lpdaac': 'lp-prod-protected',
- 'nsidc': 'nsidc-cumulus-prod-protected',
- 'ornldaac': 'ornl-cumulus-prod-protected',
- 'podaac': 'podaac-ops-cumulus-protected'
+ "gesdisc": "gesdisc-cumulus-prod-protected",
+ "ghrcdaac": "ghrc-cumulus-dev",
+ "lpdaac": "lp-prod-protected",
+ "nsidc": "nsidc-cumulus-prod-protected",
+ "ornldaac": "ornl-cumulus-prod-protected",
+ "podaac": "podaac-ops-cumulus-protected",
}
+
# PURPOSE: attempt to build an opener with netrc
def attempt_login(
- urs: str,
- context=_default_ssl_context,
- password_manager: bool = True,
- get_ca_certs: bool = True,
- redirect: bool = True,
- authorization_header: bool = False,
- **kwargs
- ):
+ urs: str,
+ context=_default_ssl_context,
+ password_manager: bool = True,
+ get_ca_certs: bool = True,
+ redirect: bool = True,
+ authorization_header: bool = False,
+ **kwargs,
+):
"""
attempt to build a urllib opener for NASA Earthdata
@@ -900,35 +928,38 @@ def attempt_login(
OpenerDirector instance
"""
# set default keyword arguments
- kwargs.setdefault('username', os.environ.get('EARTHDATA_USERNAME'))
- kwargs.setdefault('password', os.environ.get('EARTHDATA_PASSWORD'))
- kwargs.setdefault('retries', 5)
- kwargs.setdefault('netrc', os.path.expanduser('~/.netrc'))
+ kwargs.setdefault("username", os.environ.get("EARTHDATA_USERNAME"))
+ kwargs.setdefault("password", os.environ.get("EARTHDATA_PASSWORD"))
+ kwargs.setdefault("retries", 5)
+ kwargs.setdefault("netrc", os.path.expanduser("~/.netrc"))
try:
# only necessary on jupyterhub
- os.chmod(kwargs['netrc'], 0o600)
+ os.chmod(kwargs["netrc"], 0o600)
# try retrieving credentials from netrc
- username, _, password = netrc.netrc(kwargs['netrc']).authenticators(urs)
+ username, _, password = netrc.netrc(kwargs["netrc"]).authenticators(urs)
except Exception as exc:
# try retrieving credentials from environmental variables
- username, password = (kwargs['username'], kwargs['password'])
+ username, password = (kwargs["username"], kwargs["password"])
pass
# if username or password are not available
if not username:
- username = builtins.input(f'Username for {urs}: ')
+ username = builtins.input(f"Username for {urs}: ")
if not password:
- prompt = f'Password for {username}@{urs}: '
+ prompt = f"Password for {username}@{urs}: "
password = getpass.getpass(prompt=prompt)
# for each retry
- for retry in range(kwargs['retries']):
+ for retry in range(kwargs["retries"]):
# build an opener for urs with credentials
- opener = build_opener(username, password,
+ opener = build_opener(
+ username,
+ password,
context=context,
password_manager=password_manager,
get_ca_certs=get_ca_certs,
redirect=redirect,
authorization_header=authorization_header,
- urs=urs)
+ urs=urs,
+ )
# try logging in by check credentials
try:
check_credentials()
@@ -937,22 +968,23 @@ def attempt_login(
else:
return opener
# reattempt login
- username = builtins.input(f'Username for {urs}: ')
+ username = builtins.input(f"Username for {urs}: ")
password = getpass.getpass(prompt=prompt)
# reached end of available retries
- raise RuntimeError('End of Retries: Check NASA Earthdata credentials')
+ raise RuntimeError("End of Retries: Check NASA Earthdata credentials")
+
# PURPOSE: "login" to NASA Earthdata with supplied credentials
def build_opener(
- username: str,
- password: str,
- context=_default_ssl_context,
- password_manager: bool = True,
- get_ca_certs: bool = True,
- redirect: bool = True,
- authorization_header: bool = False,
- urs: str = 'https://urs.earthdata.nasa.gov'
- ):
+ username: str,
+ password: str,
+ context=_default_ssl_context,
+ password_manager: bool = True,
+ get_ca_certs: bool = True,
+ redirect: bool = True,
+ authorization_header: bool = False,
+ urs: str = "https://urs.earthdata.nasa.gov",
+):
"""
Build ``urllib`` opener for NASA Earthdata with supplied credentials
@@ -1005,7 +1037,7 @@ def build_opener(
# Encode username/password for request authorization headers
# add Authorization header to opener
if authorization_header:
- b64 = base64.b64encode(f'{username}:{password}'.encode())
+ b64 = base64.b64encode(f"{username}:{password}".encode())
opener.addheaders = [("Authorization", f"Basic {b64.decode()}")]
# Now all calls to urllib2.urlopen use our opener.
urllib2.install_opener(opener)
@@ -1014,14 +1046,15 @@ def build_opener(
# HTTPPasswordMgrWithDefaultRealm will be confused.
return opener
+
# PURPOSE: generate a NASA Earthdata user token
def get_token(
- HOST: str = 'https://urs.earthdata.nasa.gov/api/users/token',
- username: str | None = None,
- password: str | None = None,
- build: bool = True,
- urs: str = 'urs.earthdata.nasa.gov',
- ):
+ HOST: str = "https://urs.earthdata.nasa.gov/api/users/token",
+ username: str | None = None,
+ password: str | None = None,
+ build: bool = True,
+ urs: str = "urs.earthdata.nasa.gov",
+):
"""
Generate a NASA Earthdata User Token
@@ -1047,34 +1080,37 @@ def get_token(
"""
# attempt to build urllib2 opener and check credentials
if build:
- attempt_login(urs,
+ attempt_login(
+ urs,
username=username,
password=password,
password_manager=False,
get_ca_certs=False,
redirect=False,
- authorization_header=True)
+ authorization_header=True,
+ )
# create post response with Earthdata token API
try:
- request = urllib2.Request(HOST, method='POST')
+ request = urllib2.Request(HOST, method="POST")
response = urllib2.urlopen(request)
except urllib2.HTTPError as exc:
logging.debug(exc.code)
raise RuntimeError(exc.reason) from exc
except urllib2.URLError as exc:
logging.debug(exc.reason)
- raise RuntimeError('Check internet connection') from exc
+ raise RuntimeError("Check internet connection") from exc
# read and return JSON response
return json.loads(response.read())
+
# PURPOSE: generate a NASA Earthdata user token
def list_tokens(
- HOST: str = 'https://urs.earthdata.nasa.gov/api/users/tokens',
- username: str | None = None,
- password: str | None = None,
- build: bool = True,
- urs: str = 'urs.earthdata.nasa.gov',
- ):
+ HOST: str = "https://urs.earthdata.nasa.gov/api/users/tokens",
+ username: str | None = None,
+ password: str | None = None,
+ build: bool = True,
+ urs: str = "urs.earthdata.nasa.gov",
+):
"""
List the current associated NASA Earthdata User Tokens
@@ -1100,13 +1136,15 @@ def list_tokens(
"""
# attempt to build urllib2 opener and check credentials
if build:
- attempt_login(urs,
+ attempt_login(
+ urs,
username=username,
password=password,
password_manager=False,
get_ca_certs=False,
redirect=False,
- authorization_header=True)
+ authorization_header=True,
+ )
# create get response with Earthdata list tokens API
try:
request = urllib2.Request(HOST)
@@ -1116,19 +1154,20 @@ def list_tokens(
raise RuntimeError(exc.reason) from exc
except urllib2.URLError as exc:
logging.debug(exc.reason)
- raise RuntimeError('Check internet connection') from exc
+ raise RuntimeError("Check internet connection") from exc
# read and return JSON response
return json.loads(response.read())
+
# PURPOSE: revoke a NASA Earthdata user token
def revoke_token(
- token: str,
- HOST: str = f'https://urs.earthdata.nasa.gov/api/users/revoke_token',
- username: str | None = None,
- password: str | None = None,
- build: bool = True,
- urs: str = 'urs.earthdata.nasa.gov',
- ):
+ token: str,
+ HOST: str = f"https://urs.earthdata.nasa.gov/api/users/revoke_token",
+ username: str | None = None,
+ password: str | None = None,
+ build: bool = True,
+ urs: str = "urs.earthdata.nasa.gov",
+):
"""
Generate a NASA Earthdata User Token
@@ -1151,27 +1190,30 @@ def revoke_token(
"""
# attempt to build urllib2 opener and check credentials
if build:
- attempt_login(urs,
+ attempt_login(
+ urs,
username=username,
password=password,
password_manager=False,
get_ca_certs=False,
redirect=False,
- authorization_header=True)
+ authorization_header=True,
+ )
# full path for NASA Earthdata revoke token API
- url = f'{HOST}?token={token}'
+ url = f"{HOST}?token={token}"
# create post response with Earthdata revoke tokens API
try:
- request = urllib2.Request(url, method='POST')
+ request = urllib2.Request(url, method="POST")
response = urllib2.urlopen(request)
except urllib2.HTTPError as exc:
logging.debug(exc.code)
raise RuntimeError(exc.reason) from exc
except urllib2.URLError as exc:
logging.debug(exc.reason)
- raise RuntimeError('Check internet connection') from exc
+ raise RuntimeError("Check internet connection") from exc
# verbose response
- logging.debug(f'Token Revoked: {token}')
+ logging.debug(f"Token Revoked: {token}")
+
# PURPOSE: check that entered NASA Earthdata credentials are valid
def check_credentials():
@@ -1179,7 +1221,7 @@ def check_credentials():
Check that entered NASA Earthdata credentials are valid
"""
try:
- remote_path = posixpath.join('https://cddis.nasa.gov','archive')
+ remote_path = posixpath.join("https://cddis.nasa.gov", "archive")
request = urllib2.Request(url=remote_path)
response = urllib2.urlopen(request, timeout=20)
except urllib2.HTTPError as exc:
@@ -1187,22 +1229,23 @@ def check_credentials():
raise RuntimeError(exc.reason) from exc
except urllib2.URLError as exc:
logging.debug(exc.reason)
- raise RuntimeError('Check internet connection') from exc
+ raise RuntimeError("Check internet connection") from exc
else:
return True
+
# PURPOSE: list a directory on GSFC CDDIS https server
def cddis_list(
- HOST: str | list,
- username: str | None = None,
- password: str | None = None,
- build: bool = True,
- timeout: int | None = None,
- urs: str = 'urs.earthdata.nasa.gov',
- parser=lxml.etree.HTMLParser(),
- pattern: str = '',
- sort: bool = False
- ):
+ HOST: str | list,
+ username: str | None = None,
+ password: str | None = None,
+ build: bool = True,
+ timeout: int | None = None,
+ urs: str = "urs.earthdata.nasa.gov",
+ parser=lxml.etree.HTMLParser(),
+ pattern: str = "",
+ sort: bool = False,
+):
"""
List a directory on GSFC CDDIS archive server
@@ -1236,7 +1279,7 @@ def cddis_list(
"""
# use netrc credentials
if build and not (username or password):
- username,_,password = netrc.netrc().authenticators(urs)
+ username, _, password = netrc.netrc().authenticators(urs)
# build urllib2 opener and check credentials
if build:
# build urllib2 opener with credentials
@@ -1247,56 +1290,65 @@ def cddis_list(
if isinstance(HOST, str):
HOST = url_split(HOST)
# Encode username/password for request authorization headers
- b64 = base64.b64encode(f'{username}:{password}'.encode())
+ b64 = base64.b64encode(f"{username}:{password}".encode())
authorization_header = f"Basic {b64.decode()}"
# try listing from https
try:
# Create and submit request.
request = urllib2.Request(posixpath.join(*HOST))
request.add_header("Authorization", authorization_header)
- tree = lxml.etree.parse(urllib2.urlopen(request, timeout=timeout), parser)
+ tree = lxml.etree.parse(
+ urllib2.urlopen(request, timeout=timeout), parser
+ )
except:
- raise Exception('List error from {0}'.format(posixpath.join(*HOST)))
+ raise Exception("List error from {0}".format(posixpath.join(*HOST)))
else:
# read and parse request for files (column names and modified times)
# find directories
colnames = tree.xpath('//div[@class="archiveDir"]/div/a/text()')
- collastmod = [None]*(len(colnames))
+ collastmod = [None] * (len(colnames))
# find files
colnames.extend(tree.xpath('//div[@class="archiveItem"]/div/a/text()'))
# get the Unix timestamp value for a modification time
- collastmod.extend([get_unix_time(i[:19], format='%Y:%m:%d %H:%M:%S')
- for i in tree.xpath('//div[@class="archiveItem"]/div/span/text()')])
+ collastmod.extend(
+ [
+ get_unix_time(i[:19], format="%Y:%m:%d %H:%M:%S")
+ for i in tree.xpath(
+ '//div[@class="archiveItem"]/div/span/text()'
+ )
+ ]
+ )
# reduce using regular expression pattern
if pattern:
- i = [i for i,f in enumerate(colnames) if re.search(pattern, f)]
+ i = [i for i, f in enumerate(colnames) if re.search(pattern, f)]
# reduce list of column names and last modified times
colnames = [colnames[indice] for indice in i]
collastmod = [collastmod[indice] for indice in i]
# sort the list
if sort:
- i = [i for i,j in sorted(enumerate(colnames), key=lambda i: i[1])]
+ i = [i for i, j in sorted(enumerate(colnames), key=lambda i: i[1])]
# sort list of column names and last modified times
colnames = [colnames[indice] for indice in i]
collastmod = [collastmod[indice] for indice in i]
# return the list of column names and last modified times
return (colnames, collastmod)
+
# PURPOSE: download a file from a GSFC CDDIS https server
def from_cddis(
- HOST: str | list,
- username: str | None = None,
- password: str | None = None,
- build: bool = True,
- timeout: int | None = None,
- urs: str = 'urs.earthdata.nasa.gov',
- local: str | pathlib.Path | None = None,
- hash: str = '',
- chunk: int = 16384,
- verbose: bool = False,
- fid=sys.stdout,
- mode: oct = 0o775
- ):
+ HOST: str | list,
+ username: str | None = None,
+ password: str | None = None,
+ build: bool = True,
+ timeout: int | None = None,
+ urs: str = "urs.earthdata.nasa.gov",
+ local: str | pathlib.Path | None = None,
+ hash: str = "",
+ chunk: int = 16384,
+ verbose: bool = False,
+ fid=sys.stdout,
+ mode: oct = 0o775,
+):
"""
Download a file from GSFC CDDIS archive server
@@ -1337,7 +1389,7 @@ def from_cddis(
logging.basicConfig(stream=fid, level=loglevel)
# use netrc credentials
if build and not (username or password):
- username,_,password = netrc.netrc().authenticators(urs)
+ username, _, password = netrc.netrc().authenticators(urs)
# build urllib2 opener and check credentials
if build:
# build urllib2 opener with credentials
@@ -1348,7 +1400,7 @@ def from_cddis(
if isinstance(HOST, str):
HOST = url_split(HOST)
# Encode username/password for request authorization headers
- b64 = base64.b64encode(f'{username}:{password}'.encode())
+ b64 = base64.b64encode(f"{username}:{password}".encode())
authorization_header = f"Basic {b64.decode()}"
# try downloading from https
try:
@@ -1357,7 +1409,7 @@ def from_cddis(
request.add_header("Authorization", authorization_header)
response = urllib2.urlopen(request, timeout=timeout)
except:
- raise Exception('Download error from {0}'.format(posixpath.join(*HOST)))
+ raise Exception("Download error from {0}".format(posixpath.join(*HOST)))
else:
# copy remote file contents to bytesIO object
remote_buffer = io.BytesIO()
@@ -1375,10 +1427,10 @@ def from_cddis(
local.parent.mkdir(mode=mode, parents=True, exist_ok=True)
# print file information
args = (posixpath.join(*HOST), str(local))
- logging.info('{0} -->\n\t{1}'.format(*args))
+ logging.info("{0} -->\n\t{1}".format(*args))
# store bytes to file using chunked transfer encoding
remote_buffer.seek(0)
- with local.open(mode='wb') as f:
+ with local.open(mode="wb") as f:
shutil.copyfileobj(remote_buffer, f, chunk)
# change the permissions mode
local.chmod(mode)
@@ -1386,13 +1438,14 @@ def from_cddis(
remote_buffer.seek(0)
return remote_buffer
+
# PURPOSE: list a directory on IERS https Server
def iers_list(
- HOST: str | list,
- timeout: int | None = None,
- context: ssl.SSLContext = _default_ssl_context,
- parser = lxml.etree.HTMLParser()
- ):
+ HOST: str | list,
+ timeout: int | None = None,
+ context: ssl.SSLContext = _default_ssl_context,
+ parser=lxml.etree.HTMLParser(),
+):
"""
List a directory on IERS Bulletin-A https server
@@ -1427,29 +1480,32 @@ def iers_list(
raise RuntimeError(exc.reason) from exc
except urllib2.URLError as exc:
logging.debug(exc.reason)
- msg = 'List error from {0}'.format(posixpath.join(*HOST))
+ msg = "List error from {0}".format(posixpath.join(*HOST))
raise Exception(msg) from exc
else:
# read and parse request for files (column names and modified times)
tree = lxml.etree.parse(response, parser)
colnames = tree.xpath('//tr/td[@class="$tdclass"][4]//a/@href')
# get the Unix timestamp value for a modification time
- collastmod = [get_unix_time(i,format='%Y-%m-%d')
- for i in tree.xpath('//tr/td[@class="$tdclass"][2]/span/text()')]
+ collastmod = [
+ get_unix_time(i, format="%Y-%m-%d")
+ for i in tree.xpath('//tr/td[@class="$tdclass"][2]/span/text()')
+ ]
# sort list of column names and last modified times in reverse order
# return the list of column names and last modified times
return (colnames[::-1], collastmod[::-1])
+
def from_jpl_ssd(
- kernel='de440s.bsp',
- timeout: int | None = None,
- context: ssl.SSLContext = _default_ssl_context,
- local: str | pathlib.Path | None = None,
- hash: str = '',
- chunk: int = 16384,
- verbose: bool = False,
- mode: oct = 0o775
- ):
+ kernel="de440s.bsp",
+ timeout: int | None = None,
+ context: ssl.SSLContext = _default_ssl_context,
+ local: str | pathlib.Path | None = None,
+ hash: str = "",
+ chunk: int = 16384,
+ verbose: bool = False,
+ mode: oct = 0o775,
+):
"""
Download `planetary ephemeride kernels`__ from the JPL Solar
System Dynamics server
@@ -1474,16 +1530,24 @@ def from_jpl_ssd(
permissions mode of output local file
"""
# determine which kernel file to download
- if (local is None):
+ if local is None:
# local path to kernel file
- local = get_data_path(['data',kernel])
+ local = get_data_path(["data", kernel])
elif (kernel is None) and (local is not None):
# verify inputs for remote http host
local = pathlib.Path(local).expanduser().absolute()
kernel = local.name
# remote host path to kernel file
- HOST = ['https://ssd.jpl.nasa.gov','ftp','eph','planets','bsp',kernel]
+ HOST = ["https://ssd.jpl.nasa.gov", "ftp", "eph", "planets", "bsp", kernel]
# get kernel file from remote host
- logging.info('Downloading JPL Planetary Ephemeride Kernel File')
- from_http(HOST, timeout=timeout, context=context, local=local,
- hash=hash, chunk=chunk, verbose=verbose, mode=mode)
+ logging.info("Downloading JPL Planetary Ephemeride Kernel File")
+ from_http(
+ HOST,
+ timeout=timeout,
+ context=context,
+ local=local,
+ hash=hash,
+ chunk=chunk,
+ verbose=verbose,
+ mode=mode,
+ )
diff --git a/timescale/version.py b/timescale/version.py
index c4a6143..421845b 100644
--- a/timescale/version.py
+++ b/timescale/version.py
@@ -1,8 +1,9 @@
#!/usr/bin/env python
-u"""
+"""
version.py (11/2023)
Gets version number with importlib.metadata
"""
+
import importlib.metadata
# package metadata