diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 00ba2251..0ee26e29 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -30,15 +30,13 @@ jobs: fail-fast: false matrix: platform: [ubuntu-latest, macos-latest, windows-latest] - python-version: ['3.10', '3.11', '3.12', '3.13', '3.14-dev'] + python-version: ['3.12', '3.13', '3.14'] exclude: [ # windows runners are pretty scarce, so let's only run lowest and highest python version - {platform: windows-latest, python-version: '3.11'}, - {platform: windows-latest, python-version: '3.12'}, + {platform: windows-latest, python-version: '3.13'}, # same, macos is a bit too slow and ubuntu covers python quirks well - {platform: macos-latest , python-version: '3.11' }, - {platform: macos-latest , python-version: '3.12' }, + {platform: macos-latest , python-version: '3.13' }, ] runs-on: ${{ matrix.platform }} @@ -50,16 +48,16 @@ jobs: # ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation - run: echo "$HOME/.local/bin" >> $GITHUB_PATH - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: submodules: recursive fetch-depth: 0 # nicer to have all git history when debugging/for tests - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} - - uses: astral-sh/setup-uv@v5 + - uses: astral-sh/setup-uv@v7 with: enable-cache: false # we don't have lock files, so can't use them as cache key @@ -100,16 +98,16 @@ jobs: # ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation - run: echo "$HOME/.local/bin" >> $GITHUB_PATH - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: submodules: recursive fetch-depth: 0 # pull all commits to correctly infer vcs version - - uses: actions/setup-python@v5 + - uses: actions/setup-python@v6 with: python-version: '3.12' - - uses: astral-sh/setup-uv@v5 + - uses: astral-sh/setup-uv@v7 with: enable-cache: false # we don't have lock files, so can't use them as cache key diff --git a/pyproject.toml b/pyproject.toml index 93c95f84..fa5933fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ dependencies = [ "kompress>=0.2.20240918" , # for transparent access to compressed files via pathlib.Path ] -requires-python = ">=3.10" +requires-python = ">=3.12" ## these need to be set if you're planning to upload to pypi description = "A Python interface to my life" @@ -65,7 +65,7 @@ typecheck = [ { include-group = "testing" }, "mypy", "lxml", # for mypy coverage - "ty>=0.0.1a21", + "ty>=0.0.1a22", "HPI[optional]", "orgparse", # for my.core.orgmode diff --git a/ruff.toml b/ruff.toml index 29bc6f82..9147f78b 100644 --- a/ruff.toml +++ b/ruff.toml @@ -98,6 +98,10 @@ lint.ignore = [ "PLC0415", # "imports should be at the top level" -- not realistic "ARG001", # ugh, kinda annoying when using pytest fixtures + + # FIXME hmm. Need to figure out if cachew works fine with type = defined types before updating things.. + "UP047", # non-pep695-generic-function + "UP040", # non-pep695-type-alias ] lint.per-file-ignores."src/my/core/compat.py" = [ diff --git a/src/my/arbtt.py b/src/my/arbtt.py index 32b73863..b1f97e78 100644 --- a/src/my/arbtt.py +++ b/src/my/arbtt.py @@ -10,13 +10,13 @@ from collections.abc import Iterable, Sequence from dataclasses import dataclass +from datetime import datetime from pathlib import Path from subprocess import PIPE, Popen import ijson # type: ignore[import-untyped] from my.core import Json, PathIsh, Stats, datetime_aware, get_files, stat -from my.core.compat import fromisoformat def inputs() -> Sequence[Path]: @@ -46,7 +46,6 @@ class Entry: @property def dt(self) -> datetime_aware: # contains utc already - # TODO after python>=3.11, could just use fromisoformat ds = self.json['date'] elen = 27 lds = len(ds) @@ -57,7 +56,7 @@ def dt(self) -> datetime_aware: # and sometimes more... ds = ds[: elen - 1] + 'Z' - return fromisoformat(ds) + return datetime.fromisoformat(ds) @property def active(self) -> str | None: diff --git a/src/my/bumble/android.py b/src/my/bumble/android.py index ee07400e..5995f856 100644 --- a/src/my/bumble/android.py +++ b/src/my/bumble/android.py @@ -11,12 +11,11 @@ from dataclasses import dataclass from datetime import datetime from pathlib import Path -from typing import Protocol +from typing import Protocol, assert_never from more_itertools import unique_everseen from my.core import Paths, Res, get_files -from my.core.compat import assert_never from my.core.sqlite import select, sqlite_connection diff --git a/src/my/codeforces.py b/src/my/codeforces.py index 9c6b7c94..02a0d9e6 100644 --- a/src/my/codeforces.py +++ b/src/my/codeforces.py @@ -1,7 +1,7 @@ import json from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from functools import cached_property from pathlib import Path @@ -45,7 +45,7 @@ def _parse_allcontests(self, p: Path) -> Iterator[Contest]: for c in j['result']: yield Contest( contest_id=c['id'], - when=datetime.fromtimestamp(c['startTimeSeconds'], tz=timezone.utc), + when=datetime.fromtimestamp(c['startTimeSeconds'], tz=UTC), name=c['name'], ) diff --git a/src/my/core/common.py b/src/my/core/common.py index 956456c1..90575542 100644 --- a/src/my/core/common.py +++ b/src/my/core/common.py @@ -5,11 +5,7 @@ from collections.abc import Callable, Iterable, Sequence from glob import glob as do_glob from pathlib import Path -from typing import ( - TYPE_CHECKING, - Generic, - TypeVar, -) +from typing import TYPE_CHECKING from . import compat, warnings @@ -106,22 +102,19 @@ def caller() -> str: return tuple(paths) -_R = TypeVar('_R') - - # https://stackoverflow.com/a/5192374/706389 # NOTE: it was added to stdlib in 3.9 and then deprecated in 3.11 # seems that the suggested solution is to use custom decorator? -class classproperty(Generic[_R]): - def __init__(self, f: Callable[..., _R]) -> None: +class classproperty[R]: + def __init__(self, f: Callable[..., R]) -> None: self.f = f - def __get__(self, obj, cls) -> _R: + def __get__(self, obj, cls) -> R: return self.f(cls) def test_classproperty() -> None: - from .compat import assert_type + from typing import assert_type class C: @classproperty @@ -129,7 +122,7 @@ def prop(cls) -> str: return 'hello' res = C.prop - assert_type(res, str) + assert_type(res, str) # ty: ignore[type-assertion-failure] assert res == 'hello' @@ -247,7 +240,7 @@ def asdict(*args, **kwargs): tzdatetime = datetime_aware else: - from .compat import Never + from typing import Never # make these invalid during type check while working in runtime Stats = Never diff --git a/src/my/core/compat.py b/src/my/core/compat.py index ac953ae4..e748a2b1 100644 --- a/src/my/core/compat.py +++ b/src/my/core/compat.py @@ -32,91 +32,25 @@ def removeprefix(text: str, prefix: str) -> str: def removesuffix(text: str, suffix: str) -> str: return text.removesuffix(suffix) - ## - ## used to have compat function before 3.8 for these, keeping for runtime back compatibility from bisect import bisect_left from functools import cached_property from types import NoneType - # used to have compat function before 3.9 for these, keeping for runtime back compatibility + ## + ## used to have compat function before 3.9 for these, keeping for runtime back compatibility from typing import Literal, ParamSpec, Protocol, TypeAlias, TypedDict _KwOnlyType = TypedDict("_KwOnlyType", {"kw_only": Literal[True]}) # noqa: UP013 KW_ONLY: _KwOnlyType = {"kw_only": True} -## - + ## -from datetime import datetime + ## old compat for python <3.12 + from datetime import datetime -if sys.version_info[:2] >= (3, 11): fromisoformat = datetime.fromisoformat -else: - # fromisoformat didn't support Z as "utc" before 3.11 - # https://docs.python.org/3/library/datetime.html#datetime.datetime.fromisoformat - - def fromisoformat(date_string: str) -> datetime: - if date_string.endswith('Z'): - date_string = date_string[:-1] + '+00:00' - return datetime.fromisoformat(date_string) - - -def test_fromisoformat() -> None: - from datetime import timezone - - # fmt: off - # feedbin has this format - assert fromisoformat('2020-05-01T10:32:02.925961Z') == datetime( - 2020, 5, 1, 10, 32, 2, 925961, timezone.utc, - ) - - # polar has this format - assert fromisoformat('2018-11-28T22:04:01.304Z') == datetime( - 2018, 11, 28, 22, 4, 1, 304000, timezone.utc, - ) - # stackexchange, runnerup has this format - assert fromisoformat('2020-11-30T00:53:12Z') == datetime( - 2020, 11, 30, 0, 53, 12, 0, timezone.utc, - ) - # fmt: on - - # arbtt has this format (sometimes less/more than 6 digits in milliseconds) - # TODO doesn't work atm, not sure if really should be supported... - # maybe should have flags for weird formats? - # assert isoparse('2017-07-18T18:59:38.21731Z') == datetime( - # 2017, 7, 18, 18, 59, 38, 217310, timezone.utc, - # ) - - -if sys.version_info[:2] >= (3, 11): from typing import Never, assert_never, assert_type -else: - from typing_extensions import Never, assert_never, assert_type - -if sys.version_info[:2] >= (3, 11): add_note = BaseException.add_note -else: - - def add_note(e: BaseException, note: str) -> None: - """ - Backport of BaseException.add_note - """ - - # The only (somewhat annoying) difference is it will log extra lines for notes past the main exception message: - # (i.e. line 2 here:) - - # 1 [ERROR 2025-02-04 22:12:21] Main exception message - # 2 ^ extra note - # 3 Traceback (most recent call last): - # 4 File "run.py", line 19, in - # 5 ee = test() - # 6 File "run.py", line 5, in test - # 7 raise RuntimeError("Main exception message") - # 8 RuntimeError: Main exception message - # 9 ^ extra note - - args = e.args - if len(args) == 1 and isinstance(args[0], str): - e.args = (e.args[0] + '\n' + note,) + ## diff --git a/src/my/core/freezer.py b/src/my/core/freezer.py index 09851c61..78ba467c 100644 --- a/src/my/core/freezer.py +++ b/src/my/core/freezer.py @@ -2,13 +2,11 @@ import dataclasses import inspect -from typing import Any, Generic, TypeVar +from typing import Any -D = TypeVar('D') - -def _freeze_dataclass(Orig: type[D]): - ofields = [(f.name, f.type, f) for f in dataclasses.fields(Orig)] # type: ignore[arg-type] # see https://github.com/python/typing_extensions/issues/115 +def _freeze_dataclass(Orig: type): + ofields = [(f.name, f.type, f) for f in dataclasses.fields(Orig)] # extract properties along with their types props = list(inspect.getmembers(Orig, lambda o: isinstance(o, property))) @@ -20,7 +18,7 @@ def _freeze_dataclass(Orig: type[D]): return props, RRR -class Freezer(Generic[D]): +class Freezer[D]: ''' Some magic which converts dataclass properties into fields. It could be useful for better serialization, for performance, for using type as a schema. diff --git a/src/my/core/pandas.py b/src/my/core/pandas.py index b34fe99a..bbd91262 100644 --- a/src/my/core/pandas.py +++ b/src/my/core/pandas.py @@ -69,8 +69,6 @@ def _check_dateish(s: SeriesT[S1]) -> Iterable[str]: def test_check_dateish() -> None: import pandas as pd - from .compat import fromisoformat - # empty series shouldn't warn assert list(_check_dateish(pd.Series([]))) == [] @@ -80,16 +78,16 @@ def test_check_dateish() -> None: # all values are datetimes, shouldn't warn # fmt: off assert list(_check_dateish(pd.Series([ - fromisoformat('2024-08-19T01:02:03'), - fromisoformat('2024-08-19T03:04:05'), + datetime.fromisoformat('2024-08-19T01:02:03'), + datetime.fromisoformat('2024-08-19T03:04:05'), ]))) == [] # fmt: on # mixture of timezones -- should warn # fmt: off assert len(list(_check_dateish(pd.Series([ - fromisoformat('2024-08-19T01:02:03'), - fromisoformat('2024-08-19T03:04:05Z'), + datetime.fromisoformat('2024-08-19T01:02:03'), + datetime.fromisoformat('2024-08-19T03:04:05Z'), ])))) == 1 # fmt: on @@ -97,7 +95,7 @@ def test_check_dateish() -> None: # fmt: off assert len(list(_check_dateish(pd.Series([ 'whatever', - fromisoformat('2024-08-19T01:02:03'), + datetime.fromisoformat('2024-08-19T01:02:03'), ])))) == 0 # fmt: on @@ -216,14 +214,11 @@ def test_as_dataframe() -> None: import pytest from pandas.testing import assert_frame_equal - from .compat import fromisoformat - it = ({'i': i, 's': f'str{i}'} for i in range(5)) with pytest.warns(UserWarning, match=r"No 'error' column") as record_warnings: # noqa: F841 df: DataFrameT = as_dataframe(it) # todo test other error col policies - # fmt: off assert_frame_equal( df, pd.DataFrame({ @@ -232,8 +227,7 @@ def test_as_dataframe() -> None: # NOTE: error column is always added 'error': [None , None , None , None , None ], }), - ) - # fmt: on + ) # fmt: skip assert_frame_equal(as_dataframe([]), pd.DataFrame(columns=['error'])) df2: DataFrameT = as_dataframe([], schema=_X) @@ -252,7 +246,6 @@ def it2() -> Iterator[Res[S]]: yield RuntimeError('i failed') df = as_dataframe(it2()) - # fmt: off assert_frame_equal( df, pd.DataFrame(data={ @@ -260,22 +253,19 @@ def it2() -> Iterator[Res[S]]: 'error': [np.nan, 'RuntimeError: i failed\n'], 'dt' : [np.nan, np.nan ], }).astype(dtype={'dt': 'float'}), # FIXME should be datetime64 as below - ) - # fmt: on + ) # fmt: skip def it3() -> Iterator[Res[S]]: yield S(value='aba') yield RuntimeError('whoops') yield S(value='cde') - yield RuntimeError('exception with datetime', fromisoformat('2024-08-19T22:47:01Z')) + yield RuntimeError('exception with datetime', datetime.fromisoformat('2024-08-19T22:47:01Z')) df = as_dataframe(it3()) - # fmt: off assert_frame_equal(df, pd.DataFrame(data={ 'value': ['aba' , np.nan , 'cde' , np.nan ], 'error': [np.nan, 'RuntimeError: whoops\n', np.nan, "RuntimeError: ('exception with datetime', datetime.datetime(2024, 8, 19, 22, 47, 1, tzinfo=datetime.timezone.utc))\n"], # note: dt column is added even if errors don't have an associated datetime 'dt' : [np.nan, np.nan , np.nan, '2024-08-19 22:47:01+00:00'], - }).astype(dtype={'dt': 'datetime64[ns, UTC]'})) - # fmt: on + }).astype(dtype={'dt': 'datetime64[ns, UTC]'})) # fmt: skip diff --git a/src/my/core/query_range.py b/src/my/core/query_range.py index 87b32f63..d4f857b7 100644 --- a/src/my/core/query_range.py +++ b/src/my/core/query_range.py @@ -18,7 +18,6 @@ import more_itertools -from .compat import fromisoformat from .query import ( ET, OrderFunc, @@ -80,10 +79,6 @@ def parse_datetime_float(date_str: str) -> float: return datetime.fromisoformat(ds).timestamp() except ValueError: pass - try: - return fromisoformat(ds).timestamp() - except (AssertionError, ValueError): - pass try: import dateparser diff --git a/src/my/core/sqlite.py b/src/my/core/sqlite.py index 900740a2..d95e0538 100644 --- a/src/my/core/sqlite.py +++ b/src/my/core/sqlite.py @@ -6,11 +6,10 @@ from contextlib import contextmanager from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any, Literal, overload +from typing import Any, Literal, assert_never, overload from . import warnings from .common import PathIsh -from .compat import assert_never def sqlite_connect_immutable(db: PathIsh) -> sqlite3.Connection: @@ -67,7 +66,7 @@ def sqlite_connection( elif row_factory == 'dict': row_factory_ = dict_factory else: - assert_never(row_factory) + assert_never(row_factory) # ty: ignore[type-assertion-failure] # I think ty is confused about callable() if _via_apsw: diff --git a/src/my/core/stats.py b/src/my/core/stats.py index 271acf8d..8b09a66e 100644 --- a/src/my/core/stats.py +++ b/src/my/core/stats.py @@ -404,10 +404,10 @@ def funcit(): def test_stat_iterable() -> None: - from datetime import datetime, timedelta, timezone + from datetime import UTC, datetime, timedelta from typing import NamedTuple - dd = datetime.fromtimestamp(123, tz=timezone.utc) + dd = datetime.fromtimestamp(123, tz=UTC) day = timedelta(days=3) class X(NamedTuple): @@ -444,11 +444,10 @@ def _guess_datetime(x: Any) -> datetime | None: def test_guess_datetime() -> None: from dataclasses import dataclass + from datetime import datetime from typing import NamedTuple - from .compat import fromisoformat - - dd = fromisoformat('2021-02-01T12:34:56Z') + dd = datetime.fromisoformat('2021-02-01T12:34:56Z') class A(NamedTuple): x: int diff --git a/src/my/core/utils/itertools.py b/src/my/core/utils/itertools.py index 6e6e1dc1..521b7db2 100644 --- a/src/my/core/utils/itertools.py +++ b/src/my/core/utils/itertools.py @@ -115,7 +115,7 @@ def listify(func: Callable[LFP, Iterable[LV]]) -> Callable[LFP, list[LV]]: ... def test_listify() -> None: - from ..compat import assert_type + from typing import assert_type @listify def it() -> Iterator[int]: @@ -123,7 +123,7 @@ def it() -> Iterator[int]: yield 2 res = it() - assert_type(res, list[int]) + assert_type(res, list[int]) # ty: ignore[type-assertion-failure] assert res == [1, 2] @@ -162,7 +162,7 @@ def warn_if_empty(func: FF) -> FF: ... def test_warn_if_empty_iterator() -> None: - from ..compat import assert_type + from typing import assert_type @warn_if_empty def nonempty() -> Iterator[str]: @@ -189,7 +189,7 @@ def empty() -> Iterator[int]: def test_warn_if_empty_list() -> None: - from ..compat import assert_type + from typing import assert_type ll = [1, 2, 3] @@ -261,11 +261,10 @@ def res() -> Iterator[_HT]: # TODO different policies -- error/warn/ignore? def test_check_if_hashable() -> None: from dataclasses import dataclass + from typing import assert_type import pytest - from ..compat import assert_type - x1: list[int] = [1, 2] r1 = check_if_hashable(x1) assert_type(r1, Iterable[int]) @@ -273,12 +272,12 @@ def test_check_if_hashable() -> None: x2: Iterator[int | str] = iter((123, 'aba')) r2 = check_if_hashable(x2) - assert_type(r2, Iterable[int | str]) + assert_type(r2, Iterable[int | str]) # ty: ignore[type-assertion-failure] # atm ty is a bit confused about generics assert list(r2) == [123, 'aba'] x3: tuple[object, ...] = (789, 'aba') r3 = check_if_hashable(x3) - assert_type(r3, Iterable[object]) + assert_type(r3, Iterable[object]) # ty: ignore[type-assertion-failure] # ty thinks it's Literal[789, 'aba']? odd assert r3 is x3 # object should be unchanged x4: list[set[int]] = [{1, 2, 3}, {4, 5, 6}] diff --git a/src/my/demo.py b/src/my/demo.py index 34424989..a1c6677f 100644 --- a/src/my/demo.py +++ b/src/my/demo.py @@ -6,7 +6,7 @@ import json from collections.abc import Iterable, Sequence from dataclasses import dataclass -from datetime import datetime, timezone, tzinfo +from datetime import UTC, datetime, tzinfo from pathlib import Path from typing import Protocol @@ -20,7 +20,7 @@ class config(Protocol): username: str # this is to check optional attribute handling - timezone: tzinfo = timezone.utc + timezone: tzinfo = UTC external: PathIsh | None = None diff --git a/src/my/experimental/destructive_parsing.py b/src/my/experimental/destructive_parsing.py index dfb14d1b..adf1aa8b 100644 --- a/src/my/experimental/destructive_parsing.py +++ b/src/my/experimental/destructive_parsing.py @@ -1,9 +1,7 @@ from collections.abc import Iterator from dataclasses import dataclass from types import NoneType -from typing import Any - -from my.core.compat import assert_never +from typing import Any, assert_never # TODO Popper? not sure @@ -31,7 +29,7 @@ def zoom(self, key: str) -> 'Helper': return self.manager.helper(item=self.item.pop(key), path=(*self.path, key)) -def is_empty(x) -> bool: # noqa: RET503 +def is_empty(x) -> bool: if isinstance(x, dict): return len(x) == 0 elif isinstance(x, list): diff --git a/src/my/fbmessenger/android.py b/src/my/fbmessenger/android.py index e50682be..1ed7bf96 100644 --- a/src/my/fbmessenger/android.py +++ b/src/my/fbmessenger/android.py @@ -7,12 +7,12 @@ import sqlite3 from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path +from typing import assert_never from my.core import Paths, Res, datetime_aware, get_files, make_config, make_logger from my.core.common import unique_everseen -from my.core.compat import add_note, assert_never from my.core.sqlite import SqliteTool, sqlite_connection from my.config import fbmessenger as user_config # isort: skip @@ -91,7 +91,7 @@ def _entities() -> Iterator[Res[Entity]]: else: yield from _process_db_threads_db2(db) except Exception as e: - add_note(e, f'^ while processing {path}') + e.add_note(f'^ while processing {path}') yield e @@ -183,7 +183,7 @@ def _process_db_msys(db: sqlite3.Connection) -> Iterator[Res[Entity]]: yield _Message( id=r['message_id'], # TODO double check utc - dt=datetime.fromtimestamp(r['timestamp_ms'] / 1000, tz=timezone.utc), + dt=datetime.fromtimestamp(r['timestamp_ms'] / 1000, tz=UTC), # is_incoming=False, TODO?? text=r['text'], thread_id=r['thread_key'], @@ -248,7 +248,7 @@ def _process_db_threads_db2(db: sqlite3.Connection) -> Iterator[Res[Entity]]: yield _Message( id=r['msg_id'], # double checked against some messages in different timezone - dt=datetime.fromtimestamp(r['timestamp_ms'] / 1000, tz=timezone.utc), + dt=datetime.fromtimestamp(r['timestamp_ms'] / 1000, tz=UTC), # is_incoming=False, TODO?? text=r['text'], thread_id=_normalise_thread_id(r['thread_key']), @@ -291,7 +291,7 @@ def messages() -> Iterator[Res[Message]]: try: thread = threads[x.thread_id] except KeyError as e: - add_note(e, f'^ while processing {x}') + e.add_note(f'^ while processing {x}') yield e continue m = Message( diff --git a/src/my/github/common.py b/src/my/github/common.py index 6b9c3c99..50dac167 100644 --- a/src/my/github/common.py +++ b/src/my/github/common.py @@ -8,7 +8,7 @@ from collections.abc import Iterable -from datetime import datetime, timezone +from datetime import UTC, datetime from typing import NamedTuple from my.core import datetime_aware, make_logger, warn_if_empty @@ -49,7 +49,7 @@ def merge_events(*sources: Results) -> Results: def parse_dt(s: str) -> datetime_aware: # TODO isoformat? - return datetime.strptime(s, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc) + return datetime.strptime(s, '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=UTC) # experimental way of supportint event ids... not sure diff --git a/src/my/github/gdpr.py b/src/my/github/gdpr.py index e6f58536..bd82dc14 100644 --- a/src/my/github/gdpr.py +++ b/src/my/github/gdpr.py @@ -19,7 +19,6 @@ stat, warnings, ) -from my.core.compat import add_note from my.core.json import json_loads from .common import Event, EventIds, parse_dt @@ -136,7 +135,7 @@ def _process_one(root: Path) -> Iterator[Res[Event]]: try: yield handler(r) except Exception as e: - add_note(e, f'^ while processing {f}') + e.add_note(f'^ while processing {f}') yield e diff --git a/src/my/google/maps/android.py b/src/my/google/maps/android.py index 621eca4c..093f764e 100644 --- a/src/my/google/maps/android.py +++ b/src/my/google/maps/android.py @@ -9,7 +9,7 @@ from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from typing import Any from urllib.parse import quote @@ -92,7 +92,7 @@ def _process_one(f: Path): for row in conn.execute('SELECT * FROM sync_item_data WHERE corpus == 11'): # this looks like 'Labeled' list ts = row['timestamp'] / 1000 - created = datetime.fromtimestamp(ts, tz=timezone.utc) + created = datetime.fromtimestamp(ts, tz=UTC) server_id = row['server_id'] [item_type, item_id] = server_id.split(':') @@ -134,10 +134,10 @@ def _process_one(f: Path): note = None # TODO double check timezone - created = datetime.fromtimestamp(msg.f1.created.seconds, tz=timezone.utc).replace(microsecond=msg.f1.created.nanos // 1000) + created = datetime.fromtimestamp(msg.f1.created.seconds, tz=UTC).replace(microsecond=msg.f1.created.nanos // 1000) # NOTE: this one seems to be the same as row['timestamp'] - updated = datetime.fromtimestamp(msg.f1.updated.seconds, tz=timezone.utc).replace(microsecond=msg.f1.updated.nanos // 1000) + updated = datetime.fromtimestamp(msg.f1.updated.seconds, tz=UTC).replace(microsecond=msg.f1.updated.nanos // 1000) address = msg.f2.addr1 # NOTE: there is also addr2, but they seem identical :shrug: if address == '': diff --git a/src/my/hackernews/dogsheep.py b/src/my/hackernews/dogsheep.py index 8043fe38..21b6a65d 100644 --- a/src/my/hackernews/dogsheep.py +++ b/src/my/hackernews/dogsheep.py @@ -6,7 +6,7 @@ from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path import my.config @@ -57,7 +57,7 @@ def items() -> Iterator[Res[Item]]: yield Item( id=r['id'], type=r['type'], - created=datetime.fromtimestamp(r['time'], tz=timezone.utc), + created=datetime.fromtimestamp(r['time'], tz=UTC), title=r['title'], # todo hmm maybe a method to strip off html tags would be nice text_html=r['text'], diff --git a/src/my/hackernews/harmonic.py b/src/my/hackernews/harmonic.py index ff3860da..674bf484 100644 --- a/src/my/hackernews/harmonic.py +++ b/src/my/hackernews/harmonic.py @@ -8,7 +8,7 @@ from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from typing import Any, TypedDict, cast @@ -65,7 +65,7 @@ class Saved(SavedBase): @property def when(self) -> datetime_aware: ts = self.raw['created_at_i'] - return datetime.fromtimestamp(ts, tz=timezone.utc) + return datetime.fromtimestamp(ts, tz=UTC) @property def uid(self) -> str: diff --git a/src/my/hackernews/materialistic.py b/src/my/hackernews/materialistic.py index cbbdc1c7..4774d056 100644 --- a/src/my/hackernews/materialistic.py +++ b/src/my/hackernews/materialistic.py @@ -3,7 +3,7 @@ """ from collections.abc import Iterator, Sequence -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from typing import Any, NamedTuple @@ -36,7 +36,7 @@ class Saved(NamedTuple): @property def when(self) -> datetime_aware: ts = int(self.row['time']) / 1000 - return datetime.fromtimestamp(ts, tz=timezone.utc) + return datetime.fromtimestamp(ts, tz=UTC) @property def uid(self) -> str: diff --git a/src/my/instagram/android.py b/src/my/instagram/android.py index f59838da..662728bf 100644 --- a/src/my/instagram/android.py +++ b/src/my/instagram/android.py @@ -9,6 +9,7 @@ from dataclasses import dataclass from datetime import datetime from pathlib import Path +from typing import assert_never from my.core import ( Json, @@ -21,7 +22,6 @@ ) from my.core.cachew import mcachew from my.core.common import unique_everseen -from my.core.compat import add_note, assert_never from my.core.json import json_loads from my.core.sqlite import select, sqlite_connect_immutable @@ -195,7 +195,7 @@ def _process_db(db: sqlite3.Connection) -> Iterator[Res[User | _Message]]: if m is not None: yield m except Exception as e: - add_note(e, f'^ while parsing {j}') + e.add_note(f'^ while parsing {j}') yield e @@ -212,10 +212,10 @@ def _entities() -> Iterator[Res[User | _Message]]: try: for m in _process_db(db=db): if isinstance(m, Exception): - add_note(m, f'^ while processing {path}') + m.add_note(f'^ while processing {path}') yield m except Exception as e: - add_note(e, f'^ while processing {path}') + e.add_note(f'^ while processing {path}') yield e # todo use error policy here diff --git a/src/my/instagram/gdpr.py b/src/my/instagram/gdpr.py index e59921dd..f03479ad 100644 --- a/src/my/instagram/gdpr.py +++ b/src/my/instagram/gdpr.py @@ -10,6 +10,7 @@ from dataclasses import dataclass from datetime import datetime from pathlib import Path +from typing import assert_never from more_itertools import bucket, spy @@ -22,7 +23,6 @@ make_logger, ) from my.core.common import unique_everseen -from my.core.compat import add_note, assert_never from my.config import instagram as user_config # isort: skip @@ -216,7 +216,7 @@ def _parse_message(jm: Json) -> _Message: try: yield _parse_message(jm) except Exception as e: - add_note(e, f'^ while parsing {jm}') + e.add_note(f'^ while parsing {jm}') yield e @@ -234,7 +234,7 @@ def messages() -> Iterator[Res[Message]]: try: user = id2user[x.user_id] except Exception as e: - add_note(e, f'^ while processing {x}') + e.add_note(f'^ while processing {x}') yield e continue yield Message( diff --git a/src/my/lastfm.py b/src/my/lastfm.py index 43d8d658..30f28dff 100644 --- a/src/my/lastfm.py +++ b/src/my/lastfm.py @@ -5,7 +5,7 @@ from abc import abstractmethod from collections.abc import Iterable, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from typing import Protocol @@ -52,7 +52,7 @@ class Scrobble: @property def dt(self) -> datetime_aware: ts = int(self.raw['date']) - return datetime.fromtimestamp(ts, tz=timezone.utc) + return datetime.fromtimestamp(ts, tz=UTC) @property def artist(self) -> str: diff --git a/src/my/location/fallback/common.py b/src/my/location/fallback/common.py index 1d048f6d..5177d0f4 100644 --- a/src/my/location/fallback/common.py +++ b/src/my/location/fallback/common.py @@ -2,7 +2,7 @@ from collections.abc import Callable, Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta from ..common import Location, LocationProtocol @@ -79,7 +79,7 @@ def _datetime_timestamp(dt: DateExact) -> float: return dt.timestamp() except ValueError: # https://github.com/python/cpython/issues/75395 - return dt.replace(tzinfo=timezone.utc).timestamp() + return dt.replace(tzinfo=UTC).timestamp() return float(dt) def _iter_estimate_from( diff --git a/src/my/location/fallback/via_home.py b/src/my/location/fallback/via_home.py index f88fee00..dfb2ddbf 100644 --- a/src/my/location/fallback/via_home.py +++ b/src/my/location/fallback/via_home.py @@ -6,7 +6,7 @@ from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, time, timezone +from datetime import UTC, datetime, time from functools import cache from typing import cast @@ -50,7 +50,7 @@ def _history(self) -> Sequence[tuple[datetime, LatLon]]: dt = datetime.combine(x, time.min) # todo not sure about doing it here, but makes it easier to compare.. if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) + dt = dt.replace(tzinfo=UTC) res.append((dt, loc)) res = sorted(res, key=lambda p: p[0]) return res @@ -87,7 +87,7 @@ def estimate_location(dt: DateExact) -> Iterator[FallbackLocation]: lat=lat, lon=lon, accuracy=config.home_accuracy, - dt=datetime.fromtimestamp(d, timezone.utc), + dt=datetime.fromtimestamp(d, UTC), datasource='via_home') return @@ -97,5 +97,5 @@ def estimate_location(dt: DateExact) -> Iterator[FallbackLocation]: lat=lat, lon=lon, accuracy=config.home_accuracy, - dt=datetime.fromtimestamp(d, timezone.utc), + dt=datetime.fromtimestamp(d, UTC), datasource='via_home') diff --git a/src/my/location/google.py b/src/my/location/google.py index 509bad86..9c3f5f3e 100644 --- a/src/my/location/google.py +++ b/src/my/location/google.py @@ -12,7 +12,7 @@ import re from collections.abc import Iterable, Sequence -from datetime import datetime, timezone +from datetime import UTC, datetime from itertools import islice from pathlib import Path from subprocess import PIPE, Popen @@ -54,7 +54,7 @@ def _iter_via_ijson(fo) -> Iterable[TsLatLon]: warnings.medium("Falling back to default ijson because 'cffi' backend isn't found. It's up to 2x faster, you might want to check it out") import ijson # type: ignore[import-untyped] - for d in ijson.items(fo, 'locations.item'): + for d in ijson.items(fo, 'locations.item'): # ty: ignore[possibly-missing-attribute] yield ( int(d['timestampMs']), d['latitudeE7' ], @@ -88,7 +88,7 @@ def _iter_locations_fo(fit) -> Iterable[Location]: errors = 0 for tsMs, latE7, lonE7 in fit: - dt = datetime.fromtimestamp(tsMs / 1000, tz=timezone.utc) + dt = datetime.fromtimestamp(tsMs / 1000, tz=UTC) total += 1 if total % 10000 == 0: logger.info('processing item %d %s', total, dt) diff --git a/src/my/location/gpslogger.py b/src/my/location/gpslogger.py index a67f31c9..5f91e365 100644 --- a/src/my/location/gpslogger.py +++ b/src/my/location/gpslogger.py @@ -7,7 +7,7 @@ from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from itertools import chain from pathlib import Path @@ -81,7 +81,7 @@ def _extract_locations(path: Path) -> Iterator[Location]: lon=point.longitude, accuracy=config.accuracy, elevation=point.elevation, - dt=datetime.replace(point.time, tzinfo=timezone.utc), + dt=datetime.replace(point.time, tzinfo=UTC), datasource="gpslogger", ) diff --git a/src/my/pdfs.py b/src/my/pdfs.py index 9f87a413..8b776d85 100644 --- a/src/my/pdfs.py +++ b/src/my/pdfs.py @@ -19,7 +19,6 @@ from my.core import PathIsh, Paths, Stats, get_files, make_logger, stat from my.core.cachew import mcachew -from my.core.compat import add_note from my.core.error import Res, split_errors @@ -135,7 +134,7 @@ def _iter_annotations(pdfs: Sequence[Path]) -> Iterator[Res[Annotation]]: try: yield from f.result() except Exception as e: - add_note(e, f'^ while processing {pdf}') + e.add_note(f'^ while processing {pdf}') logger.exception(e) # todo add a comment that it can be ignored... or something like that # TODO not sure if should attach pdf as well; it's a bit annoying to pass around? diff --git a/src/my/podcastaddict/android.py b/src/my/podcastaddict/android.py index 86b2718a..412c6f8c 100644 --- a/src/my/podcastaddict/android.py +++ b/src/my/podcastaddict/android.py @@ -7,7 +7,7 @@ from abc import abstractmethod from collections.abc import Iterable, Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from itertools import chain from pathlib import Path from typing import Protocol @@ -167,7 +167,7 @@ def playback_dt(self) -> datetime | None: if pb == -1: # todo might be nice to tell apart from None case? return None - return datetime.fromtimestamp(pb / 1000, tz=timezone.utc) + return datetime.fromtimestamp(pb / 1000, tz=UTC) @property def position_to_resume(self) -> int | None: @@ -203,7 +203,7 @@ def url(self) -> str | None: def publication_dt(self) -> datetime: # todo not 100% sure if it's UTC? # tricky to find out for sure, the app doesn't show podcast publication time.. - return datetime.fromtimestamp(self.row['publication_date'] / 1000, tz=timezone.utc) + return datetime.fromtimestamp(self.row['publication_date'] / 1000, tz=UTC) @property def short_description(self) -> str: diff --git a/src/my/polar.py b/src/my/polar.py index 5adee31a..c3b8b794 100644 --- a/src/my/polar.py +++ b/src/my/polar.py @@ -7,6 +7,7 @@ import json from collections.abc import Iterable, Sequence from dataclasses import dataclass +from datetime import datetime from pathlib import Path from typing import TYPE_CHECKING, NamedTuple, cast @@ -19,7 +20,6 @@ make_config, make_logger, ) -from my.core.compat import add_note, fromisoformat from my.core.error import sort_res_by from my.core.konsume import Wdict, Zoomable, wrap @@ -158,7 +158,7 @@ def load_item(self, meta: Zoomable) -> Iterable[Highlight]: cmap[hlid] = ccs comment = Comment( cid=cid.value, - created=fromisoformat(crt.value), + created=datetime.fromisoformat(crt.value), text=html.value, # TODO perhaps coonvert from html to text or org? ) ccs.append(comment) @@ -197,7 +197,7 @@ def load_item(self, meta: Zoomable) -> Iterable[Highlight]: yield Highlight( hid=hid, - created=fromisoformat(crt), + created=datetime.fromisoformat(crt), selection=text, comments=tuple(comments), tags=tuple(htags), @@ -234,7 +234,7 @@ def load(self) -> Iterable[Result]: path = Path(config.polar_dir) / 'stash' / filename yield Book( - created=fromisoformat(added), + created=datetime.fromisoformat(added), uid=self.uid, path=path, title=title, @@ -249,7 +249,7 @@ def iter_entries() -> Iterable[Result]: try: yield from loader.load() except Exception as e: - add_note(e, f'^ while processing {d}') + e.add_note(f'^ while processing {d}') yield e diff --git a/src/my/roamresearch.py b/src/my/roamresearch.py index 3b0e3700..7803d6ec 100644 --- a/src/my/roamresearch.py +++ b/src/my/roamresearch.py @@ -5,7 +5,7 @@ import re from collections.abc import Iterator -from datetime import datetime, timezone +from datetime import UTC, datetime from itertools import chain from pathlib import Path from typing import NamedTuple @@ -39,7 +39,7 @@ class Node(NamedTuple): def created(self) -> datetime: ct = self.raw.get(Keys.CREATED) if ct is not None: - return datetime.fromtimestamp(ct / 1000, tz=timezone.utc) + return datetime.fromtimestamp(ct / 1000, tz=UTC) # ugh. daily notes don't have create time for some reason??? title = self.title @@ -51,13 +51,13 @@ def created(self) -> datetime: return self.edited # fallback TODO log? # strip off 'th'/'rd' crap dts = m.group(1) + ' ' + m.group(2) + ' ' + m.group(3) - dt = datetime.strptime(dts, '%B %d %Y').replace(tzinfo=timezone.utc) + dt = datetime.strptime(dts, '%B %d %Y').replace(tzinfo=UTC) return dt @property def edited(self) -> datetime: rt = self.raw[Keys.EDITED] - return datetime.fromtimestamp(rt / 1000, tz=timezone.utc) + return datetime.fromtimestamp(rt / 1000, tz=UTC) @property def title(self) -> str | None: diff --git a/src/my/rss/feedbin.py b/src/my/rss/feedbin.py index 5f4da0a9..2d32c21a 100644 --- a/src/my/rss/feedbin.py +++ b/src/my/rss/feedbin.py @@ -4,10 +4,10 @@ import json from collections.abc import Iterator, Sequence +from datetime import datetime from pathlib import Path from my.core import Stats, get_files, stat -from my.core.compat import fromisoformat from .common import Subscription, SubscriptionState @@ -21,7 +21,7 @@ def parse_file(f: Path) -> Iterator[Subscription]: raw = json.loads(f.read_text()) for r in raw: yield Subscription( - created_at=fromisoformat(r['created_at']), + created_at=datetime.fromisoformat(r['created_at']), title=r['title'], url=r['site_url'], id=r['id'], @@ -32,7 +32,7 @@ def states() -> Iterator[SubscriptionState]: for f in inputs(): # TODO ugh. depends on my naming. not sure if useful? dts = f.stem.split('_')[-1] - dt = fromisoformat(dts) + dt = datetime.fromisoformat(dts) subs = list(parse_file(f)) yield dt, subs diff --git a/src/my/rss/feedly.py b/src/my/rss/feedly.py index 9bf5429b..716fa83b 100644 --- a/src/my/rss/feedly.py +++ b/src/my/rss/feedly.py @@ -5,7 +5,7 @@ import json from abc import abstractmethod from collections.abc import Iterator, Sequence -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from typing import Protocol @@ -51,6 +51,6 @@ def parse_file(f: Path) -> Iterator[Subscription]: def states() -> Iterator[SubscriptionState]: for f in inputs(): dts = f.stem.split('_')[-1] - dt = datetime.strptime(dts, '%Y%m%d%H%M%S').replace(tzinfo=timezone.utc) + dt = datetime.strptime(dts, '%Y%m%d%H%M%S').replace(tzinfo=UTC) subs = list(parse_file(f)) yield dt, subs diff --git a/src/my/runnerup.py b/src/my/runnerup.py index f5d7d1e4..cc67f1e8 100644 --- a/src/my/runnerup.py +++ b/src/my/runnerup.py @@ -7,14 +7,13 @@ ] from collections.abc import Iterable -from datetime import timedelta +from datetime import datetime, timedelta from pathlib import Path import tcxparser # type: ignore[import-untyped] from my.config import runnerup as config from my.core import Json, Res, get_files -from my.core.compat import fromisoformat # TODO later, use a proper namedtuple? Workout = Json @@ -42,7 +41,7 @@ def _parse(f: Path) -> Workout: return { 'id' : f.name, # not sure? - 'start_time' : fromisoformat(tcx.started_at), + 'start_time' : datetime.fromisoformat(tcx.started_at), 'duration' : timedelta(seconds=tcx.duration), 'sport' : sport, 'heart_rate_avg': tcx.hr_avg, diff --git a/src/my/smscalls.py b/src/my/smscalls.py index 27d08be4..5e9bb56f 100644 --- a/src/my/smscalls.py +++ b/src/my/smscalls.py @@ -24,7 +24,7 @@ class smscalls(user_config): config = make_config(smscalls) from collections.abc import Iterator -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from typing import Any, NamedTuple @@ -318,7 +318,7 @@ def _extract_mms(path: Path) -> Iterator[Res[MMS]]: # See https://github.com/karlicoss/HPI/pull/90#issuecomment-702422351 # for potentially parsing timezone from the readable_date def _parse_dt_ms(d: str) -> datetime: - return datetime.fromtimestamp(int(d) / 1000, tz=timezone.utc) + return datetime.fromtimestamp(int(d) / 1000, tz=UTC) def stats() -> Stats: diff --git a/src/my/stackexchange/gdpr.py b/src/my/stackexchange/gdpr.py index 8ed0d309..4eb0e446 100644 --- a/src/my/stackexchange/gdpr.py +++ b/src/my/stackexchange/gdpr.py @@ -4,13 +4,16 @@ # TODO need to merge gdpr and stexport -### config +from collections.abc import Iterable from dataclasses import dataclass +from datetime import datetime +from typing import NamedTuple from my.config import stackexchange as user_config from my.core import Json, PathIsh, get_files, make_config +### config @dataclass class stackexchange(user_config): gdpr_path: PathIsh # path to GDPR zip file @@ -20,12 +23,6 @@ class stackexchange(user_config): # TODO just merge all of them and then filter?.. not sure -from collections.abc import Iterable -from datetime import datetime -from typing import NamedTuple - -from my.core.compat import fromisoformat - class Vote(NamedTuple): j: Json @@ -33,7 +30,7 @@ class Vote(NamedTuple): @property def when(self) -> datetime: - return fromisoformat(self.j['eventTime']) + return datetime.fromisoformat(self.j['eventTime']) # todo Url return type? @property diff --git a/src/my/telegram/telegram_backup.py b/src/my/telegram/telegram_backup.py index f966e71f..6be80654 100644 --- a/src/my/telegram/telegram_backup.py +++ b/src/my/telegram/telegram_backup.py @@ -6,7 +6,7 @@ import sqlite3 from collections.abc import Iterator from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from struct import calcsize, unpack_from from my.config import telegram as user_config @@ -68,7 +68,7 @@ def _message_from_row(r: sqlite3.Row, *, chats: Chats, with_extra_media_info: bo ts = r['time'] # desktop export uses UTC (checked by exporting in winter time vs summer time) # and telegram_backup timestamps seem same as in desktop export - time = datetime.fromtimestamp(ts, tz=timezone.utc) + time = datetime.fromtimestamp(ts, tz=UTC) chat = chats[r['source_id']] sender = chats[r['sender_id']] diff --git a/src/my/tests/location/fallback.py b/src/my/tests/location/fallback.py index 8bbfb5ad..75c155b9 100644 --- a/src/my/tests/location/fallback.py +++ b/src/my/tests/location/fallback.py @@ -3,7 +3,7 @@ """ from collections.abc import Iterator -from datetime import datetime, timedelta, timezone +from datetime import UTC, datetime, timedelta import pytest from more_itertools import ilen @@ -30,32 +30,30 @@ def test_ip_fallback() -> None: # basic tests # try estimating slightly before the first IP - est = list(via_ip.estimate_location(datetime(2020, 1, 1, 11, 59, 59, tzinfo=timezone.utc))) + est = list(via_ip.estimate_location(datetime(2020, 1, 1, 11, 59, 59, tzinfo=UTC))) assert len(est) == 0 # during the duration for the first IP - est = list(via_ip.estimate_location(datetime(2020, 1, 1, 12, 30, 0, tzinfo=timezone.utc))) + est = list(via_ip.estimate_location(datetime(2020, 1, 1, 12, 30, 0, tzinfo=UTC))) assert len(est) == 1 # right after the 'for_duration' for an IP - est = list( - via_ip.estimate_location(datetime(2020, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + via_ip.config.for_duration + timedelta(seconds=1)) - ) + est = list(via_ip.estimate_location(datetime(2020, 1, 1, 12, 0, 0, tzinfo=UTC) + via_ip.config.for_duration + timedelta(seconds=1))) assert len(est) == 0 # on 2/1/2020, threes one IP if before 16:30 - est = list(via_ip.estimate_location(datetime(2020, 2, 1, 12, 30, 0, tzinfo=timezone.utc))) + est = list(via_ip.estimate_location(datetime(2020, 2, 1, 12, 30, 0, tzinfo=UTC))) assert len(est) == 1 # and two if after 16:30 - est = list(via_ip.estimate_location(datetime(2020, 2, 1, 17, 00, 0, tzinfo=timezone.utc))) + est = list(via_ip.estimate_location(datetime(2020, 2, 1, 17, 00, 0, tzinfo=UTC))) assert len(est) == 2 # the 12:30 IP should 'expire' before the 16:30 IP, use 3:30PM on the next day - est = list(via_ip.estimate_location(datetime(2020, 2, 2, 15, 30, 0, tzinfo=timezone.utc))) + est = list(via_ip.estimate_location(datetime(2020, 2, 2, 15, 30, 0, tzinfo=UTC))) assert len(est) == 1 - use_dt = datetime(2020, 3, 1, 12, 15, 0, tzinfo=timezone.utc) + use_dt = datetime(2020, 3, 1, 12, 15, 0, tzinfo=UTC) # test last IP est = list(via_ip.estimate_location(use_dt)) @@ -105,11 +103,11 @@ def _fe() -> Iterator[all.LocationEstimator]: assert all_est.datasource == "via_ip" # test that a home defined in shared_tz_config.py is used if no IP is found - loc = all.estimate_location(datetime(2021, 1, 1, 12, 30, 0, tzinfo=timezone.utc)) + loc = all.estimate_location(datetime(2021, 1, 1, 12, 30, 0, tzinfo=UTC)) assert loc.datasource == "via_home" # test a different home using location.fallback.all - bulgaria = all.estimate_location(datetime(2006, 1, 1, 12, 30, 0, tzinfo=timezone.utc)) + bulgaria = all.estimate_location(datetime(2006, 1, 1, 12, 30, 0, tzinfo=UTC)) assert bulgaria.datasource == "via_home" assert (bulgaria.lat, bulgaria.lon) == (42.697842, 23.325973) assert (loc.lat, loc.lon) != (bulgaria.lat, bulgaria.lon) @@ -117,11 +115,11 @@ def _fe() -> Iterator[all.LocationEstimator]: def data() -> Iterator[IP]: # random IP addresses - yield IP(addr="67.98.113.0", dt=datetime(2020, 1, 1, 12, 0, 0, tzinfo=timezone.utc)) - yield IP(addr="67.98.112.0", dt=datetime(2020, 1, 15, 12, 0, 0, tzinfo=timezone.utc)) - yield IP(addr="59.40.113.87", dt=datetime(2020, 2, 1, 12, 0, 0, tzinfo=timezone.utc)) - yield IP(addr="59.40.139.87", dt=datetime(2020, 2, 1, 16, 0, 0, tzinfo=timezone.utc)) - yield IP(addr="161.235.192.228", dt=datetime(2020, 3, 1, 12, 0, 0, tzinfo=timezone.utc)) + yield IP(addr="67.98.113.0", dt=datetime(2020, 1, 1, 12, 0, 0, tzinfo=UTC)) + yield IP(addr="67.98.112.0", dt=datetime(2020, 1, 15, 12, 0, 0, tzinfo=UTC)) + yield IP(addr="59.40.113.87", dt=datetime(2020, 2, 1, 12, 0, 0, tzinfo=UTC)) + yield IP(addr="59.40.139.87", dt=datetime(2020, 2, 1, 16, 0, 0, tzinfo=UTC)) + yield IP(addr="161.235.192.228", dt=datetime(2020, 3, 1, 12, 0, 0, tzinfo=UTC)) @pytest.fixture(autouse=True) diff --git a/src/my/tests/reddit.py b/src/my/tests/reddit.py index 4ddccf83..e6e6d851 100644 --- a/src/my/tests/reddit.py +++ b/src/my/tests/reddit.py @@ -15,7 +15,7 @@ def test_basic_1() -> None: # todo maybe this should call stat or something instead? # would ensure reasonable stat implementation as well and less duplication # note: deliberately use old module (instead of my.reddit.all) to test bwd compatibility - from my.reddit import saved + from my.reddit import saved # ty: ignore[possibly-missing-import] assert len(list(saved())) > 0 @@ -43,7 +43,7 @@ def test_saves() -> None: def test_preserves_extra_attr() -> None: # doesn't strictly belong here (not specific to reddit) # but my.reddit does a fair bit of dynamic hacking, so perhaps a good place to check nothing is lost - from my.reddit import config + from my.reddit import config # ty: ignore[possibly-missing-import] assert isinstance(getattr(config, 'please_keep_me'), str) diff --git a/src/my/tests/shared_tz_config.py b/src/my/tests/shared_tz_config.py index bcb98225..19cf82b0 100644 --- a/src/my/tests/shared_tz_config.py +++ b/src/my/tests/shared_tz_config.py @@ -2,7 +2,7 @@ Helper to test various timezone/location dependent things """ -from datetime import date, datetime, timezone +from datetime import UTC, date, datetime from pathlib import Path import pytest @@ -20,16 +20,14 @@ class google: takeout_path = _takeout_path class location: - # fmt: off home = ( # supports ISO strings - ('2005-12-04' , (42.697842, 23.325973)), # Bulgaria, Sofia + ('2005-12-04' , (42.697842, 23.325973)), # Bulgaria, Sofia # supports date/datetime objects - (date(year=1980, month=2, day=15) , (40.7128 , -74.0060 )), # NY + (date(year=1980, month=2, day=15) , (40.7128 , -74.0060 )), # NY # check tz handling.. - (datetime.fromtimestamp(1600000000, tz=timezone.utc), (55.7558 , 37.6173 )), # Moscow, Russia - ) - # fmt: on + (datetime.fromtimestamp(1600000000, tz=UTC), (55.7558 , 37.6173 )), # Moscow, Russia + ) # fmt: skip # note: order doesn't matter, will be sorted in the data provider class time: diff --git a/src/my/tests/twitter/archive.py b/src/my/tests/twitter/archive.py index 2c7bc9ab..7d6ff9d0 100644 --- a/src/my/tests/twitter/archive.py +++ b/src/my/tests/twitter/archive.py @@ -1,5 +1,5 @@ import json -from datetime import datetime, timezone +from datetime import UTC, datetime from my.twitter.archive import Tweet @@ -53,7 +53,7 @@ def test_tweet() -> None: """.strip() t = Tweet(json.loads(raw), screen_name='whatever') assert t.permalink == 'https://twitter.com/whatever/status/1269253350735982592' - assert t.dt == datetime(2020, 6, 6, 13, 2, 35, tzinfo=timezone.utc) + assert t.dt == datetime(2020, 6, 6, 13, 2, 35, tzinfo=UTC) assert ( t.text == 'Finally published the post about Promnesia: https://beepb00p.xyz/promnesia.html\n\nA story of how our browser history is broken and my attempt to fix it!' diff --git a/src/my/tests/tz.py b/src/my/tests/tz.py index 5c338fbd..5464f780 100644 --- a/src/my/tests/tz.py +++ b/src/my/tests/tz.py @@ -7,7 +7,6 @@ import my.time.tz.main as tz_main import my.time.tz.via_location as tz_via_location from my.core import notnone -from my.core.compat import fromisoformat from .shared_tz_config import config # noqa: F401 # autoused fixture @@ -47,7 +46,7 @@ def test_past() -> None: """ Should fallback to the 'home' location provider """ - dt = fromisoformat('2000-01-01 12:34:45') + dt = datetime.fromisoformat('2000-01-01 12:34:45') dt = tz_main.localize(dt) assert getzone(dt) == 'America/New_York' @@ -66,16 +65,16 @@ def test_get_tz(config) -> None: get_tz = tz_via_location.get_tz # not present in the test data - tz = get_tz(fromisoformat('2020-01-01 10:00:00')) + tz = get_tz(datetime.fromisoformat('2020-01-01 10:00:00')) assert notnone(tz).zone == 'Europe/Sofia' - tz = get_tz(fromisoformat('2017-08-01 11:00:00')) + tz = get_tz(datetime.fromisoformat('2017-08-01 11:00:00')) assert notnone(tz).zone == 'Europe/Vienna' - tz = get_tz(fromisoformat('2017-07-30 10:00:00')) + tz = get_tz(datetime.fromisoformat('2017-07-30 10:00:00')) assert notnone(tz).zone == 'Europe/Rome' - tz = get_tz(fromisoformat('2020-10-01 14:15:16')) + tz = get_tz(datetime.fromisoformat('2020-10-01 14:15:16')) assert tz is not None on_windows = sys.platform == 'win32' @@ -90,7 +89,7 @@ def test_get_tz(config) -> None: def test_policies() -> None: - naive = fromisoformat('2017-07-30 10:00:00') + naive = datetime.fromisoformat('2017-07-30 10:00:00') assert naive.tzinfo is None # just in case # actual timezone at the time diff --git a/src/my/tinder/android.py b/src/my/tinder/android.py index f4569867..ddbed19c 100644 --- a/src/my/tinder/android.py +++ b/src/my/tinder/android.py @@ -8,13 +8,13 @@ from collections import Counter, defaultdict from collections.abc import Iterator, Mapping, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from itertools import chain from pathlib import Path +from typing import assert_never from my.core import Paths, Res, Stats, datetime_aware, get_files, make_logger, stat from my.core.common import unique_everseen -from my.core.compat import add_note, assert_never from my.core.sqlite import sqlite_connection import my.config # isort: skip @@ -97,7 +97,7 @@ def _entities() -> Iterator[Res[_Entity]]: try: yield from _handle_db(db) except Exception as e: - add_note(e, f'^ while processing {path}') + e.add_note(f'^ while processing {path}') yield e @@ -120,21 +120,21 @@ def _handle_db(db: sqlite3.Connection) -> Iterator[Res[_Entity]]: try: yield _parse_person(row) except Exception as e: - add_note(e, f'^ while parsing {dict(row)}') + e.add_note(f'^ while parsing {dict(row)}') yield e for row in db.execute('SELECT * FROM match'): try: yield _parse_match(row) except Exception as e: - add_note(e, f'^ while parsing {dict(row)}') + e.add_note(f'^ while parsing {dict(row)}') yield e for row in db.execute('SELECT * FROM message'): try: yield _parse_msg(row) except Exception as e: - add_note(e, f'^ while parsing {dict(row)}') + e.add_note(f'^ while parsing {dict(row)}') yield e @@ -149,7 +149,7 @@ def _parse_match(row: sqlite3.Row) -> _Match: return _Match( id=row['id'], person_id=row['person_id'], - when=datetime.fromtimestamp(row['creation_date'] / 1000, tz=timezone.utc), + when=datetime.fromtimestamp(row['creation_date'] / 1000, tz=UTC), ) @@ -157,7 +157,7 @@ def _parse_msg(row: sqlite3.Row) -> _Message: # note it also has raw_message_data -- not sure which is best to use.. sent = row['sent_date'] return _Message( - sent=datetime.fromtimestamp(sent / 1000, tz=timezone.utc), + sent=datetime.fromtimestamp(sent / 1000, tz=UTC), id=row['id'], text=row['text'], match_id=row['match_id'], @@ -182,7 +182,7 @@ def entities() -> Iterator[Res[Entity]]: try: person = id2person[x.person_id] except Exception as e: - add_note(e, f'^ while processing {x}') + e.add_note(f'^ while processing {x}') yield e continue m = Match( @@ -199,7 +199,7 @@ def entities() -> Iterator[Res[Entity]]: from_ = id2person[x.from_id] to = id2person[x.to_id] except Exception as e: - add_note(e, f'^ while processing {x}') + e.add_note(f'^ while processing {x}') continue yield Message( sent=x.sent, @@ -235,7 +235,7 @@ def match2messages() -> Iterator[Res[Mapping[Match, Sequence[Message]]]]: try: ml = res[x.match] except Exception as e: - add_note(e, f'^ while processing {x}') + e.add_note(f'^ while processing {x}') yield e continue ml.append(x) diff --git a/src/my/topcoder.py b/src/my/topcoder.py index 40df77c5..86b64d95 100644 --- a/src/my/topcoder.py +++ b/src/my/topcoder.py @@ -1,11 +1,11 @@ import json from collections.abc import Iterator, Sequence from dataclasses import dataclass +from datetime import datetime from functools import cached_property from pathlib import Path from my.core import Res, datetime_aware, get_files -from my.core.compat import fromisoformat from my.experimental.destructive_parsing import Manager from my.config import topcoder as config # type: ignore[attr-defined] # isort: skip @@ -28,7 +28,7 @@ def uid(self) -> str: @cached_property def when(self) -> datetime_aware: - return fromisoformat(self.date_str) + return datetime.fromisoformat(self.date_str) @classmethod def make(cls, j) -> Iterator[Res['Competition']]: diff --git a/src/my/twitter/android.py b/src/my/twitter/android.py index 05e66544..950797a1 100644 --- a/src/my/twitter/android.py +++ b/src/my/twitter/android.py @@ -8,7 +8,7 @@ import sqlite3 from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from struct import unpack_from @@ -288,7 +288,7 @@ def _query_one(*, where: str, quoted: set[int]) -> Iterator[Res[Tweet]]: yield Tweet( id_str=tweet_id, # TODO double check it's utc? - created_at=datetime.fromtimestamp(created_ms / 1000, tz=timezone.utc), + created_at=datetime.fromtimestamp(created_ms / 1000, tz=UTC), screen_name=user_username, text=content, ) diff --git a/src/my/twitter/talon.py b/src/my/twitter/talon.py index bd1adf10..9992b8fe 100644 --- a/src/my/twitter/talon.py +++ b/src/my/twitter/talon.py @@ -9,7 +9,7 @@ from abc import abstractmethod from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from my.core import Paths, Res, datetime_aware, get_files @@ -108,7 +108,7 @@ def _parse_tweet(row: sqlite3.Row) -> Tweet: # uses https://docs.oracle.com/javase/7/docs/api/java/util/Date.html#getTime() # and it's created here, so looks like it's properly parsed from the api # https://github.com/Twitter4J/Twitter4J/blob/8376fade8d557896bb9319fb46e39a55b134b166/twitter4j-core/src/internal-json/java/twitter4j/ParseUtil.java#L69-L79 - created_at = datetime.fromtimestamp(row['time'] / 1000, tz=timezone.utc) + created_at = datetime.fromtimestamp(row['time'] / 1000, tz=UTC) text = row['text'] # try explanding URLs.. sadly there are no positions in the db diff --git a/src/my/twitter/twint.py b/src/my/twitter/twint.py index 5276c88d..c0739aad 100644 --- a/src/my/twitter/twint.py +++ b/src/my/twitter/twint.py @@ -3,7 +3,7 @@ """ from collections.abc import Iterator from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from typing import NamedTuple @@ -53,7 +53,7 @@ def id_str(self) -> TweetId: @property def created_at(self) -> datetime_aware: seconds = self.row['created_at'] / 1000 - tz = timezone.utc + tz = UTC # NOTE: UTC seems to be the case at least for the older version of schema I was using # in twint, it was extracted from "data-time-ms" field in the scraped HML # https://github.com/twintproject/twint/blob/e3345426eb24154ff084be22e4fed5cfa4631930/twint/tweet.py#L85 diff --git a/src/my/vk/favorites.py b/src/my/vk/favorites.py index bdd0a030..2657ca92 100644 --- a/src/my/vk/favorites.py +++ b/src/my/vk/favorites.py @@ -4,7 +4,7 @@ import json from collections.abc import Iterable, Iterator from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from my.config import vk as config # type: ignore[attr-defined] from my.core import Json, Stats, datetime_aware, stat @@ -47,7 +47,7 @@ def parse_fav(j: Json) -> Favorite: # TODO would be nice to include user return Favorite( - dt=datetime.fromtimestamp(j['date'], tz=timezone.utc), + dt=datetime.fromtimestamp(j['date'], tz=UTC), title=title, url=url, text=j['text'], diff --git a/src/my/whatsapp/android.py b/src/my/whatsapp/android.py index 7f61f978..a4ec2694 100644 --- a/src/my/whatsapp/android.py +++ b/src/my/whatsapp/android.py @@ -7,12 +7,11 @@ import sqlite3 from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from my.core import Paths, Res, datetime_aware, get_files, make_config, make_logger from my.core.common import unique_everseen -from my.core.compat import add_note from my.core.error import notnone from my.core.sqlite import sqlite_connection @@ -143,7 +142,7 @@ def _process_db(db: sqlite3.Connection) -> Iterator[Entity]: ): msg_id: str = notnone(r['key_id']) ts: int = notnone(r['timestamp']) - dt = datetime.fromtimestamp(ts / 1000, tz=timezone.utc) + dt = datetime.fromtimestamp(ts / 1000, tz=UTC) text: str | None = r['text_data'] media_file_path: str | None = r['file_path'] @@ -222,7 +221,7 @@ def _entities() -> Iterator[Res[Entity]]: try: yield from _process_db(db) except Exception as e: - add_note(e, f'^ while processing {path}') + e.add_note(f'^ while processing {path}') def entities() -> Iterator[Res[Entity]]: diff --git a/src/my/zotero.py b/src/my/zotero.py index 210e9c4b..1e6616bd 100644 --- a/src/my/zotero.py +++ b/src/my/zotero.py @@ -4,7 +4,7 @@ import sqlite3 from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from pathlib import Path from typing import Any @@ -175,7 +175,7 @@ def _parse_annotation(r: dict) -> Annotation: # fmt: on added = datetime.strptime(addeds, '%Y-%m-%d %H:%M:%S') - added = added.replace(tzinfo=timezone.utc) + added = added.replace(tzinfo=UTC) item = Item( file=Path(path), # path is a bit misleading... could mean some internal DOM path? diff --git a/src/my/zulip/organization.py b/src/my/zulip/organization.py index 0f37d8df..a3173903 100644 --- a/src/my/zulip/organization.py +++ b/src/my/zulip/organization.py @@ -8,9 +8,10 @@ from abc import abstractmethod from collections.abc import Iterator, Sequence from dataclasses import dataclass -from datetime import datetime, timezone +from datetime import UTC, datetime from itertools import count from pathlib import Path +from typing import assert_never from my.core import ( Json, @@ -23,7 +24,6 @@ stat, warnings, ) -from my.core.compat import assert_never logger = make_logger(__name__) @@ -163,7 +163,7 @@ def _parse_message(j: Json) -> _Message: # fmt: off return _Message( id = j['id'], - sent = datetime.fromtimestamp(ds, tz=timezone.utc), + sent = datetime.fromtimestamp(ds, tz=UTC), subject = j['subject'], sender_id = j['sender'], server_id = server.id, diff --git a/ty.toml b/ty.toml index 976465a9..4cd7d625 100644 --- a/ty.toml +++ b/ty.toml @@ -4,6 +4,3 @@ exclude = [ [rules] type-assertion-failure = "ignore" # TODO many false positives for now, sort out later -missing-argument = "ignore" # typed dicts/kwargs unpacking triggers these a lot https://github.com/astral-sh/ty/issues/154 -possibly-unbound-import = "ignore" # my.config; sort out later -possibly-unbound-attribute = "ignore" # see https://github.com/astral-sh/ty/issues/623