From a1de73f50e30aa185cff47f1e362412761558115 Mon Sep 17 00:00:00 2001 From: Dima Gerasimov Date: Sun, 12 Oct 2025 21:48:22 +0100 Subject: [PATCH] general: migrate to python 3.12 `type ... = ` syntax after the cachew fix --- pyproject.toml | 2 +- ruff.toml | 4 --- src/my/coding/commits.py | 4 +-- src/my/core/cachew.py | 22 +++++--------- src/my/core/cfg.py | 13 +++------ src/my/core/common.py | 1 - src/my/core/denylist.py | 10 +++---- src/my/core/discovery_pure.py | 4 +-- src/my/core/error.py | 34 +++++++--------------- src/my/core/freezer.py | 12 ++++---- src/my/core/hpi_compat.py | 7 ++--- src/my/core/internal.py | 2 +- src/my/core/konsume.py | 6 ++-- src/my/core/orgmode.py | 18 ++++-------- src/my/core/pandas.py | 22 ++++++-------- src/my/core/query.py | 24 ++++++---------- src/my/core/source.py | 19 ++++-------- src/my/core/sqlite.py | 9 +++--- src/my/core/stats.py | 1 - src/my/core/tests/denylist.py | 2 +- src/my/core/utils/concurrent.py | 7 ++--- src/my/core/utils/itertools.py | 51 ++++++++++----------------------- src/my/tests/reddit.py | 2 +- src/my/time/tz/main.py | 1 + src/my/time/tz/via_location.py | 3 +- 25 files changed, 101 insertions(+), 179 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fa5933fc..5c88192d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ optional = [ # todo document these? "orjson", # for my.core.serialize "pyfzf_iter", # for my.core.denylist - "cachew>=0.15.20231019", + "cachew>=0.22.20251013", # min version that makes type = syntax properly work "mypy", # used for config checks "colorlog", # for colored logs "enlighten", # for CLI progress bars diff --git a/ruff.toml b/ruff.toml index 9147f78b..29bc6f82 100644 --- a/ruff.toml +++ b/ruff.toml @@ -98,10 +98,6 @@ lint.ignore = [ "PLC0415", # "imports should be at the top level" -- not realistic "ARG001", # ugh, kinda annoying when using pytest fixtures - - # FIXME hmm. Need to figure out if cachew works fine with type = defined types before updating things.. - "UP047", # non-pep695-generic-function - "UP040", # non-pep695-type-alias ] lint.per-file-ignores."src/my/core/compat.py" = [ diff --git a/src/my/coding/commits.py b/src/my/coding/commits.py index 39bcb5a4..f87e3587 100644 --- a/src/my/coding/commits.py +++ b/src/my/coding/commits.py @@ -198,7 +198,7 @@ def _commits(_repos: list[Path]) -> Iterator[Commit]: yield from _cached_commits(r) -def _cached_commits_path(p: Path) -> str: +def _cached_commits_path(p: Path) -> Path | str: p = cache_dir() / 'my.coding.commits:_cached_commits' / str(p.absolute()).strip("/") p.mkdir(parents=True, exist_ok=True) return str(p) @@ -208,7 +208,7 @@ def _cached_commits_path(p: Path) -> str: @mcachew( depends_on=_repo_depends_on, logger=log, - cache_path=_cached_commits_path, + cache_path=_cached_commits_path, # type: ignore[arg-type] # hmm mypy seems confused here? likely a but in type + paramspec handling... ) def _cached_commits(repo: Path) -> Iterator[Commit]: log.debug('processing %s', repo) diff --git a/src/my/core/cachew.py b/src/my/core/cachew.py index dc2eba2c..4bcab7af 100644 --- a/src/my/core/cachew.py +++ b/src/my/core/cachew.py @@ -7,7 +7,6 @@ from typing import ( TYPE_CHECKING, Any, - TypeVar, cast, overload, ) @@ -16,7 +15,7 @@ from . import warnings -PathIsh = str | Path # avoid circular import from .common +type PathIsh = str | Path # avoid circular import from .common def disable_cachew() -> None: @@ -120,31 +119,24 @@ def _mcachew_impl(cache_path=_cache_path_dflt, **kwargs): if TYPE_CHECKING: - R = TypeVar('R') - from typing import ParamSpec - - P = ParamSpec('P') - CC = Callable[P, R] # need to give it a name, if inlined into bound=, mypy runs in a bug - PathProvider = PathIsh | Callable[P, PathIsh] + type PathProvider[**P] = PathIsh | Callable[P, PathIsh] # NOTE: in cachew, HashFunction type returns str # however in practice, cachew always calls str for its result # so perhaps better to switch it to Any in cachew as well - HashFunction = Callable[P, Any] - - F = TypeVar('F', bound=Callable) + type HashFunction[**P] = Callable[P, Any] # we need two versions due to @doublewrap # this is when we just annotate as @cachew without any args @overload - def mcachew(fun: F) -> F: ... + def mcachew[F: Callable](fun: F) -> F: ... @overload - def mcachew( - cache_path: PathProvider | None = ..., + def mcachew[F, **P]( + cache_path: PathProvider[P] | None = ..., # ty: ignore[too-many-positional-arguments] *, force_file: bool = ..., cls: type | None = ..., - depends_on: HashFunction = ..., + depends_on: HashFunction[P] = ..., # ty: ignore[too-many-positional-arguments] logger: logging.Logger | None = ..., chunk_by: int = ..., synthetic_key: str | None = ..., diff --git a/src/my/core/cfg.py b/src/my/core/cfg.py index 7ced194f..c62cdbcd 100644 --- a/src/my/core/cfg.py +++ b/src/my/core/cfg.py @@ -6,17 +6,15 @@ import sys from collections.abc import Callable, Iterator from contextlib import ExitStack, contextmanager -from typing import Any, TypeVar +from typing import Any -Attrs = dict[str, Any] - -C = TypeVar('C') +type Attrs = dict[str, Any] # todo not sure about it, could be overthinking... # but short enough to change later # TODO document why it's necessary? -def make_config(cls: type[C], migration: Callable[[Attrs], Attrs] = lambda x: x) -> C: +def make_config[C](cls: type[C], migration: Callable[[Attrs], Attrs] = lambda x: x) -> C: user_config = cls.__base__ old_props = { # NOTE: deliberately use gettatr to 'force' class properties here @@ -34,11 +32,8 @@ def make_config(cls: type[C], migration: Callable[[Attrs], Attrs] = lambda x: x) return cls(**params) -F = TypeVar('F') - - @contextmanager -def _override_config(config: F) -> Iterator[F]: +def _override_config[F](config: F) -> Iterator[F]: ''' Temporary override for config's parameters, useful for testing/fake data/etc. ''' diff --git a/src/my/core/common.py b/src/my/core/common.py index 90575542..0943e12b 100644 --- a/src/my/core/common.py +++ b/src/my/core/common.py @@ -92,7 +92,6 @@ def caller() -> str: traceback.print_stack() if guess_compression: - from kompress import CPath, is_compressed # note: ideally we'd just wrap everything in CPath for simplicity, however diff --git a/src/my/core/denylist.py b/src/my/core/denylist.py index e0a093aa..965b08d8 100644 --- a/src/my/core/denylist.py +++ b/src/my/core/denylist.py @@ -14,7 +14,7 @@ from collections import defaultdict from collections.abc import Iterator, Mapping from pathlib import Path -from typing import Any, TypeVar +from typing import Any import click from more_itertools import seekable @@ -22,16 +22,14 @@ from .serialize import dumps from .warnings import medium -T = TypeVar("T") +type DenyMap = Mapping[str, set[Any]] -DenyMap = Mapping[str, set[Any]] - -def _default_key_func(obj: T) -> str: +def _default_key_func[T](obj: T) -> str: return str(obj) -class DenyList: +class DenyList[T]: def __init__(self, denylist_file: Path | str) -> None: self.file = Path(denylist_file).expanduser().absolute() self._deny_raw_list: list[dict[str, Any]] = [] diff --git a/src/my/core/discovery_pure.py b/src/my/core/discovery_pure.py index 8c2f8ac8..c2b96d19 100644 --- a/src/my/core/discovery_pure.py +++ b/src/my/core/discovery_pure.py @@ -23,12 +23,12 @@ import re from collections.abc import Iterable, Sequence from pathlib import Path -from typing import Any, NamedTuple, TypeAlias, cast +from typing import Any, NamedTuple, cast ''' None means that requirements weren't defined (different from empty requirements) ''' -Requires: TypeAlias = Sequence[str] | None +type Requires = Sequence[str] | None class HPIModule(NamedTuple): diff --git a/src/my/core/error.py b/src/my/core/error.py index 1e5cba6d..5619aede 100644 --- a/src/my/core/error.py +++ b/src/my/core/error.py @@ -10,39 +10,30 @@ from collections.abc import Callable, Iterable, Iterator from datetime import date, datetime from itertools import tee -from typing import ( - Any, - Literal, - TypeAlias, - TypeVar, - cast, -) +from typing import Any, Literal, cast from .types import Json from .warnings import medium -T = TypeVar('T') -E = TypeVar('E', bound=Exception) # TODO make covariant? +type ResT[T, E: Exception] = T | E -ResT: TypeAlias = T | E - -Res: TypeAlias = ResT[T, Exception] +type Res[T] = ResT[T, Exception] ErrorPolicy = Literal["yield", "raise", "drop"] -def notnone(x: T | None) -> T: +def notnone[T](x: T | None) -> T: assert x is not None return x -def unwrap(res: Res[T]) -> T: +def unwrap[T](res: Res[T]) -> T: if isinstance(res, Exception): raise res return res -def drop_exceptions(itr: Iterator[Res[T]]) -> Iterator[T]: +def drop_exceptions[T](itr: Iterator[Res[T]]) -> Iterator[T]: """Return non-errors from the iterable""" for o in itr: if isinstance(o, Exception): @@ -50,7 +41,7 @@ def drop_exceptions(itr: Iterator[Res[T]]) -> Iterator[T]: yield o -def raise_exceptions(itr: Iterable[Res[T]]) -> Iterator[T]: +def raise_exceptions[T](itr: Iterable[Res[T]]) -> Iterator[T]: """Raise errors from the iterable, stops the select function""" for o in itr: if isinstance(o, Exception): @@ -58,7 +49,7 @@ def raise_exceptions(itr: Iterable[Res[T]]) -> Iterator[T]: yield o -def warn_exceptions(itr: Iterable[Res[T]], warn_func: Callable[[Exception], None] | None = None) -> Iterator[T]: +def warn_exceptions[T](itr: Iterable[Res[T]], warn_func: Callable[[Exception], None] | None = None) -> Iterator[T]: # if not provided, use the 'warnings' module if warn_func is None: @@ -76,12 +67,12 @@ def _warn_func(e: Exception) -> None: # TODO deprecate in favor of Exception.add_note? -def echain(ex: E, cause: Exception) -> E: +def echain[E: Exception](ex: E, cause: Exception) -> E: ex.__cause__ = cause return ex -def split_errors(l: Iterable[ResT[T, E]], ET: type[E]) -> tuple[Iterable[T], Iterable[E]]: +def split_errors[T, E: Exception](l: Iterable[ResT[T, E]], ET: type[E]) -> tuple[Iterable[T], Iterable[E]]: # TODO would be nice to have ET=Exception default? but it causes some mypy complaints? vit, eit = tee(l) # TODO ugh, not sure if I can reconcile type checking and runtime and convince mypy that ET and E are the same type? @@ -96,10 +87,7 @@ def split_errors(l: Iterable[ResT[T, E]], ET: type[E]) -> tuple[Iterable[T], Ite return (values, errors) -K = TypeVar('K') - - -def sort_res_by(items: Iterable[Res[T]], key: Callable[[Any], K]) -> list[Res[T]]: +def sort_res_by[T, K](items: Iterable[Res[T]], key: Callable[[Any], K]) -> list[Res[T]]: """ Sort a sequence potentially interleaved with errors/entries on which the key can't be computed. The general idea is: the error sticks to the non-error entry that follows it diff --git a/src/my/core/freezer.py b/src/my/core/freezer.py index 78ba467c..2874e06d 100644 --- a/src/my/core/freezer.py +++ b/src/my/core/freezer.py @@ -54,15 +54,17 @@ def untyped(self): def test_freezer() -> None: - val = _A(x={ - 'an_int': 123, - 'an_any': [1, 2, 3], - }) + val = _A( + x={ + 'an_int': 123, + 'an_any': [1, 2, 3], + } + ) af = Freezer(_A) fval = af.freeze(val) fd = vars(fval) - assert fd['typed'] == 123 + assert fd['typed'] == 123 assert fd['untyped'] == [1, 2, 3] diff --git a/src/my/core/hpi_compat.py b/src/my/core/hpi_compat.py index 8da35844..6ce19b65 100644 --- a/src/my/core/hpi_compat.py +++ b/src/my/core/hpi_compat.py @@ -10,7 +10,7 @@ import pkgutil import re from collections.abc import Iterator, MutableSequence, Sequence -from typing import Any, TypeVar +from typing import Any from . import warnings @@ -112,11 +112,8 @@ def _get_dal(cfg, module_name: str): return import_module(f'my.config.repos.{module_name}.dal') -V = TypeVar('V') - - # named to be kinda consistent with more_itertools, e.g. more_itertools.always_iterable -class always_supports_sequence(Iterator[V]): +class always_supports_sequence[V](Iterator[V]): """ Helper to make migration from Sequence/List to Iterable/Iterator type backwards compatible in runtime """ diff --git a/src/my/core/internal.py b/src/my/core/internal.py index e634d285..3d8769bd 100644 --- a/src/my/core/internal.py +++ b/src/my/core/internal.py @@ -1,6 +1,7 @@ """ Utils specific to hpi core, shouldn't really be used by HPI modules """ + from __future__ import annotations import importlib.metadata @@ -35,7 +36,6 @@ def warn_if_not_using_src_layout(path: Sequence[str]) -> None: # nothing to check return - from . import warnings MSG = ''' diff --git a/src/my/core/konsume.py b/src/my/core/konsume.py index bc42ad50..9d59de9c 100644 --- a/src/my/core/konsume.py +++ b/src/my/core/konsume.py @@ -142,9 +142,11 @@ def wrap(j, *, throw=True) -> Iterator[Zoomable]: if not c.this_consumed(): # TODO hmm. how does it figure out if it's consumed??? if throw: # TODO need to keep a full path or something... - raise UnconsumedError(f''' + raise UnconsumedError( + f''' Expected {c} to be fully consumed by the parser. -'''.lstrip()) +'''.lstrip() + ) else: # TODO log? pass diff --git a/src/my/core/orgmode.py b/src/my/core/orgmode.py index 5695a825..09f4b846 100644 --- a/src/my/core/orgmode.py +++ b/src/my/core/orgmode.py @@ -2,8 +2,13 @@ Various helpers for reading org-mode data """ +from collections.abc import Callable, Iterable from datetime import datetime +from more_itertools import one +from orgparse import OrgNode +from orgparse.extra import Table + def parse_org_datetime(s: str) -> datetime: s = s.strip('[]') @@ -23,24 +28,13 @@ def parse_org_datetime(s: str) -> datetime: # TODO I guess want to borrow inspiration from bs4? element type <-> tag; and similar logic for find_one, find_all -from collections.abc import Callable, Iterable -from typing import TypeVar - -from orgparse import OrgNode - -V = TypeVar('V') - -def collect(n: OrgNode, cfun: Callable[[OrgNode], Iterable[V]]) -> Iterable[V]: +def collect[V](n: OrgNode, cfun: Callable[[OrgNode], Iterable[V]]) -> Iterable[V]: yield from cfun(n) for c in n.children: yield from collect(c, cfun) -from more_itertools import one -from orgparse.extra import Table - - def one_table(o: OrgNode) -> Table: return one(collect(o, lambda n: (x for x in n.body_rich if isinstance(x, Table)))) diff --git a/src/my/core/pandas.py b/src/my/core/pandas.py index bbd91262..bbc604b2 100644 --- a/src/my/core/pandas.py +++ b/src/my/core/pandas.py @@ -14,7 +14,6 @@ TYPE_CHECKING, Any, Literal, - TypeVar, ) from decorator import decorator @@ -30,19 +29,16 @@ if TYPE_CHECKING: import pandas as pd - DataFrameT = pd.DataFrame - SeriesT = pd.Series + type DataFrameT = pd.DataFrame + type SeriesT[T] = pd.Series[T] from pandas._typing import S1 # meh - FuncT = TypeVar('FuncT', bound=Callable[..., DataFrameT]) # huh interesting -- with from __future__ import annotations don't even need else clause here? # but still if other modules import these we do need some fake runtime types here.. else: - from typing import Optional - - DataFrameT = Any - SeriesT = Optional # just some type with one argument - S1 = Any + type DataFrameT = Any + type SeriesT[T] = T | None # just some type with one argument + type S1 = Any def _check_dateish(s: SeriesT[S1]) -> Iterable[str]: @@ -100,13 +96,11 @@ def test_check_dateish() -> None: # fmt: on -# fmt: off ErrorColPolicy = Literal[ 'add_if_missing', # add error column if it's missing 'warn' , # warn, but do not modify 'ignore' , # no warnings -] -# fmt: on +] # fmt: skip def check_error_column(df: DataFrameT, *, policy: ErrorColPolicy) -> Iterable[str]: @@ -129,7 +123,9 @@ def check_error_column(df: DataFrameT, *, policy: ErrorColPolicy) -> Iterable[st # TODO ugh. typing this is a mess... perhaps should use ParamSpec? @decorator -def check_dataframe(f: FuncT, error_col_policy: ErrorColPolicy = 'add_if_missing', *args, **kwargs) -> DataFrameT: +def check_dataframe[FuncT: Callable[..., DataFrameT]]( + f: FuncT, error_col_policy: ErrorColPolicy = 'add_if_missing', *args, **kwargs +) -> DataFrameT: df: DataFrameT = f(*args, **kwargs) tag = '{f.__module__}:{f.__name__}' # makes sense to keep super defensive diff --git a/src/my/core/query.py b/src/my/core/query.py index d056558b..50b15b4b 100644 --- a/src/my/core/query.py +++ b/src/my/core/query.py @@ -13,11 +13,7 @@ import itertools from collections.abc import Callable, Iterable, Iterator from datetime import datetime -from typing import ( - Any, - NamedTuple, - TypeVar, -) +from typing import Any, NamedTuple import more_itertools @@ -26,17 +22,15 @@ from .types import is_namedtuple from .warnings import low -T = TypeVar("T") -ET = Res[T] +type ET[T] = Res[T] -U = TypeVar("U") # In a perfect world, the return value from a OrderFunc would just be U, # not Optional[U]. However, since this has to deal with so many edge # cases, there's a possibility that the functions generated by # _generate_order_by_func can't find an attribute -OrderFunc = Callable[[ET], U | None] -Where = Callable[[ET], bool] +type OrderFunc[T, U] = Callable[[ET[T]], U | None] +type Where[T] = Callable[[ET[T]], bool] # the generated OrderFunc couldn't handle sorting this @@ -82,7 +76,7 @@ def locate_qualified_function(qualified_name: str) -> Callable[[], Iterable[ET]] return locate_function(qualified_name[:rdot_index], qualified_name[rdot_index + 1 :]) -def attribute_func(obj: T, where: Where, default: U | None = None) -> OrderFunc | None: +def attribute_func[T, U](obj: T, where: Where, default: U | None = None) -> OrderFunc | None: """ Attempts to find an attribute which matches the 'where_function' on the object, using some getattr/dict checks. Returns a function which when called with @@ -126,7 +120,7 @@ class A(NamedTuple): return None -def _generate_order_by_func( +def _generate_order_by_func[T, U]( obj_res: Res[T], *, key: str | None = None, @@ -289,7 +283,7 @@ def _handle_unsorted( # different types. ***This consumes the iterator***, so # you should definitely itertoolts.tee it beforehand # as to not exhaust the values -def _generate_order_value_func(itr: Iterator[ET], order_value: Where, default: U | None = None) -> OrderFunc: +def _generate_order_value_func[U](itr: Iterator[ET], order_value: Where, default: U | None = None) -> OrderFunc: # TODO: add a kwarg to force lookup for every item? would sort of be like core.common.guess_datetime then order_by_lookup: dict[Any, OrderFunc] = {} @@ -316,7 +310,7 @@ def _generate_order_value_func(itr: Iterator[ET], order_value: Where, default: U # handles the arguments from the user, creating a order_value function # at least one of order_by, order_key or order_value must have a value -def _handle_generate_order_by( +def _handle_generate_order_by[U]( itr, *, order_by: OrderFunc | None = None, @@ -348,7 +342,7 @@ def _handle_generate_order_by( raise QueryException("Could not determine a way to order src iterable - at least one of the order args must be set") -def select( +def select[U]( src: Iterable[ET] | Callable[[], Iterable[ET]], *, where: Where | None = None, diff --git a/src/my/core/source.py b/src/my/core/source.py index 21d281cf..674810c1 100644 --- a/src/my/core/source.py +++ b/src/my/core/source.py @@ -8,30 +8,21 @@ import warnings from collections.abc import Callable, Iterable, Iterator from functools import wraps -from typing import Any, TypeVar +from typing import Any from .warnings import medium -# The factory function may produce something that has data -# similar to the shared model, but not exactly, so not -# making this a TypeVar, is just to make reading the -# type signature below a bit easier... -T = Any - -# https://mypy.readthedocs.io/en/latest/generics.html?highlight=decorators#decorator-factories -FactoryF = TypeVar("FactoryF", bound=Callable[..., Iterator[T]]) - _DEFAULT_ITR = () # tried to use decorator module but it really doesn't work well # with types and kw-arguments... :/ -def import_source( +def import_source[T, F: Callable[..., Iterator[Any]]]( *, default: Iterable[T] = _DEFAULT_ITR, module_name: str | None = None, help_url: str | None = None, -) -> Callable[..., Callable[..., Iterator[T]]]: +) -> Callable[[F], F]: """ doesn't really play well with types, but is used to catch ModuleNotFoundError's for when modules aren't installed in @@ -44,7 +35,7 @@ def import_source( nothing and warns instead """ - def decorator(factory_func: FactoryF) -> Callable[..., Iterator[T]]: + def decorator(factory_func: F) -> F: @wraps(factory_func) def wrapper(*args, **kwargs) -> Iterator[T]: try: @@ -80,6 +71,6 @@ class core: raise err yield from default - return wrapper + return wrapper # type: ignore[return-value] # I think not possible to make it consistent since F is dependent on T? return decorator diff --git a/src/my/core/sqlite.py b/src/my/core/sqlite.py index d95e0538..28491e7c 100644 --- a/src/my/core/sqlite.py +++ b/src/my/core/sqlite.py @@ -32,7 +32,7 @@ def test_sqlite_connect_immutable(tmp_path: Path) -> None: conn.execute('DROP TABLE testtable') -SqliteRowFactory = Callable[[sqlite3.Cursor, sqlite3.Row], Any] +type SqliteRowFactory = Callable[[sqlite3.Cursor, sqlite3.Row], Any] def dict_factory(cursor, row): @@ -40,7 +40,7 @@ def dict_factory(cursor, row): return dict(zip(fields, row, strict=True)) -Factory = SqliteRowFactory | Literal['row', 'dict'] +type Factory = SqliteRowFactory | Literal['row', 'dict'] @contextmanager @@ -68,7 +68,6 @@ def sqlite_connection( else: assert_never(row_factory) # ty: ignore[type-assertion-failure] # I think ty is confused about callable() - if _via_apsw: try: # for now, defensive, will see later how to do it properly @@ -124,6 +123,7 @@ def sqlite_copy_and_open(db: PathIsh) -> sqlite3.Connection: # and then the return type ends up as Iterator[Tuple[str, ...]], which isn't desirable :( # a bit annoying to have this copy-pasting, but hopefully not a big issue + # fmt: off @overload def select(cols: tuple[str ], rest: str, *, db: sqlite3.Connection) -> \ @@ -151,6 +151,7 @@ def select(cols: tuple[str, str, str, str, str, str, str, str], rest: str, *, db Iterator[tuple[Any, Any, Any, Any, Any, Any, Any, Any]]: ... # fmt: on + def select(cols, rest, *, db): # db arg is last cause that results in nicer code formatting.. return db.execute('SELECT ' + ','.join(cols) + ' ' + rest) @@ -189,7 +190,7 @@ def get_table_schema(self, name: str) -> dict[str, str]: """ schema: dict[str, str] = {} for row in self.connection.execute(f'PRAGMA table_info(`{name}`)'): - col = row[1] + col = row[1] type_ = row[2] # hmm, somewhere between 3.34.1 and 3.37.2, sqlite started normalising type names to uppercase # let's do this just in case since python < 3.10 are using the old version diff --git a/src/my/core/stats.py b/src/my/core/stats.py index 8b09a66e..b39f8982 100644 --- a/src/my/core/stats.py +++ b/src/my/core/stats.py @@ -297,7 +297,6 @@ def _sig_required_params(sig: inspect.Signature) -> Iterator[inspect.Parameter]: def test_sig_required_params() -> None: - def x() -> int: return 5 diff --git a/src/my/core/tests/denylist.py b/src/my/core/tests/denylist.py index 73c3165d..18cfe4b3 100644 --- a/src/my/core/tests/denylist.py +++ b/src/my/core/tests/denylist.py @@ -33,7 +33,7 @@ def test_denylist(tmp_path: Path) -> None: # create empty denylist (though file does not have to exist for denylist to work) tf.write_text("[]") - d = DenyList(tf) + d = DenyList[IP](tf) d.load() assert dict(d._deny_map) == {} diff --git a/src/my/core/utils/concurrent.py b/src/my/core/utils/concurrent.py index dd227a61..49bc4d43 100644 --- a/src/my/core/utils/concurrent.py +++ b/src/my/core/utils/concurrent.py @@ -2,10 +2,7 @@ from collections.abc import Callable from concurrent.futures import Executor, Future -from typing import Any, ParamSpec, TypeVar - -_P = ParamSpec('_P') -_T = TypeVar('_T') +from typing import Any # https://stackoverflow.com/a/10436851/706389 @@ -19,7 +16,7 @@ def __init__(self, max_workers: int | None = 1) -> None: self._shutdown = False self._max_workers = max_workers - def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: + def submit[T, **P](self, fn: Callable[P, T], /, *args: P.args, **kwargs: P.kwargs) -> Future[T]: if self._shutdown: raise RuntimeError('cannot schedule new futures after shutdown') diff --git a/src/my/core/utils/itertools.py b/src/my/core/utils/itertools.py index 521b7db2..f872d92f 100644 --- a/src/my/core/utils/itertools.py +++ b/src/my/core/utils/itertools.py @@ -8,30 +8,21 @@ import warnings from collections.abc import Callable, Hashable, Iterable, Iterator, Sized -from typing import ( - TYPE_CHECKING, - ParamSpec, - TypeVar, - cast, -) +from typing import TYPE_CHECKING, cast import more_itertools from decorator import decorator from .. import warnings as core_warnings -T = TypeVar('T') -K = TypeVar('K') -V = TypeVar('V') - -def _identity(v: T) -> V: # type: ignore[type-var] +def _identity[T, V](v: T) -> V: # type: ignore[type-var] return cast(V, v) # ugh. nothing in more_itertools? # perhaps duplicates_everseen? but it doesn't yield non-unique elements? -def ensure_unique(it: Iterable[T], *, key: Callable[[T], K]) -> Iterable[T]: +def ensure_unique[T, K](it: Iterable[T], *, key: Callable[[T], K]) -> Iterable[T]: key2item: dict[K, T] = {} for i in it: k = key(i) @@ -59,7 +50,7 @@ def test_ensure_unique() -> None: list(ensure_unique(dups, key=lambda _: object())) -def make_dict( +def make_dict[T, K, V]( it: Iterable[T], *, key: Callable[[T], K], @@ -90,12 +81,8 @@ def test_make_dict() -> None: _d3: dict[str, bool] = make_dict(it, key=lambda i: str(i), value=lambda i: i % 2 == 0) -LFP = ParamSpec('LFP') -LV = TypeVar('LV') - - @decorator -def _listify(func: Callable[LFP, Iterable[LV]], *args: LFP.args, **kwargs: LFP.kwargs) -> list[LV]: +def _listify(func, *args, **kwargs): """ Wraps a function's return value in wrapper (e.g. list) Useful when an algorithm can be expressed more cleanly as a generator @@ -108,7 +95,7 @@ def _listify(func: Callable[LFP, Iterable[LV]], *args: LFP.args, **kwargs: LFP.k # so seems easiest to just use specialize instantiations of decorator instead if TYPE_CHECKING: - def listify(func: Callable[LFP, Iterable[LV]]) -> Callable[LFP, list[LV]]: ... + def listify[**LFP, LV](func: Callable[LFP, Iterable[LV]]) -> Callable[LFP, list[LV]]: ... else: listify = _listify @@ -153,9 +140,8 @@ def wit(): if TYPE_CHECKING: - FF = TypeVar('FF', bound=Callable[..., Iterable]) - def warn_if_empty(func: FF) -> FF: ... + def warn_if_empty[FF: Callable[..., Iterable]](func: FF) -> FF: ... else: warn_if_empty = _warn_if_empty @@ -223,15 +209,12 @@ def bad_return_type() -> float: return 0.00 -_HT = TypeVar('_HT', bound=Hashable) - - # NOTE: ideally we'do It = TypeVar('It', bound=Iterable[_HT]), and function would be It -> It # Sadly this doesn't work in mypy, doesn't look like we can have double bound TypeVar # Not a huge deal, since this function is for unique_eversee and # we need to pass iterator to unique_everseen anyway # TODO maybe contribute to more_itertools? https://github.com/more-itertools/more-itertools/issues/898 -def check_if_hashable(iterable: Iterable[_HT]) -> Iterable[_HT]: +def check_if_hashable[HT: Hashable](iterable: Iterable[HT]) -> Iterable[HT]: """ NOTE: Despite Hashable bound, typing annotation doesn't guarantee runtime safety Consider hashable type X, and Y that inherits from X, but not hashable @@ -243,11 +226,11 @@ def check_if_hashable(iterable: Iterable[_HT]) -> Iterable[_HT]: if isinstance(iterable, Iterator): - def res() -> Iterator[_HT]: + def res() -> Iterator[HT]: for i in iterable: assert isinstance(i, Hashable), i # ugh. need a cast due to https://github.com/python/mypy/issues/10817 - yield cast(_HT, i) + yield cast(HT, i) return res() else: @@ -314,20 +297,16 @@ class Y(X): check_if_hashable(x7) -_UET = TypeVar('_UET') -_UEU = TypeVar('_UEU') - - # NOTE: for historic reasons, this function had to accept Callable that returns iterator # instead of just iterator # TODO maybe deprecated Callable support? not sure -def unique_everseen( - fun: Callable[[], Iterable[_UET]] | Iterable[_UET], - key: Callable[[_UET], _UEU] | None = None, -) -> Iterator[_UET]: +def unique_everseen[UET, UEU]( + fun: Callable[[], Iterable[UET]] | Iterable[UET], + key: Callable[[UET], UEU] | None = None, +) -> Iterator[UET]: import os - iterable: Iterable[_UET] + iterable: Iterable[UET] if callable(fun): iterable = fun() else: diff --git a/src/my/tests/reddit.py b/src/my/tests/reddit.py index e6e6d851..898c5a26 100644 --- a/src/my/tests/reddit.py +++ b/src/my/tests/reddit.py @@ -37,7 +37,7 @@ def test_saves() -> None: assert len(saves) > 0 # will throw if not unique - consume(ensure_unique(saves, key=lambda s: s.sid)) + consume(ensure_unique(saves, key=lambda s: s.id)) def test_preserves_extra_attr() -> None: diff --git a/src/my/time/tz/main.py b/src/my/time/tz/main.py index bdd36b12..cdbed6c6 100644 --- a/src/my/time/tz/main.py +++ b/src/my/time/tz/main.py @@ -13,4 +13,5 @@ def localize(dt: datetime, **kwargs) -> datetime_aware: # e.g. see https://github.com/karlicoss/HPI/issues/89#issuecomment-716495136 from . import via_location as L from .common import localize_with_policy + return localize_with_policy(L.localize, dt, **kwargs) diff --git a/src/my/time/tz/via_location.py b/src/my/time/tz/via_location.py index a82d8766..7bced162 100644 --- a/src/my/time/tz/via_location.py +++ b/src/my/time/tz/via_location.py @@ -77,7 +77,8 @@ def make_config() -> config: if TYPE_CHECKING: import my.config - user_config: TypeAlias = my.config.time.tz.via_location # noqa: PYI042 + # note: this has to be old-style TypeAlias, otherwise can't use as base class + user_config: TypeAlias = my.config.time.tz.via_location # noqa: PYI042, UP040 else: user_config = _get_user_config()