-
Notifications
You must be signed in to change notification settings - Fork 6
Release/4.0.0 #540
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Release/4.0.0 #540
Changes from all commits
9fd2dfb
cfa7438
a22e623
0a37e54
e56fbd0
1928157
d764ff1
b173b0c
c222c7e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||
|---|---|---|---|---|---|---|---|---|
|
|
@@ -9,13 +9,15 @@ | |||||||
| from podpac.core.cache.disk_cache_store import DiskCacheStore | ||||||||
| from podpac.core.cache.s3_cache_store import S3CacheStore | ||||||||
| import traitlets as tl | ||||||||
| import logging | ||||||||
|
|
||||||||
|
|
||||||||
|
|
||||||||
| _CACHE_STORES = {"ram": RamCacheStore, "disk": DiskCacheStore, "s3": S3CacheStore} | ||||||||
|
|
||||||||
| _CACHE_NAMES = {RamCacheStore: "ram", DiskCacheStore: "disk", S3CacheStore: "s3"} | ||||||||
|
|
||||||||
| _CACHE_MODES = ["ram", "disk", "network", "all"] | ||||||||
| _CACHE_MODES = ["ram", "disk", "s3", "all"] | ||||||||
|
|
||||||||
| # Error messages used in 3 or more places | ||||||||
| _INVALID_NODE = "Invalid node (must be of type Node, not '%s')" | ||||||||
|
|
@@ -24,6 +26,7 @@ | |||||||
| _INVALID_MODE = "Invalid mode (must be one of %s, not '%s')" | ||||||||
| _INVALID_ITEM_ASTERISK = "Invalid item ('*' is reserved)" | ||||||||
|
|
||||||||
| _logger = logging.getLogger(__name__) | ||||||||
|
|
||||||||
| def get_default_cache_ctrl(): | ||||||||
| """ | ||||||||
|
|
@@ -63,7 +66,15 @@ def make_cache_ctrl(names): | |||||||
| if name not in _CACHE_STORES: | ||||||||
| raise ValueError("Unknown cache store type '%s', options are %s" % (name, list(_CACHE_STORES))) | ||||||||
|
|
||||||||
| return CacheCtrl([_CACHE_STORES[name]() for name in names]) | ||||||||
| cache_stores = [] | ||||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I got the approval notice before these suggestions and jumped the gun a bit. I'll get this into the branch that is still merging into main. |
||||||||
| for name in names: | ||||||||
| try: | ||||||||
| cache_store = _CACHE_STORES[name]() | ||||||||
| cache_stores.append(cache_store) | ||||||||
| except Exception as e: | ||||||||
| _logger.warning("Cannot create cache_store of type {} -- error={}".format(name, e)) | ||||||||
|
|
||||||||
| return CacheCtrl(cache_stores) | ||||||||
|
|
||||||||
|
|
||||||||
| def clear_cache(mode="all"): | ||||||||
|
|
@@ -73,10 +84,13 @@ def clear_cache(mode="all"): | |||||||
| Arguments | ||||||||
| --------- | ||||||||
| mode : str | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. | ||||||||
| """ | ||||||||
|
|
||||||||
| cache_ctrl = get_default_cache_ctrl() | ||||||||
| if mode == "all": | ||||||||
| modes = _CACHE_STORES.keys() | ||||||||
| else: | ||||||||
| modes = [mode] | ||||||||
| cache_ctrl = make_cache_ctrl(modes) | ||||||||
| cache_ctrl.clear(mode=mode) | ||||||||
|
|
||||||||
|
|
||||||||
|
|
@@ -128,7 +142,7 @@ def _validate_args(node, item, coordinates, mode): | |||||||
| coordinates : :class:`podpac.Coordinates`, optional | ||||||||
| Coordinates for which cached object should be retrieved, for coordinate-dependent data such as evaluation output | ||||||||
| mode : str | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. | ||||||||
| """ | ||||||||
| if not isinstance(node, podpac.Node): | ||||||||
| raise TypeError(_INVALID_NODE % type(node)) | ||||||||
|
|
@@ -159,7 +173,7 @@ def put(self, node, data, item, coordinates=None, expires=None, mode="all", upda | |||||||
| coordinates : :class:`podpac.Coordinates`, optional | ||||||||
| Coordinates for which cached object should be retrieved, for coordinate-dependent data such as evaluation output | ||||||||
| mode : str | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. | ||||||||
| expires : float, datetime, timedelta | ||||||||
| Expiration date. If a timedelta is supplied, the expiration date will be calculated from the current time. | ||||||||
| update : bool | ||||||||
|
|
@@ -182,7 +196,7 @@ def get(self, node, item, coordinates=None, mode="all"): | |||||||
| coordinates : :class:`podpac.Coordinates`, optional | ||||||||
| Coordinates for which cached object should be retrieved, for coordinate-dependent data such as evaluation output | ||||||||
| mode : str | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. | ||||||||
|
|
||||||||
| Returns | ||||||||
| ------- | ||||||||
|
|
@@ -213,7 +227,7 @@ def has(self, node, item, coordinates=None, mode="all"): | |||||||
| coordinates: Coordinate, optional | ||||||||
| Coordinates for which cached object should be checked | ||||||||
| mode : str | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. | ||||||||
|
|
||||||||
| Returns | ||||||||
| ------- | ||||||||
|
|
@@ -240,7 +254,7 @@ def rem(self, node, item, coordinates=None, mode="all"): | |||||||
| coordinates : :class:`podpac.Coordinates`, str | ||||||||
| Delete only cached objects for these coordinates. Use `'*'` to match all coordinates. | ||||||||
| mode : str | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. | ||||||||
| """ | ||||||||
|
|
||||||||
| if not isinstance(node, podpac.Node): | ||||||||
|
|
@@ -271,7 +285,7 @@ def clear(self, mode="all"): | |||||||
| Parameters | ||||||||
| ------------ | ||||||||
| mode : str | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. | ||||||||
| determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. | ||||||||
| """ | ||||||||
|
|
||||||||
| if mode not in _CACHE_MODES: | ||||||||
|
|
@@ -289,23 +303,3 @@ def cleanup(self): | |||||||
|
|
||||||||
| for c in self._cache_stores: | ||||||||
| c.cleanup() | ||||||||
|
|
||||||||
|
|
||||||||
| # --------------------------------------------------------# | ||||||||
| # Mixins | ||||||||
| # --------------------------------------------------------# | ||||||||
|
|
||||||||
|
|
||||||||
| class DiskCacheMixin(tl.HasTraits): | ||||||||
| """Mixin to add disk caching to the Node by default.""" | ||||||||
|
|
||||||||
| property_cache_ctrl = tl.Instance(CacheCtrl, allow_none=True) | ||||||||
|
|
||||||||
| @tl.default("property_cache_ctrl") | ||||||||
| def _property_cache_ctrl_default(self): | ||||||||
| # get the default cache_ctrl and addd a disk cache store if necessary | ||||||||
| default_ctrl = get_default_cache_ctrl() | ||||||||
| stores = default_ctrl._cache_stores | ||||||||
| if not any(isinstance(store, DiskCacheStore) for store in default_ctrl._cache_stores): | ||||||||
| stores.append(DiskCacheStore()) | ||||||||
| return CacheCtrl(stores) | ||||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -85,7 +85,7 @@ def _default_outputs(self): | |
| # ------------------------------------------------------------------------- | ||
|
|
||
| def open_dataset(self, f): | ||
| return pd.read_csv(f, parse_dates=True, infer_datetime_format=True, header=self.header) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What was the problem with
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For the version of pandas that was installed when pip installing podpac, that argument no longer existed. I suspect me using python 3.11.12 for this effort may have caused me to do some unexpected "newer python" updating. This and the "zarr<3" in setup seem to both be python version upgrade-like changes. |
||
| return pd.read_csv(f, parse_dates=True, header=self.header) | ||
|
|
||
| @cached_property | ||
| def dims(self): | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -263,7 +263,7 @@ def _get_data(self, rc, rci): | |
|
|
||
| """ | ||
| # get data from data source at requested source coordinates and requested source coordinates index | ||
| data = self.get_data(rc, rci) | ||
| data = deepcopy(self.get_data(rc, rci)) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why the deepcopy? that's rarely needed...
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This later line would get "read-only" permission errors if the original get_data was used for one of the unit tests. |
||
|
|
||
| # convert data into UnitsDataArray depending on format | ||
| # TODO: what other processing needs to happen here? | ||
|
|
||
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -12,20 +12,26 @@ | |||||||||||||||||||||||
| import requests | ||||||||||||||||||||||||
| from webob.exc import HTTPError | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
| # Helper utility for optional imports | ||||||||||||||||||||||||
| from lazy_import import lazy_module, lazy_class | ||||||||||||||||||||||||
| # # Helper utility for optional imports | ||||||||||||||||||||||||
| # from lazy_import import lazy_module, lazy_class | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
| # Internal dependencies | ||||||||||||||||||||||||
| from podpac.core import authentication | ||||||||||||||||||||||||
| from podpac.core.utils import common_doc, cached_property | ||||||||||||||||||||||||
| from podpac.core.data.datasource import COMMON_DATA_DOC, DataSource | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
| # Optional dependencies | ||||||||||||||||||||||||
| pydap = lazy_module("pydap") | ||||||||||||||||||||||||
| lazy_module("pydap.client") | ||||||||||||||||||||||||
| lazy_module("pydap.model") | ||||||||||||||||||||||||
| # pydap = lazy_module("pydap") | ||||||||||||||||||||||||
| # lazy_module("pydap.client") | ||||||||||||||||||||||||
| # lazy_module("pydap.model") | ||||||||||||||||||||||||
| # lazy_class("pydap.__spec__") | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
| # Lazy loading was conflicting with xarray access of pyap.__spec__ | ||||||||||||||||||||||||
| import pydap | ||||||||||||||||||||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I just realized we can still make pydap an optional dependency using the pattern:
Suggested change
|
||||||||||||||||||||||||
| import pydap.model | ||||||||||||||||||||||||
| import pydap.client | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
| _logger = logging.getLogger(__name__) | ||||||||||||||||||||||||
|
|
||||||||||||||||||||||||
Uh oh!
There was an error while loading. Please reload this page.