diff --git a/podpac/core/authentication.py b/podpac/core/authentication.py index 9c1e8998..848f439c 100644 --- a/podpac/core/authentication.py +++ b/podpac/core/authentication.py @@ -8,13 +8,15 @@ import requests import traitlets as tl -from lazy_import import lazy_module, lazy_function +from lazy_import import lazy_module from podpac.core.settings import settings from podpac.core.utils import cached_property # Optional dependencies -pydap_setup_session = lazy_function("pydap.cas.urs.setup_session") +# see pydap_source.py for import note +# pydap_setup_session = lazy_function("pydap.cas.urs.setup_session") +from pydap.cas.urs import setup_session as pydap_setup_session _log = logging.getLogger(__name__) _USERNAME_AT = "username@{}" diff --git a/podpac/core/cache/__init__.py b/podpac/core/cache/__init__.py index 2c005f8e..ec5d99e5 100644 --- a/podpac/core/cache/__init__.py +++ b/podpac/core/cache/__init__.py @@ -5,7 +5,6 @@ make_cache_ctrl, clear_cache, cache_cleanup, - DiskCacheMixin, ) from podpac.core.cache.ram_cache_store import RamCacheStore from podpac.core.cache.disk_cache_store import DiskCacheStore diff --git a/podpac/core/cache/cache_ctrl.py b/podpac/core/cache/cache_ctrl.py index b581b760..97a5cb47 100644 --- a/podpac/core/cache/cache_ctrl.py +++ b/podpac/core/cache/cache_ctrl.py @@ -9,13 +9,15 @@ from podpac.core.cache.disk_cache_store import DiskCacheStore from podpac.core.cache.s3_cache_store import S3CacheStore import traitlets as tl +import logging + _CACHE_STORES = {"ram": RamCacheStore, "disk": DiskCacheStore, "s3": S3CacheStore} _CACHE_NAMES = {RamCacheStore: "ram", DiskCacheStore: "disk", S3CacheStore: "s3"} -_CACHE_MODES = ["ram", "disk", "network", "all"] +_CACHE_MODES = ["ram", "disk", "s3", "all"] # Error messages used in 3 or more places _INVALID_NODE = "Invalid node (must be of type Node, not '%s')" @@ -24,6 +26,7 @@ _INVALID_MODE = "Invalid mode (must be one of %s, not '%s')" _INVALID_ITEM_ASTERISK = "Invalid item ('*' is reserved)" +_logger = logging.getLogger(__name__) def get_default_cache_ctrl(): """ @@ -63,7 +66,15 @@ def make_cache_ctrl(names): if name not in _CACHE_STORES: raise ValueError("Unknown cache store type '%s', options are %s" % (name, list(_CACHE_STORES))) - return CacheCtrl([_CACHE_STORES[name]() for name in names]) + cache_stores = [] + for name in names: + try: + cache_store = _CACHE_STORES[name]() + cache_stores.append(cache_store) + except Exception as e: + _logger.warning("Cannot create cache_store of type {} -- error={}".format(name, e)) + + return CacheCtrl(cache_stores) def clear_cache(mode="all"): @@ -73,10 +84,13 @@ def clear_cache(mode="all"): Arguments --------- mode : str - determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. + determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. """ - - cache_ctrl = get_default_cache_ctrl() + if mode == "all": + modes = _CACHE_STORES.keys() + else: + modes = [mode] + cache_ctrl = make_cache_ctrl(modes) cache_ctrl.clear(mode=mode) @@ -128,7 +142,7 @@ def _validate_args(node, item, coordinates, mode): coordinates : :class:`podpac.Coordinates`, optional Coordinates for which cached object should be retrieved, for coordinate-dependent data such as evaluation output mode : str - determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. + determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. """ if not isinstance(node, podpac.Node): raise TypeError(_INVALID_NODE % type(node)) @@ -159,7 +173,7 @@ def put(self, node, data, item, coordinates=None, expires=None, mode="all", upda coordinates : :class:`podpac.Coordinates`, optional Coordinates for which cached object should be retrieved, for coordinate-dependent data such as evaluation output mode : str - determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. + determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. expires : float, datetime, timedelta Expiration date. If a timedelta is supplied, the expiration date will be calculated from the current time. update : bool @@ -182,7 +196,7 @@ def get(self, node, item, coordinates=None, mode="all"): coordinates : :class:`podpac.Coordinates`, optional Coordinates for which cached object should be retrieved, for coordinate-dependent data such as evaluation output mode : str - determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. + determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. Returns ------- @@ -213,7 +227,7 @@ def has(self, node, item, coordinates=None, mode="all"): coordinates: Coordinate, optional Coordinates for which cached object should be checked mode : str - determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. + determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. Returns ------- @@ -240,7 +254,7 @@ def rem(self, node, item, coordinates=None, mode="all"): coordinates : :class:`podpac.Coordinates`, str Delete only cached objects for these coordinates. Use `'*'` to match all coordinates. mode : str - determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. + determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. """ if not isinstance(node, podpac.Node): @@ -271,7 +285,7 @@ def clear(self, mode="all"): Parameters ------------ mode : str - determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 'network', 'all'. Default 'all'. + determines what types of the `CacheStore` are affected. Options: 'ram', 'disk', 's3', 'all'. Default 'all'. """ if mode not in _CACHE_MODES: @@ -289,23 +303,3 @@ def cleanup(self): for c in self._cache_stores: c.cleanup() - - -# --------------------------------------------------------# -# Mixins -# --------------------------------------------------------# - - -class DiskCacheMixin(tl.HasTraits): - """Mixin to add disk caching to the Node by default.""" - - property_cache_ctrl = tl.Instance(CacheCtrl, allow_none=True) - - @tl.default("property_cache_ctrl") - def _property_cache_ctrl_default(self): - # get the default cache_ctrl and addd a disk cache store if necessary - default_ctrl = get_default_cache_ctrl() - stores = default_ctrl._cache_stores - if not any(isinstance(store, DiskCacheStore) for store in default_ctrl._cache_stores): - stores.append(DiskCacheStore()) - return CacheCtrl(stores) diff --git a/podpac/core/cache/cache_store.py b/podpac/core/cache/cache_store.py index a0ec5a57..031b6b3a 100644 --- a/podpac/core/cache/cache_store.py +++ b/podpac/core/cache/cache_store.py @@ -8,7 +8,7 @@ class CacheStore(object): - """Abstract parent class for classes representing actual data stores (e.g. RAM, local disk, network storage). + """Abstract parent class for classes representing actual data stores (e.g. RAM, local disk, s3 storage). Includes implementation of common hashing operations and call signature for required abstract methods: put(), get(), rem(), has() """ diff --git a/podpac/core/cache/test/test_node_caches.py b/podpac/core/cache/test/test_node_caches.py index 8f7b7690..40f284d2 100644 --- a/podpac/core/cache/test/test_node_caches.py +++ b/podpac/core/cache/test/test_node_caches.py @@ -20,8 +20,12 @@ def test_uid_no_definition(self): assert not hash_cache_node._from_cache o2 = hash_cache_node2.eval(coords) assert hash_cache_node2._from_cache - o3 = hash_cache_node3.eval(coords) - assert not hash_cache_node3._from_cache + try: + o3 = hash_cache_node3.eval(coords) + assert not hash_cache_node3._from_cache + finally: + hash_cache_node3.rem_cache("*",coordinates="*") + def test_global_ram_cache(self): my_node = SinCoords(cache_output=True) diff --git a/podpac/core/coordinates/test/test_coordinates.py b/podpac/core/coordinates/test/test_coordinates.py index 5057989d..6887eb4e 100644 --- a/podpac/core/coordinates/test/test_coordinates.py +++ b/podpac/core/coordinates/test/test_coordinates.py @@ -636,7 +636,7 @@ def test_alt_units(self): assert c.alt_units in ["us-ft", "US survey foot"] # pyproj < 3.0 # pyproj >= 3.0 def test_create_from_uniform_coords(self): - Coordinates([[clinspace("2020-01-01T09:36", "2020-01-02T15:35", 8)]], [['time']]) + Coordinates([clinspace("2020-01-01T09:36", "2020-01-02T15:35", 8)], [['time']]) class TestCoordinatesSerialization(object): def test_definition(self): diff --git a/podpac/core/coordinates/test/test_coordinates_utils.py b/podpac/core/coordinates/test/test_coordinates_utils.py index 1338396f..223c5619 100644 --- a/podpac/core/coordinates/test/test_coordinates_utils.py +++ b/podpac/core/coordinates/test/test_coordinates_utils.py @@ -621,7 +621,7 @@ def test_add_custom_dimension(): # Unstacked coords, one dimension, nearest neighbor interpolation c1 = podpac.Coordinates([[1, 2, 3]], ["mydim"]) c1_interp = podpac.Coordinates([[1.9, 2, 3]], ["mydim"]) - n1 = podpac.data.Array(source=[1, 2, 3], coordinates=c1) + n1 = podpac.data.Array(source=[1, 2, 3], coordinates=c1).interpolate(interpolation="nearest") data1 = n1.eval(c1_interp) assert np.array_equal(data1.data, np.array([2, 2, 3])) assert "mydim" in data1.dims @@ -629,7 +629,7 @@ def test_add_custom_dimension(): # Unstacked coords, one dimension, linear neighbor interpolation c1 = podpac.Coordinates([[1, 2, 3]], ["mydim"]) c1_interp = podpac.Coordinates([[1.9, 2, 3]], ["mydim"]) - n1 = podpac.data.Array(source=[1, 2, 3], coordinates=c1, interpolation="bilinear") + n1 = podpac.data.Array(source=[1, 2, 3], coordinates=c1).interpolate(interpolation="bilinear") data1 = n1.eval(c1_interp) assert np.array_equal(data1.data, np.array([1.9, 2, 3])) assert "mydim" in data1.dims @@ -655,7 +655,7 @@ def test_add_custom_dimension(): assert "mydim" in c3.udims assert "lat" in c3.udims c3_interp = podpac.Coordinates([[[1.9, 2, 3], [4.9, 5, 6]]], dims=["mydim_lat"]) - n3 = podpac.data.Array(source=[1, 2, 3], coordinates=c3, interpolation="nearest") + n3 = podpac.data.Array(source=[1, 2, 3], coordinates=c3).interpolate(interpolation="nearest") data3 = n3.eval(c3_interp) assert np.array_equal(data3.data, np.array([2, 2, 3])) assert "mydim_lat" in data3.dims diff --git a/podpac/core/data/csv_source.py b/podpac/core/data/csv_source.py index 65b3c6f9..36822cb0 100644 --- a/podpac/core/data/csv_source.py +++ b/podpac/core/data/csv_source.py @@ -85,7 +85,7 @@ def _default_outputs(self): # ------------------------------------------------------------------------- def open_dataset(self, f): - return pd.read_csv(f, parse_dates=True, infer_datetime_format=True, header=self.header) + return pd.read_csv(f, parse_dates=True, header=self.header) @cached_property def dims(self): diff --git a/podpac/core/data/datasource.py b/podpac/core/data/datasource.py old mode 100644 new mode 100755 index b28226aa..2c82e470 --- a/podpac/core/data/datasource.py +++ b/podpac/core/data/datasource.py @@ -263,7 +263,7 @@ def _get_data(self, rc, rci): """ # get data from data source at requested source coordinates and requested source coordinates index - data = self.get_data(rc, rci) + data = deepcopy(self.get_data(rc, rci)) # convert data into UnitsDataArray depending on format # TODO: what other processing needs to happen here? diff --git a/podpac/core/data/h5py_source.py b/podpac/core/data/h5py_source.py index e1dda0ab..57c9995c 100644 --- a/podpac/core/data/h5py_source.py +++ b/podpac/core/data/h5py_source.py @@ -52,6 +52,8 @@ class H5PY(FileKeysMixin, BaseFileSource): file_mode = tl.Unicode(default_value="r").tag(readonly=True) array_dims = tl.List(trait=tl.Unicode()).tag(readonly=True) + coordinate_index_type = "slice" + @cached_property def dataset(self): return h5py.File(self.source, self.file_mode) diff --git a/podpac/core/data/pydap_source.py b/podpac/core/data/pydap_source.py index a64834b4..5482f343 100644 --- a/podpac/core/data/pydap_source.py +++ b/podpac/core/data/pydap_source.py @@ -12,8 +12,8 @@ import requests from webob.exc import HTTPError -# Helper utility for optional imports -from lazy_import import lazy_module, lazy_class +# # Helper utility for optional imports +# from lazy_import import lazy_module, lazy_class # Internal dependencies from podpac.core import authentication @@ -21,11 +21,17 @@ from podpac.core.data.datasource import COMMON_DATA_DOC, DataSource - # Optional dependencies -pydap = lazy_module("pydap") -lazy_module("pydap.client") -lazy_module("pydap.model") +# pydap = lazy_module("pydap") +# lazy_module("pydap.client") +# lazy_module("pydap.model") +# lazy_class("pydap.__spec__") + +# Lazy loading was conflicting with xarray access of pyap.__spec__ +import pydap +import pydap.model +import pydap.client + _logger = logging.getLogger(__name__) diff --git a/podpac/core/data/test/test_array.py b/podpac/core/data/test/test_array.py old mode 100644 new mode 100755 index 9ea2864a..9080da2b --- a/podpac/core/data/test/test_array.py +++ b/podpac/core/data/test/test_array.py @@ -58,7 +58,3 @@ def test_coordinates(self): node = Array(source=self.data).interpolate() with pytest.raises(tl.TraitError): node.coordinates - - def test_no_cache(self): - node = Array().interpolate() - assert len(node.source.cache_ctrl._cache_stores) == 0 diff --git a/podpac/core/data/test/test_dataset.py b/podpac/core/data/test/test_dataset.py index 4b0caccc..b2133fd9 100644 --- a/podpac/core/data/test/test_dataset.py +++ b/podpac/core/data/test/test_dataset.py @@ -105,4 +105,4 @@ def test_extra_dimension_selection(self): node = Dataset(source=self.source, data_key="data", selection={"day": 1}) assert np.all([d in ["lat", "lon"] for d in node.dims]) out = node.eval(node.coordinates) - np.testing.assert_array_equal(out, self.data[1].T) + np.testing.assert_array_equal(out, self.data[1]) diff --git a/podpac/core/interpolation/interpolation_manager.py b/podpac/core/interpolation/interpolation_manager.py index 5d63beff..fb731343 100644 --- a/podpac/core/interpolation/interpolation_manager.py +++ b/podpac/core/interpolation/interpolation_manager.py @@ -737,17 +737,13 @@ def _fix_coordinates_for_none_interp(self, eval_coordinates, source_coordinates) covered_udims = [] for k in interpolator_queue: # Keep the eval_coordinates for some dimensions - dims = ( - source_coordinates.dims - if isinstance(interpolator_queue[k], NoneInterpolator) - else eval_coordinates.dims - ) - for d in dims: + coords = source_coordinates if isinstance(interpolator_queue[k], NoneInterpolator) else eval_coordinates + for d in coords.dims: ud = d.split("_") for u in ud: if u in k: new_dims.append(d) - new_coords.append(eval_coordinates[d]) + new_coords.append(coords[d]) covered_udims.extend(ud) break new_coordinates = Coordinates(new_coords, new_dims) diff --git a/podpac/core/interpolation/test/test_interpolators.py b/podpac/core/interpolation/test/test_interpolators.py index 72c58034..2dac3696 100755 --- a/podpac/core/interpolation/test/test_interpolators.py +++ b/podpac/core/interpolation/test/test_interpolators.py @@ -3,6 +3,7 @@ """ + # pylint: disable=C0111,W0212,R0903 import pytest @@ -24,7 +25,7 @@ _TIMEDELTA64 = "timedelta64[h]" # Set up the PRNG with a seed to stay deterministic -_rand = np.random.default_rng(0xC * ord('r') + 0xea + ord('r') * 0xe) +_rand = np.random.default_rng(0xC * ord("r") + 0xEA + ord("r") * 0xE) class MockArrayDataSource(DataSource): @@ -369,7 +370,10 @@ def test_time_tolerance(self): coords_src = Coordinates( [np.linspace(0, 10, 5), clinspace("2018-01-01", "2018-01-09", 5)], dims=["lat", "time"] ) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={ "method": "nearest", "params": {"spatial_tolerance": 1.1, "time_tolerance": np.timedelta64(1, "D")}, @@ -418,7 +422,10 @@ def test_time_space_scale_grid(self): ) coords_dst = Coordinates([5.1, "2018-01-02T11", 1], dims=["lat", "time", "alt"]) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -434,7 +441,10 @@ def test_time_space_scale_grid(self): output = node.eval(coords_dst) assert output == source[2, 2, 0] - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -450,7 +460,10 @@ def test_time_space_scale_grid(self): output = node.eval(coords_dst) assert output == source[2, 1, 1] - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -473,14 +486,20 @@ def test_remove_nan(self): coords_src = Coordinates( [[np.linspace(0, 10, 5), clinspace("2018-01-01", "2018-01-09", 5)]], dims=[["lat", "time"]] ) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={"method": "nearest", "interpolators": [NearestNeighbor], "params": {"remove_nan": False}} ) coords_dst = Coordinates([[5.1]], dims=["lat"]) output = node.eval(coords_dst) assert np.isnan(output) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -496,14 +515,20 @@ def test_remove_nan(self): source = _rand.random(size=(5, 3)) source[2, 1] = np.nan coords_src = Coordinates([np.linspace(0, 10, 5), [1, 2, 3]], dims=["lat", "time"]) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={"method": "nearest", "interpolators": [NearestNeighbor], "params": {"remove_nan": False}} ) coords_dst = Coordinates([5.1, 2.01], dims=["lat", "time"]) output = node.eval(coords_dst) assert np.isnan(output) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -517,7 +542,10 @@ def test_respect_bounds(self): source = _rand.random(size=(5,)) coords_src = Coordinates([[1, 2, 3, 4, 5]], ["alt"]) coords_dst = Coordinates([[-0.5, 1.1, 2.6]], ["alt"]) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -527,7 +555,10 @@ def test_respect_bounds(self): output = node.eval(coords_dst) np.testing.assert_array_equal(output.data, source[[0, 0, 2]]) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={"method": "nearest", "interpolators": [NearestNeighbor], "params": {"respect_bounds": True}} ) output = node.eval(coords_dst) @@ -548,7 +579,10 @@ def test_2Dstacked(self): ["lat_lon", "time"], ) coords_dst = Coordinates([np.arange(4) + 0.2, np.arange(1, 4) - 0.2, [0.5]], ["lat", "lon", "time"]) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -558,7 +592,11 @@ def test_2Dstacked(self): np.testing.assert_array_equal(output, source[:4, 1:, :1]) # Using 'xarray' coordinates type - node = MockArrayDataSourceXR(data=source, coordinates=coords_src, coordinate_index_type="xarray",).interpolate( + node = MockArrayDataSourceXR( + data=source, + coordinates=coords_src, + coordinate_index_type="xarray", + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -568,7 +606,11 @@ def test_2Dstacked(self): np.testing.assert_array_equal(output, source[:4, 1:, :1]) # Using 'slice' coordinates type - node = MockArrayDataSource(data=source, coordinates=coords_src, coordinate_index_type="slice",).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + coordinate_index_type="slice", + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -579,7 +621,10 @@ def test_2Dstacked(self): # Without Time source = _rand.random(size=(5, 4)) - node = MockArrayDataSource(data=source, coordinates=coords_src.drop("time"),).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src.drop("time"), + ).interpolate( interpolation={ "method": "nearest", "interpolators": [NearestNeighbor], @@ -590,14 +635,14 @@ def test_2Dstacked(self): def test_timedeltas_in_time_dim(self): """ - test targets when timedelta dtype is used for time coordinates and the + test targets when timedelta dtype is used for time coordinates and the eval coordinates do not have uniform steps between array elements """ source = _rand.random(size=(5, 5, 6)) timedelta_values = np.array([0, 1, 2, 3, 5, 8]).astype(_TIMEDELTA64) lat = np.array([0, 1, 2, 3, 5, 8]) - lon =np.array([0, 1, 2, 3, 5, 8]) - coords_src = Coordinates([lat,lon,timedelta_values], dims=["lat", "lon", "time"]) + lon = np.array([0, 1, 2, 3, 5, 8]) + coords_src = Coordinates([lat, lon, timedelta_values], dims=["lat", "lon", "time"]) node = MockArrayDataSource( data=source, coordinates=coords_src, @@ -612,11 +657,11 @@ def test_timedeltas_in_time_dim(self): output = node.eval(coords_dst) assert isinstance(output, UnitsDataArray) assert np.all(output.lat.values == coords_dst["lat"].coordinates) - assert np.all(np.moveaxis(output.values,0,1) == source[1:4, 1:4, 1:5]) + assert np.all(np.moveaxis(output.values, 0, 1) == source[1:4, 1:4, 1:5]) def test_times_objs_in_other_dims(self): """ - targets when datetime or timedelta dtype is used in any dimension other than `time` + targets when datetime or timedelta dtype is used in any dimension other than `time` and the eval coordinates do not have uniform steps between array elements """ @@ -624,8 +669,8 @@ def test_times_objs_in_other_dims(self): source = _rand.random(size=(5, 5, 6)) datetime_values = np.array([0, 1, 2, 3, 5, 8]).astype("datetime64[h]") lat = np.array([0, 1, 2, 3, 5, 8]) - lon =np.array([0, 1, 2, 3, 5, 8]) - coords_src = Coordinates([lat,lon,datetime_values], dims=["lat", "lon", "alt"]) + lon = np.array([0, 1, 2, 3, 5, 8]) + coords_src = Coordinates([lat, lon, datetime_values], dims=["lat", "lon", "alt"]) node = MockArrayDataSource( data=source, coordinates=coords_src, @@ -638,14 +683,14 @@ def test_times_objs_in_other_dims(self): output = node.eval(coords_dst) assert isinstance(output, UnitsDataArray) assert np.all(output.lat.values == coords_dst["lat"].coordinates) - assert np.all(np.moveaxis(output.values,0,1) == source[1:4, 1:4, 1:5]) - + assert np.all(np.moveaxis(output.values, 0, 1) == source[1:4, 1:4, 1:5]) + # timedelta test source = _rand.random(size=(5, 5, 6)) timedelta_values = np.array([0, 1, 2, 3, 5, 8]).astype(_TIMEDELTA64) lat = np.array([0, 1, 2, 3, 5, 8]) - lon =np.array([0, 1, 2, 3, 5, 8]) - coords_src = Coordinates([lat,lon,timedelta_values], dims=["lat", "lon", "alt"]) + lon = np.array([0, 1, 2, 3, 5, 8]) + coords_src = Coordinates([lat, lon, timedelta_values], dims=["lat", "lon", "alt"]) node = MockArrayDataSource( data=source, coordinates=coords_src, @@ -658,7 +703,8 @@ def test_times_objs_in_other_dims(self): output = node.eval(coords_dst) assert isinstance(output, UnitsDataArray) assert np.all(output.lat.values == coords_dst["lat"].coordinates) - assert np.all(np.moveaxis(output.values,0,1) == source[1:4, 1:4, 1:5]) + assert np.all(np.moveaxis(output.values, 0, 1) == source[1:4, 1:4, 1:5]) + class TestInterpolateRasterioInterpolator(TestCase): """test interpolation functions""" @@ -674,6 +720,9 @@ def test_interpolate_rasterio(self): source = np.arange(0, 15) source.resize((3, 5)) + coords_src = Coordinates([clinspace(0, 10, 3), clinspace(0, 10, 5)], dims=["lat", "lon"]) + coords_dst = Coordinates([clinspace(1, 11, 3), clinspace(1, 11, 5)], dims=["lat", "lon"]) + # try one specific rasterio case to measure output node = MockArrayDataSource( data=source, @@ -724,9 +773,8 @@ def test_interpolate_rasterio_descending(self): node = MockArrayDataSource( data=source, - coordinates=coords_src, - interpolation={"method": "nearest", "interpolators": [RasterioInterpolator]}, - ) + coordinates=coords_src + ).interpolate(interpolation={"method": "nearest", "interpolators": [RasterioInterpolator]}) output = node.eval(coords_dst) assert isinstance(output, UnitsDataArray) @@ -789,7 +837,10 @@ def test_interpolate_irregular_arbitrary_2dims(self): coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5), [2, 3, 5]], dims=["lat", "lon", "time"]) coords_dst = Coordinates([clinspace(1, 11, 5), clinspace(1, 11, 5), [2, 3, 4]], dims=["lat", "lon", "time"]) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation=[{"method": "nearest", "interpolators": [ScipyGrid]}, {"method": "linear", "dims": ["time"]}] ) output = node.eval(coords_dst) @@ -827,12 +878,11 @@ def test_interpolate_looper_helper(self): node = MockArrayDataSource( data=source, - coordinates=coords_src, - interpolation=[ - {"method": "nearest", "interpolators": [ScipyGrid]}, - {"method": "linear", "dims": ["time", "alt"]}, - ], - ) + coordinates=coords_src + ).interpolate(interpolation=[ + {"method": "nearest", "interpolators": [ScipyGrid]}, + {"method": "linear", "dims": ["time", "alt"]}, + ],) output = node.eval(coords_dst) assert isinstance(output, UnitsDataArray) @@ -1003,7 +1053,10 @@ def test_interpolate_xarray_grid(self): assert output.data[1, 3] == 8.0 assert np.isnan(output.data[0, 4]) # TODO: how to handle outside bounds - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={"method": "linear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": True}} ) output = node.eval(coords_dst) @@ -1012,7 +1065,10 @@ def test_interpolate_xarray_grid(self): assert int(output.data[0, 0]) == 2 assert int(output.data[2, 3]) == 15 - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={"method": "slinear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": True}} ) output = node.eval(coords_dst) @@ -1023,7 +1079,10 @@ def test_interpolate_xarray_grid(self): assert np.isnan(output.data[4, 4]) # Check extrapolation - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={ "method": "linear", "interpolators": [XarrayInterpolator], @@ -1103,7 +1162,10 @@ def test_interpolate_fill_nan(self): coords_dst = Coordinates([clinspace(1, 11, 5), clinspace(1, 11, 5)], dims=["lat", "lon"]) # Ensure nan present - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={"method": "linear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": False}} ) output = node.eval(coords_dst) @@ -1112,7 +1174,10 @@ def test_interpolate_fill_nan(self): assert np.all(np.isnan(output.data[1:3, 1:3])) # Ensure nan gone - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={"method": "linear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": True}} ) output = node.eval(coords_dst) @@ -1122,7 +1187,10 @@ def test_interpolate_fill_nan(self): # Ensure nan gone, flip lat-lon on source coords_src = Coordinates([clinspace(0, 10, 5), clinspace(0, 10, 5)], dims=["lon", "lat"]) - node = MockArrayDataSource(data=source, coordinates=coords_src,).interpolate( + node = MockArrayDataSource( + data=source, + coordinates=coords_src, + ).interpolate( interpolation={"method": "linear", "interpolators": [XarrayInterpolator], "params": {"fill_nan": True}} ) output = node.eval(coords_dst) diff --git a/podpac/core/test/test_node.py b/podpac/core/test/test_node.py index 971ba4e5..832c7812 100644 --- a/podpac/core/test/test_node.py +++ b/podpac/core/test/test_node.py @@ -20,7 +20,7 @@ from podpac.core.utils import ArrayTrait, NodeTrait from podpac.core.units import UnitsDataArray from podpac.core.style import Style -from podpac.core.cache import CacheCtrl, RamCacheStore, DiskCacheStore, DiskCacheMixin, clear_cache +from podpac.core.cache import CacheCtrl, RamCacheStore, DiskCacheStore, clear_cache from podpac.core.node import Node, NodeException, NodeDefinitionError _OUTPUTS = "outputs=" @@ -1450,27 +1450,6 @@ def test_output_node(self): Node.from_json(wrong_name_json) -class TestDiskCacheMixin(object): - class DiskCacheNode(DiskCacheMixin, Node): - pass - - def test_default_disk_cache(self): - with podpac.settings: - # add disk cache - podpac.settings["DEFAULT_CACHE"] = ["ram"] - node = self.DiskCacheNode() - assert len(node.cache_ctrl._cache_stores) == 2 - - # don't add if it is already there - podpac.settings["DEFAULT_CACHE"] = ["ram", "disk"] - node = self.DiskCacheNode() - assert len(node.cache_ctrl._cache_stores) == 2 - - def test_customizable(self): - node = self.DiskCacheNode().cache(cache_type=["ram"]) - assert len(node.cache_ctrl._cache_stores) == 1 - - @pytest.mark.integration def tests_node_integration(): # This is currently a placeholder test until we actually have integration tests (pytest will exit with code 5 if no tests found) diff --git a/podpac/core/test/test_units.py b/podpac/core/test/test_units.py index 6467c42a..acad09fd 100644 --- a/podpac/core/test/test_units.py +++ b/podpac/core/test/test_units.py @@ -354,9 +354,12 @@ def test_keep_attrs(self): assert "test" in (a1 * a2).attrs # Order is important - assert "test" not in (1 + a1).attrs + # As of 20260205, xarray seems to keep attributes regardless of order with constants + # old behavior with objects appears maintained. + # We're keeping this test just to track if this behavior changes again + assert "test" in (1 + a1).attrs assert "test" not in (a2 + a1).attrs - assert "test" not in (1 * a1).attrs + assert "test" in (1 * a1).attrs assert "test" not in (a2 * a1).attrs @@ -553,7 +556,7 @@ def make_rot_array(self, order=1, bands=1): coordinates=c, outputs=[str(s) for s in list(range(bands))], ) - return node + return node.interpolate(interpolation="nearest") def test_to_geotiff_roundtrip_1band(self): # lat/lon order, usual @@ -564,7 +567,7 @@ def test_to_geotiff_roundtrip_1band(self): fp.write(b"a") # for some reason needed to get good comparison fp.seek(0) - rnode = Rasterio(source=fp.name, outputs=node.outputs) + rnode = Rasterio(source=fp.name, outputs=node.outputs, crs="EPSG:4326") assert rnode.coordinates == node.coordinates rout = rnode.eval(rnode.coordinates) @@ -578,7 +581,7 @@ def test_to_geotiff_roundtrip_1band(self): fp.write(b"a") # for some reason needed to get good comparison fp.seek(0) - rnode = Rasterio(source=fp.name, outputs=node.outputs) + rnode = Rasterio(source=fp.name, outputs=node.outputs, crs="EPSG:4326") assert rnode.coordinates == node.coordinates rout = rnode.eval(rnode.coordinates) @@ -593,7 +596,7 @@ def test_to_geotiff_roundtrip_2band(self): fp.write(b"a") # for some reason needed to get good comparison fp.seek(0) - rnode = Rasterio(source=fp.name, outputs=node.outputs) + rnode = Rasterio(source=fp.name, outputs=node.outputs, crs="EPSG:4326") assert rnode.coordinates == node.coordinates rout = rnode.eval(rnode.coordinates) @@ -607,7 +610,7 @@ def test_to_geotiff_roundtrip_2band(self): fp.write(b"a") # for some reason needed to get good comparison fp.seek(0) - rnode = Rasterio(source=fp.name, outputs=node.outputs) + rnode = Rasterio(source=fp.name, outputs=node.outputs, crs="EPSG:4326") assert rnode.coordinates == node.coordinates rout = rnode.eval(rnode.coordinates) @@ -615,19 +618,19 @@ def test_to_geotiff_roundtrip_2band(self): # Check single output fp.seek(0) - rnode = Rasterio(source=fp.name, outputs=node.outputs, output=node.outputs[1]) + rnode = Rasterio(source=fp.name, outputs=node.outputs, output=node.outputs[1], crs="EPSG:4326") rout = rnode.eval(rnode.coordinates) np.testing.assert_almost_equal(out.data[..., 1], rout.data) # Check single band 1 fp.seek(0) - rnode = Rasterio(source=fp.name, band=1) + rnode = Rasterio(source=fp.name, band=1, crs="EPSG:4326") rout = rnode.eval(rnode.coordinates) np.testing.assert_almost_equal(out.data[..., 0], rout.data) # Check single band 2 fp.seek(0) - rnode = Rasterio(source=fp.name, band=2) + rnode = Rasterio(source=fp.name, band=2, crs="EPSG:4326") rout = rnode.eval(rnode.coordinates) np.testing.assert_almost_equal(out.data[..., 1], rout.data) @@ -642,7 +645,9 @@ def test_to_geotiff_roundtrip_rotcoords(self): fp.write(b"a") # for some reason needed to get good comparison fp.seek(0) - rnode = Rasterio(source=fp.name, outputs=node.outputs, mode="r") + rnode = Rasterio(source=fp.name, outputs=node.outputs, mode="r", crs="EPSG:4326").interpolate( + interpolation="nearest" + ) assert node.coordinates == rnode.coordinates rout = rnode.eval(rnode.coordinates) diff --git a/podpac/core/test/test_utils.py b/podpac/core/test/test_utils.py old mode 100644 new mode 100755 index 46a653a8..4af659a4 --- a/podpac/core/test/test_utils.py +++ b/podpac/core/test/test_utils.py @@ -499,16 +499,16 @@ class TestNodeProber(object): another_one = AnotherOne() def test_single_prober(self): - expected = { + expected = OrderedDict({ "Array": { "active": True, "value": 1, - "units": "o", + "label": "o", "inputs": [], "name": "one_style", "node_hash": self.one.hash, } - } + }) out = probe_node(self.one, lat=1, lon=1) assert out == expected @@ -521,7 +521,7 @@ def test_serial_prober(self): "Array": { "active": True, "value": 1.0, - "units": "o", + "label": "o", "inputs": [], "name": "one_style", "node_hash": self.one.hash, @@ -529,7 +529,7 @@ def test_serial_prober(self): "Arithmetic": { "active": True, "value": 2.0, - "units": "", + "label": "", "inputs": ["Array"], "name": "Arithmetic", "node_hash": a.hash, @@ -537,7 +537,7 @@ def test_serial_prober(self): "Arithmetic_1": { "active": True, "value": 6.0, - "units": "m", + "label": "m", "inputs": ["Arithmetic"], "name": "six_style", "node_hash": b.hash, @@ -554,7 +554,7 @@ def test_parallel_prober(self): "Array": { "active": True, "value": 1.0, - "units": "o", + "label": "o", "inputs": [], "name": "one_style", "node_hash": self.one.hash, @@ -562,7 +562,7 @@ def test_parallel_prober(self): "Array_1": { "active": True, "value": 2.0, - "units": "t", + "label": "t", "inputs": [], "name": "two_style", "node_hash": self.two.hash, @@ -570,7 +570,7 @@ def test_parallel_prober(self): "Arithmetic": { "active": True, "value": 2.0, - "units": "", + "label": "", "inputs": ["Array", "Array_1"], "name": "Arithmetic", "node_hash": a.hash, @@ -585,7 +585,7 @@ def test_composited_prober(self): "Array": { "active": True, "value": 1.0, - "units": "o", + "label": "o", "inputs": [], "name": "one_style", "node_hash": self.one.hash, @@ -593,7 +593,7 @@ def test_composited_prober(self): "Arange": { "active": False, "value": 0.0, - "units": "", + "label": "", "inputs": [], "name": "Arange", "node_hash": self.arange.hash, @@ -601,7 +601,7 @@ def test_composited_prober(self): "OrderedCompositor": { "active": True, "value": 1.0, - "units": "", + "label": "", "inputs": ["Array", "Arange"], "name": "OrderedCompositor", "node_hash": a.hash, @@ -615,7 +615,7 @@ def test_composited_prober(self): "Array": { "active": False, "value": "nan", - "units": "", + "label": "", "inputs": [], "name": "Array", "node_hash": self.nan.hash, @@ -623,7 +623,7 @@ def test_composited_prober(self): "Array_1": { "active": True, "value": 2.0, - "units": "t", + "label": "t", "inputs": [], "name": "two_style", "node_hash": self.two.hash, @@ -631,7 +631,7 @@ def test_composited_prober(self): "OrderedCompositor": { "active": True, "value": 2.0, - "units": "", + "label": "", "inputs": ["Array", "Array_1"], "name": "OrderedCompositor", "node_hash": a.hash, @@ -648,7 +648,7 @@ def test_composited_prober(self): "Array": { "active": False, "value": "nan", - "units": "", + "label": "", "inputs": [], "name": "Array", "node_hash": self.nan.hash, @@ -656,7 +656,7 @@ def test_composited_prober(self): "Array_1": { "active": True, "value": 1.0, - "units": "o", + "label": "o", "inputs": [], "name": "one_style", "node_hash": self.one.hash, @@ -664,7 +664,7 @@ def test_composited_prober(self): "AnotherOne": { "active": False, "value": 1.0, - "units": "", + "label": "", "inputs": [], "name": "AnotherOne", "node_hash": self.another_one.hash, @@ -672,7 +672,7 @@ def test_composited_prober(self): "OrderedCompositor": { "active": True, "value": 1.0, - "units": "", + "label": "", "inputs": ["Array", "Array_1", "AnotherOne"], "name": "OrderedCompositor", "node_hash": a.hash, @@ -688,33 +688,36 @@ def test_composited_prober_nested(self): a = podpac.compositor.OrderedCompositor( sources=[self.one, self.arange], style=podpac.style.Style(name="composited", units="c") ) - expected = { + expected = OrderedDict({ "name": "composited", - "value": "1.0 c", + "value": "1.0", + "label": "c", "active": True, "node_id": a.hash, "params": {}, "inputs": { "inputs": [ - { + OrderedDict({ "name": "one_style", - "value": "1.0 o", + "value": "1.0", + "label": "o", "active": True, "node_id": self.one.hash, "params": {}, "inputs": {}, - }, - { + }), + OrderedDict({ "name": "Arange", "value": "0.0", + "label": "", "active": False, "node_id": self.arange.hash, "params": {}, "inputs": {}, - }, + }), ] }, - } + }) out = probe_node(a, lat=1, lon=1, nested=True) assert out == expected @@ -730,41 +733,45 @@ def test_prober_with_enumerated_legends(self): zero = podpac.data.Array(source=np.zeros((3, 3), int), coordinates=self.coords, style=enumeration_style) a = podpac.compositor.OrderedCompositor(sources=[nan, one, zero], style=enumeration_style) - expected = { + expected = OrderedDict({ "name": "composited", - "value": "1 (sand) my_units", + "value": "1.0", + "label": "sand", "active": True, "node_id": a.hash, "params": {}, "inputs": { "inputs": [ - { + OrderedDict({ "name": "composited", - "value": "nan (unknown) my_units", + "value": "nan", + "label": "unknown", "active": False, "node_id": nan.hash, "params": {}, "inputs": {}, - }, - { + }), + OrderedDict({ "name": "composited", - "value": "1 (sand) my_units", + "value": "1.0", + "label": "sand", "active": True, "node_id": one.hash, "params": {}, "inputs": {}, - }, - { + }), + OrderedDict({ "name": "composited", - "value": "0 (dirt) my_units", + "value": "0.0", + "label": "dirt", "active": False, "node_id": zero.hash, "params": {}, "inputs": {}, - }, + }), ] }, - } + }) out = probe_node(a, lat=1, lon=1, nested=True, add_enumeration_labels=True) assert out == expected @@ -831,7 +838,7 @@ def test_key_not_in_params_upper_not_in_params(self): class TestGetFromUrl: def test_raise_requests_error(self): mock_requests = MagicMock() - mock_requests.get.side_effect = ConnectionError("Test Connection Error") + mock_requests.get.side_effect = Exception("Test Connection Error") with patch("podpac.core.utils.requests", mock_requests): ret = _get_from_url("TEST/URL", None) @@ -842,7 +849,7 @@ def test_raise_runtime_error(self): mock_requests.get.side_effect = RuntimeError("Test Runtime Error") with patch("podpac.core.utils.requests", mock_requests): - ret = _get_from_url("TEST/URL", None) + ret = _get_from_url("TEST/URL", None) assert ret is None def test_session_is_none(self): diff --git a/podpac/core/utils.py b/podpac/core/utils.py old mode 100644 new mode 100755 index c56d735c..9add4c65 --- a/podpac/core/utils.py +++ b/podpac/core/utils.py @@ -143,7 +143,6 @@ def validate(self, obj, value): super(OrderedDictTrait, self).validate(obj, value) return value - else: OrderedDictTrait = tl.Dict @@ -341,11 +340,7 @@ def _get_from_url(url, session=None): url, r.status_code, r.text ) ) - - except requests.ConnectionError as e: - _log.warning("Cannot connect to {}:".format(url) + str(e)) - r = None - except RuntimeError as e: + except Exception as e: _log.warning("Cannot authenticate to {}. Check credentials. Error was as follows:".format(url) + str(e)) r = None @@ -499,6 +494,7 @@ def _partial_definition(key, definition): if k == key: return new_def + def _flatten_list(l): """Helper for probe_node(). Needed to flatten the inputs list for all the dependencies""" nl = [] @@ -510,55 +506,66 @@ def _flatten_list(l): nl.append(ll) return nl + def _get_entry(key, out, definition): """Helper for probe_node(). Needed for the nested version of the pipeline""" # We have to rearrange the outputs entry = OrderedDict() entry["name"] = out[key]["name"] entry["value"] = str(out[key]["value"]) - entry['label'] = out[key]['label'] + entry["label"] = out[key]["label"] entry["active"] = out[key]["active"] - entry['node_class'] = out[key]['node_class'] - if 'node_hash' in out[key]: + if "node_hash" in out[key]: entry["node_id"] = out[key]["node_hash"] entry["params"] = {} entry["inputs"] = {"inputs": [_get_entry(inp, out, definition) for inp in out[key]["inputs"]]} if len(entry["inputs"]["inputs"]) == 0: entry["inputs"] = {} return entry - - + + def _get_label(value, style, add_enumeration_labels): """Helper for probe_node(). Handles both enumerations and units to be given back to the label field - - If no enumeration_legend is detected in style, or the user opts out of enumeration labels - with add_enumeration_labels = False, then units are returned. - Else, an enumeration label is determined, defaulting to "unknown" in error cases + + If no enumeration_legend is detected in style, or the user opts out of enumeration labels + with add_enumeration_labels = False, then units are returned. + Else, an enumeration label is determined, defaulting to "unknown" in error cases """ if not add_enumeration_labels or style.enumeration_legend is None: return style.units if isinstance(value, list): # all list returns should be 2-D - ret = '' + ret = "" for v in np.unique(value): try: new_label = style.enumeration_legend[int(v)] except ValueError: _log.warning( - 'Enumeration label lookup failed for node of name {}, returning unknown'.format(style.name) + "Enumeration label lookup failed for node of name {}, returning unknown".format(style.name) ) - new_label = 'unknown' - ret += '{}={}, '.format(v, new_label) + new_label = "unknown" + ret += "{}={}, ".format(v, new_label) return ret[:-2] else: if np.isnan(value): - return 'unknown' + return "unknown" try: return str(style.enumeration_legend[int(value)]) except ValueError: - _log.warning('Enumeration label lookup failed for node of name {}, returning unknown'.format(style.name)) - return 'unknown' - -def probe_node(node, lat=None, lon=None, time=None, alt=None, crs=None, nested=False, add_enumeration_labels=True, compute_hash=True): + _log.warning("Enumeration label lookup failed for node of name {}, returning unknown".format(style.name)) + return "unknown" + + +def probe_node( + node, + lat=None, + lon=None, + time=None, + alt=None, + crs=None, + nested=False, + add_enumeration_labels=True, + compute_hash=True, +): """Evaluates every part of a node / pipeline at a point and records which nodes are actively being used. @@ -610,7 +617,7 @@ def probe_node(node, lat=None, lon=None, time=None, alt=None, crs=None, nested=F n = podpac.Node.from_definition(d) o = n.eval(coords) if o.size == 1: - value = float(o) + value = float(o.data.flatten()[0]) # making robust to all shapes of size=1 else: value = o.data.tolist() inputs = _flatten_list(list(d[item].get("inputs", {}).values())) @@ -620,11 +627,10 @@ def probe_node(node, lat=None, lon=None, time=None, alt=None, crs=None, nested=F "value": value, "label": _get_label(value, n.style, add_enumeration_labels), "inputs": inputs, - "name": n.style.name if n.style.name else item, - "node_class": type(n).__name__ + "name": n.style.name if n.style.name else item } if compute_hash: - out[item]['node_hash'] = n.hash + out[item]["node_hash"] = n.hash raw_values[item] = value # Fix sources for Compositors if isinstance(n, podpac.compositor.OrderedCompositor): @@ -693,9 +699,10 @@ def get_ui_node_spec(module=None, category="default", help_as_html=False): return spec + def align_xarray_dict(inputs): """ - Overrides the coordinates of each xarray entry so that they match to avoid + Overrides the coordinates of each xarray entry so that they match to avoid floating-point issues Parameters @@ -710,6 +717,6 @@ def align_xarray_dict(inputs): """ keys = list(inputs.keys()) for k in keys[1:]: - _,b = xr.align(inputs[keys[0]],inputs[k],join='override') + _, b = xr.align(inputs[keys[0]], inputs[k], join="override") inputs[k] = b - return inputs \ No newline at end of file + return inputs diff --git a/podpac/utils.py b/podpac/utils.py index eafc21af..5fe17a6b 100644 --- a/podpac/utils.py +++ b/podpac/utils.py @@ -6,4 +6,4 @@ from podpac.core.utils import create_logfile, cached_property, NodeTrait -from podpac.core.cache import clear_cache, cache_cleanup, DiskCacheMixin +from podpac.core.cache import clear_cache, cache_cleanup diff --git a/setup.py b/setup.py index 92a1dfd2..e6a8ba33 100644 --- a/setup.py +++ b/setup.py @@ -27,6 +27,8 @@ "psutil", "affine", "geopy", + "webob", + "pydap>=3.3", ] if sys.version_info.major == 2: @@ -39,11 +41,10 @@ "beautifulsoup4>=4.6", "h5py>=2.9", "lxml>=4.2", - "pydap>=3.3", "rasterio>=1.0", - "zarr>=2.3", + "zarr>=2.3,<3", "owslib", - "h5netcdf" + "h5netcdf", # "intake>=0.5" Not supported in Python 3.5 ], "aws": ["awscli>=1.16", "boto3>=1.9.200", "s3fs>=0.4"],