Example #1
0
    def test_init_mismatching_dims(self):
        # mismatching dims
        c1 = Coordinates([[0, 1], [0, 1]], dims=['lat', 'lon'])
        c2 = Coordinates([[10, 11], [10, 11], '2018-01-01'], dims=['lat', 'lon', 'time'])

        with pytest.raises(ValueError, match='Mismatching dims'):
            GroupCoordinates([c1, c2])
    def test_iter(self):
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11]], dims=["lat", "lon"])
        g = GroupCoordinates([c1, c2])

        for c in g:
            assert isinstance(c, Coordinates)
    def test_json(self):
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11]], dims=["lat", "lon"])
        g = GroupCoordinates([c1, c2])

        s = g.json
        g2 = GroupCoordinates.from_json(s)
    def test_definition(self):
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11]], dims=["lat", "lon"])
        g = GroupCoordinates([c1, c2])

        d = g.definition
        json.dumps(d, cls=podpac.core.utils.JSONEncoder)
        g2 = GroupCoordinates.from_definition(d)
    def test_init_mismatching_dims(self):
        # mismatching dims
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11], "2018-01-01"],
                         dims=["lat", "lon", "time"])

        with pytest.raises(ValueError, match="Mismatching dims"):
            GroupCoordinates([c1, c2])
Example #6
0
    def test_issubset_coordinates(self):
        a = ArrayCoordinates1d([3, 1], name="lat")
        c1 = Coordinates([[1, 2, 3], [10, 20, 30]], dims=["lat", "lon"])
        c2 = Coordinates([[1, 2, 4], [10, 20, 30]], dims=["lat", "lon"])
        c3 = Coordinates([[10, 20, 30]], dims=["alt"])

        assert a.issubset(c1)
        assert not a.issubset(c2)
        assert not a.issubset(c3)
    def test_issubset_coordinates(self):
        u = UniformCoordinates1d(1, 3, 1, name="lat")
        c1 = Coordinates([[1, 2, 3], [10, 20, 30]], dims=["lat", "lon"])
        c2 = Coordinates([[1, 2, 4], [10, 20, 30]], dims=["lat", "lon"])
        c3 = Coordinates([[10, 20, 30]], dims=["alt"])

        assert u.issubset(c1)
        assert not u.issubset(c2)
        assert not u.issubset(c3)
    def test_intersect(self):
        c1 = Coordinates([[0, 1, 2], [0, 1, 2]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11]], dims=["lat", "lon"])
        c3 = Coordinates([[0.5, 1.5, 2.5]], dims=["lat"])

        g = GroupCoordinates([c1, c2])

        g2 = g.intersect(c3)
        g2 = g.intersect(c3, outer=True)
        g2, I = g.intersect(c3, return_index=True)
    def test_repr(self):
        # empty
        g = GroupCoordinates([])
        repr(g)

        # nonempty
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[[10, 11], [10, 11]]], dims=["lat_lon"])
        g = GroupCoordinates([c1, c2])
        repr(g)
Example #10
0
 def _coordinates(self):
     if isinstance(self.coordinates, Coordinates):
         return self.coordinates
     elif isinstance(self.coordinates, Node):
         return self.coordinates.coordinates
     elif isinstance(self.coordinates, dict):
         return Coordinates.from_definition(self.coordinates)
     elif isinstance(self.coordinates, string_types):
         return Coordinates.from_json(self.coordinates)
     else:
         raise TypeError("The coordinates attribute is of the wrong type.")
    def test_properties(self):
        g = GroupCoordinates([])
        assert len(g) == 0
        assert g.udims == set()

        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[[10, 11], [10, 11]]], dims=["lat_lon"])

        g = GroupCoordinates([c1, c2])
        assert len(g) == 2
        assert g.udims == set(["lat", "lon"])
Example #12
0
 def _validate_coordinates(self, d):
     val = d["value"]
     if isinstance(val, Node):
         if not hasattr(val, "coordinates"):
             raise ValueError(
                 "When specifying the coordinates as a PODPAC Node, this Node must have a 'coordinates' attribute"
             )
     elif isinstance(val, dict):
         Coordinates.from_definition(self.coordinates)
     elif isinstance(val, string_types):
         Coordinates.from_json(self.coordinates)
     return val
Example #13
0
    def test_properties(self):
        g = GroupCoordinates([])
        assert len(g) == 0
        assert g.udims == set()
        # assert isinstance(g.definition, list)
        # assert isinstance(g.json, str)
        # assert isinstance(g.hash, int)

        c1 = Coordinates([[0, 1], [0, 1]], dims=['lat', 'lon'])
        c2 = Coordinates([[[10, 11], [10, 11]]], dims=['lat_lon'])

        g = GroupCoordinates([c1, c2])
        assert len(g) == 2
        assert g.udims == set(['lat', 'lon'])
    def test_hash(self):
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11]], dims=["lat", "lon"])
        c3 = Coordinates([[10, 11], [10, 11]], dims=["lat", "lon"])
        c4 = Coordinates([[10, 12], [10, 11]], dims=["lat", "lon"])

        g1 = GroupCoordinates([c1, c2])
        g2 = GroupCoordinates([c1, c2])
        g3 = GroupCoordinates([c1, c3])
        g4 = GroupCoordinates([c1, c4])

        assert g1.hash == g1.hash
        assert g1.hash == g2.hash
        assert g1.hash == g3.hash
        assert g1.hash != g4.hash
Example #15
0
def test_put_and_remove_array_datasource_numpy_array():
    lat = [0, 1, 2]
    lon = [10, 20, 30, 40]
    dates = ['2018-01-01', '2018-01-02']
    native_coordinates = Coordinates([lat, lon, dates], ['lat', 'lon', 'time'])
    source = np.zeros(native_coordinates.shape)
    array_data_source = Array(source=source,
                              native_coordinates=native_coordinates)
    put_data = np.zeros(native_coordinates.shape)
    cache.put(node=array_data_source,
              data=put_data,
              key='key',
              coordinates=native_coordinates,
              mode='all',
              update=False)
    cached_data = cache.get(node=array_data_source,
                            key='key',
                            coordinates=native_coordinates,
                            mode='all')
    assert (cached_data == put_data).all()
    cache.rem(node=array_data_source,
              key='key',
              coordinates=native_coordinates,
              mode='all')
    assert not cache.has(node=array_data_source,
                         key='key',
                         coordinates=native_coordinates,
                         mode='all')
    with pytest.raises(CacheException):
        cache.get(node=array_data_source,
                  key='key',
                  coordinates=native_coordinates,
                  mode='all')
    cache.rem(node='*', key='*', coordinates='*',
              mode='all')  # clear the cache stores
Example #16
0
    def reprojection_coordinates(self):
        # get coordinates
        if isinstance(self.coordinates, Coordinates):
            coordinates = self.coordinates
        elif isinstance(self.coordinates, Node):
            coordinates = self.coordinates.coordinates
        elif isinstance(self.coordinates, dict):
            coordinates = Coordinates.from_definition(self.coordinates)
        elif isinstance(self.coordinates, string_types):
            coordinates = Coordinates.from_json(self.coordinates)

        # drop non-reprojection dims
        if self.reproject_dims is not None:
            coordinates = coordinates.drop(
                [dim for dim in coordinates if dim not in self.reproject_dims])

        return coordinates
Example #17
0
def test_put_something_new_into_existing_file():
    lat = np.random.rand(3)
    lon = np.random.rand(4)
    dummy_coords = Coordinates([lat, lon], ['lat', 'lon'])
    dummy_node = Array(source=np.random.random_sample(dummy_coords.shape),
                       native_coordinates=dummy_coords)
    dummy_node_din = np.random.rand(6, 7, 8)
    dummy_node_key = "key"
    disk_stores = [c for c in cache._cache_stores if type(c) is DiskCacheStore]
    for coord_f in coord_funcs:
        for node_f in node_funcs:
            for data_f in data_funcs:
                c1, c2 = coord_f(), coord_f()
                n1, n2 = node_f(), node_f()
                din = data_f()
                k = "key"
                assert not cache.has(
                    node=n1, key=k, coordinates=c1, mode='all')
                for store in disk_stores:
                    store.make_cache_dir(node=n1)
                    path = store.cache_path(node=n1, key=k, coordinates=c1)
                    listing = CacheListing(node=dummy_node,
                                           key=dummy_node_key,
                                           coordinates=dummy_coords,
                                           data=dummy_node_din)
                    CachePickleContainer(listings=[listing]).save(path)
                assert not cache.has(
                    node=n1, key=k, coordinates=c1, mode='all')
                cache.put(node=n1,
                          data=din,
                          key=k,
                          coordinates=c1,
                          mode='all',
                          update=False)
                assert cache.has(node=n1, key=k, coordinates=c1, mode='all')
                dout = cache.get(node=n1, key=k, coordinates=c1, mode='all')
                assert (din == dout).all()
                dout = cache.get(node=n2, key=k, coordinates=c2, mode='all')
                assert (din == dout).all()
                cache.rem(node=n1, key=k, coordinates=c1, mode='all')
                assert not cache.has(
                    node=n1, key=k, coordinates=c1, mode='all')
                for store in disk_stores:
                    path = store.cache_path(node=n1, key=k, coordinates=c1)
                    assert os.path.exists(path)
                    c = CachePickleContainer.load(path)
                    listing = CacheListing(node=dummy_node,
                                           key=dummy_node_key,
                                           coordinates=dummy_coords,
                                           data=dummy_node_din)
                    assert c.has(listing)
                cache.rem(node='*', key='*', coordinates='*', mode='all')
Example #18
0
    def select(self, source_coords, request_coords, index_type="numpy"):
        """Sub-selects the source_coords based on the request_coords

        Parameters
        ------------
        source_coords: :class:`podpac.Coordinates`
            The coordinates of the source data
        request_coords: :class:`podpac.Coordinates`
            The coordinates of the request (user eval)
        index_type: str, optional
            Default is 'numpy'. Either "numpy", "xarray", or "slice". The returned index will be compatible with,
            either "numpy" (default) or "xarray" objects, or any
            object that works with tuples of slices ("slice")

        Returns
        --------
        :class:`podpac.Coordinates`:
            The sub-selected source coordinates
        tuple(indices):
            The indices that can be used to sub-select the source coordinates to produce the sub-selected coordinates.
            This is useful for directly indexing into the data type.
        """
        if source_coords.crs.lower() != request_coords.crs.lower():
            request_coords = request_coords.transform(source_coords.crs)
        coords = []
        coords_inds = []
        for coord1d in source_coords._coords.values():
            ci = self._select1d(coord1d, request_coords, index_type)
            ci = np.sort(np.unique(ci))
            if len(coord1d.shape
                   ) == 2:  # Handle case of 2D-stacked coordinates
                ncols = coord1d.shape[1]
                ci = (ci // ncols, ci % ncols)
                if index_type == "slice":
                    ci = tuple([_index2slice(cii) for cii in ci])
            elif index_type == "slice":
                ci = _index2slice(ci)
            if len(coord1d.shape
                   ) == 3:  # Handle case of 3D-stacked coordinates
                raise NotImplementedError
            c = coord1d[ci]
            coords.append(c)
            coords_inds.append(ci)
        coords = Coordinates(coords)
        if index_type == "numpy":
            coords_inds = self._merge_indices(coords_inds, source_coords.dims,
                                              request_coords.dims)
        elif index_type == "xarray":
            # unlike numpy, xarray assumes indexes are orthogonal by default, so the 1d coordinates are already correct
            # unless there are tuple coordinates (nD stacked coords) but those are handled in interpolation_manager
            pass
        return coords, tuple(coords_inds)
    def test_append(self):
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11]], dims=["lat", "lon"])
        c3 = Coordinates(["2018-01-01"], dims=["time"])

        g = GroupCoordinates([])
        assert len(g) == 0

        g.append(c1)
        assert len(g) == 1

        g.append(c2)
        assert len(g) == 2

        with pytest.raises(TypeError):
            g.append(10)

        with pytest.raises(ValueError):
            g.append(c3)

        assert g._items[0] is c1
        assert g._items[1] is c2
    def test_add(self):
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11]], dims=["lat", "lon"])
        c3 = Coordinates(["2018-01-01"], dims=["time"])

        g1 = GroupCoordinates([c1])
        g2 = GroupCoordinates([c2])
        g3 = GroupCoordinates([c3])

        g = g1 + g2

        assert len(g1) == 1
        assert len(g2) == 1
        assert len(g) == 2
        assert g._items[0] is c1
        assert g._items[1] is c2

        with pytest.raises(ValueError):
            g1 + g3

        with pytest.raises(TypeError):
            g1 + c1
Example #21
0
    def test_iadd(self):
        c1 = Coordinates([[0, 1], [0, 1]], dims=['lat', 'lon'])
        c2 = Coordinates([[10, 11], [10, 11]], dims=['lat', 'lon'])
        c3 = Coordinates(['2018-01-01'], dims=['time'])

        g1 = GroupCoordinates([c1])
        g2 = GroupCoordinates([c2])
        g3 = GroupCoordinates([c3])

        g1 += g2

        with pytest.raises(ValueError):
            g1 += g3

        with pytest.raises(TypeError):
            g1 += c1

        assert len(g1) == 2
        assert g1._items[0] is c1
        assert g1._items[1] is c2

        assert len(g2) == 1
        assert g2._items[0] is c2
Example #22
0
def test_put_and_get_with_different_instances_of_same_key_objects_array_datasource_output(
):
    lat = [0, 1, 2]
    lon = [10, 20, 30, 40]
    dates = ['2018-01-01', '2018-01-02']

    # create data source node and coordinates for put operation
    native_coordinates_put = Coordinates([lat, lon, dates],
                                         ['lat', 'lon', 'time'])
    source_put = np.zeros(native_coordinates_put.shape)
    array_data_source_put = Array(source=source_put,
                                  native_coordinates=native_coordinates_put)
    output = array_data_source_put.eval(native_coordinates_put)

    cache.put(node=array_data_source_put,
              data=output,
              key='output',
              coordinates=native_coordinates_put,
              mode='all',
              update=False)

    # create equivalent (but new objects) data source node and coordinates for get operation
    native_coordinates_get = Coordinates([lat, lon, dates],
                                         ['lat', 'lon', 'time'])
    source_get = np.zeros(native_coordinates_get.shape)
    array_data_source_get = Array(source=source_get,
                                  native_coordinates=native_coordinates_get)

    cached_output = cache.get(node=array_data_source_get,
                              key='output',
                              coordinates=native_coordinates_get,
                              mode='all')

    assert (cached_output == output).all()
    cache.rem(node='*', key='*', coordinates='*',
              mode='all')  # clear the cache stores
Example #23
0
def test_two_different_nodes_put_and_one_node_removes_all():
    lat = np.random.rand(3)
    lon = np.random.rand(4)
    coords = Coordinates([lat, lon], ['lat', 'lon'])
    persistent_node = Array(source=np.random.random_sample(coords.shape),
                            native_coordinates=coords)
    persistent_node_din = np.random.rand(6, 7, 8)
    persistent_node_key = "key"
    cache.put(node=persistent_node,
              data=persistent_node_din,
              key=persistent_node_key,
              coordinates=None)
    for coord_f in coord_funcs:
        for node_f in node_funcs:
            for data_f in data_funcs:
                c1, c2 = coord_f(), coord_f()
                n1, n2 = node_f(), node_f()
                d1 = data_f()
                d2 = data_f() + np.pi
                k1 = "key"
                k2 = k1 + "2"
                cache.put(node=n1,
                          data=d1,
                          key=k1,
                          coordinates=c1,
                          mode='all',
                          update=False)
                cache.put(node=n1,
                          data=d1,
                          key=k2,
                          coordinates=c1,
                          mode='all',
                          update=False)
                assert cache.has(node=n2, key=k1, coordinates=c2, mode='all')
                assert cache.has(node=n2, key=k2, coordinates=c2, mode='all')
                cache.rem(node=n2, key='*', coordinates='*', mode='all')
                assert not cache.has(
                    node=n1, key=k1, coordinates=c1, mode='all')
                assert not cache.has(
                    node=n1, key=k2, coordinates=c1, mode='all')
                with pytest.raises(CacheException):
                    cache.get(node=n1, key=k2, coordinates=c1, mode='all')
                with pytest.raises(CacheException):
                    cache.get(node=n1, key=k1, coordinates=c1, mode='all')
                assert cache.has(node=persistent_node,
                                 key=persistent_node_key,
                                 coordinates=None)
    cache.rem(node='*', key='*', coordinates='*', mode='all')
Example #24
0
    def test_intersect_multi(self):
        coords = Coordinates([[55., 65., 95., 45.], [1., 2., 3., 4.]],
                             dims=['lat', 'lon'])

        # intersect correct dimension
        a = ArrayCoordinates1d([50., 60., 10.], ctype='point', name='lat')
        b = ArrayCoordinates1d([2.5, 3.5, 4.5], ctype='point', name='lon')
        c = ArrayCoordinates1d([100., 200., 300.], ctype='point', name='alt')

        ai = a.intersect(coords)
        bi = b.intersect(coords)
        ci = c.intersect(coords)

        assert_equal(ai.coordinates, [50., 60.])
        assert_equal(bi.coordinates, [2.5, 3.5])
        assert_equal(ci.coordinates, [100., 200., 300.])
Example #25
0
    def from_definition(cls, d):
        """
        Create a Coordinates group from a group definition.

        Arguments
        ---------
        d : list
            group definition

        Returns
        -------
        :class:`CoordinatesGroup`
            Coordinates group

        See Also
        --------
        definition, from_json
        """

        return cls([Coordinates.from_definition(elem) for elem in d])
    def test_init(self):
        # empty
        g = GroupCoordinates([])

        # same dims, unstacked
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11]], dims=["lat", "lon"])
        g = GroupCoordinates([c1, c2])

        # same dims, stacked
        c2 = Coordinates([[[0, 1], [0, 1]]], dims=["lat_lon"])
        c2 = Coordinates([[[10, 11], [10, 11]]], dims=["lat_lon"])
        g = GroupCoordinates([c1, c2])

        # different order
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[10, 11], [10, 11]], dims=["lon", "lat"])
        g = GroupCoordinates([c1, c2])

        # different stacking
        c1 = Coordinates([[0, 1], [0, 1]], dims=["lat", "lon"])
        c2 = Coordinates([[[10, 11], [10, 11]]], dims=["lat_lon"])
        g = GroupCoordinates([c1, c2])
Example #27
0
def test_put_and_get_array_datasource_output():
    lat = [0, 1, 2]
    lon = [10, 20, 30, 40]
    dates = ['2018-01-01', '2018-01-02']
    native_coordinates = Coordinates([lat, lon, dates], ['lat', 'lon', 'time'])
    source = np.zeros(native_coordinates.shape)
    array_data_source = Array(source=source,
                              native_coordinates=native_coordinates)
    output = array_data_source.eval(native_coordinates)
    cache.put(node=array_data_source,
              data=output,
              key='output',
              coordinates=native_coordinates,
              mode='all',
              update=False)
    cached_output = cache.get(node=array_data_source,
                              key='output',
                              coordinates=native_coordinates,
                              mode='all')
    assert (cached_output == output).all()
    cache.rem(node='*', key='*', coordinates='*',
              mode='all')  # clear the cache stores
Example #28
0
def make_lat_lon_time_grid_coords():
    lat = [0, 1, 2]
    lon = [10, 20, 30, 40]
    dates = ['2018-01-01', '2018-01-02']
    coords = Coordinates([lat, lon, dates], ['lat', 'lon', 'time'])
    return coords
Example #29
0
 def get_coordinates(self):
     """{get_coordinates}"""
     if self.infer_podpac_coords:
         return Coordinates.from_xarray(self.dataset, crs=self.crs)
     return super().get_coordinates()