コード例 #1
0
    def _update_dataset(self, change):

        # only update dataset if dataset trait has been defined the first time
        if trait_is_defined(self, 'dataset'):
            self.dataset = self._open_dataset()

            # update native_coordinates if they have been defined
            if trait_is_defined(self, 'native_coordinates'):
                self.native_coordinates = self.get_native_coordinates()
コード例 #2
0
 def _update_dataset(self, change):
     # TODO: update this to look like Rasterio
     if self.dataset is not None:
         self.close_dataset()
         self.dataset = self._open_dataset(change['new'])
     if trait_is_defined(self, 'native_coordinates'):
         self.native_coordinates = self.get_native_coordinates()
コード例 #3
0
    def get_native_coordinates(self):
        """{get_native_coordinates}

        Raises
        --------
        NotImplementedError
            Raised if get_native_coordinates is not implemented by data source subclass.
        """

        if trait_is_defined(self, 'native_coordinates'):
            return self.native_coordinates
        else:
            raise NotImplementedError('{0}.native_coordinates is not defined and '  \
                                      '{0}.get_native_coordinates() is not implemented'.format(self.__class__.__name__))
コード例 #4
0
 def get_native_coordinates(self):
     """{get_native_coordinates}
     
     The default implementation tries to find the lat/lon coordinates based on dataset.affine or dataset.transform
     (depending on the version of rasterio). It cannot determine the alt or time dimensions, so child classes may
     have to overload this method.
     """
     coords = []
     for d in self.dims:
         if trait_is_defined(
                 self, d + '_col') or (d + '_col' not in self.trait_names()
                                       and hasattr(self, d + '_col')):
             i = getattr(self, '_{}_col'.format(d))
             if d is 'time':
                 c = np.array(self.dataset.iloc[:, i], np.datetime64)
             else:
                 c = np.array(self.dataset.iloc[:, i])
             coords.append(ArrayCoordinates1d(c, name=d))
     if len(coords) > 1:
         coords = [StackedCoordinates(coords)]
     return Coordinates(coords)
コード例 #5
0
    def __repr__(self):
        source_name = str(self.__class__.__name__)

        rep = '{}'.format(source_name)
        if source_name != 'DataSource':
            rep += ' DataSource'

        rep += '\n\tsource: {}'.format(self.source)
        if trait_is_defined(self, 'native_coordinates'):
            rep += '\n\tnative_coordinates: '
            for c in self.native_coordinates.values():
                if isinstance(c, Coordinates1d):
                    rep += '\n\t\t%s: %s' % (c.name, c)
                elif isinstance(c, StackedCoordinates):
                    for _c in c:
                        rep += '\n\t\t%s[%s]: %s' % (c.name, _c.name, _c)

                # rep += '{}: {}'.format(c.name, c)
        rep += '\n\tinterpolation: {}'.format(self.interpolation)

        return rep
コード例 #6
0
    def test_trait_is_defined(self):
        class MyClass(tl.HasTraits):
            a = tl.Any()
            b = tl.Any(default_value=0)
            c = tl.Any()

            @tl.default("c")
            def _default_b(self):
                return "test"

        x = MyClass(a=1, b=1, c=1)
        assert trait_is_defined(x, "a")
        assert trait_is_defined(x, "b")
        assert trait_is_defined(x, "c")
        assert not trait_is_defined(x, "other")

        x = MyClass()
        if tl.version_info[0] >= 5:
            assert not trait_is_defined(x, "a")
            assert not trait_is_defined(x, "b")
            assert not trait_is_defined(x, "c")
        else:
            assert trait_is_defined(x, "a")
            assert trait_is_defined(x, "b")
            assert not trait_is_defined(x, "c")

        x.c
        assert trait_is_defined(x, "c")
コード例 #7
0
 def trait_is_defined(self, name):
     return trait_is_defined(self, name)
コード例 #8
0
    def iteroutputs(self, coordinates):
        """Summary
        
        Parameters
        ----------
        coordinates : :class:`podpac.Coordinates`
            Coordinates to evaluate at compositor sources
        
        Yields
        ------
        :class:`podpac.core.units.UnitsDataArray`
            Output from source node eval method
        """
        # downselect sources based on coordinates
        src_subset = self.select_sources(coordinates)

        if len(src_subset) == 0:
            yield self.create_output_array(coordinates)
            return

        # Set the interpolation properties for sources
        if self.interpolation is not None:
            for s in src_subset.ravel():
                if trait_is_defined(self, 'interpolation'):
                    s.interpolation = self.interpolation

        # Optimization: if coordinates complete and source coords is 1D,
        # set native_coordinates unless they are set already
        # WARNING: this assumes
        #              native_coords = source_coords + shared_coordinates
        #         NOT  native_coords = shared_coords + source_coords
        if self.is_source_coordinates_complete and self.source_coordinates.ndim == 1:
            coords_subset = list(
                self.source_coordinates.intersect(
                    coordinates, outer=True).coords.values())[0]
            coords_dim = list(self.source_coordinates.dims)[0]
            for s, c in zip(src_subset, coords_subset):
                nc = merge_dims([
                    Coordinates(np.atleast_1d(c), dims=[coords_dim]),
                    self.shared_coordinates
                ])

                if trait_is_defined(s, 'native_coordinates') is False:
                    s.native_coordinates = nc

        if self.threaded:
            # TODO pool of pre-allocated scratch space
            # TODO: docstring?
            def f(src):
                return src.eval(coordinates)

            pool = ThreadPool(processes=self.n_threads)
            results = [pool.apply_async(f, [src]) for src in src_subset]

            for src, res in zip(src_subset, results):
                yield res.get()
                #src._output = None # free up memory

        else:
            output = None  # scratch space
            for src in src_subset:
                output = src.eval(coordinates, output)
                yield output