Ejemplo n.º 1
0
    def __init__(self, index=None, product=None, geopolygon=None, like=None, **search_terms):
        """"Parses search terms in preparation for querying the Data Cube Index.

        Create a :class:`Query` object by passing it a set of search terms as keyword arguments.

        >>> query = Query(product='ls5_nbar_albers', time=('2001-01-01', '2002-01-01'))

        Use by accessing :attr:`search_terms`::

        >>> query.search_terms['time']  # doctest: +NORMALIZE_WHITESPACE
        Range(begin=datetime.datetime(2001, 1, 1, 0, 0, tzinfo=<UTC>), \
        end=datetime.datetime(2002, 1, 1, 0, 0, tzinfo=<UTC>))

        By passing in an ``index``, the search parameters will be validated as existing on the ``product``.

        Used by :meth:`datacube.Datacube.find_datasets` and :meth:`datacube.Datacube.load`.

        :param datacube.index.Index index: An optional `index` object, if checking of field names is desired.
        :param str product: name of product
        :param geopolygon: spatial bounds of the search
        :type geopolygon: geometry.Geometry or None
        :param xarray.Dataset like: spatio-temporal bounds of `like` are used for the search
        :param search_terms:
         * `measurements` - list of measurements to retrieve
         * `latitude`, `lat`, `y`, `longitude`, `lon`, `long`, `x` - tuples (min, max) bounding spatial dimensions
         * `crs` - spatial coordinate reference system to interpret the spatial bounds
         * `group_by` - observation grouping method. One of `time`, `solar_day`. Default is `time`
        """
        self.product = product
        self.geopolygon = query_geopolygon(geopolygon=geopolygon, **search_terms)
        if 'source_filter' in search_terms and search_terms['source_filter'] is not None:
            self.source_filter = Query(**search_terms['source_filter'])
        else:
            self.source_filter = None

        remaining_keys = set(search_terms.keys()) - set(SPATIAL_KEYS + CRS_KEYS + OTHER_KEYS)
        if index:
            unknown_keys = remaining_keys - set(index.datasets.get_field_names())
            # TODO: What about keys source filters, and what if the keys don't match up with this product...
            if unknown_keys:
                raise LookupError('Unknown arguments: ', unknown_keys)

        self.search = {}
        for key in remaining_keys:
            self.search.update(_values_to_search(**{key: search_terms[key]}))

        if like:
            assert self.geopolygon is None, "'like' with other spatial bounding parameters is not supported"
            self.geopolygon = getattr(like, 'extent', self.geopolygon)

            if 'time' not in self.search:
                time_coord = like.coords.get('time')
                if time_coord is not None:
                    self.search['time'] = _time_to_search_dims(
                        (pandas_to_datetime(time_coord.values[0]).to_pydatetime(),
                         pandas_to_datetime(time_coord.values[-1]).to_pydatetime()
                         + datetime.timedelta(milliseconds=1))  # TODO: inclusive time searches
                    )
Ejemplo n.º 2
0
    def __init__(self,
                 index=None,
                 product=None,
                 geopolygon=None,
                 like=None,
                 **kwargs):
        """Parses a kwarg dict for query parameters

        :param datacube.index._api.Index index: An optional `index` object, if checking of field names is desired.
        :param str product: name of product
        :param Union[datacube.utils.Geometry|None] geopolygon: spatial bounds of the search
        :param xarray.Dataset like: spatio-temporal bounds of `like` are used for the search
        :param kwargs:
         * `measurements` - list of measurements to retrieve
         * `latitude`, `lat`, `y`, `longitude`, `lon`, `long`, `x` - tuples (min, max) bounding spatial dimensions
         * `crs` - spatial coordinate reference system to interpret the spatial bounds
         * `group_by` - observation grouping method. One of 'time', 'solar_day'. Default is 'time'
        """
        self.product = product
        self.geopolygon = query_geopolygon(geopolygon=geopolygon, **kwargs)
        if 'source_filter' in kwargs and kwargs['source_filter'] is not None:
            self.source_filter = Query(**kwargs['source_filter'])
        else:
            self.source_filter = None

        remaining_keys = set(
            kwargs.keys()) - set(SPATIAL_KEYS + CRS_KEYS + OTHER_KEYS)
        if index:
            unknown_keys = remaining_keys - set(
                index.datasets.get_field_names())
            # TODO: What about keys source filters, and what if the keys don't match up with this product...
            if unknown_keys:
                raise LookupError('Unknown arguments: ', unknown_keys)

        self.search = {}
        for key in remaining_keys:
            self.search.update(_values_to_search(**{key: kwargs[key]}))

        if like:
            assert self.geopolygon is None, "'like' with other spatial bounding parameters is not supported"
            self.geopolygon = getattr(like, 'extent', self.geopolygon)

            if 'time' not in self.search:
                time_coord = like.coords.get('time')
                if time_coord is not None:
                    self.search['time'] = _time_to_search_dims(
                        (pandas_to_datetime(
                            time_coord.values[0]).to_pydatetime(),
                         pandas_to_datetime(
                             time_coord.values[-1]).to_pydatetime() +
                         datetime.timedelta(milliseconds=1)
                         )  # TODO: inclusive time searches
                    )
Ejemplo n.º 3
0
def _to_datetime(t):
    if isinstance(t, (float, int)):
        t = datetime.datetime.fromtimestamp(t, tz=tz.tzutc())

    if isinstance(t, tuple):
        t = datetime.datetime(*t, tzinfo=tz.tzutc())
    elif isinstance(t, str):
        try:
            t = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%fZ")
        except ValueError:
            pass
    elif isinstance(t, datetime.datetime):
        if t.tzinfo is None:
            t = t.replace(tzinfo=tz.tzutc())
        return t

    return pandas_to_datetime(t, utc=True, infer_datetime_format=True).to_pydatetime()
Ejemplo n.º 4
0
def to_datetime(t):
    if isinstance(t, compat.integer_types + (float,)):
        t = datetime.datetime.fromtimestamp(t, tz=tz.tzutc())
    if isinstance(t, tuple):
        t = datetime.datetime(*t, tzinfo=tz.tzutc())
    elif isinstance(t, compat.string_types):
        try:
            t = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%fZ")
        except ValueError:
            pass
        try:
            from pandas import to_datetime as pandas_to_datetime
            return pandas_to_datetime(t, utc=True, infer_datetime_format=True).to_pydatetime()
        except ImportError:
            pass

    if isinstance(t, datetime.datetime):
        if t.tzinfo is None:
            t = t.replace(tzinfo=tz.tzutc())
        return t
    raise ValueError('Could not parse the time for {}'.format(t))
Ejemplo n.º 5
0
def to_datetime(t):
    if isinstance(t, compat.integer_types + (float, )):
        t = datetime.datetime.fromtimestamp(t, tz=tz.tzutc())
    if isinstance(t, tuple):
        t = datetime.datetime(*t, tzinfo=tz.tzutc())
    elif isinstance(t, compat.string_types):
        try:
            t = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%fZ")
        except ValueError:
            pass
        try:
            from pandas import to_datetime as pandas_to_datetime
            return pandas_to_datetime(
                t, utc=True, infer_datetime_format=True).to_pydatetime()
        except ImportError:
            pass

    if isinstance(t, datetime.datetime):
        if t.tzinfo is None:
            t = t.replace(tzinfo=tz.tzutc())
        return t
    raise ValueError('Could not parse the time for {}'.format(t))