def search(self, products=None, sat_id=None, date='acquired', place=None, geom=None, start_datetime=None, end_datetime=None, cloud_fraction=None, cloud_fraction_0=None, fill_fraction=None, q=None, limit=100, offset=0, fields=None, dltile=None, sort_field=None, sort_order="asc", randomize=None, continuation_token=None, **kwargs): """Search metadata given a spatio-temporal query. All parameters are optional. For accessing more than 10000 results, see :py:func:`features`. :param list(str) products: Product Identifier(s). :param list(str) sat_id: Satellite identifier(s). :param str date: The date field to use for search (e.g. `acquired`). :param str place: A slug identifier to be used as a region of interest. :param str geom: A GeoJSON or WKT region of interest. :param str start_datetime: Desired starting timestamp, in any common format. :param str end_datetime: Desired ending timestamp, in any common format. :param float cloud_fraction: Maximum cloud fraction, calculated by data provider. :param float cloud_fraction_0: Maximum cloud fraction, calculated by cloud mask pixels. :param float fill_fraction: Minimum scene fill fraction, calculated as valid/total pixels. :param expr q: Expression for filtering the results. See :py:attr:`descarteslabs.utilities.properties`. :param int limit: Number of items to return up to the maximum of 10000. :param int offset: Number of items to skip. :param list(str) fields: Properties to return. :param str dltile: a dltile key used to specify the resolution, bounds, and srs. :param str sort_field: Property to sort on. :param str sort_order: Order of sort. :param bool randomize: Randomize the results. You may also use an `int` or `str` as an explicit seed. return: GeoJSON ``FeatureCollection`` Example:: >>> from descarteslabs.client.services import Metadata >>> scenes = Metadata().search(place='north-america_united-states_iowa', \ products=['landsat:LC08:PRE:TOAR'], \ start_datetime='2016-07-01', \ end_datetime='2016-07-31T23:59:59') >>> len(scenes['features']) # doctest: +SKIP 2 """ if place: places = Places() places.auth = self.auth shape = places.shape(place, geom='low') geom = json.dumps(shape['geometry']) if dltile is not None: if isinstance(dltile, string_types): dltile = self._raster.dltile(dltile) if isinstance(dltile, dict): geom = dltile['geometry'] if isinstance(geom, dict): geom = json.dumps(geom) kwargs.update({'date': date, 'limit': limit}) if offset: warn(OFFSET_DEPRECATION_MESSAGE, DeprecationWarning) kwargs['offset'] = offset if sat_id: warn(SAT_ID_DEPRECATION_MESSAGE, DeprecationWarning) if isinstance(sat_id, string_types): sat_id = [sat_id] kwargs['sat_id'] = sat_id if products: if isinstance(products, string_types): products = [products] kwargs['products'] = products if geom: kwargs['geom'] = geom if start_datetime: kwargs['start_datetime'] = start_datetime if end_datetime: kwargs['end_datetime'] = end_datetime if cloud_fraction is not None: kwargs['cloud_fraction'] = cloud_fraction if cloud_fraction_0 is not None: kwargs['cloud_fraction_0'] = cloud_fraction_0 if fill_fraction is not None: kwargs['fill_fraction'] = fill_fraction if fields is not None: kwargs['fields'] = fields if q is not None: if not isinstance(q, list): q = [q] kwargs['query_expr'] = AndExpression(q).serialize() if sort_field is not None: kwargs['sort_field'] = sort_field if sort_order is not None: kwargs['sort_order'] = sort_order if randomize is not None: kwargs['random_seed'] = randomize if continuation_token is not None: kwargs['continuation_token'] = continuation_token r = self.session.post('/search', json=kwargs) fc = {'type': 'FeatureCollection', "features": r.json()} if 'x-continuation-token' in r.headers: fc['properties'] = { 'continuation_token': r.headers['x-continuation-token'] } return DotDict(fc)
def raster( self, inputs, bands=None, scales=None, data_type=None, output_format='GTiff', srs=None, dimensions=None, resolution=None, bounds=None, bounds_srs=None, cutline=None, place=None, align_pixels=False, resampler=None, dltile=None, save=False, outfile_basename=None, **pass_through_params ): """Given a list of :class:`Metadata <descarteslabs.services.Metadata>` identifiers, retrieve a translated and warped mosaic as an image file. :param inputs: List of :class:`Metadata` identifiers. :param bands: List of requested bands. If the last item in the list is an alpha band (with data range `[0, 1]`) it affects rastering of all other bands: When rastering multiple images, they are combined image-by-image only where each respective image's alpha band is `1` (pixels where the alpha band is not `1` are "transparent" in the overlap between images). If a pixel is fully masked considering all combined alpha bands it will be `0` in all non-alpha bands. :param scales: List of tuples specifying the scaling to be applied to each band. A tuple has 4 elements in the order ``(src_min, src_max, out_min, out_max)``, meaning values in the source range ``src_min`` to ``src_max`` will be scaled to the output range ``out_min`` to ``out_max``. A tuple with 2 elements ``(src_min, src_max)`` is also allowed, in which case the output range defaults to ``(0, 255)`` (a useful default for the common output type ``Byte``). If no scaling is desired for a band, use ``None``. This tuple format and behaviour is identical to GDAL's scales during translation. Example argument: ``[(0, 10000, 0, 127), None, (0, 10000)]`` - the first band will have source values 0-10000 scaled to 0-127, the second band will not be scaled, the third band will have 0-10000 scaled to 0-255. :param str output_format: Output format (one of ``GTiff``, ``PNG``, ``JPEG``). :param str data_type: Output data type (one of ``Byte``, ``UInt16``, ``Int16``, ``UInt32``, ``Int32``, ``Float32``, ``Float64``). :param str srs: Output spatial reference system definition understood by GDAL. :param float resolution: Desired resolution in output SRS units. Incompatible with `dimensions` :param tuple dimensions: Desired output (width, height) in pixels. Incompatible with `resolution` :param str cutline: A GeoJSON feature or geometry to be used as a cutline. :param str place: A slug identifier to be used as a cutline. :param tuple bounds: ``(min_x, min_y, max_x, max_y)`` in target SRS. :param str bounds_srs: Override the coordinate system in which bounds are expressed. :param bool align_pixels: Align pixels to the target coordinate system. :param str resampler: Resampling algorithm to be used during warping (``near``, ``bilinear``, ``cubic``, ``cubicsplice``, ``lanczos``, ``average``, ``mode``, ``max``, ``min``, ``med``, ``q1``, ``q3``). :param str dltile: a dltile key used to specify the resolution, bounds, and srs. :param bool save: Write resulting files to disk. Default: False :param str outfile_basename: If 'save' is True, override default filename using this string as a base. :return: A dictionary with two keys, ``files`` and ``metadata``. The value for ``files`` is a dictionary mapping file names to binary data for files (at the moment there will always be only a single file with the appropriate file extension based on the ``output_format`` requested). The value for ``metadata`` is a dictionary containing details about the raster operation that happened. These details can be useful for debugging but shouldn't otherwise be relied on (there are no guarantees that certain keys will be present). """ cutline = as_json_string(cutline) if place: places = Places() places.auth = self.auth shape = places.shape(place, geom='low') cutline = json.dumps(shape['geometry']) params = { 'keys': inputs, 'bands': bands, 'scales': scales, 'ot': data_type, 'of': output_format, 'srs': srs, 'resolution': resolution, 'shape': cutline, 'outputBounds': bounds, 'outputBoundsSRS': bounds_srs, 'outsize': dimensions, 'targetAlignedPixels': align_pixels, 'resampleAlg': resampler, } params.update(pass_through_params) if dltile is not None: if isinstance(dltile, dict): params['dltile'] = dltile['properties']['key'] else: params['dltile'] = dltile r = self.session.post('/raster', json=params) raw = BytesIO(r.content) json_resp = json.loads(raw.readline().decode('utf-8').strip()) num_files = json_resp['files'] json_resp['files'] = {} for _ in range(num_files): file_meta = json.loads(raw.readline().decode('utf-8').strip()) fn = file_meta['name'] data = raw.read(file_meta['length']) if outfile_basename: outfilename = "{}.{}".format( outfile_basename, ".".join(os.path.basename(fn).split(".")[1:]) ) else: outfilename = fn json_resp['files'][outfilename] = data if save: for filename, data in six.iteritems(json_resp['files']): with open(filename, "wb") as f: f.write(data) return DotDict(json_resp)
def summary(self, products=None, sat_id=None, date='acquired', part=None, place=None, geom=None, start_datetime=None, end_datetime=None, cloud_fraction=None, cloud_fraction_0=None, fill_fraction=None, q=None, pixels=None, dltile=None, **kwargs): """Get a summary of the results for the specified spatio-temporal query. :param list(str) products: Product identifier(s). :param list(str) sat_id: Satellite identifier(s). *Deprecated* :param str date: The date field to use for search (e.g. `acquired`). :param str part: Part of the date to aggregate over (e.g. `day`). :param str place: A slug identifier to be used as a region of interest. :param str geom: A GeoJSON or WKT region of interest. :param str start_datetime: Desired starting timestamp, in any common format. :param str end_datetime: Desired ending timestamp, in any common format. :param float cloud_fraction: Maximum cloud fraction, calculated by data provider. :param float cloud_fraction_0: Maximum cloud fraction, calculated by cloud mask pixels. :param float fill_fraction: Minimum scene fill fraction, calculated as valid/total pixels. :param expr q: Expression for filtering the results. See :py:attr:`descarteslabs.utilities.properties`. :param bool pixels: Whether to include pixel counts in summary calculations. :param str dltile: A dltile key used to specify the resolution, bounds, and srs. Example usage:: >>> from descarteslabs.client.services import Metadata >>> Metadata().summary(place='north-america_united-states_iowa', \ products=['landsat:LC08:PRE:TOAR'], start_datetime='2016-07-06', \ end_datetime='2016-07-07', part='hour', pixels=True) { 'bytes': 93298309, 'count': 1, 'items': [ { 'bytes': 93298309, 'count': 1, 'date': '2016-07-06T16:00:00', 'pixels': 250508160, 'timestamp': 1467820800 } ], 'pixels': 250508160, 'products': ['landsat:LC08:PRE:TOAR'] } """ if place: places = Places() places.auth = self.auth shape = places.shape(place, geom='low') geom = json.dumps(shape['geometry']) if dltile is not None: if isinstance(dltile, string_types): dltile = self._raster.dltile(dltile) if isinstance(dltile, dict): geom = dltile['geometry'] if isinstance(geom, dict): geom = json.dumps(geom) if sat_id: warn(SAT_ID_DEPRECATION_MESSAGE, DeprecationWarning) if isinstance(sat_id, string_types): sat_id = [sat_id] kwargs['sat_id'] = sat_id if products: if isinstance(products, string_types): products = [products] kwargs['products'] = products if date: kwargs['date'] = date if part: kwargs['part'] = part if geom: kwargs['geom'] = geom if start_datetime: kwargs['start_datetime'] = start_datetime if end_datetime: kwargs['end_datetime'] = end_datetime if cloud_fraction is not None: kwargs['cloud_fraction'] = cloud_fraction if cloud_fraction_0 is not None: kwargs['cloud_fraction_0'] = cloud_fraction_0 if fill_fraction is not None: kwargs['fill_fraction'] = fill_fraction if q is not None: if not isinstance(q, list): q = [q] kwargs['query_expr'] = AndExpression(q).serialize() if pixels: kwargs['pixels'] = pixels r = self.session.post('/summary', json=kwargs) return DotDict(r.json())
def paged_search(self, products=None, sat_ids=None, date="acquired", place=None, geom=None, start_datetime=None, end_datetime=None, cloud_fraction=None, cloud_fraction_0=None, fill_fraction=None, q=None, limit=100, fields=None, dltile=None, sort_field=None, sort_order="asc", randomize=None, continuation_token=None, **kwargs): """ Execute a metadata query in a paged manner, with up to 10,000 items per page. Most clients should use :py:func:`features` instead, which batch searches into smaller requests and handles the paging for you. :param list(str) products: Product Identifier(s). :param list(str) sat_ids: Satellite identifier(s). :param str date: The date field to use for search (default is `acquired`). :param str place: A slug identifier to be used as a region of interest. :param str geom: A GeoJSON or WKT region of interest. :param str start_datetime: Desired starting timestamp, in any common format. :param str end_datetime: Desired ending timestamp, in any common format. :param float cloud_fraction: Maximum cloud fraction, calculated by data provider. :param float cloud_fraction_0: Maximum cloud fraction, calculated by cloud mask pixels. :param float fill_fraction: Minimum scene fill fraction, calculated as valid/total pixels. :param expr q: Expression for filtering the results. See :py:attr:`descarteslabs.client.services.metadata.properties`. :param int limit: Maximum number of items per page to return. :param list(str) fields: Properties to return. :param str dltile: a dltile key used to specify the resolution, bounds, and srs. :param str sort_field: Property to sort on. :param str sort_order: Order of sort. :param bool randomize: Randomize the results. You may also use an `int` or `str` as an explicit seed. :param str continuation_token: None for new query, or the `properties.continuation_token` value from the returned FeatureCollection from a previous invocation of this method to page through a large result set. return: GeoJSON ``FeatureCollection`` containing at most `limit` features. """ check_deprecated_kwargs( kwargs, { "product": "products", "const_id": "const_ids", "sat_id": "sat_ids", "start_time": "start_datetime", "end_time": "end_datetime", "offset": None, }, ) if place: places = Places() places.auth = self.auth shape = places.shape(place, geom="low") geom = json.dumps(shape["geometry"]) if dltile is not None: if isinstance(dltile, string_types): dltile = self._raster.dltile(dltile) if isinstance(dltile, dict): geom = dltile["geometry"] if isinstance(geom, dict): geom = json.dumps(geom) kwargs.update({"date": date, "limit": limit}) if sat_ids: if isinstance(sat_ids, string_types): sat_ids = [sat_ids] kwargs["sat_ids"] = sat_ids if products: if isinstance(products, string_types): products = [products] kwargs["products"] = products if geom: kwargs["geom"] = geom if start_datetime: kwargs["start_datetime"] = start_datetime if end_datetime: kwargs["end_datetime"] = end_datetime if cloud_fraction is not None: kwargs["cloud_fraction"] = cloud_fraction if cloud_fraction_0 is not None: kwargs["cloud_fraction_0"] = cloud_fraction_0 if fill_fraction is not None: kwargs["fill_fraction"] = fill_fraction if fields is not None: kwargs["fields"] = fields if q is not None: if not isinstance(q, list): q = [q] kwargs["query_expr"] = AndExpression(q).serialize() if sort_field is not None: kwargs["sort_field"] = sort_field if sort_order is not None: kwargs["sort_order"] = sort_order if randomize is not None: kwargs["random_seed"] = randomize if continuation_token is not None: kwargs["continuation_token"] = continuation_token r = self.session.post("/search", json=kwargs) fc = {"type": "FeatureCollection", "features": r.json()} if "x-continuation-token" in r.headers: fc["properties"] = { "continuation_token": r.headers["x-continuation-token"] } return DotDict(fc)
def summary(self, products=None, sat_ids=None, date="acquired", interval=None, place=None, geom=None, start_datetime=None, end_datetime=None, cloud_fraction=None, cloud_fraction_0=None, fill_fraction=None, q=None, pixels=None, dltile=None, **kwargs): """Get a summary of the results for the specified spatio-temporal query. :param list(str) products: Product identifier(s). :param list(str) sat_ids: Satellite identifier(s). :param str date: The date field to use for search (e.g. `acquired`). :param str interval: Part of the date to aggregate over (e.g. `day`). The list of possibilites is: * ``year`` or ``y`` * ``quarter`` * ``month`` or ``M`` * ``week`` or ``q`` * ``day`` or ``d`` * ``hour`` or ``h`` * ``minute`` or ``m`` * ``product`` :param str place: A slug identifier to be used as a region of interest. :param str geom: A GeoJSON or WKT region of interest. :param str start_datetime: Desired starting timestamp, in any common format. :param str end_datetime: Desired ending timestamp, in any common format. :param float cloud_fraction: Maximum cloud fraction, calculated by data provider. :param float cloud_fraction_0: Maximum cloud fraction, calculated by cloud mask pixels. :param float fill_fraction: Minimum scene fill fraction, calculated as valid/total pixels. :param expr q: Expression for filtering the results. See :py:attr:`descarteslabs.client.services.metadata.properties`. :param bool pixels: Whether to include pixel counts in summary calculations. :param str dltile: A dltile key used to specify the resolution, bounds, and srs. Example usage:: >>> from descarteslabs.client.services import Metadata >>> iowa_geom = { ... "coordinates": [[ ... [-96.498997, 42.560832], ... [-95.765645, 40.585208], ... [-91.729115, 40.61364], ... [-91.391613, 40.384038], ... [-90.952233, 40.954047], ... [-91.04589, 41.414085], ... [-90.343228, 41.587833], ... [-90.140613, 41.995999], ... [-91.065059, 42.751338], ... [-91.217706, 43.50055], ... [-96.599191, 43.500456], ... [-96.498997, 42.560832] ... ]], ... "type": "Polygon" ... } >>> Metadata().summary(geom=iowa_geom, ... products=['landsat:LC08:PRE:TOAR'], ... start_datetime='2016-07-06', ... end_datetime='2016-07-07', ... interval='hour', ... pixels=True) { 'bytes': 93298309, 'count': 1, 'items': [ { 'bytes': 93298309, 'count': 1, 'date': '2016-07-06T16:00:00', 'pixels': 250508160, 'timestamp': 1467820800 } ], 'pixels': 250508160, 'products': ['landsat:LC08:PRE:TOAR'] } """ check_deprecated_kwargs( kwargs, { "product": "products", "const_id": "const_ids", "sat_id": "sat_ids", "start_time": "start_datetime", "end_time": "end_datetime", "part": "interval", }, ) if place: places = Places() places.auth = self.auth shape = places.shape(place, geom="low") geom = json.dumps(shape["geometry"]) if dltile is not None: if isinstance(dltile, string_types): dltile = self._raster.dltile(dltile) if isinstance(dltile, dict): geom = dltile["geometry"] if isinstance(geom, dict): geom = json.dumps(geom) if sat_ids: if isinstance(sat_ids, string_types): sat_ids = [sat_ids] kwargs["sat_ids"] = sat_ids if products: if isinstance(products, string_types): products = [products] kwargs["products"] = products if date: kwargs["date"] = date if interval: kwargs["interval"] = interval if geom: kwargs["geom"] = geom if start_datetime: kwargs["start_datetime"] = start_datetime if end_datetime: kwargs["end_datetime"] = end_datetime if cloud_fraction is not None: kwargs["cloud_fraction"] = cloud_fraction if cloud_fraction_0 is not None: kwargs["cloud_fraction_0"] = cloud_fraction_0 if fill_fraction is not None: kwargs["fill_fraction"] = fill_fraction if q is not None: if not isinstance(q, list): q = [q] kwargs["query_expr"] = AndExpression(q).serialize() if pixels: kwargs["pixels"] = pixels r = self.session.post("/summary", json=kwargs) return DotDict(r.json())
def raster( self, inputs, bands=None, scales=None, data_type=None, output_format="GTiff", srs=None, dimensions=None, resolution=None, bounds=None, bounds_srs=None, cutline=None, place=None, align_pixels=False, resampler=None, dltile=None, processing_level=None, save=False, outfile_basename=None, **pass_through_params ): """ Given a list of catalog image identifiers, retrieve a translated and warped mosaic as an image file. :param list(str) inputs: List of catalog image identifiers. :param list(str) bands: List of requested bands. If the last item in the list is an alpha band (with data range ``[0, 1]``) it affects rastering of all other bands: When rastering multiple images, they are combined image-by-image only where each respective image's alpha band is ``1`` (pixels where the alpha band is not ``1`` are "transparent" in the overlap between images). If a pixel is fully masked considering all combined alpha bands it will be ``0`` in all non-alpha bands. Not specifying bands returns all bands in the product. :param list(tuple()) scales: List of tuples specifying the scaling to be applied to each band. A tuple has 4 elements in the order ``(src_min, src_max, out_min, out_max)``, meaning values in the source range ``src_min`` to ``src_max`` will be scaled to the output range ``out_min`` to ``out_max``. A tuple with 2 elements ``(src_min, src_max)`` is also allowed, in which case the output range defaults to ``(0, 255)`` (a useful default for the common output type ``Byte``). If no scaling is desired for a band, use ``None``. This tuple format and behaviour is identical to GDAL's scales during translation. Example argument: ``[(0, 10000, 0, 127), (0, 1, 0, 1), (0, 10000)]`` - the first band will have source values 0-10000 scaled to 0-127, the second band will not be scaled, the third band will have 0-10000 scaled to 0-255. :param str output_format: Output format (one of ``GTiff``, ``PNG``, ``JPEG``). :param str data_type: Output data type (one of ``Byte``, ``UInt16``, ``Int16``, ``UInt32``, ``Int32``, ``Float32``, ``Float64``). :param str srs: Output spatial reference system definition understood by GDAL. :param float resolution: Desired resolution in output SRS units. Incompatible with ``dimensions`` :param tuple dimensions: Desired output ``(width, height)`` in pixels within which the raster should fit; i.e. the longer side of the raster will be min(dimensions). Incompatible with ``resolution``. :param str cutline: A GeoJSON object to be used as a cutline, or WKT string. GeoJSON coordinates must be in WGS84 lat-lon. :param str place: A slug identifier to be used as a cutline. :param tuple bounds: ``(min_x, min_y, max_x, max_y)`` in target SRS. :param str bounds_srs: Override the coordinate system in which bounds are expressed. If not given, bounds are assumed to be expressed in the output SRS. :param bool align_pixels: Align pixels to the target coordinate system. :param str resampler: Resampling algorithm to be used during warping (``near``, ``bilinear``, ``cubic``, ``cubicsplice``, ``lanczos``, ``average``, ``mode``, ``max``, ``min``, ``med``, ``q1``, ``q3``). :param str dltile: a dltile key used to specify the resolution, bounds, and srs. :param str processing_level: How the processing level of the underlying data should be adjusted, one of ``toa`` (top of atmosphere) and ``surface``. For products that support it, ``surface`` applies Descartes Labs' general surface reflectance algorithm to the output. :param bool save: Write resulting files to disk. Default: False :param str outfile_basename: If ``save`` is True, override default filename using this string as a base. :return: A dictionary with two keys, ``files`` and ``metadata``. * ``files``: a dictionary mapping file names to binary data for files (at the moment there will always be only a single file with the appropriate file extension based on the ``output_format`` requested) * ``metadata``: a dictionary containing details about the raster operation that happened. These details can be useful for debugging but shouldn't otherwise be relied on (there are no guarantees that certain keys will be present). :rtype: DotDict """ cutline = as_json_string(cutline) if place: places = Places() places.auth = self.auth shape = places.shape(place, geom="low") cutline = json.dumps(shape["geometry"]) params = { "ids": inputs, "bands": bands, "scales": scales, "ot": data_type, "of": output_format, "srs": srs, "resolution": resolution, "shape": cutline, "outputBounds": bounds, "outputBoundsSRS": bounds_srs, "outsize": dimensions, "targetAlignedPixels": align_pixels, "resampleAlg": resampler, "processing_level": processing_level, } params.update(pass_through_params) if dltile is not None: if isinstance(dltile, dict): params["dltile"] = dltile["properties"]["key"] else: params["dltile"] = dltile r = self.session.post("/raster", json=params) raw = BytesIO(r.content) json_resp = json.loads(raw.readline().decode("utf-8").strip()) num_files = json_resp["files"] json_resp["files"] = {} for _ in range(num_files): file_meta = json.loads(raw.readline().decode("utf-8").strip()) fn = file_meta["name"] data = raw.read(file_meta["length"]) if outfile_basename: _, ext = os.path.splitext(os.path.basename(fn)) outfilename = "{}{}".format(outfile_basename, ext) else: outfilename = fn json_resp["files"][outfilename] = data if save: for filename, data in six.iteritems(json_resp["files"]): with open(filename, "wb") as f: f.write(data) return DotDict(json_resp)
def _query(self, products=None, sat_ids=None, date="acquired", place=None, geom=None, start_datetime=None, end_datetime=None, cloud_fraction=None, cloud_fraction_0=None, fill_fraction=None, q=None, limit=100, fields=None, dltile=None, sort_field=None, sort_order="asc", randomize=None, continuation_token=None, **kwargs): """ Execute a metadata query for up to 10,000 items. Use :py:func:`search` or :py:func:`features` instead, which batch searches into smaller requests and handle paging for you. """ check_deprecated_kwargs( kwargs, { "product": "products", "const_id": "const_ids", "sat_id": "sat_ids", "start_time": "start_datetime", "end_time": "end_datetime", "offset": None, }, ) if place: places = Places() places.auth = self.auth shape = places.shape(place, geom="low") geom = json.dumps(shape["geometry"]) if dltile is not None: if isinstance(dltile, string_types): dltile = self._raster.dltile(dltile) if isinstance(dltile, dict): geom = dltile["geometry"] if isinstance(geom, dict): geom = json.dumps(geom) kwargs.update({"date": date, "limit": limit}) if sat_ids: if isinstance(sat_ids, string_types): sat_ids = [sat_ids] kwargs["sat_ids"] = sat_ids if products: if isinstance(products, string_types): products = [products] kwargs["products"] = products if geom: kwargs["geom"] = geom if start_datetime: kwargs["start_datetime"] = start_datetime if end_datetime: kwargs["end_datetime"] = end_datetime if cloud_fraction is not None: kwargs["cloud_fraction"] = cloud_fraction if cloud_fraction_0 is not None: kwargs["cloud_fraction_0"] = cloud_fraction_0 if fill_fraction is not None: kwargs["fill_fraction"] = fill_fraction if fields is not None: kwargs["fields"] = fields if q is not None: if not isinstance(q, list): q = [q] kwargs["query_expr"] = AndExpression(q).serialize() if sort_field is not None: kwargs["sort_field"] = sort_field if sort_order is not None: kwargs["sort_order"] = sort_order if randomize is not None: kwargs["random_seed"] = randomize if continuation_token is not None: kwargs["continuation_token"] = continuation_token r = self.session.post("/search", json=kwargs) fc = {"type": "FeatureCollection", "features": r.json()} if "x-continuation-token" in r.headers: fc["properties"] = { "continuation_token": r.headers["x-continuation-token"] } return DotDict(fc)