def get_sources(self, bounds, resolution):
        bounds, bounds_crs = bounds
        zoom = get_zoom(max(resolution))

        self._log.info("Resolution: %s; equivalent zoom: %d", resolution, zoom)

        left, bottom, right, top = warp.transform_bounds(
            bounds_crs, WGS84_CRS, *bounds)

        # account for rounding errors when converting between tiles and coords
        left += 0.000001
        bottom += 0.000001
        right -= 0.000001
        top -= 0.000001

        if (self._bounds[0] <= left <= self._bounds[2]
                or self._bounds[0] <= right <= self._bounds[2]) and (
                    self._bounds[1] <= bottom <= self._bounds[3]
                    or self._bounds[1] <= top <= self._bounds[3]) and (
                        self._minzoom <= zoom <= self._maxzoom):
            tile = mercantile.bounding_tile(left, bottom, right, top)

            with requests.get(
                    self.endpoint.format(x=tile.x, y=tile.y, z=tile.z)) as rsp:
                if not rsp:
                    self._log.warn("%s failed: %s", rsp.url, rsp.text)
                    return

                for source in rsp.json():
                    yield Source(**source)
    def __init__(self, uri):
        try:
            if uri.startswith("s3://"):
                url = urlparse(uri)
                obj = S3.get_object(Bucket=url.netloc, Key=url.path[1:])
                oin_meta = json.loads(obj["Body"].read().decode("utf-8"))
            elif uri.startswith(("http://", "https://")):
                oin_meta = requests.get(uri).json()
            else:
                raise NoCatalogAvailable()
        except Exception:
            raise NoCatalogAvailable()

        self._meta = oin_meta
        self._metadata_url = uri
        self._name = oin_meta.get("title")
        self._provider = oin_meta.get("provider")
        self._source = oin_meta.get("uuid")

        with get_source(self._source) as src:
            self._bounds = warp.transform_bounds(src.crs, WGS84_CRS,
                                                 *src.bounds)
            self._resolution = get_resolution_in_meters(
                Bounds(src.bounds, src.crs), (src.height, src.width))
            approximate_zoom = get_zoom(max(self._resolution), op=math.ceil)

            if src.meta["dtype"] != "uint8":
                global_min = src.get_tag_item("TIFFTAG_MINSAMPLEVALUE")
                global_max = src.get_tag_item("TIFFTAG_MAXSAMPLEVALUE")

                for band in range(0, src.count):
                    self._meta["values"] = self._meta.get("values", {})
                    self._meta["values"][band] = {}
                    min_val = src.get_tag_item("STATISTICS_MINIMUM",
                                               bidx=band + 1)
                    max_val = src.get_tag_item("STATISTICS_MAXIMUM",
                                               bidx=band + 1)
                    mean_val = src.get_tag_item("STATISTICS_MEAN",
                                                bidx=band + 1)

                    if min_val is not None:
                        self._meta["values"][band]["min"] = float(min_val)
                    elif global_min is not None:
                        self._meta["values"][band]["min"] = float(global_min)

                    if max_val is not None:
                        self._meta["values"][band]["max"] = float(max_val)
                    elif global_max is not None:
                        self._meta["values"][band]["max"] = float(global_max)

                    if mean_val is not None:
                        self._meta["values"][band]["mean"] = float(mean_val)

        self._center = [
            (self._bounds[0] + self.bounds[2]) / 2,
            (self._bounds[1] + self.bounds[3]) / 2,
            approximate_zoom - 3,
        ]
        self._maxzoom = approximate_zoom + 3
        self._minzoom = approximate_zoom - 10
Example #3
0
    def _candidates(self, bounds, resolution):
        cursor = self.conn.cursor()

        zoom = get_zoom(max(resolution))
        if bounds.crs == WGS84_CRS:
            left, bottom, right, top = bounds.bounds
        else:
            left, bottom, right, top = warp.transform_bounds(
                bounds.crs, WGS84_CRS, *bounds.bounds)

        left = left if left != Infinity else -180
        bottom = bottom if bottom != Infinity else -90
        right = right if right != Infinity else 180
        top = top if top != Infinity else 90

        try:
            cursor.execute(
                """
WITH bbox AS (
  SELECT SetSRID(
    GeomFromText('BOX({minx} {miny}, {maxx} {maxy})'),
    4326) geom
),
sources AS (
  SELECT
     url,
     source,
     resolution,
     coalesce(band_info, '{{}}') band_info,
     coalesce(meta, '{{}}') meta,
     coalesce(recipes, '{{}}') recipes,
     acquired_at,
     priority,
     ST_Multi(footprints.geom) geom,
     min_zoom,
     max_zoom
   FROM footprints
   JOIN bbox ON ST_Intersects(footprints.geom, bbox.geom)
   WHERE ? BETWEEN min_zoom AND max_zoom
)
SELECT
  url,
  source,
  resolution,
  band_info,
  meta,
  recipes,
  acquired_at,
  null band,
  priority
FROM sources
            """.format(minx=left, miny=bottom, maxx=right, maxy=top), (zoom, ))

            for record in cursor:
                yield Source(*record)
        except Exception as e:
            print(e)
            LOG.warn(e)
        finally:
            cursor.close()
Example #4
0
    def __init__(self, uri):
        rsp = requests.get(uri)

        if not rsp.ok:
            raise NoDataAvailable()

        oin_meta = rsp.json()
        self._meta = oin_meta
        self._metadata_url = uri
        self._name = oin_meta.get('title')
        self._provider = oin_meta.get('provider')
        self._source = oin_meta.get('uuid')

        with get_source(self._source) as src:
            self._bounds = warp.transform_bounds(src.crs, WGS84_CRS,
                                                 *src.bounds)
            self._resolution = get_resolution_in_meters(
                Bounds(src.bounds, src.crs), (src.height, src.width))
            approximate_zoom = get_zoom(max(self._resolution), op=math.ceil)

        self._center = [(self._bounds[0] + self.bounds[2]) / 2,
                        (self._bounds[1] + self.bounds[3]) / 2,
                        approximate_zoom - 3]
        self._maxzoom = approximate_zoom + 3
        self._minzoom = approximate_zoom - 10
Example #5
0
    def __init__(self, uri, rgb=None, nodata=None, linear_stretch=None, resample=None):
        self._uri = uri

        if rgb:
            self._rgb = rgb

        if nodata:
            self._nodata = nodata

        if linear_stretch:
            self._linear_stretch = linear_stretch

        try:
            # test whether provided resampling method is valid
            Resampling[resample]
            self._resample = resample
        except KeyError:
            self._resample = None

        self._meta = {}

        with get_source(self._uri) as src:
            self._bounds = warp.transform_bounds(src.crs, WGS84_CRS, *src.bounds)
            self._resolution = get_resolution_in_meters(
                Bounds(src.bounds, src.crs), (src.height, src.width)
            )
            approximate_zoom = get_zoom(max(self._resolution), op=math.ceil)

            global_min = src.get_tag_item("TIFFTAG_MINSAMPLEVALUE")
            global_max = src.get_tag_item("TIFFTAG_MAXSAMPLEVALUE")

            for band in range(0, src.count):
                self._meta["values"] = self._meta.get("values", {})
                self._meta["values"][band] = {}
                min_val = src.get_tag_item("STATISTICS_MINIMUM", bidx=band + 1)
                max_val = src.get_tag_item("STATISTICS_MAXIMUM", bidx=band + 1)
                mean_val = src.get_tag_item("STATISTICS_MEAN", bidx=band + 1)

                if min_val is not None:
                    self._meta["values"][band]["min"] = float(min_val)
                elif global_min is not None:
                    self._meta["values"][band]["min"] = float(global_min)

                if max_val is not None:
                    self._meta["values"][band]["max"] = float(max_val)
                elif global_max is not None:
                    self._meta["values"][band]["max"] = float(global_max)

                if mean_val is not None:
                    self._meta["values"][band]["mean"] = float(mean_val)

        self._center = [
            (self._bounds[0] + self.bounds[2]) / 2,
            (self._bounds[1] + self.bounds[3]) / 2,
            approximate_zoom - 3,
        ]
        self._maxzoom = approximate_zoom + 3
        self._minzoom = approximate_zoom - 10
Example #6
0
    def get_sources(self, bounds, resolution):
        bounds, bounds_crs = bounds
        zoom = get_zoom(max(resolution))
        left, bottom, right, top = warp.transform_bounds(
            bounds_crs, WGS84_CRS, *bounds)

        if (self._bounds[0] <= left <= self._bounds[2]
                or self._bounds[0] <= right <= self._bounds[2]) and (
                    self._bounds[1] <= bottom <= self._bounds[3]
                    or self._bounds[1] <= top <= self._bounds[3]) and (
                        self._minzoom <= zoom <= self._maxzoom):
            yield Source(self._source, self._name, self._resolution, {}, {},
                         {"imagery": True})
Example #7
0
    def get_sources(self, bounds, resolution):
        from shapely.geometry import box

        bounds, bounds_crs = bounds

        results = []
        zoom = get_zoom(max(resolution))
        ((left, right),
         (bottom, top)) = warp.transform(bounds_crs, WGS84_CRS, bounds[::2],
                                         bounds[1::2])
        bounds_geom = box(left, bottom, right, top)
        bounds_centroid = bounds_geom.centroid

        # Filter by zoom level and intersecting geometries
        for candidate in self._sources:
            (geom, attr) = candidate
            if attr['min_zoom'] <= zoom < attr['max_zoom'] and \
               geom.intersects(bounds_geom):
                results.append(candidate)

        # Sort by resolution and centroid distance
        results = sorted(results,
                         key=lambda (geom, attr): (
                             attr['priority'],
                             int(attr['resolution']),
                             bounds_centroid.distance(geom.centroid),
                         ))

        # Remove duplicate URLs
        # From https://stackoverflow.com/a/480227
        seen = set()
        seen_add = seen.add
        results = [
            x for x in results
            if not (x[1]['url'] in seen or seen_add(x[1]['url']))
        ]

        # Pick only the attributes we care about
        results = [(a['url'], a['source'], a['resolution'])
                   for (_, a) in results]

        return results
    def get_sources(self, bounds, resolution):
        cursor = self.conn.cursor()

        # TODO this is becoming relatively standard catalog boilerplate
        zoom = get_zoom(max(resolution))
        if bounds.crs == WGS84_CRS:
            left, bottom, right, top = bounds.bounds
        else:
            left, bottom, right, top = warp.transform_bounds(
                bounds.crs, WGS84_CRS, *bounds.bounds)

        left = left if left != Infinity else -180
        bottom = bottom if bottom != Infinity else -90
        right = right if right != Infinity else 180
        top = top if top != Infinity else 90

        try:
            query = """
WITH bbox AS (
  SELECT SetSRID(GeomFromGeoJSON(?), 4326) geom
),
uncovered AS (
  SELECT SetSRID(GeomFromGeoJSON(?), 4326) geom
),
date_range AS (
  SELECT
  COALESCE(min(acquired_at), date('1970-01-01')) min,
  COALESCE(max(acquired_at), date('1970-01-01')) max
  FROM footprints
)
SELECT
  url,
  source,
  resolution,
  coalesce(band_info, '{{}}') band_info,
  coalesce(meta, '{{}}') meta,
  coalesce(recipes, '{{}}') recipes,
  acquired_at,
  null band, -- for Source constructor compatibility
  priority,
  ST_Area(ST_Intersection(uncovered.geom, ST_Difference(footprints.geom, footprints.mask))) /
  ST_Area(bbox.geom) coverage,
  AsGeoJSON(ST_Intersection(bbox.geom, footprints.geom)) geom,
  AsGeoJSON(ST_Intersection(footprints.mask, bbox.geom)) mask,
  AsGeoJSON(ST_Difference(uncovered.geom, ST_Difference(footprints.geom, footprints.mask))) uncovered
FROM bbox, date_range, footprints
JOIN uncovered ON ST_Intersects(footprints.geom, uncovered.geom)
WHERE footprints.source || ' - ' || footprints.url NOT IN ({id_placeholders})
  AND ? BETWEEN min_zoom AND max_zoom
ORDER BY
  10 * coalesce(footprints.priority, 0.5) *
  .1 * (1 - (strftime('%s') -
         strftime('%s', COALESCE(acquired_at, date('2000-01-01')))) /
        (strftime('%s') - strftime('%s', date_range.min))) *
  50 *
    -- de-prioritize over-zoomed sources
    CASE WHEN ? / footprints.resolution >= 1
    THEN 1
    ELSE 1 / footprints.resolution
    END *
  ST_Area(
    ST_Intersection(bbox.geom, footprints.geom)) /
    ST_Area(bbox.geom) DESC
LIMIT 1
      """

            bbox = json.dumps({
                "type":
                "Polygon",
                "coordinates": [[
                    [left, bottom],
                    [left, top],
                    [right, top],
                    [right, bottom],
                    [left, bottom],
                ]],
            })

            uncovered = bbox
            ids = set()

            while True:
                id_placeholders = ", ".join("?" * len(ids))
                cursor.execute(
                    query.format(id_placeholders=id_placeholders),
                    (bbox, uncovered) + tuple(ids) + (zoom, min(resolution)),
                )

                count = 0
                for record in cursor:
                    count += 1
                    (
                        url,
                        source,
                        res,
                        band_info,
                        meta,
                        recipes,
                        acquired_at,
                        band,
                        priority,
                        coverage,
                        _,
                        mask,
                        uncovered,
                    ) = record

                    if mask is not None:
                        mask = json.loads(mask)

                    yield Source(
                        url,
                        source,
                        res,
                        json.loads(band_info),
                        json.loads(meta),
                        json.loads(recipes),
                        acquired_at,
                        band,
                        priority,
                        coverage,
                        mask=mask,
                    )

                    ids.add(source + " - " + url)

                if count == 0 or uncovered is None:
                    break

        except Exception as e:
            LOG.exception(e)
        finally:
            cursor.close()
Example #9
0
    def _fill_bounds(self, bounds, resolution, include_geometries=False):
        zoom = get_zoom(max(resolution))
        query = """
            WITH RECURSIVE bbox AS (
              SELECT ST_SetSRID(
                    'BOX(%(minx)s %(miny)s, %(maxx)s %(maxy)s)'::box2d,
                    4326) geom
            ),
            date_range AS (
              SELECT
                COALESCE(min(acquired_at), '1970-01-01') min,
                COALESCE(max(acquired_at), '1970-01-01') max,
                age(COALESCE(max(acquired_at), '1970-01-01'),
                    COALESCE(min(acquired_at), '1970-01-01')) "interval"
              FROM {table}
            ),
            sources AS (
              SELECT * FROM (
                SELECT
                  1 iterations,
                  ARRAY[url] urls,
                  ARRAY[source] sources,
                  ARRAY[resolution] resolutions,
                  ARRAY[coalesce(bands, '{{}}'::jsonb)] bands,
                  ARRAY[coalesce(meta, '{{}}'::jsonb)] metas,
                  ARRAY[coalesce(recipes, '{{}}'::jsonb)] recipes,
                  ARRAY[acquired_at] acquisition_dates,
                  ARRAY[priority] priorities,
                  ARRAY[ST_Area(ST_Intersection(bbox.geom, footprints.geom)) /
                    ST_Area(bbox.geom)] coverages,
                  ARRAY[ST_Multi(footprints.geom)] geometries,
                  ST_Multi(footprints.geom) geom,
                  ST_Difference(bbox.geom, footprints.geom) uncovered
                FROM date_range, {table} footprints
                JOIN bbox ON footprints.geom && bbox.geom
                WHERE %(zoom)s BETWEEN min_zoom AND max_zoom
                  AND footprints.enabled = true
                ORDER BY
                  10 * coalesce(footprints.priority, 0.5) *
                    .1 * (1 - (extract(
                      EPOCH FROM (current_timestamp - COALESCE(
                        acquired_at, '2000-01-01'))) /
                        extract(
                          EPOCH FROM (current_timestamp - date_range.min)))) *
                    50 *
                      -- de-prioritize over-zoomed sources
                      CASE WHEN %(resolution)s / footprints.resolution >= 1
                        THEN 1
                        ELSE 1 / footprints.resolution
                      END *
                    ST_Area(
                        ST_Intersection(bbox.geom, footprints.geom)) /
                      ST_Area(bbox.geom) DESC
                LIMIT 1
              ) AS _
              UNION ALL
              SELECT * FROM (
                SELECT
                  sources.iterations + 1,
                  sources.urls || url urls,
                  sources.sources || source sources,
                  sources.resolutions || resolution resolutions,
                  sources.bands || coalesce(
                    footprints.bands, '{{}}'::jsonb) bands,
                  sources.metas || coalesce(meta, '{{}}'::jsonb) metas,
                  sources.recipes || coalesce(
                    footprints.recipes, '{{}}'::jsonb) recipes,
                  sources.acquisition_dates || footprints.acquired_at
                    acquisition_dates,
                  sources.priorities || footprints.priority priorities,
                  sources.coverages || ST_Area(
                    ST_Intersection(sources.uncovered, footprints.geom)) /
                    ST_Area(bbox.geom) coverages,
                  sources.geometries || footprints.geom,
                  ST_Collect(sources.geom, footprints.geom) geom,
                  ST_Difference(sources.uncovered, footprints.geom) uncovered
                FROM bbox, date_range, {table} footprints
                -- use proper intersection to prevent voids from irregular
                -- footprints
                JOIN sources ON ST_Intersects(
                    footprints.geom, sources.uncovered)
                WHERE NOT (footprints.url = ANY(sources.urls))
                  AND %(zoom)s BETWEEN min_zoom AND max_zoom
                  AND footprints.enabled = true
                ORDER BY
                  10 * coalesce(footprints.priority, 0.5) *
                    .1 * (1 - (extract(
                      EPOCH FROM (current_timestamp - COALESCE(
                        acquired_at, '2000-01-01'))) /
                        extract(
                          EPOCH FROM (current_timestamp - date_range.min)))) *
                    50 *
                      -- de-prioritize over-zoomed sources
                      CASE WHEN %(resolution)s / footprints.resolution >= 1
                        THEN 1
                        ELSE 1 / footprints.resolution
                      END *
                    ST_Area(
                        ST_Intersection(sources.uncovered, footprints.geom)) /
                        ST_Area(bbox.geom) DESC
                LIMIT 1
              ) AS _
            ),
            candidates AS (
                SELECT *
                FROM sources
                ORDER BY iterations DESC
                LIMIT 1
            ), candidate_rows AS (
                SELECT
                  unnest(urls) url,
                  unnest(sources) source,
                  unnest(resolutions) resolution,
                  unnest(bands) bands,
                  unnest(metas) meta,
                  unnest(recipes) recipes,
                  unnest(acquisition_dates) acquired_at,
                  unnest(priorities) priority,
                  unnest(coverages) coverage,
                  unnest(geometries) geom
                FROM candidates
            )
            SELECT
              url,
              source,
              resolution,
              bands,
              meta,
              recipes,
              acquired_at,
              null band,
              priority,
              coverage,
              CASE WHEN {include_geometries}
                  THEN ST_AsGeoJSON(geom)
                  ELSE 'null'
              END geom
            FROM candidate_rows
        """.format(table=self.table,
                   geometry_column=self.geometry_column,
                   include_geometries=bool(include_geometries))

        if bounds.crs == WGS84_CRS:
            left, bottom, right, top = bounds.bounds
        else:
            left, bottom, right, top = warp.transform_bounds(
                bounds.crs, WGS84_CRS, *bounds.bounds)

        connection = self._pool.getconn()
        try:
            with connection as conn, conn.cursor() as cur:
                cur.execute(
                    query, {
                        "minx": left if left != Infinity else -180,
                        "miny": bottom if bottom != Infinity else -90,
                        "maxx": right if right != Infinity else 180,
                        "maxy": top if top != Infinity else 90,
                        "zoom": zoom,
                        "resolution": min(resolution),
                    })

                for record in cur:
                    yield Source(*record[:-1], geom=json.loads(record[-1]))
        except Exception as e:
            self._log.error(e)
        finally:
            self._pool.putconn(connection)
Example #10
0
    def transform(self, pixels):
        data, (bounds, crs), _ = pixels
        (count, height, width) = data.shape

        if count != 1:
            raise Exception("Can't hillshade from multiple bands")

        (dx, dy) = get_resolution_in_meters(pixels.bounds, (height, width))
        zoom = get_zoom(max(dx, dy))
        # invert resolutions for hillshading purposes
        dy *= -1

        data = apply_latitude_adjustments(pixels).data

        resample_factor = RESAMPLING.get(zoom, 1.0)
        aff = transform.from_bounds(*bounds, width=width, height=height)

        if self.resample and resample_factor != 1.0:
            # resample data according to Tom Paterson's chart

            # create an empty target array that's the shape of the resampled
            # tile (e.g. 80% of 260x260px)
            resampled_height = int(round(height * resample_factor))
            resampled_width = int(round(width * resample_factor))
            resampled = np.empty(
                shape=(resampled_height, resampled_width), dtype=data.dtype)
            resampled_mask = np.empty(shape=(resampled.shape))

            newaff = transform.from_bounds(
                *bounds, width=resampled_width, height=resampled_height)

            # downsample using GDAL's reprojection functionality (which gives
            # us access to different resampling algorithms)
            warp.reproject(
                data,
                resampled,
                src_transform=aff,
                dst_transform=newaff,
                src_crs=crs,
                dst_crs=crs,
                resampling=Resampling.bilinear, )

            # reproject / resample the mask so that intermediate operations
            # can also use it
            if np.any(data.mask):
                warp.reproject(
                    data.mask.astype(np.uint8),
                    resampled_mask,
                    src_transform=aff,
                    dst_transform=newaff,
                    src_crs=crs,
                    dst_crs=crs,
                    resampling=Resampling.nearest, )

                resampled = np.ma.masked_array(resampled, mask=resampled_mask)
            else:
                resampled = np.ma.masked_array(resampled)

            hs = _hillshade(
                resampled,
                dx=dx,
                dy=dy,
                vert_exag=EXAGGERATION.get(zoom, 1.0), )

            if self.add_slopeshade:
                ss = slopeshade(
                    resampled,
                    dx=dx,
                    dy=dy,
                    vert_exag=EXAGGERATION.get(zoom, 1.0))

                hs *= ss

            # scale hillshade values (0.0-1.0) to integers (0-255)
            hs = (255.0 * hs).astype(np.uint8)

            # create an empty target array that's the shape of the target tile
            # + buffers (e.g. 260x260px)
            resampled_hs = np.empty(shape=data.shape, dtype=hs.dtype)

            # upsample (invert the previous reprojection)
            warp.reproject(
                hs.data,
                resampled_hs,
                src_transform=newaff,
                dst_transform=aff,
                src_crs=crs,
                dst_crs=crs,
                resampling=Resampling.bilinear, )

            hs = np.ma.masked_array(resampled_hs, mask=data.mask)
        else:
            hs = _hillshade(
                data[0],
                dx=dx,
                dy=dy,
                vert_exag=EXAGGERATION.get(zoom, 1.0), )

            if self.add_slopeshade:
                ss = slopeshade(
                    data[0],
                    dx=dx,
                    dy=dy,
                    vert_exag=EXAGGERATION.get(zoom, 1.0))

                # hs *= 0.8
                hs *= ss

            hs = np.ma.masked_array(hs[np.newaxis], mask=data.mask)

            # scale hillshade values (0.0-1.0) to integers (0-255)
            hs = (255.0 * hs).astype(np.uint8)

        hs.fill_value = 0

        return PixelCollection(hs, pixels.bounds), "raw"
Example #11
0
    def __init__(self, uri, rgb=None, nodata=None, linear_stretch=None, resample=None, 
        dst_max=None, dst_min=None, force_cast=None, to_vis=None):
        self._uri = uri
        self._rgb = rgb
        self._nodata = nodata
        self._linear_stretch = linear_stretch
        self._dst_min = dst_min
        self._dst_max = dst_max
        self._force_cast = force_cast
        self._to_vis = to_vis
        try:
            # test whether provided resampling method is valid
            Resampling[resample]
            self._resample = resample
        except KeyError:
            self._resample = None
        self._meta = {}
        self.src_meta = {}

        with get_source(self._uri) as src:
            self.src_meta = snake_case_to_camel_case_keys_of_dict(src.tags())
            self.src_meta["bandCount"] = src.count
            self._bounds = warp.transform_bounds(src.crs, WGS84_CRS, *src.bounds)
            self._resolution = get_resolution_in_meters(
                Bounds(src.bounds, src.crs), (src.height, src.width)
            )
            approximate_zoom = get_zoom(max(self._resolution), op=math.ceil)
            
            global_min = src.get_tag_item("TIFFTAG_MINSAMPLEVALUE")
            global_max = src.get_tag_item("TIFFTAG_MAXSAMPLEVALUE")

            band_order = src.get_tag_item("BAND_ORDER")
            if band_order is not None:
                band_order = band_order.split(',')
            if str(self._rgb).lower() == "metadata":
                if band_order is not None:
                    def get_band_from_band_order(band_order, band_name, fallback):
                        if band_name in band_order:
                            return str(band_order.index(band_name) + 1)
                        else:
                            return fallback
                    red_band = get_band_from_band_order(band_order, "RED", "1")
                    green_band = get_band_from_band_order(band_order, "GRE", "2")
                    blue_band = get_band_from_band_order(band_order, "BLU", "3")
                    self._rgb = ",".join([red_band, green_band, blue_band])
                else:
                    # Fallback
                    if src.count >= 3:
                        self._rgb = "1,2,3"
                    else:
                        self._rgb = "1,1,1"
            
            self.src_meta["bandMetadata"] = {}
            # FarmLens specific
            band_assignments = band_order
            if band_order is None:
                band_assignments = range(0, src.count)
            for band in xrange(0, src.count):
                self.src_meta["bandMetadata"][band_assignments[band]] = snake_case_to_camel_case_keys_of_dict(src.tags(bidx=band+1))
                self._meta["values"] = self._meta.get("values", {})
                self._meta["values"][band] = {}
                min_val = src.get_tag_item("STATISTICS_MINIMUM", bidx=band + 1)
                max_val = src.get_tag_item("STATISTICS_MAXIMUM", bidx=band + 1)
                mean_val = src.get_tag_item("STATISTICS_MEAN", bidx=band + 1)
                stddev_val = src.get_tag_item("STATISTICS_STDDEV", bidx=band + 1)
                
                if min_val is not None:
                    self._meta["values"][band]["min"] = float(min_val)
                elif global_min is not None:
                    self._meta["values"][band]["min"] = float(global_min)

                if max_val is not None:
                    self._meta["values"][band]["max"] = float(max_val)
                elif global_max is not None:
                    self._meta["values"][band]["max"] = float(global_max)

                if mean_val is not None:
                    self._meta["values"][band]["mean"] = float(mean_val)

                if stddev_val is not None:
                    self._meta["values"][band]["stddev"] = float(stddev_val)

        self._center = [
            (self._bounds[0] + self.bounds[2]) / 2,
            (self._bounds[1] + self.bounds[3]) / 2,
            approximate_zoom - 3,
        ]
        self._maxzoom = approximate_zoom + 3
        self._minzoom = approximate_zoom - 10