Пример #1
0
    def openGeoJson(self, check_field, filename):

        driver = ogr.GetDriverByName("GeoJSON")
        dataSource = driver.Open(filename, 0)
        layer = dataSource.GetLayer()

        wfs_result = dict()
        for feat in layer:

            #create geometry object
            geom = feat.GetGeometryRef()
            if geom is not None:
                sr = osr.SpatialReference()
                sr.ImportFromEPSG(3857)
                geom_type = geom.GetGeometryType()  #say to Dima
                geom.TransformTo(sr)

                if geom_type == ogr.wkbLineString:
                    mercator_geom = ogr.ForceToLineString(geom)
                elif geom_type == ogr.wkbPolygon:
                    mercator_geom = ogr.ForceToPolygon(geom)
                elif geom_type == ogr.wkbPoint:
                    mercator_geom = ogr.ForceToMultiPoint(geom)
                elif geom_type == ogr.wkbMultiPolygon:
                    mercator_geom = ogr.ForceToMultiPolygon(geom)
                elif geom_type == ogr.wkbMultiPoint:
                    mercator_geom = ogr.ForceToMultiPoint(geom)
                elif geom_type == ogr.wkbMultiLineString:
                    mercator_geom = ogr.ForceToMultiPolygon(geom)
                else:
                    mercator_geom = geom
            else:
                continue

            #Read broker fields

            feat_defn = layer.GetLayerDefn()
            wfs_fields = dict()

            for i in range(feat_defn.GetFieldCount()):
                field_defn = feat_defn.GetFieldDefn(i)
                #if field_defn.GetName() == 'gml_id':
                #    continue

                #Compare by one control field

                if field_defn.GetName() == check_field:
                    check_field_val = feat.GetFieldAsString(i).decode(
                        'utf-8')  #GetFieldAsInteger64(i)

                #Read fields
                if field_defn.GetType(
                ) == ogr.OFTInteger:  #or field_defn.GetType() == ogr.OFTInteger64:
                    wfs_fields[field_defn.GetName()] = feat.GetFieldAsInteger(
                        i)  #GetFieldAsInteger64(i)
#                    print "%s = %d" % (field_defn.GetName(), feat.GetFieldAsInteger64(i))
                elif field_defn.GetType() == ogr.OFTReal:
                    wfs_fields[field_defn.GetName()] = feat.GetFieldAsDouble(i)


#                    print "%s = %.3f" % (field_defn.GetName(), feat.GetFieldAsDouble(i))
                elif field_defn.GetType() == ogr.OFTString:
                    #                    print "%s = %s" % (field_defn.GetName(), feat.GetFieldAsString(i))
                    wfs_fields[field_defn.GetName()] = feat.GetFieldAsString(
                        i).decode('utf-8')
                else:
                    #                    print "%s = %s" % (field_defn.GetName(), feat.GetFieldAsString(i))
                    wfs_fields[field_defn.GetName()] = feat.GetFieldAsString(
                        i).decode('utf-8')

            #Object with keys - as values of one control field
            wfs_result[check_field_val] = dict()
            wfs_result[check_field_val]['id'] = check_field_val
            wfs_result[check_field_val]['fields'] = wfs_fields
            wfs_result[check_field_val]['geom'] = mercator_geom.Clone()

        layer_result_sorted = dict()
        for key in sorted(wfs_result):
            layer_result_sorted[key] = wfs_result[key]

        return layer_result_sorted
Пример #2
0
def ogr_factory_6():

    src_wkt_list = [ None,
                     'POINT EMPTY',
                     'LINESTRING EMPTY',
                     'POLYGON EMPTY',
                     'MULTIPOINT EMPTY',
                     'MULTILINESTRING EMPTY',
                     'MULTIPOLYGON EMPTY',
                     'GEOMETRYCOLLECTION EMPTY',
                     'POINT(0 0)',
                     'LINESTRING(0 0)',
                     'POLYGON((0 0))',
                     'POLYGON(EMPTY,(0 0),EMPTY,(1 1))',
                     'MULTIPOINT(EMPTY,(0 0),EMPTY,(1 1))',
                     'MULTILINESTRING(EMPTY,(0 0),EMPTY,(1 1))',
                     'MULTIPOLYGON(((0 0),EMPTY,(1 1)),EMPTY,((2 2)))',
                     'GEOMETRYCOLLECTION(POINT EMPTY)',
                     'GEOMETRYCOLLECTION(LINESTRING EMPTY)',
                     'GEOMETRYCOLLECTION(POLYGON EMPTY)',
                     'GEOMETRYCOLLECTION(MULTIPOINT EMPTY)',
                     'GEOMETRYCOLLECTION(MULTILINESTRING EMPTY)',
                     'GEOMETRYCOLLECTION(MULTIPOLYGON EMPTY)',
                     'GEOMETRYCOLLECTION(GEOMETRYCOLLECTION EMPTY)',
                     'GEOMETRYCOLLECTION(POINT(0 0))',
                     'GEOMETRYCOLLECTION(LINESTRING(0 0),LINESTRING(1 1))',
                     'GEOMETRYCOLLECTION(POLYGON((0 0),EMPTY,(2 2)), POLYGON((1 1)))',
                     'CURVEPOLYGON EMPTY',
                     'CURVEPOLYGON ((0 0,0 1,1 1,1 0,0 0))',
                     'CURVEPOLYGON (CIRCULARSTRING(0 0,1 0,0 0))',
                     'COMPOUNDCURVE EMPTY',
                     'COMPOUNDCURVE ((0 0,0 1,1 1,1 0,0 0))',
                     'COMPOUNDCURVE (CIRCULARSTRING(0 0,1 0,0 0))',
                     'CIRCULARSTRING EMPTY',
                     'CIRCULARSTRING (0 0,1 0,0 0)',
                     'MULTISURFACE EMPTY',
                     'MULTISURFACE (((0 0,0 1,1 1,1 0,0 0)))',
                     'MULTISURFACE (CURVEPOLYGON((0 0,0 1,1 1,1 0,0 0)))',
                     'MULTICURVE EMPTY',
                     'MULTICURVE ((0 0,0 1))',
                     'MULTICURVE (COMPOUNDCURVE((0 0,0 1)))',
                     'MULTICURVE (CIRCULARSTRING (0 0,1 0,0 0))',
                      ]

    for src_wkt in src_wkt_list:
        if src_wkt is None:
            src_geom = None
        else:
            src_geom = ogr.CreateGeometryFromWkt( src_wkt )

        ogr.ForceToPolygon( src_geom )
        ogr.ForceToMultiPolygon( src_geom )
        ogr.ForceToMultiPoint( src_geom )
        ogr.ForceToMultiLineString( src_geom )
        ogr.ForceToLineString( src_geom )
        for target_type in range(ogr.wkbMultiSurface):
            gdal.PushErrorHandler('CPLQuietErrorHandler')
            ogr.ForceTo( src_geom, 1 +target_type )
            gdal.PopErrorHandler()
        #print(src_geom.ExportToWkt(), dst_geom1.ExportToWkt(), dst_geom2.ExportToWkt(), dst_geom3.ExportToWkt(), dst_geom4.ExportToWkt())

    return 'success'
Пример #3
0
def ee_beamer_et(ini_path=None, overwrite_flag=False):
    """Earth Engine Beamer ET Image Download

    Args:
        ini_path (str):
        overwrite_flag (bool): if True, overwrite existing files

    Returns:
        None
    """
    logging.info('\nEarth Engine Beamer ETg Image Download')

    # Read config file
    ini = inputs.read(ini_path)
    inputs.parse_section(ini, section='INPUTS')
    inputs.parse_section(ini, section='SPATIAL')
    inputs.parse_section(ini, section='IMAGES')
    inputs.parse_section(ini, section='BEAMER')

    ini['IMAGES']['download_bands'] = [
        'etg_mean', 'etg_lci', 'etg_uci', 'etg_lpi', 'etg_upi'
    ]
    stat_list = ['median', 'mean']
    nodata_value = -9999
    zips_folder = 'zips'
    images_folder = 'images'
    annuals_folder = 'annuals'

    # Regular expression is only used to extract year from SCENE_ID
    landsat_re = re.compile('L[ETC]0[4578]_\d{3}XXX_(?P<YEAR>\d{4})\d{2}\d{2}')

    # if end_doy and end_doy > 273:
    #     logging.error(
    #         '\nERROR: End DOY has to be in the same water year as start DOY')
    #     sys.exit()

    # Get ee features from shapefile
    zone_geom_list = gdc.shapefile_2_geom_list_func(
        ini['INPUTS']['zone_shp_path'],
        zone_field=ini['INPUTS']['zone_field'],
        reverse_flag=False)
    # zone_count = len(zone_geom_list)
    # output_fmt = '_{0:0%sd}.csv' % str(int(math.log10(zone_count)) + 1)

    # Check if the zone_names are unique
    # Eventually support merging common zone_names
    if len(set([z[1] for z in zone_geom_list])) != len(zone_geom_list):
        logging.error(
            '\nERROR: There appear to be duplicate zone ID/name values.'
            '\n  Currently, the values in "{}" must be unique.'
            '\n  Exiting.'.format(ini['INPUTS']['zone_field']))
        return False

    # Filter features by FID
    if ini['INPUTS']['fid_keep_list']:
        zone_geom_list = [
            zone_obj for zone_obj in zone_geom_list
            if zone_obj[0] in ini['INPUTS']['fid_keep_list']
        ]
    if ini['INPUTS']['fid_skip_list']:
        zone_geom_list = [
            zone_obj for zone_obj in zone_geom_list
            if zone_obj[0] not in ini['INPUTS']['fid_skip_list']
        ]

    # Merge geometries
    if ini['INPUTS']['merge_geom_flag']:
        merge_geom = ogr.Geometry(ogr.wkbMultiPolygon)
        for zone in zone_geom_list:
            zone_multipolygon = ogr.ForceToMultiPolygon(
                ogr.CreateGeometryFromJson(json.dumps(zone[2])))
            for zone_polygon in zone_multipolygon:
                merge_geom.AddGeometry(zone_polygon)
        # merge_json = json.loads(merge_mp.ExportToJson())
        zone_geom_list = [[
            0, ini['INPUTS']['zone_filename'],
            json.loads(merge_geom.ExportToJson())
        ]]
        ini['INPUTS']['zone_field'] = ''

    # Set all zone specific parameters into a dictionary
    zone = {}

    # Need zone_shp_path projection to build EE geometries
    zone['osr'] = gdc.feature_path_osr(ini['INPUTS']['zone_shp_path'])
    zone['proj'] = gdc.osr_wkt(zone['osr'])
    # zone['proj'] = ee.Projection(zone['proj']).wkt().getInfo()
    # zone['proj'] = zone['proj'].replace('\n', '').replace(' ', '')
    # logging.debug('  Zone Projection: {}'.format(zone['proj']))

    # Check that shapefile has matching spatial reference
    if not gdc.matching_spatref(zone['osr'], ini['SPATIAL']['osr']):
        logging.warning('  Zone OSR:\n{}\n'.format(zone['osr']))
        logging.warning('  Output OSR:\n{}\n'.format(
            ini['SPATIAL']['osr'].ExportToWkt()))
        logging.warning('  Zone Proj4:   {}'.format(
            zone['osr'].ExportToProj4()))
        logging.warning('  Output Proj4: {}'.format(
            ini['SPATIAL']['osr'].ExportToProj4()))
        logging.warning(
            '\nWARNING: \n'
            'The output and zone spatial references do not appear to match\n'
            'This will likely cause problems!')
        input('Press ENTER to continue')
    else:
        logging.debug('  Zone Projection:\n{}\n'.format(
            zone['osr'].ExportToWkt()))
        logging.debug('  Output Projection:\n{}\n'.format(
            ini['SPATIAL']['osr'].ExportToWkt()))
        logging.debug('  Output Cellsize: {}'.format(
            ini['SPATIAL']['cellsize']))

    # Initialize Earth Engine API key
    logging.info('\nInitializing Earth Engine')
    ee.Initialize()
    utils.ee_request(ee.Number(1).getInfo())

    # Get list of path/row strings to centroid coordinates
    if ini['INPUTS']['tile_keep_list']:
        ini['INPUTS']['tile_geom'] = [
            wrs2.tile_centroids[tile]
            for tile in ini['INPUTS']['tile_keep_list']
            if tile in wrs2.tile_centroids.keys()
        ]
        ini['INPUTS']['tile_geom'] = ee.Geometry.MultiPoint(
            ini['INPUTS']['tile_geom'], 'EPSG:4326')
    else:
        ini['INPUTS']['tile_geom'] = None

    # Read in ETo and PPT data from file
    if (ini['BEAMER']['eto_source'] == 'file'
            or ini['BEAMER']['ppt_source'] == 'file'):
        data_array = np.genfromtxt(ini['BEAMER']['data_path'],
                                   delimiter=',',
                                   names=True,
                                   dtype=None)
        data_fields = data_array.dtype.names
        logging.debug('  CSV fields: {}'.format(', '.join(data_fields)))
        # DEADBEEF - Compare fields names assuming all upper case
        data_fields = [f.upper() for f in data_fields]
        eto_dict = defaultdict(dict)
        ppt_dict = defaultdict(dict)
        for row in data_array:
            z = str(row[data_fields.index(ini['BEAMER']['data_zone_field'])])
            y = int(row[data_fields.index(ini['BEAMER']['data_year_field'])])
            if ini['BEAMER']['eto_source'] == 'file':
                # DEADBEEF - Compare fields names assuming all upper case
                eto_dict[z][y] = row[data_fields.index(
                    ini['BEAMER']['data_eto_field'].upper())]
            if ini['BEAMER']['ppt_source'] == 'file':
                # DEADBEEF - Compare fields names assuming all upper case
                ppt_dict[z][y] = row[data_fields.index(
                    ini['BEAMER']['data_ppt_field'].upper())]

    # Get filtered/merged/prepped Landsat collection
    landsat_args = {
        k: v
        for section in ['INPUTS'] for k, v in ini[section].items() if k in [
            'landsat4_flag', 'landsat5_flag', 'landsat7_flag', 'landsat8_flag',
            'fmask_flag', 'acca_flag', 'start_year', 'end_year', 'start_month',
            'end_month', 'start_doy', 'end_doy', 'scene_id_keep_list',
            'scene_id_skip_list', 'path_keep_list', 'row_keep_list',
            'tile_geom', 'adjust_method', 'mosaic_method', 'refl_sur_method'
        ]
    }
    landsat = ee_common.Landsat(landsat_args)

    # Download images for each feature separately
    for zone_fid, zone_name, zone_json in zone_geom_list:
        zone['fid'] = zone_fid
        zone['name'] = zone_name.replace(' ', '_')
        zone['json'] = zone_json
        logging.info('ZONE: {} (FID: {})'.format(zone['name'], zone['fid']))

        # Build EE geometry object for zonal stats
        zone['geom'] = ee.Geometry(geo_json=zone['json'],
                                   opt_proj=zone['proj'],
                                   opt_geodesic=False)
        # logging.debug('  Centroid: {}'.format(
        #     zone['geom'].centroid(100).getInfo()['coordinates']))

        # Use feature geometry to build extent, transform, and shape
        zone['extent'] = gdc.Extent(
            ogr.CreateGeometryFromJson(json.dumps(zone['json'])).GetEnvelope())
        # zone['extent'] = gdc.Extent(zone['geom'].GetEnvelope())
        zone['extent'] = zone['extent'].ogrenv_swap()
        zone['extent'] = zone['extent'].adjust_to_snap(
            'EXPAND', ini['SPATIAL']['snap_x'], ini['SPATIAL']['snap_y'],
            ini['SPATIAL']['cellsize'])
        zone['geo'] = zone['extent'].geo(ini['SPATIAL']['cellsize'])
        zone['transform'] = gdc.geo_2_ee_transform(zone['geo'])
        # zone['transform'] = '[' + ','.join(map(str, zone['transform'])) + ']'
        zone['shape'] = zone['extent'].shape(ini['SPATIAL']['cellsize'])
        logging.debug('  Zone Shape: {}'.format(zone['shape']))
        logging.debug('  Zone Transform: {}'.format(zone['transform']))
        logging.debug('  Zone Extent: {}'.format(zone['extent']))
        # logging.debug('  Zone Geom: {}'.format(zone['geom'].getInfo()))

        # Assume all pixels in all 14+2 images could be reduced
        zone['max_pixels'] = zone['shape'][0] * zone['shape'][1]
        logging.debug('  Max Pixels: {}'.format(zone['max_pixels']))

        # Set output spatial reference
        # Eventually allow user to manually set these
        # output_crs = zone['proj']
        logging.debug('  Image Projection: {}'.format(ini['SPATIAL']['crs']))

        # output_transform = zone['transform'][:]
        output_transform = '[' + ','.join(map(str, zone['transform'])) + ']'
        output_shape = '{1}x{0}'.format(*zone['shape'])
        logging.debug('  Image Transform: {}'.format(output_transform))
        logging.debug('  Image Shape: {}'.format(output_shape))

        zone_output_ws = os.path.join(ini['IMAGES']['output_ws'], zone_name)
        zone_zips_ws = os.path.join(zone_output_ws, zips_folder)
        zone_images_ws = os.path.join(zone_output_ws, images_folder)
        zone_annuals_ws = os.path.join(zone_output_ws, annuals_folder)
        if not os.path.isdir(zone_zips_ws):
            os.makedirs(zone_zips_ws)
        if not os.path.isdir(zone_images_ws):
            os.makedirs(zone_images_ws)
        if not os.path.isdir(zone_annuals_ws):
            os.makedirs(zone_annuals_ws)

        # Initialize the Landsat object
        # Limit Landsat products for getting SCENE IDs
        landsat.products = []
        landsat.zone_geom = zone['geom']
        landsat_coll = landsat.get_collection()
        # if ee.Image(landsat_coll.first()).getInfo() is None:
        #     logging.info('    No images, skipping')
        #     continue

        # Get the full list of scene IDs
        logging.debug('  Getting SCENE_ID list')
        scene_id_list = sorted(
            utils.ee_getinfo(landsat_coll.aggregate_histogram('SCENE_ID')))
        logging.debug('    {} scenes'.format(len(scene_id_list)))

        # Switch Landsat products for computing ETg
        landsat.products = ['evi_sur']

        # Process each image in the collection by date
        for image_id in scene_id_list:
            logging.info('{}'.format(image_id))

            zip_path = os.path.join(zone_zips_ws, '{}.zip'.format(image_id))
            logging.debug('  Zip: {}'.format(zip_path))

            if os.path.isfile(zip_path) and overwrite_flag:
                logging.debug('    Output already exists, removing zip')
                os.remove(zip_path)
            elif os.path.isfile(zip_path) and not overwrite_flag:
                # Check that existing ZIP files can be opened
                try:
                    with zipfile.ZipFile(zip_path, 'r') as z:
                        pass
                    logging.debug('    Output already exists, skipping')
                    continue
                except Exception as e:
                    logging.warning('    Zip file error, removing')
                    os.remove(zip_path)

            # Getting the date directly from the SCENE_ID
            image_start_dt = datetime.datetime.strptime(
                image_id[12:], '%Y%m%d')
            image_end_dt = image_start_dt + datetime.timedelta(days=1)
            logging.debug('  {}  {}'.format(image_start_dt.date(),
                                            image_end_dt.date()))
            year = image_start_dt.year

            # Filter the GRIDMET collection
            wy_start_date = '{}-10-01'.format(year - 1)
            wy_end_date = '{}-10-01'.format(year)
            logging.debug('  WY: {} {}'.format(wy_start_date, wy_end_date))
            gridmet_coll = ee.ImageCollection('IDAHO_EPSCOR/GRIDMET') \
                .filterDate(wy_start_date, wy_end_date)

            # # PRISM collection was uploaded as an asset
            # if ini['BEAMER']['ppt_source'] == 'prism':
            #     def prism_time_start(input_image):
            #         """Set time_start property on PRISM water year PPT collection"""
            #         # Assume year is the 4th item separated by "_"
            #         water_year = ee.String(input_image.get('system:index')).split('_').get(3)
            #         date_start = ee.Date(ee.String(water_year).cat('-10-01'))
            #         return input_image.select([0], ['ppt']).set({
            #             'system:time_start': date_start.millis()
            #         })
            #     prism_coll = ee.ImageCollection('users/cgmorton/prism_800m_ppt_wy')
            #     prism_coll = prism_coll.map(prism_time_start) \
            #         .filterDate(wy_start_date, wy_end_date)

            # Get water year PPT from file
            # Convert all input data to mm to match GRIDMET data
            if ini['BEAMER']['ppt_source'] == 'file':
                wy_ppt_input = ppt_dict[zone_name][year]
                if ini['BEAMER']['data_ppt_units'] == 'mm':
                    pass
                elif ini['BEAMER']['data_ppt_units'] == 'inches':
                    wy_ppt_input *= 25.4
                elif ini['BEAMER']['data_ppt_units'] == 'feet':
                    wy_ppt_input *= (25.4 * 12)
            elif ini['BEAMER']['ppt_source'] == 'gridmet':
                # GET GRIDMET value at centroid of geometry
                wy_ppt_input = float(
                    utils.ee_getinfo(
                        ee.ImageCollection(
                            gridmet_coll.select(['pr'],
                                                ['ppt']).sum()).getRegion(
                                                    zone['geom'].centroid(1),
                                                    500))[1][4])
                # Calculate GRIDMET zonal mean of geometry
                # wy_ppt_input = float(ee.ImageCollection(
                #     gridmet_coll.select(['pr'], ['ppt'])).reduceRegion(
                #         reducer=ee.Reducer.sum(),
                #         geometry=zone['geom'],
                #         crs=ini['SPATIAL']['crs'],
                #         crsTransform=zone['transform'],
                #         bestEffort=False,
                #         tileScale=1).getInfo()['ppt']
            # elif ini['BEAMER']['ppt_source'] == 'prism':
            #     # Calculate PRISM zonal mean of geometry
            #     wy_ppt_input = float(ee.ImageCollection(
            #         prism_coll.map(ee_common.prism_ppt_func)).sum().reduceRegion(
            #             reducer=ee.Reducer.mean(),
            #             geometry=zone['geom'],
            #             crs=ini['SPATIAL']['crs'],
            #             crsTransform=zone['transform'],
            #             bestEffort=False,
            #             tileScale=1).getInfo()['ppt'])

            # Get water year ETo read from file
            # Convert all input data to mm for Beamer Method
            if ini['BEAMER']['eto_source'] == 'file':
                wy_eto_input = eto_dict[zone_name][year]
                if ini['BEAMER']['data_eto_units'] == 'mm':
                    pass
                elif ini['BEAMER']['data_eto_units'] == 'inches':
                    wy_eto_input *= 25.4
                elif ini['BEAMER']['data_eto_units'] == 'feet':
                    wy_eto_input *= (25.4 * 12)
            # This assumes GRIMET data is in millimeters
            elif ini['BEAMER']['eto_source'] == 'gridmet':
                wy_eto_input = float(
                    utils.ee_getinfo(
                        ee.ImageCollection(gridmet_coll.select(
                            ['eto']).sum()).getRegion(zone['geom'].centroid(1),
                                                      500))[1][4])
                # wy_eto_input = float(utils.ee_getinfo(ee.ImageCollection(
                #     gridmet_coll.select(['eto'])).reduceRegion(
                #         reducer=ee.Reducer.sum(),
                #         geometry=zone['geom'],
                #         crs=zone_proj,
                #         crsTransform=zone_transform,
                #         bestEffort=False,
                #         tileScale=1).getInfo()))
            logging.debug('  Input ETO: {} mm  PPT: {} mm'.format(
                wy_eto_input, wy_ppt_input))

            # Scale ETo & PPT
            wy_eto_input *= ini['BEAMER']['eto_factor']
            wy_ppt_input *= ini['BEAMER']['ppt_factor']

            # Convert output units from mm
            wy_ppt_output = wy_ppt_input
            wy_eto_output = wy_eto_input
            if ini['IMAGES']['ppt_units'] == 'mm':
                pass
            elif ini['IMAGES']['ppt_units'] == 'in':
                wy_ppt_output /= 25.4
            elif ini['IMAGES']['ppt_units'] == 'ft':
                wy_ppt_output /= (25.4 * 12)
            if ini['IMAGES']['eto_units'] == 'mm':
                pass
            elif ini['IMAGES']['eto_units'] == 'in':
                wy_eto_output /= 25.4
            elif ini['IMAGES']['eto_units'] == 'ft':
                wy_eto_output /= (25.4 * 12)
            logging.debug('  Output ETO: {} {} PPT: {} {}'.format(
                wy_eto_output, ini['IMAGES']['eto_units'], wy_ppt_output,
                ini['IMAGES']['ppt_units']))

            # Add water year ETo and PPT values to each image
            def eto_ppt_func(img):
                """"""
                return ee.Image(img).setMulti({
                    'wy_eto': wy_eto_output,
                    'wy_ppt': wy_ppt_output
                })

            # Compute EVI_SUR, add ETo and PPT, then Compute ETg
            # Set the masked values to a nodata value
            # so that the TIF can have a nodata value other than 0 set
            landsat_image = eto_ppt_func(
                landsat.get_image(landsat,
                                  image_start_dt.year,
                                  image_start_dt.strftime('%j'),
                                  path=image_id[5:8],
                                  row=None))
            etg_image = ee.Image(ee_common.beamer_func(landsat_image)) \
                .clip(zone['geom']) \
                .unmask(nodata_value, False)

            # Get the download URL
            logging.debug('  Requesting URL')
            zip_url = utils.ee_request(
                etg_image.getDownloadURL({
                    'name': image_id,
                    'crs': ini['SPATIAL']['crs'],
                    'crs_transform': output_transform,
                    'dimensions': output_shape
                }))
            del etg_image

            # Remove the scene from scen list if it's not going to work
            if not zip_url:
                scene_id_list.remove(image_id)
                continue

            # Try downloading a few times
            logging.info('  Downloading')
            for i in range(1, 10):
                try:
                    response = urlrequest.urlopen(zip_url)
                    with open(zip_path, 'wb') as output_f:
                        shutil.copyfileobj(response, output_f)
                    break
                except Exception as e:
                    logging.info('  Resending query')
                    logging.debug('  {}'.format(e))
                    sleep(i**2)
                    os.remove(zip_path)

            # Remove the scene from scen list if it's not going to work
            if not os.path.isfile(zip_path):
                scene_id_list.remove(image_id)

        logging.info('\nExtracting images')
        for image_id in scene_id_list:
            logging.info('{}'.format(image_id))

            zip_path = os.path.join(zone_zips_ws, '{}.zip'.format(image_id))
            logging.debug('  Zip: {}'.format(zip_path))
            if not os.path.isfile(zip_path):
                logging.debug('    zip file does not exist, skipping')

            # Skip if all output images are present
            image_band_list = [
                os.path.join(zone_images_ws,
                             '{}.{}.tif'.format(image_id, band))
                for band in ini['IMAGES']['download_bands']
            ]
            if (not overwrite_flag
                    and all(os.path.isfile(x) for x in image_band_list)):
                logging.debug('  all images present, skipping')
                continue
            # Otherwise, remove existing images
            for image_path in image_band_list:
                for file_path in glob.glob(image_path.replace('.tif', '.*')):
                    os.remove(file_path)

            # Try extracting the files
            try:
                logging.debug('  Extracting')
                with zipfile.ZipFile(zip_path, 'r') as z:
                    z.extractall(zone_images_ws)
            except Exception as e:
                logging.warning('    Error: could not extract'.format(i))
                logging.debug('  {}'.format(e))
                try:
                    os.remove(zip_path)
                except Exception as e:
                    pass

            # Set nodata value
            for item in os.listdir(zone_images_ws):
                if item.startswith(image_id) and item.endswith('.tif'):
                    gdc.raster_path_set_nodata(
                        os.path.join(zone_images_ws, item), nodata_value)
                    raster_statistics(os.path.join(zone_images_ws, item))

        logging.info('\nComputing annual means')
        for band in ini['IMAGES']['download_bands']:
            logging.info('  {}'.format(band))
            for year in range(ini['INPUTS']['start_year'],
                              ini['INPUTS']['end_year'] + 1):
                logging.info('  {}'.format(year))
                mean_path = os.path.join(
                    # zone_annuals_ws, 'etg_{}_{}.{}.tif'.format(
                    zone_annuals_ws,
                    '{}_{}.{}.tif'.format(zone_name.lower().replace(' ', '_'),
                                          year, band))
                logging.debug('  {}'.format(mean_path))
                # if os.path.isfile(mean_path) and not overwrite_flag:
                #     logging.debug('    file already exists, skipping')
                #     continue

                image_band_list = [
                    os.path.join(zone_images_ws, item)
                    for item in os.listdir(zone_images_ws)
                    if (item.endswith('.{}.tif'.format(band)) and (
                        int(landsat_re.match(item).group('YEAR')) == year))
                ]
                # for image_path in image_band_list:
                #     raster_path_set_nodata(image_path, nodata_value)
                if not image_band_list:
                    continue

                # Use GDAL to compute the composite
                cell_statistics(image_band_list, mean_path, 'mean')
                raster_statistics(mean_path)

        logging.info('\nComputing composite rasters from annual means')
        for stat in stat_list:
            logging.info('  Stat: {}'.format(stat))
            for band in ini['IMAGES']['download_bands']:
                logging.info('  {}'.format(band))
                image_band_list = [
                    os.path.join(zone_annuals_ws, item)
                    for item in os.listdir(zone_annuals_ws)
                    if item.endswith('.{}.tif'.format(band))
                ]
                # for image_path in image_band_list:
                #     raster_path_set_nodata(image_path, nodata_value)

                output_path = os.path.join(
                    zone_output_ws,
                    '{}_{}.{}.tif'.format(zone_name.lower().replace(' ', '_'),
                                          stat.lower(), band.lower()))
                logging.debug('  {}'.format(output_path))

                # Use GDAL to compute the composite raster
                cell_statistics(image_band_list, output_path, 'mean')
                raster_statistics(output_path)