Пример #1
0
def WriteLineShp(lineList, outShp, lineFieldsList = None):
    print "Write line shapefile: %s" % outShp
    gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "NO")  ## support for path in Chinese
    gdal.SetConfigOption("SHAPE_ENCODING", "")  ## suppoert for field in Chinese
    ogr.RegisterAll()
    driver = ogr.GetDriverByName("ESRI Shapefile")
    if driver is None:
        print "ESRI Shapefile driver not available."
        sys.exit(1)
    if os.path.exists(outShp):
        driver.DeleteDataSource(outShp)

    fieldName = []
    fieldNameIdx = 0
    if lineFieldsList is not None:
        if len(lineList) != len(lineFieldsList):
            if len(lineList) + 1 == len(lineFieldsList):
                fieldName = lineFieldsList[0]
                fieldNameIdx = 1
            else:
                sys.exit(1)
        else:
            fieldLength = len(lineFieldsList[0])
            for i in range(fieldLength):
                name = 'lineName' + str(i)
                fieldName.append(name)
            fieldNameIdx = 0
    else:
        fieldName = ['LineName']
        fieldNameIdx = 0
    ds = driver.CreateDataSource(outShp.rpartition(os.sep)[0])
    if ds is None:
        print "ERROR Output: Creation of output file failed."
        sys.exit(1)
    lyr = ds.CreateLayer(outShp.rpartition(os.sep)[2].split('.')[0], None, ogr.wkbLineString)
    ## create fields
    for fld in fieldName:
        nameField = ogr.FieldDefn(fld, ogr.OFTString)
        lyr.CreateField(nameField)
    for l in lineList:
        idx = lineList.index(l)
        if len(l) > 1:
            line = ogr.Geometry(ogr.wkbLineString)
            for i in l:
                line.AddPoint(i[0], i[1])
            templine = ogr.CreateGeometryFromJson(line.ExportToJson())
            feature = ogr.Feature(lyr.GetLayerDefn())
            feature.SetGeometry(templine)
            for fld in fieldName:
                idx2 = fieldName.index(fld)
                if lineFieldsList is not None:
                    if fieldNameIdx == 1:
                        fieldValue = lineFieldsList[idx + 1][idx2]
                    else:
                        fieldValue = lineFieldsList[idx][idx2]
                else:
                    fieldValue = ' '
                # print fieldValue
                feature.SetField(fld, fieldValue)
            lyr.CreateFeature(feature)
            feature.Destroy()
    ds.Destroy()
Пример #2
0
def main(ini_path, overwrite_flag=True):
    """Earth Engine Beamer ET Zonal Stats

    Args:
        ini_path (str):
        overwrite_flag (bool): if True, overwrite existing files

    Returns:
        None
    """
    logging.info('\nEarth Engine Beamer ET Zonal Stats')

    # Read config file
    ini = inputs.read(ini_path)
    inputs.parse_section(ini, section='INPUTS')
    inputs.parse_section(ini, section='SPATIAL')
    inputs.parse_section(ini, section='BEAMER')
    inputs.parse_section(ini, section='EXPORT')
    inputs.parse_section(ini, section='ZONAL_STATS')

    # Overwrite landsat products with Beamer specific values
    ini['EXPORT']['landsat_products'] = [
        'ndvi_toa', 'ndwi_toa', 'albedo_sur', 'ts', 'evi_sur', 'etstar_mean',
        'etg_mean', 'etg_lpi', 'etg_upi', 'etg_lci', 'etg_uci', 'etg_mean',
        'et_lpi', 'et_upi', 'et_lci', 'et_uci'
    ]

    # First row  of csv is header
    header_list = [
        'ZONE_NAME', 'ZONE_FID', 'DATE', 'SCENE_ID', 'PLATFORM', 'PATH', 'ROW',
        'YEAR', 'MONTH', 'DAY', 'DOY', 'PIXEL_COUNT', 'PIXEL_TOTAL',
        'FMASK_COUNT', 'FMASK_TOTAL', 'FMASK_PCT', 'ETSTAR_COUNT',
        'CLOUD_SCORE', 'QA', 'NDVI_TOA', 'NDWI_TOA', 'ALBEDO_SUR', 'TS',
        'EVI_SUR', 'ETSTAR_MEAN', 'ETG_MEAN', 'ETG_LPI', 'ETG_UPI', 'ETG_LCI',
        'ETG_UCI', 'ET_MEAN', 'ET_LPI', 'ET_UPI', 'ET_LCI', 'ET_UCI', 'WY_ETO',
        'WY_PPT'
    ]
    int_fields = [
        'ZONE_FID', 'PATH', 'ROW', 'YEAR', 'MONTH', 'DAY', 'DOY',
        'PIXEL_COUNT', 'PIXEL_TOTAL', 'FMASK_COUNT', 'FMASK_TOTAL',
        'ETSTAR_COUNT'
    ]
    float_fields = list(
        set(header_list) - set(int_fields) -
        set(['ZONE_NAME', 'DATE', 'SCENE_ID', 'PLATFORM']))

    # Regular expression to pull out Landsat scene_id
    # If RE has capturing groups, findall call below will fail to extract ID
    landsat_re = re.compile('L[ETC]0[4578]_\d{3}XXX_\d{4}\d{2}\d{2}')
    # landsat_re = re.compile('L[ETC][4578]\d{3}XXX\d{4}\d{3}')
    # landsat_re = re.compile('L[ETC][4578]\d{3}\d{3}\d{4}\d{3}\D{3}\d{2}')

    # Remove the existing CSV
    output_path = os.path.join(ini['ZONAL_STATS']['output_ws'],
                               ini['BEAMER']['output_name'])
    if overwrite_flag and os.path.isfile(output_path):
        os.remove(output_path)
    # Create an empty CSV
    if not os.path.isfile(output_path):
        data_df = pd.DataFrame(columns=header_list)
        data_df[int_fields] = data_df[int_fields].astype(np.int64)
        data_df[float_fields] = data_df[float_fields].astype(np.float32)
        data_df.to_csv(output_path, index=False)

    # Get ee features from shapefile
    zone_geom_list = gdc.shapefile_2_geom_list_func(
        ini['INPUTS']['zone_shp_path'],
        zone_field=ini['INPUTS']['zone_field'],
        reverse_flag=False)
    # zone_count = len(zone_geom_list)
    # output_fmt = '_{0:0%sd}.csv' % str(int(math.log10(zone_count)) + 1)

    # Check if the zone_names are unique
    # Eventually support merging common zone_names
    if len(set([z[1] for z in zone_geom_list])) != len(zone_geom_list):
        logging.error(
            '\nERROR: There appear to be duplicate zone ID/name values.'
            '\n  Currently, the values in "{}" must be unique.'
            '\n  Exiting.'.format(ini['INPUTS']['zone_field']))
        return False

    # Filter features by FID
    if ini['INPUTS']['fid_keep_list']:
        zone_geom_list = [
            zone_obj for zone_obj in zone_geom_list
            if zone_obj[0] in ini['INPUTS']['fid_keep_list']
        ]
    if ini['INPUTS']['fid_skip_list']:
        zone_geom_list = [
            zone_obj for zone_obj in zone_geom_list
            if zone_obj[0] not in ini['INPUTS']['fid_skip_list']
        ]

    # Merge geometries
    if ini['INPUTS']['merge_geom_flag']:
        merge_geom = ogr.Geometry(ogr.wkbMultiPolygon)
        for zone in zone_geom_list:
            zone_multipolygon = ogr.ForceToMultiPolygon(
                ogr.CreateGeometryFromJson(json.dumps(zone[2])))
            for zone_polygon in zone_multipolygon:
                merge_geom.AddGeometry(zone_polygon)
        # merge_json = json.loads(merge_mp.ExportToJson())
        zone_geom_list = [[
            0, ini['INPUTS']['zone_filename'],
            json.loads(merge_geom.ExportToJson())
        ]]
        ini['INPUTS']['zone_field'] = ''

    # Set all zone specific parameters into a dictionary
    zone = {}

    # Need zone_shp_path projection to build EE geometries
    zone['osr'] = gdc.feature_path_osr(ini['INPUTS']['zone_shp_path'])
    zone['proj'] = gdc.osr_wkt(zone['osr'])
    # zone['proj'] = ee.Projection(zone['proj']).wkt().getInfo()
    # zone['proj'] = zone['proj'].replace('\n', '').replace(' ', '')
    # logging.debug('  Zone Projection: {}'.format(zone['proj']))

    # Check that shapefile has matching spatial reference
    if not gdc.matching_spatref(zone['osr'], ini['SPATIAL']['osr']):
        logging.warning('  Zone OSR:\n{}\n'.format(zone['osr']))
        logging.warning('  Output OSR:\n{}\n'.format(
            ini['SPATIAL']['osr'].ExportToWkt()))
        logging.warning('  Zone Proj4:   {}'.format(
            zone['osr'].ExportToProj4()))
        logging.warning('  Output Proj4: {}'.format(
            ini['SPATIAL']['osr'].ExportToProj4()))
        logging.warning(
            '\nWARNING: \n'
            'The output and zone spatial references do not appear to match\n'
            'This will likely cause problems!')
        input('Press ENTER to continue')
    else:
        logging.debug('  Zone Projection:\n{}\n'.format(
            zone['osr'].ExportToWkt()))
        logging.debug('  Output Projection:\n{}\n'.format(
            ini['SPATIAL']['osr'].ExportToWkt()))
        logging.debug('  Output Cellsize: {}'.format(
            ini['SPATIAL']['cellsize']))

    # Initialize Earth Engine API key
    logging.info('\nInitializing Earth Engine')
    ee.Initialize()
    utils.ee_request(ee.Number(1).getInfo())

    # Read in ETo and PPT data from file
    if (ini['BEAMER']['eto_source'] == 'file'
            or ini['BEAMER']['ppt_source'] == 'file'):
        data_array = np.atleast_1d(
            np.genfromtxt(ini['BEAMER']['data_path'],
                          delimiter=',',
                          names=True,
                          encoding=None,
                          dtype=None))
        data_fields = data_array.dtype.names
        logging.debug('  CSV fields: {}'.format(', '.join(data_fields)))
        # DEADBEEF - Compare fields names assuming all upper case
        data_fields = [f.upper() for f in data_fields]
        eto_dict = defaultdict(dict)
        ppt_dict = defaultdict(dict)
        for row in data_array:
            z = str(row[data_fields.index(ini['BEAMER']['data_zone_field'])])
            y = row[data_fields.index(ini['BEAMER']['data_year_field'])]
            if ini['BEAMER']['eto_source'] == 'file':
                # DEADBEEF - Compare fields names assuming all upper case
                eto_dict[z][y] = row[data_fields.index(
                    ini['BEAMER']['data_eto_field'].upper())]
            if ini['BEAMER']['ppt_source'] == 'file':
                # DEADBEEF - Compare fields names assuming all upper case
                ppt_dict[z][y] = row[data_fields.index(
                    ini['BEAMER']['data_ppt_field'].upper())]

    # Get filtered/merged/prepped Landsat collection
    landsat_args = {
        k: v
        for section in ['INPUTS'] for k, v in ini[section].items() if k in [
            'landsat4_flag', 'landsat5_flag', 'landsat7_flag', 'landsat8_flag',
            'fmask_flag', 'acca_flag', 'start_year', 'end_year', 'start_month',
            'end_month', 'start_doy', 'end_doy', 'scene_id_keep_list',
            'scene_id_skip_list', 'path_keep_list', 'row_keep_list',
            'tile_geom', 'adjust_method', 'mosaic_method', 'refl_sur_method'
        ]
    }
    landsat_args['products'] = ini['EXPORT']['landsat_products']
    landsat = ee_common.Landsat(landsat_args)

    # Calculate zonal stats for each feature separately
    for zone_fid, zone_name, zone_json in zone_geom_list:
        zone['fid'] = zone_fid
        zone['name'] = zone_name.replace(' ', '_')
        zone['json'] = zone_json
        logging.info('ZONE: {} (FID: {})'.format(zone['name'], zone['fid']))

        # zone_key used for wy_ppt and wy_eto inputs from csv file
        if ini['INPUTS']['zone_field'] == 'FID':
            zone_key = str(zone['fid'])
            print('Using FID as zone_field')
        else:
            zone_key = zone['name']
            print('Using Name as zone_field')

        # Build EE geometry object for zonal stats
        zone['geom'] = ee.Geometry(geo_json=zone['json'],
                                   opt_proj=zone['proj'],
                                   opt_geodesic=False)
        # logging.debug('  Centroid: {}'.format(
        #     zone['geom'].centroid(100).getInfo()['coordinates']))

        # Use feature geometry to build extent, transform, and shape
        zone['extent'] = gdc.Extent(
            ogr.CreateGeometryFromJson(json.dumps(zone['json'])).GetEnvelope())
        # zone['extent'] = gdc.Extent(zone['geom'].GetEnvelope())
        zone['extent'] = zone['extent'].ogrenv_swap()
        zone['extent'] = zone['extent'].adjust_to_snap(
            'EXPAND', ini['SPATIAL']['snap_x'], ini['SPATIAL']['snap_y'],
            ini['SPATIAL']['cellsize'])
        zone['geo'] = zone['extent'].geo(ini['SPATIAL']['cellsize'])
        zone['transform'] = gdc.geo_2_ee_transform(zone['geo'])
        # zone['transform'] = '[' + ','.join(map(str, zone['transform'])) + ']'
        zone['shape'] = zone['extent'].shape(ini['SPATIAL']['cellsize'])
        logging.debug('  Zone Shape: {}'.format(zone['shape']))
        logging.debug('  Zone Transform: {}'.format(zone['transform']))
        logging.debug('  Zone Extent: {}'.format(zone['extent']))
        # logging.debug('  Zone Geom: {}'.format(zone['geom'].getInfo()))

        # Assume all pixels in all 14+2 images could be reduced
        zone['max_pixels'] = zone['shape'][0] * zone['shape'][1]
        logging.debug('  Max Pixels: {}'.format(zone['max_pixels']))

        # Set output spatial reference
        # Eventually allow user to manually set these
        # output_crs = zone['proj']
        # ini['INPUTS']['transform'] = zone['transform']
        logging.debug('  Output Projection: {}'.format(ini['SPATIAL']['crs']))
        logging.debug('  Output Transform: {}'.format(zone['transform']))

        # Process date range by year
        start_dt = datetime.datetime(ini['INPUTS']['start_year'], 1, 1)
        end_dt = datetime.datetime(ini['INPUTS']['end_year'] + 1, 1,
                                   1) - datetime.timedelta(0, 1)
        iter_months = ini['BEAMER']['month_step']
        for i, iter_start_dt in enumerate(
                rrule.rrule(
                    # rrule.YEARLY, interval=interval_cnt,
                    rrule.MONTHLY,
                    interval=iter_months,
                    dtstart=start_dt,
                    until=end_dt)):
            iter_end_dt = (
                iter_start_dt +
                # relativedelta.relativedelta(years=interval_cnt) -
                relativedelta.relativedelta(months=iter_months) -
                datetime.timedelta(0, 1))
            if ((ini['INPUTS']['start_month']
                 and iter_end_dt.month < ini['INPUTS']['start_month'])
                    or (ini['INPUTS']['end_month']
                        and iter_start_dt.month > ini['INPUTS']['end_month'])):
                logging.debug('  {}  {}  skipping'.format(
                    iter_start_dt.date(), iter_end_dt.date()))
                continue
            elif (
                (ini['INPUTS']['start_doy'] and
                 int(iter_end_dt.strftime('%j')) < ini['INPUTS']['start_doy'])
                    or
                (ini['INPUTS']['end_doy'] and int(iter_start_dt.strftime('%j'))
                 > ini['INPUTS']['end_doy'])):
                logging.debug('  {}  {}  skipping'.format(
                    iter_start_dt.date(), iter_end_dt.date()))
                continue
            else:
                logging.info('  {}  {}'.format(iter_start_dt.date(),
                                               iter_end_dt.date()))
            year = iter_start_dt.year

            # Filter the GRIDMET collection
            wy_start_date = '{}-10-01'.format(year - 1)
            wy_end_date = '{}-10-01'.format(year)
            logging.debug('  WY: {} {}'.format(wy_start_date, wy_end_date))
            gridmet_coll = ee.ImageCollection('IDAHO_EPSCOR/GRIDMET') \
                .filterDate(wy_start_date, wy_end_date)

            # # PRISM collection was uploaded as an asset
            # if ini['BEAMER']['ppt_source'] == 'prism':
            #     def prism_time_start(input_image):
            #         """Set time_start property on PRISM water year PPT collection"""
            #         # Assume year is the 4th item separated by "_"
            #         wy = ee.String(
            #             input_image.get('system:index')).split('_').get(3)
            #         date_start = ee.Date(ee.String(wy).cat('-10-01'))
            #         return input_image.select([0], ['ppt']).setMulti({
            #             'system:time_start': date_start.advance(-1, 'year').millis()
            #         })
            #     prism_coll = ee.ImageCollection('users/cgmorton/prism_800m_ppt_wy')
            #     prism_coll = ee.ImageCollection(prism_coll.map(prism_time_start)) \
            #         .filterDate(wy_start_dt, wy_end_dt)
            #     # prism_coll = ee.ImageCollection(
            #     #     ee_common.MapsEngineAssets.prism_ppt_wy).filterDate(
            #     #         wy_start_dt, wy_end_dt)

            # Get water year PPT for centroid of zone or read from file
            # Convert all input data to mm to match GRIDMET data
            if ini['BEAMER']['ppt_source'] == 'file':
                wy_ppt_input = ppt_dict[zone_key][year]
                if ini['BEAMER']['data_ppt_units'] == 'mm':
                    pass
                elif ini['BEAMER']['data_ppt_units'] == 'in':
                    wy_ppt_input *= 25.4
                elif ini['BEAMER']['data_ppt_units'] == 'ft':
                    wy_ppt_input *= (25.4 * 12)
            elif ini['BEAMER']['ppt_source'] == 'gridmet':
                wy_ppt_input = float(
                    utils.ee_getinfo(
                        ee.ImageCollection(
                            gridmet_coll.select(['pr'],
                                                ['ppt']).sum()).getRegion(
                                                    zone['geom'].centroid(1),
                                                    500))[1][4])
                # Calculate GRIDMET zonal mean of geometry
                # wy_ppt_input = float(ee.ImageCollection(
                #     gridmet_coll.select(['pr'], ['ppt'])).reduceRegion(
                #         reducer=ee.Reducer.sum(),
                #         geometry=zone['geom'],
                #         crs=ini['SPATIAL']['crs'],
                #         crsTransform=zone['transform'],
                #         bestEffort=False,
                #         tileScale=1).getInfo()['ppt']
            # elif ini['BEAMER']['ppt_source'] == 'prism':
            #     # Calculate PRISM zonal mean of geometry
            #     wy_ppt_input = float(utils.ee_getinfo(ee.ImageCollection(
            #         prism_coll.map(ee_common.prism_ppt_func)).sum().reduceRegion(
            #             reducer=ee.Reducer.mean(),
            #             geometry=zone['geom'],
            #             crs=ini['SPATIAL']['crs'],
            #             crsTransform=zone['transform'],
            #             bestEffort=False,
            #             tileScale=1))['ppt'])

            # Get water year ETo for centroid of zone or read from file
            # Convert all input data to mm for Beamer Method
            if ini['BEAMER']['eto_source'] == 'FILE':
                wy_eto_input = eto_dict[zone_key][year]
                if ini['BEAMER']['data_eto_units'] == 'mm':
                    pass
                elif ini['BEAMER']['data_eto_units'] == 'in':
                    wy_eto_input *= 25.4
                elif ini['BEAMER']['data_eto_units'] == 'ft':
                    wy_eto_input *= (25.4 * 12)
            # This assumes GRIMET data is in millimeters
            elif ini['BEAMER']['eto_source'] == 'gridmet':
                wy_eto_input = float(
                    utils.ee_getinfo(
                        ee.ImageCollection(gridmet_coll.select(
                            ['eto']).sum()).getRegion(zone['geom'].centroid(1),
                                                      500))[1][4])
                # wy_eto_input = float(ee.ImageCollection(
                #     gridmet_coll.select(['eto'])).reduceRegion(
                #         reducer=ee.Reducer.sum(),
                #         geometry=zone['geom'],
                #         crs=ini['SPATIAL']['crs'],
                #         crsTransform=zone['transform'],
                #         bestEffort=False,
                #         tileScale=1).getInfo()
            logging.debug('  Input  ETO: {} mm  PPT: {} mm'.format(
                wy_eto_input, wy_ppt_input))

            # Scale ETo & PPT
            wy_eto_input *= ini['BEAMER']['eto_factor']
            wy_ppt_input *= ini['BEAMER']['ppt_factor']

            # Convert output units from mm
            wy_eto_output = wy_eto_input
            wy_ppt_output = wy_ppt_input
            if ini['BEAMER']['ppt_units'] == 'mm':
                pass
            elif ini['BEAMER']['ppt_units'] == 'in':
                wy_ppt_output /= 25.4
            elif ini['BEAMER']['ppt_units'] == 'ft':
                wy_ppt_output /= (25.4 * 12)
            if ini['BEAMER']['eto_units'] == 'mm':
                pass
            elif ini['BEAMER']['eto_units'] == 'in':
                wy_eto_output /= 25.4
            elif ini['BEAMER']['eto_units'] == 'ft':
                wy_eto_output /= (25.4 * 12)
            logging.debug('  Output ETO: {} {}  PPT: {} {}'.format(
                wy_eto_output, ini['BEAMER']['eto_units'], wy_ppt_output,
                ini['BEAMER']['ppt_units']))

            # Initialize the Landsat object
            landsat.zone_geom = zone['geom']
            landsat.start_date = iter_start_dt.strftime('%Y-%m-%d')
            landsat.end_date = iter_end_dt.strftime('%Y-%m-%d')
            landsat_coll = landsat.get_collection()
            if ee.Image(landsat_coll.first()).getInfo() is None:
                logging.info('    No images, skipping')
                continue

            # # Print the collection SCENE_ID list
            # logging.debug('{}'.format(', '.join([
            #     f['properties']['SCENE_ID']
            #     for f in landsat_coll.getInfo()['features']])))
            # input('ENTER')

            # Add water year ETo and PPT values to each image
            def eto_ppt_func(img):
                """"""
                return ee.Image(img).setMulti({
                    'wy_eto': wy_eto_input,
                    'wy_ppt': wy_ppt_input
                })

            landsat_coll = ee.ImageCollection(landsat_coll.map(eto_ppt_func))

            # Compute ETg
            image_coll = ee.ImageCollection(landsat_coll.map(landsat_etg_func))

            # # Get the output image URL
            # output_url = ee.Image(landsat_coll.first()) \
            #     .select(['red', 'green', 'blue']) \
            #     .visualize(min=[0, 0, 0], max=[0.4, 0.4, 0.4]) \
            #     .getThumbUrl({'format': 'png', 'size': '600'})
            # # This would load the image in your browser
            # import webbrowser
            # webbrowser.open(output_url)
            # # webbrowser.read(output_url)

            # # Show the output image
            # window = tk.Tk()
            # output_file = Image.open(io.BytesIO(urllib.urlopen(output_url).read()))
            # output_photo = ImageTk.PhotoImage(output_file)
            # label = tk.Label(window, image=output_photo)
            # label.pack()
            # window.mainloop()

            # Compute zonal stats of polygon
            def beamer_zonal_stats_func(input_image):
                """"""
                # Beamer function adds 5 ETg and 1 ET* band
                # Landsat collection adds 3 ancillary bands
                bands = len(landsat_args['products']) + 3 + 6

                # .clip(zone['geom']) \
                input_mean = input_image \
                    .reduceRegion(
                        reducer=ee.Reducer.mean(),
                        geometry=zone['geom'],
                        crs=ini['SPATIAL']['crs'],
                        crsTransform=zone['transform'],
                        bestEffort=False,
                        tileScale=1,
                        maxPixels=zone['max_pixels'] * bands)

                fmask_img = input_image.select(['fmask'])
                input_count = fmask_img.gt(1) \
                    .addBands(fmask_img.gte(0).unmask()) \
                    .rename(['fmask', 'pixel']) \
                    .reduceRegion(
                        reducer=ee.Reducer.sum().combine(
                            ee.Reducer.count(), '', True),
                        geometry=zone['geom'],
                        crs=ini['SPATIAL']['crs'],
                        crsTransform=zone['transform'],
                        bestEffort=False,
                        tileScale=1,
                        maxPixels=zone['max_pixels'] * 3)

                etstar_count = input_image \
                    .select(['etstar_mean'], ['etstar_count']) \
                    .lte(ini['BEAMER']['etstar_threshold']) \
                    .reduceRegion(
                        reducer=ee.Reducer.sum(),
                        geometry=zone['geom'],
                        crs=ini['SPATIAL']['crs'],
                        crsTransform=zone['transform'],
                        bestEffort=False,
                        tileScale=1,
                        maxPixels=zone['max_pixels'] * 2)

                # Save as image properties
                return ee.Feature(
                    None, {
                        'scene_id': ee.String(input_image.get('SCENE_ID')),
                        'time': input_image.get('system:time_start'),
                        'row': input_mean.get('row'),
                        'pixel_count': input_count.get('pixel_sum'),
                        'pixel_total': input_count.get('pixel_count'),
                        'fmask_count': input_count.get('fmask_sum'),
                        'fmask_total': input_count.get('fmask_count'),
                        'cloud_score': input_mean.get('cloud_score'),
                        'etstar_count': etstar_count.get('etstar_count'),
                        'ndvi_toa': input_mean.get('ndvi_toa'),
                        'ndwi_toa': input_mean.get('ndwi_toa'),
                        'albedo_sur': input_mean.get('albedo_sur'),
                        'ts': input_mean.get('ts'),
                        'evi_sur': input_mean.get('evi_sur'),
                        'etstar_mean': input_mean.get('etstar_mean'),
                        'etg_mean': input_mean.get('etg_mean'),
                        'etg_lpi': input_mean.get('etg_lpi'),
                        'etg_upi': input_mean.get('etg_upi'),
                        'etg_lci': input_mean.get('etg_lci'),
                        'etg_uci': input_mean.get('etg_uci')
                    })

            # Calculate values and statistics
            stats_coll = ee.ImageCollection(
                image_coll.map(beamer_zonal_stats_func))

            # # DEADBEEF - Test the function for a single image
            # stats_info = beamer_zonal_stats_func(
            #     ee.Image(image_coll.first())).getInfo()
            # print(stats_info)
            # for k, v in sorted(stats_info['properties'].items()):
            #     logging.info('{:24s}: {}'.format(k, v))
            # input('ENTER')
            # return False

            # # DEADBEEF - Print the stats info to the screen
            # stats_info = stats_coll.getInfo()
            # import pprint
            # pp = pprint.PrettyPrinter(indent=4)
            # for ftr in stats_info['features']:
            #     pp.pprint(ftr)
            # input('ENTER')
            # # return False

            # Get the values from EE
            stats_desc = utils.ee_getinfo(stats_coll)
            if stats_desc is None:
                logging.error('  Timeout error, skipping')
                continue

            # Save data for writing
            row_list = []
            for ftr in stats_desc['features']:
                try:
                    count = int(ftr['properties']['pixel_count'])
                except (KeyError, TypeError) as e:
                    # logging.debug('  Exception: {}'.format(e))
                    continue
                if count == 0:
                    logging.info('  COUNT: 0, skipping')
                    continue

                # First get scene ID and time
                try:
                    scene_id = landsat_re.findall(
                        ftr['properties']['scene_id'])[0]
                    scene_time = datetime.datetime.utcfromtimestamp(
                        float(ftr['properties']['time']) / 1000)
                except:
                    pp = pprint.PrettyPrinter(indent=4)
                    pp.pprint(ftr)
                    input('ENTER')

                # Extract and save other properties
                try:
                    row_list.append({
                        'ZONE_FID':
                        zone_fid,
                        'ZONE_NAME':
                        zone_name,
                        'SCENE_ID':
                        scene_id,
                        'PLATFORM':
                        scene_id[0:4],
                        'PATH':
                        int(scene_id[5:8]),
                        'ROW':
                        int(ftr['properties']['row']),
                        # 'ROW': int(scene_id[8:11]),
                        'DATE':
                        scene_time.date().isoformat(),
                        'YEAR':
                        int(scene_time.year),
                        'MONTH':
                        int(scene_time.month),
                        'DAY':
                        int(scene_time.day),
                        'DOY':
                        int(scene_time.strftime('%j')),
                        'PIXEL_COUNT':
                        int(ftr['properties']['pixel_count']),
                        'PIXEL_TOTAL':
                        int(ftr['properties']['pixel_total']),
                        'FMASK_COUNT':
                        int(ftr['properties']['fmask_count']),
                        'FMASK_TOTAL':
                        int(ftr['properties']['fmask_total']),
                        'CLOUD_SCORE':
                        float(ftr['properties']['cloud_score']),
                        'ETSTAR_COUNT':
                        int(ftr['properties']['etstar_count']),
                        'NDVI_TOA':
                        float(ftr['properties']['ndvi_toa']),
                        'NDWI_TOA':
                        float(ftr['properties']['ndwi_toa']),
                        'ALBEDO_SUR':
                        float(ftr['properties']['albedo_sur']),
                        'TS':
                        float(ftr['properties']['ts']),
                        'EVI_SUR':
                        float(ftr['properties']['evi_sur']),
                        'ETSTAR_MEAN':
                        float(ftr['properties']['etstar_mean']),
                        'ETG_MEAN':
                        float(ftr['properties']['etg_mean']),
                        'ETG_LPI':
                        float(ftr['properties']['etg_lpi']),
                        'ETG_UPI':
                        float(ftr['properties']['etg_upi']),
                        'ETG_LCI':
                        float(ftr['properties']['etg_lci']),
                        'ETG_UCI':
                        float(ftr['properties']['etg_uci']),
                        'WY_ETO':
                        wy_eto_output,
                        'WY_PPT':
                        wy_ppt_output
                    })
                except (KeyError, TypeError) as e:
                    logging.info('  ERROR: {}\n  SCENE_ID: {}\n  '
                                 '  There may not be an SR image to join to\n'
                                 '  {}'.format(e, scene_id, ftr['properties']))
                    # input('ENTER')

            # Save all values to the dataframe (and export)
            if row_list:
                logging.debug('  Appending')
                data_df = data_df.append(row_list, ignore_index=True)

                # DEADBEEF
                if data_df['QA'].isnull().any():
                    data_df.loc[data_df['QA'].isnull(), 'QA'] = 0
                fmask_mask = data_df['FMASK_TOTAL'] > 0
                if fmask_mask.any():
                    data_df.loc[fmask_mask, 'FMASK_PCT'] = 100.0 * (
                        data_df.loc[fmask_mask, 'FMASK_COUNT'] /
                        data_df.loc[fmask_mask, 'FMASK_TOTAL'])

                logging.debug('  Saving')
                data_df[int_fields] = data_df[int_fields].astype(np.int64)
                data_df[float_fields] = data_df[float_fields].astype(
                    np.float32)

                # Compute ET from ETg and PPT offline
                # (must be after float conversion above)
                data_df['ET_MEAN'] = data_df['ETG_MEAN'] + data_df['WY_PPT']
                data_df['ET_LPI'] = data_df['ETG_LPI'] + data_df['WY_PPT']
                data_df['ET_UPI'] = data_df['ETG_UPI'] + data_df['WY_PPT']
                data_df['ET_LCI'] = data_df['ETG_LCI'] + data_df['WY_PPT']
                data_df['ET_UCI'] = data_df['ETG_UCI'] + data_df['WY_PPT']

                # Convert float fields to objects, set NaN to None
                for field in data_df.columns.values:
                    if field.upper() not in float_fields:
                        continue
                    data_df[field] = data_df[field].astype(object)
                    null_mask = data_df[field].isnull()
                    data_df.loc[null_mask, field] = None
                    data_df.loc[~null_mask,
                                field] = data_df.loc[~null_mask, field].map(
                                    lambda x: '{0:10.6f}'.format(x).strip())
                    # data_df.loc[~null_mask, [field]] = data_df.loc[~null_mask, [field]].apply(
                    #     lambda x: '{0:10.6f}'.format(x[0]).strip(), axis=1)

                # data_df = data_df.reindex_axis(header_list, axis=1)
                data_df = data_df.reindex(header_list, axis=1)
                # data_df.reset_index(drop=False, inplace=True)
                data_df.sort_values(['ZONE_FID', 'DATE', 'ROW'],
                                    ascending=True,
                                    inplace=True)
                # data_df.sort(
                #     ['ZONE_NAME', 'DATE'], ascending=[True, True], inplace=True)
                data_df.to_csv(output_path, index=False)
            del row_list
Пример #3
0
def rcbl(parcel, start_date, end_date, bands, chipsize, filespath, quiet=True):
    """Get parcel raw chip images from RESTful API by location"""
    import os
    import os.path
    import pandas as pd
    from osgeo import osr, ogr
    import time
    start = time.time()
    api_url, api_user, api_pass = config.credentials('api')

    for band in bands:
        requrl = """{}/query/rawChipByLocation?lon={}&lat={}&start_date={}&end_date={}"""
        if band is not None:
            requrl = f"{requrl}&band={band}"
        if chipsize is not None:
            requrl = f"{requrl}&chipsize={chipsize}"

        # Create a valid geometry from the returned JSON withGeometry
        geom = ogr.CreateGeometryFromJson(parcel.get('geom')[0])
        source = osr.SpatialReference()
        source.ImportFromEPSG(parcel.get('srid')[0])

        # Assign this projection to the geometry
        geom.AssignSpatialReference(source)
        target = osr.SpatialReference()
        target.ImportFromEPSG(4326)
        transform = osr.CoordinateTransformation(source, target)

        # And get the lon, lat for its centroid, so that we can center the chips
        # on the parcel
        centroid = geom.Centroid()
        centroid.Transform(transform)

        # Use pid for next request
        # pid = parcel['pid'][0]
        # cropname = parcel['cropname'][0]

        # Set up the rawChip request
        cen_x, cen_y = str(centroid.GetX()), str(centroid.GetY())

        response = requests.get(requrl.format(api_url, cen_y, cen_x,
                                              start_date, end_date, band,
                                              chipsize),
                                auth=(api_user, api_pass))
        if not quiet:
            print(
                "Request url:",
                requrl.format(api_url, cen_y, cen_x, start_date, end_date,
                              band, chipsize))
            print("Geom:", geom)
            print("Source:", source, ", Target:", target)
            print("Centroid", centroid)
            print("Response:", response)
        # Directly create a pandas DataFrame from the json response
        df = pd.read_json(response.content)
        os.makedirs(filespath, exist_ok=True)
        df_file = normpath(join(filespath, f'images_list.{band}.csv'))
        df.to_csv(df_file, index=True, header=True)
        # print(f"The response table is saved to: {df_file}")

        # Download the GeoTIFFs that were just created in the user cache
        for c in df.chips:
            url = f"{api_url}{c}"
            outf = normpath(join(filespath, c.split('/')[-1]))
            if not isfile(outf):
                res = requests.get(url, stream=True)
                if not quiet:
                    print(f"Downloading {c.split('/')[-1]}")
                with open(outf, "wb") as handle:
                    for chunk in res.iter_content(chunk_size=512):
                        if chunk:  # filter out keep-alive new chunks
                            handle.write(chunk)
        if not quiet:
            print(
                f"Images for band '{band}', for the selected dates are downloaded."
            )

    if not quiet:
        print("\n------Total time------")
        print(
            f"Total time required for {len(bands)} bands: {time.time() - start} seconds."
        )
Пример #4
0
def crop_by_geojson(file_name):
    # geojson = jsonify(request.get_json(force=True))
    geojson = {
        "type": "Feature",
        "properties": {
            "id": 1
        },
        "geometry": {
            "type":
            "Multipolygon",
            "coordinates": [[[[27.397511483867362, 44.161117466010516],
                              [27.393924221672666, 44.159751598403503],
                              [27.393556666460618, 44.159252063395591],
                              [27.393726740035870, 44.158373985750522],
                              [27.392040835956994, 44.157378400690988],
                              [27.390354358253163, 44.156239034941315],
                              [27.390977924658255, 44.152849194060536],
                              [27.391438333095618, 44.149298658002031],
                              [27.386781918912796, 44.147461728155896],
                              [27.384487250437232, 44.146859408403664],
                              [27.382636468741264, 44.156671855578281],
                              [27.383891699721374, 44.156645049015140],
                              [27.384649769913505, 44.157388133683327],
                              [27.385547083122507, 44.160232076255667],
                              [27.387997850095061, 44.160722084482430],
                              [27.390672446485077, 44.161638147279866],
                              [27.395361188085396, 44.163429614137918],
                              [27.396513835695238, 44.162325787855522],
                              [27.397511483867362, 44.161117466010516]]]]
        }
    }

    min_x, max_x, min_y, max_y = bbox(geojson)

    # Register Imagine driver and open file
    driver = gdal.GetDriverByName('GTiff')
    driver.Register()
    dataset = gdal.Open('{}/{}.tif'.format(image_dir, file_name))
    if dataset is None:
        print 'Could not open ' + file_name + '.tif'
        sys.exit(1)

    # Getting image dimensions
    cols = dataset.RasterXSize
    rows = dataset.RasterYSize
    bands = dataset.RasterCount

    # Getting georeference info
    transform = dataset.GetGeoTransform()
    projection = dataset.GetProjection()
    xOrigin = transform[0]
    yOrigin = transform[3]
    pixelWidth = transform[1]
    pixelHeight = -transform[5]

    # Getting spatial reference of input raster
    srs = osr.SpatialReference()
    srs.ImportFromWkt(projection)

    # WGS84 projection reference
    OSR_WGS84_REF = osr.SpatialReference()
    OSR_WGS84_REF.ImportFromEPSG(4326)

    # OSR transformation
    wgs84_to_image_trasformation = osr.CoordinateTransformation(
        OSR_WGS84_REF, srs)
    XYmin = wgs84_to_image_trasformation.TransformPoint(min_x, max_y)
    XYmax = wgs84_to_image_trasformation.TransformPoint(max_x, min_y)

    # Computing Point1(i1,j1), Point2(i2,j2)
    i1 = int((XYmin[0] - xOrigin) / pixelWidth)
    j1 = int((yOrigin - XYmin[1]) / pixelHeight)
    i2 = int((XYmax[0] - xOrigin) / pixelWidth)
    j2 = int((yOrigin - XYmax[1]) / pixelHeight)
    new_cols = i2 - i1 + 1
    new_rows = j2 - j1 + 1

    # New upper-left X,Y values
    new_x = xOrigin + i1 * pixelWidth
    new_y = yOrigin - j1 * pixelHeight
    new_transform = (new_x, transform[1], transform[2], new_y, transform[4],
                     transform[5])

    wkt_geom = ogr.CreateGeometryFromJson("""{}""".format(geojson['geometry']))
    wkt_geom.Transform(wgs84_to_image_trasformation)

    target_ds = gdal.GetDriverByName('MEM').Create('', new_cols, new_rows,
                                                   bands, gdal.GDT_Byte)
    target_ds.SetGeoTransform(new_transform)
    target_ds.SetProjection(projection)

    # Create a memory layer to rasterize from.
    driver = ogr.GetDriverByName('Memory')
    memds = driver.CreateDataSource('tmpmemds')

    lyr = memds.CreateLayer('poly', geom_type=ogr.wkbMultiPolygon)
    feat = ogr.Feature(lyr.GetLayerDefn())
    feat.SetGeometryDirectly(ogr.Geometry(wkt=wkt_geom.ExportToWkt()))
    lyr.CreateFeature(feat)

    gdal.RasterizeLayer(target_ds, [1, 2, 3], lyr, burn_values=[1, 1, 1])

    # Create output file
    output_file = '{}/{}_cutted_by_geojson.tif'.format(image_dir, file_name)
    driver = gdal.GetDriverByName('GTiff')
    outds = driver.Create(output_file, new_cols, new_rows, bands,
                          gdal.GDT_Byte)

    # Read in bands and store all the data in bandList
    mask_array = []
    band_list = []
    for i in range(bands):
        band_list.append(
            dataset.GetRasterBand(i + 1).ReadAsArray(i1, j1, new_cols,
                                                     new_rows))
        mask_array.append(target_ds.GetRasterBand(i + 1).ReadAsArray())

    for j in range(bands):
        data = numpy.where(mask_array[j] == 1, band_list[j], 0)
        outds.GetRasterBand(j + 1).WriteArray(data)

    outds.SetProjection(projection)
    outds.SetGeoTransform(new_transform)

    dataset = None
    outds = None
    return send_file(output_file, as_attachment=True)
Пример #5
0
def cut_by_geojson(input_file, output_file, shape_geojson):

    # Get coords for bounding box
    x, y = zip(*gj.utils.coords(gj.loads(shape_geojson)))
    min_x, max_x, min_y, max_y = min(x), max(x), min(y), max(y)

    # Open original data as read only
    dataset = gdal.Open(input_file, gdal.GA_ReadOnly)

    bands = dataset.RasterCount

    # Getting georeference info
    transform = dataset.GetGeoTransform()
    projection = dataset.GetProjection()
    xOrigin = transform[0]
    yOrigin = transform[3]
    pixelWidth = transform[1]
    pixelHeight = -transform[5]

    # Getting spatial reference of input raster
    srs = osr.SpatialReference()
    srs.ImportFromWkt(projection)

    # WGS84 projection reference
    OSR_WGS84_REF = osr.SpatialReference()
    OSR_WGS84_REF.ImportFromEPSG(4326)

    # OSR transformation
    wgs84_to_image_trasformation = osr.CoordinateTransformation(
        OSR_WGS84_REF, srs)
    XYmin = wgs84_to_image_trasformation.TransformPoint(min_x, max_y)
    XYmax = wgs84_to_image_trasformation.TransformPoint(max_x, min_y)

    # Computing Point1(i1,j1), Point2(i2,j2)
    i1 = int((XYmin[0] - xOrigin) / pixelWidth)
    j1 = int((yOrigin - XYmin[1]) / pixelHeight)
    i2 = int((XYmax[0] - xOrigin) / pixelWidth)
    j2 = int((yOrigin - XYmax[1]) / pixelHeight)
    new_cols = i2 - i1 + 1
    new_rows = j2 - j1 + 1

    # New upper-left X,Y values
    new_x = xOrigin + i1 * pixelWidth
    new_y = yOrigin - j1 * pixelHeight
    new_transform = (new_x, transform[1], transform[2], new_y, transform[4],
                     transform[5])

    wkt_geom = ogr.CreateGeometryFromJson(str(shape_geojson))
    wkt_geom.Transform(wgs84_to_image_trasformation)

    target_ds = GDAL_MEMORY_DRIVER.Create('', new_cols, new_rows, 1,
                                          gdal.GDT_Byte)
    target_ds.SetGeoTransform(new_transform)
    target_ds.SetProjection(projection)

    # Create a memory layer to rasterize from.
    ogr_dataset = OGR_MEMORY_DRIVER.CreateDataSource('shapemask')
    ogr_layer = ogr_dataset.CreateLayer('shapemask', srs=srs)
    ogr_feature = ogr.Feature(ogr_layer.GetLayerDefn())
    ogr_feature.SetGeometryDirectly(ogr.Geometry(wkt=wkt_geom.ExportToWkt()))
    ogr_layer.CreateFeature(ogr_feature)

    gdal.RasterizeLayer(target_ds, [1],
                        ogr_layer,
                        burn_values=[1],
                        options=["ALL_TOUCHED=TRUE"])

    # Create output file
    driver = gdal.GetDriverByName('GTiff')
    outds = driver.Create(output_file, new_cols, new_rows, bands,
                          gdal.GDT_Float32)

    # Read in bands and store all the data in bandList
    mask_array = target_ds.GetRasterBand(1).ReadAsArray()
    band_list = []

    for i in range(bands):
        band_list.append(
            dataset.GetRasterBand(i + 1).ReadAsArray(i1, j1, new_cols,
                                                     new_rows))

    for j in range(bands):
        data = np.where(mask_array == 1, band_list[j], mask_array)
        outds.GetRasterBand(j + 1).SetNoDataValue(0)
        outds.GetRasterBand(j + 1).WriteArray(data)

    outds.SetProjection(projection)
    outds.SetGeoTransform(new_transform)

    target_ds = None
    dataset = None
    outds = None
    ogr_dataset = None
Пример #6
0
    def btn_calculate(self):

        ret = super(DlgCalculateForestFire, self).btn_calculate()
        if not ret:
            return

        self.close()

        if self.radio_landsat8.isChecked():
            prod_mode = 'L8'
        else:
            prod_mode = 'S2'

        crosses_180th, geojsons = self.aoi.bounding_box_gee_geojson()
        val = []
        n = 1

        if self.area_tab.area_fromfile.isChecked():
            for f in self.aoi.get_layer_wgs84().getFeatures():
                # Get an OGR geometry from the QGIS geometry
                geom = f.geometry()
                val.append(geom)
                n += 1

            # stringify json object
            val_string = '{}'.format(json.loads(val[0].asJson()))

            # create ogr geometry
            val_geom = ogr.CreateGeometryFromJson(val_string)
            # simplify polygon to tolerance of 0.003
            val_geom_simplified = val_geom.Simplify(0.003)

            # fetch coordinates from json
            coords = json.loads(
                val_geom_simplified.ExportToJson())['coordinates']
            geometries = json.dumps([{
                "coordinates": coords,
                "type": "Polygon"
            }])

        elif self.area_tab.area_fromadmin.isChecked():
            geometries = json.dumps([{
                "coordinates":
                self.get_admin_poly_geojson()['geometry']['coordinates'][0],
                "type":
                "Polygon"
            }])
        elif self.area_tab.area_frompoint.isChecked():
            point = QgsPointXY(
                float(self.area_tab.area_frompoint_point_x.text()),
                float(self.area_tab.area_frompoint_point_y.text()))
            crs_src = QgsCoordinateReferenceSystem(
                self.area_tab.canvas.mapSettings().destinationCrs().authid())
            point = QgsCoordinateTransform(
                crs_src, self.aoi.crs_dst,
                QgsProject.instance()).transform(point)
            geometries = json.dumps(
                json.loads(QgsGeometry.fromPointXY(point).asJson()))

        # area = self.aoi.get_area()/(1000 * 1000)
        # log('{0}'.format(poly))

        date_format = '{0}-{1}-{2}'
        prefire_start = date_format.format(
            self.prefire_start_btn.date().year(),
            self.prefire_start_btn.date().month(),
            self.prefire_start_btn.date().day())
        prefire_end = date_format.format(self.prefire_end_btn.date().year(),
                                         self.prefire_end_btn.date().month(),
                                         self.prefire_end_btn.date().day())
        postfire_start = date_format.format(
            self.postfire_start_btn.date().year(),
            self.postfire_start_btn.date().month(),
            self.postfire_start_btn.date().day())
        postfire_end = date_format.format(self.postfire_end_btn.date().year(),
                                          self.postfire_end_btn.date().month(),
                                          self.postfire_end_btn.date().day())
        payload = {
            'prod_mode': prod_mode,
            #    'area':area,
            'prefire_start_btn': prefire_start,
            'prefire_end_btn': prefire_end,
            'postfire_start_btn': postfire_start,
            'postfire_end_btn': postfire_end,
            'geojsons': geometries,
            # 'geojsons':json.dumps(geojsons),
            'crs': self.aoi.get_crs_dst_wkt(),
            'crosses_180th': crosses_180th,
            #    'og_simple':'{}'.format(og_simple),
            'task_name': self.options_tab.task_name.text(),
            'task_notes': self.options_tab.task_notes.toPlainText()
        }

        resp = run_script(get_script_slug('forest-fire'), payload)

        if resp:
            mb.pushMessage(
                QtWidgets.QApplication.translate("MISLAND", "Submitted"),
                QtWidgets.QApplication.translate(
                    "MISLAND",
                    "Forest Fire task submitted to Google Earth Engine."),
                level=0,
                duration=5)
        else:
            mb.pushMessage(
                QtWidgets.QApplication.translate("MISLAND", "Error"),
                QtWidgets.QApplication.translate(
                    "MISLAND",
                    "Unable to submit forest fire task to Google Earth Engine."
                ),
                level=0,
                duration=5)
Пример #7
0
def do_run(fgmj):
    startup = {
                'ffmc':          {'value': 85.0},
                'dmc':           {'value': 6.0},
                'dc':            {'value': 15.0},
                'precipitation': {'value': 0.0},
              }
    region = os.path.basename(os.path.dirname(os.path.dirname(fgmj)))
    job_name = os.path.basename(os.path.dirname(fgmj))
    job_time = job_name[job_name.rindex('_') + 1:-4]
    job_date = job_time[:8]
    fire_name = job_name[:job_name.index('_')]
    out_dir = os.path.join(ROOT_DIR, job_date, region, fire_name, job_time)
    done_already = os.path.exists(out_dir)
    if not done_already:
        common.ensure_dir(out_dir)
        with open(fgmj) as f:
          data = json.load(f)
        MSG_DEFAULT_STARTUP = 'using default startup indices'
        project = data['project']
        stn = try_read_first(project['stations'], 'stations', MSG_DEFAULT_STARTUP)
        if stn is not None:
            stream = try_read_first(stn['station'], 'streams', MSG_DEFAULT_STARTUP)
            if stream is not None:
                startup = stream['condition']['startingCodes']
        unnest_values(startup)
        logging.info("Startup indices are: {}".format(startup))
        ffmc = startup['ffmc']
        dmc = startup['dmc']
        dc = startup['dc']
        apcp_0800 = float(startup['precipitation'])
        if np.isnan(apcp_0800):
            apcp_0800 = 0
        pt = None
        ignition = try_read_first(project['ignitions'], 'ignitions', is_fatal=True)
        ign = try_read_first(ignition['ignition']['ignitions'], 'ignitions', is_fatal=True)
        perim = None
        poly = ign['polygon']
        if poly['units'] != 'LAT_LON':
            logging.fatal("Only lat/long coordinates are currently supported")
            sys.exit(-1)
        if ign['polyType'] != 'POINT':
            if ign['polyType'] == 'POLYGON_OUT':
                pts = poly['polygon']['points']
                pts = list(map(unnest_values, pts))
                pts = [list(map(lambda v: [v['x'], v['y']], pts))]
                lat = statistics.mean(list(map(lambda v: v[1], pts[0])))
                long = statistics.mean(list(map(lambda v: v[0], pts[0])))
                # print(long)
                orig_zone = 15
                orig_long = -93
                diff = long - orig_long
                # print(diff)
                ZONE_SIZE = 6
                zone_diff = round(diff / ZONE_SIZE)
                # print(zone_diff)
                meridian = orig_long + (zone_diff * ZONE_SIZE)
                # print(meridian)
                zone = orig_zone + zone_diff
                # print(pts)
                p = '''{"type": "Polygon",
                        "coordinates": ''' + str(pts) + ''',
                    }'''
                # print(p)
                g = ogr.CreateGeometryFromJson(p)
                # print(g)
                # print("Hi! I'm a %s with an Area  %s" % (g.GetGeometryName(), g.Area()))
                # print("I have inside me %s feature(s)!\n" % g.GetGeometryCount())
                # for idx, f in enumerate(g):
                    # print("I'm feature n.%s and I am a %s.\t I have an Area of %s - You can get my json repr with f.ExportToJson()" % (idx, f.GetGeometryName(),f.Area()))
                source = osr.SpatialReference()
                source.ImportFromEPSG(4269)
                target = osr.SpatialReference()
                target.ImportFromEPSG(3159)
                z = target.ExportToWkt()
                z = z[:z.rindex(",AUTHORITY")] + "]"
                z = z.replace('UTM zone 15N', 'UTM zone {}N')
                z = z.replace('"central_meridian",-93', '"central_meridian",{}')
                z = z.format(zone, meridian)
                # print(z)
                # print(target)
                target.ImportFromWkt(z)
                transform = osr.CoordinateTransformation(source, target)
                g.Transform(transform)
                #print(g)
                # logging.debug("Hi! I'm a %s with an Area  %s" % (g.GetGeometryName(), g.Area()))
                # logging.debug("I have inside me %s feature(s)!\n" % g.GetGeometryCount())
                # for idx, f in enumerate(g):
                    # logging.debug("I'm feature n.%s and I am a %s.\t I have an Area of %s - You can get my json repr with f.ExportToJson()" % (idx, f.GetGeometryName(),f.Area()))
                out_name = '{}.shp'.format(fire_name)
                out_file = os.path.join(out_dir, out_name)
                driver = ogr.GetDriverByName("Esri Shapefile")
                ds = driver.CreateDataSource(out_file)
                layr1 = ds.CreateLayer('', None, ogr.wkbPolygon)
                # create the field
                layr1.CreateField(ogr.FieldDefn('id', ogr.OFTInteger))
                # Create the feature and set values
                defn = layr1.GetLayerDefn()
                feat = ogr.Feature(defn)
                feat.SetField('id', 1)
                feat.SetGeometry(g)
                layr1.CreateFeature(feat)
                # close the shapefile
                ds.Destroy()
                target.MorphToESRI()
                with open(os.path.join(out_dir, '{}.prj'.format(fire_name)), 'w') as file:
                    file.write(target.ExportToWkt())
                YEAR = 2021
                perim = firestarr_gis.rasterize_perim(out_dir, out_file, YEAR, fire_name)[1]
            else:
                logging.fatal("Unsupported ignition type {}".format(ign['polyType']))
            if perim is None:
                sys.exit(-1)
        else:
            pt = try_read_first(poly['polygon'], 'points', is_fatal=True)
            if pt is None:
                # should have already exited but check
                logging.fatal("Ignition point not initialized")
                sys.exit(-1)
            unnest_values(pt)
            lat = pt['y']
            long = pt['x']
        logging.info("Startup coordinates are {}, {}".format(lat, long))
        scenario = try_read_first(project['scenarios'], 'scenarios', is_fatal=True)['scenario']
        start_time = scenario['startTime']['time']
        start_time = pd.to_datetime(start_time)
        logging.info("Scenario start time is: {}".format(start_time))
        hour = start_time.hour
        minute = start_time.minute
        tz = (start_time.tz._minutes) / 60.0
        if math.floor(tz) != tz:
            logging.fatal("Currently not set up to deal with partial hour timezones")
            sys.exit(-1)
        tz = int(tz)
        logging.info("Timezone offset is {}".format(tz))
        date_offset = 0
        start_date = datetime.date.today()
        start_date = start_time.date()
        if start_date != datetime.date.today():
            date_offset = (start_date - datetime.date.today()).days
            logging.warning("Simulation does not start today - date offset set to {}".format(date_offset))
        url = r"http://wxshield:80/wxshield/getWx.php?model=geps&lat={}&long={}&dateOffset={}&tz={}&mode=daily".format(lat, long, date_offset, tz)
        logging.debug(url)
        try:
            csv = common.download(url).decode("utf-8")
        except:
            logging.fatal("Unable to download weather")
            sys.exit(-3)
        data = [x.split(',') for x in csv.splitlines()]
        df = pd.DataFrame(data[1:], columns=data[0])
        # print(df)
        # supposed to be really picky about inputs
        #"Scenario,Date,APCP,TMP,RH,WS,WD,FFMC,DMC,DC,ISI,BUI,FWI";
        df = df[['MEMBER', 'DAILY', 'PREC', 'TEMP', 'RH', 'WS', 'WD']]
        df.columns = ['Scenario', 'Date', 'APCP', 'TMP', 'RH', 'WS', 'WD']
        # for some reason scenario numbers are negative right now?
        df['Scenario'] = df['Scenario'].apply(lambda x: -1 - int(x))
        df['Date'] = df['Date'].apply(lambda x: x + " 13:00:00")
        for col in ['FFMC', 'DMC', 'DC', 'ISI', 'BUI', 'FWI']:
            df[col] = 0
        df.to_csv('wx.csv', index=False)
        cmd = "./FireSTARR"
        args = "{} {} {} {} {}:{:02d} -v --wx wx.csv --ffmc {} --dmc {} --dc {} --apcp_0800 {}".format(out_dir, start_date, lat, long, hour, minute, ffmc, dmc, dc, apcp_0800)
        if perim is not None:
            args = args + " --perim {}".format(perim)
        # run generated command for parsing data
        run_what = [cmd] + shlex.split(args.replace('\\', '/'))
        logging.info("Running: " + ' '.join(run_what))
        t0 = timeit.default_timer()
        stdout, stderr = common.finish_process(common.start_process(run_what, "/FireGUARD/FireSTARR"))
        t1 = timeit.default_timer()
        logging.info("Took {}s to run simulations".format(t1 - t0))
        log_name = os.path.join(out_dir, "log.txt")
        with open(log_name, 'w') as log_file:
            log_file.write(stdout.decode('utf-8'))
        outputs = sorted(os.listdir(out_dir))
        extent = None
        probs = [x for x in outputs if x.endswith('asc') and x.startswith('wxshield')]
        if len(probs) > 0:
            prob = probs[-1]
            extent = firestarr_gis.project_raster(os.path.join(out_dir, prob), os.path.join(PROB_DIR, job_date, region, fire_name + '.tif'))
        perims = [x for x in outputs if x.endswith('tif')]
        if len(perims) > 0:
            perim = perims[0]
            firestarr_gis.project_raster(os.path.join(out_dir, perim),
                                         os.path.join(PERIM_DIR, job_date, region, fire_name + '.tif'),
                                         outputBounds=extent)
    else:
        return None
    return log_name
def ee_beamer_et(ini_path=None, overwrite_flag=False):
    """Earth Engine Beamer ET Image Download

    Args:
        ini_path (str):
        overwrite_flag (bool): if True, overwrite existing files

    Returns:
        None
    """
    logging.info('\nEarth Engine Beamer Annual Mean ETg Image Download')

    # Read config file
    ini = inputs.read(ini_path)
    inputs.parse_section(ini, section='INPUTS')
    inputs.parse_section(ini, section='SPATIAL')
    inputs.parse_section(ini, section='IMAGES')
    inputs.parse_section(ini, section='BEAMER')

    ini['IMAGES']['download_bands'] = [
        'etg_mean', 'etg_lci', 'etg_uci', 'etg_lpi', 'etg_upi'
    ]
    stat_list = ['mean', 'median']
    nodata_value = -9999
    zips_folder = 'zips'
    annuals_folder = 'annuals'

    # Get ee features from shapefile
    zone_geom_list = gdc.shapefile_2_geom_list_func(
        ini['INPUTS']['zone_shp_path'],
        zone_field=ini['INPUTS']['zone_field'],
        reverse_flag=False)
    # zone_count = len(zone_geom_list)
    # output_fmt = '_{0:0%sd}.csv' % str(int(math.log10(zone_count)) + 1)

    # Check if the zone_names are unique
    # Eventually support merging common zone_names
    if len(set([z[1] for z in zone_geom_list])) != len(zone_geom_list):
        logging.error(
            '\nERROR: There appear to be duplicate zone ID/name values.'
            '\n  Currently, the values in "{}" must be unique.'
            '\n  Exiting.'.format(ini['INPUTS']['zone_field']))
        return False

    # Filter features by FID
    if ini['INPUTS']['fid_keep_list']:
        zone_geom_list = [
            zone_obj for zone_obj in zone_geom_list
            if zone_obj[0] in ini['INPUTS']['fid_keep_list']
        ]
    if ini['INPUTS']['fid_skip_list']:
        zone_geom_list = [
            zone_obj for zone_obj in zone_geom_list
            if zone_obj[0] not in ini['INPUTS']['fid_skip_list']
        ]

    # Merge geometries
    if ini['INPUTS']['merge_geom_flag']:
        merge_geom = ogr.Geometry(ogr.wkbMultiPolygon)
        for zone in zone_geom_list:
            zone_multipolygon = ogr.ForceToMultiPolygon(
                ogr.CreateGeometryFromJson(json.dumps(zone[2])))
            for zone_polygon in zone_multipolygon:
                merge_geom.AddGeometry(zone_polygon)
        # merge_json = json.loads(merge_mp.ExportToJson())
        zone_geom_list = [[
            0, ini['INPUTS']['zone_filename'],
            json.loads(merge_geom.ExportToJson())
        ]]
        ini['INPUTS']['zone_field'] = ''

    # Set all zone specific parameters into a dictionary
    zone = {}

    # Need zone_shp_path projection to build EE geometries
    zone['osr'] = gdc.feature_path_osr(ini['INPUTS']['zone_shp_path'])
    zone['proj'] = gdc.osr_wkt(zone['osr'])
    # zone['proj'] = ee.Projection(zone['proj']).wkt().getInfo()
    # zone['proj'] = zone['proj'].replace('\n', '').replace(' ', '')
    # logging.debug('  Zone Projection: {}'.format(zone['proj']))

    # Check that shapefile has matching spatial reference
    if not gdc.matching_spatref(zone['osr'], ini['SPATIAL']['osr']):
        logging.warning('  Zone OSR:\n{}\n'.format(zone['osr']))
        logging.warning('  Output OSR:\n{}\n'.format(
            ini['SPATIAL']['osr'].ExportToWkt()))
        logging.warning('  Zone Proj4:   {}'.format(
            zone['osr'].ExportToProj4()))
        logging.warning('  Output Proj4: {}'.format(
            ini['SPATIAL']['osr'].ExportToProj4()))
        logging.warning(
            '\nWARNING: \n'
            'The output and zone spatial references do not appear to match\n'
            'This will likely cause problems!')
        input('Press ENTER to continue')
    else:
        logging.debug('  Zone Projection:\n{}\n'.format(
            zone['osr'].ExportToWkt()))
        logging.debug('  Output Projection:\n{}\n'.format(
            ini['SPATIAL']['osr'].ExportToWkt()))
        logging.debug('  Output Cellsize: {}'.format(
            ini['SPATIAL']['cellsize']))

    # Initialize Earth Engine API key
    logging.info('\nInitializing Earth Engine')
    ee.Initialize()
    utils.ee_request(ee.Number(1).getInfo())

    # Get list of path/row strings to centroid coordinates
    if ini['INPUTS']['tile_keep_list']:
        ini['INPUTS']['tile_geom'] = [
            wrs2.tile_centroids[tile]
            for tile in ini['INPUTS']['tile_keep_list']
            if tile in wrs2.tile_centroids.keys()
        ]
        ini['INPUTS']['tile_geom'] = ee.Geometry.MultiPoint(
            ini['INPUTS']['tile_geom'], 'EPSG:4326')
    else:
        ini['INPUTS']['tile_geom'] = None

    # Read in ETo and PPT data from file
    if (ini['BEAMER']['eto_source'] == 'file'
            or ini['BEAMER']['ppt_source'] == 'file'):
        data_array = np.genfromtxt(ini['BEAMER']['data_path'],
                                   delimiter=',',
                                   names=True,
                                   dtype=None)
        data_fields = data_array.dtype.names
        logging.debug('  CSV fields: {}'.format(', '.join(data_fields)))
        # DEADBEEF - Compare fields names assuming all upper case
        data_fields = [f.upper() for f in data_fields]
        eto_dict = defaultdict(dict)
        ppt_dict = defaultdict(dict)
        for row in data_array:
            z = str(row[data_fields.index(ini['BEAMER']['data_zone_field'])])
            y = int(row[data_fields.index(ini['BEAMER']['data_year_field'])])
            if ini['BEAMER']['eto_source'] == 'file':
                # DEADBEEF - Compare fields names assuming all upper case
                eto_dict[z][y] = row[data_fields.index(
                    ini['BEAMER']['data_eto_field'].upper())]
            if ini['BEAMER']['ppt_source'] == 'file':
                # DEADBEEF - Compare fields names assuming all upper case
                ppt_dict[z][y] = row[data_fields.index(
                    ini['BEAMER']['data_ppt_field'].upper())]

    # Get filtered/merged/prepped Landsat collection
    landsat_args = {
        k: v
        for section in ['INPUTS'] for k, v in ini[section].items() if k in [
            'landsat4_flag', 'landsat5_flag', 'landsat7_flag', 'landsat8_flag',
            'fmask_flag', 'acca_flag', 'start_year', 'end_year', 'start_month',
            'end_month', 'start_doy', 'end_doy', 'scene_id_keep_list',
            'scene_id_skip_list', 'path_keep_list', 'row_keep_list',
            'tile_geom', 'adjust_method', 'mosaic_method', 'refl_sur_method'
        ]
    }
    landsat_args['products'] = ['evi_sur']
    landsat = ee_common.Landsat(landsat_args)

    # Download images for each feature separately
    for zone_fid, zone_name, zone_json in zone_geom_list:
        zone['fid'] = zone_fid
        zone['name'] = zone_name.replace(' ', '_')
        zone['json'] = zone_json
        logging.info('ZONE: {} (FID: {})'.format(zone['name'], zone['fid']))

        # Build EE geometry object for zonal stats
        zone['geom'] = ee.Geometry(geo_json=zone['json'],
                                   opt_proj=zone['proj'],
                                   opt_geodesic=False)
        # logging.debug('  Centroid: {}'.format(
        #     zone['geom'].centroid(100).getInfo()['coordinates']))

        # Use feature geometry to build extent, transform, and shape
        zone['extent'] = gdc.Extent(
            ogr.CreateGeometryFromJson(json.dumps(zone['json'])).GetEnvelope())
        # zone['extent'] = gdc.Extent(zone['geom'].GetEnvelope())
        zone['extent'] = zone['extent'].ogrenv_swap()
        zone['extent'] = zone['extent'].adjust_to_snap(
            'EXPAND', ini['SPATIAL']['snap_x'], ini['SPATIAL']['snap_y'],
            ini['SPATIAL']['cellsize'])
        zone['geo'] = zone['extent'].geo(ini['SPATIAL']['cellsize'])
        zone['transform'] = gdc.geo_2_ee_transform(zone['geo'])
        # zone['transform'] = '[' + ','.join(map(str, zone['transform'])) + ']'
        zone['shape'] = zone['extent'].shape(ini['SPATIAL']['cellsize'])
        logging.debug('  Zone Shape: {}'.format(zone['shape']))
        logging.debug('  Zone Transform: {}'.format(zone['transform']))
        logging.debug('  Zone Extent: {}'.format(zone['extent']))
        # logging.debug('  Zone Geom: {}'.format(zone['geom'].getInfo()))

        # Assume all pixels in all 14+2 images could be reduced
        zone['max_pixels'] = zone['shape'][0] * zone['shape'][1]
        logging.debug('  Max Pixels: {}'.format(zone['max_pixels']))

        # Set output spatial reference
        # Eventually allow user to manually set these
        # output_crs = zone['proj']
        logging.debug('  Image Projection: {}'.format(ini['SPATIAL']['crs']))

        # output_transform = zone['transform'][:]
        output_transform = '[' + ','.join(map(str, zone['transform'])) + ']'
        output_shape = '{1}x{0}'.format(*zone['shape'])
        logging.debug('  Image Transform: {}'.format(output_transform))
        logging.debug('  Image Shape: {}'.format(output_shape))

        zone_output_ws = os.path.join(ini['IMAGES']['output_ws'], zone_name)
        zone_zips_ws = os.path.join(zone_output_ws, zips_folder)
        zone_annuals_ws = os.path.join(zone_output_ws, annuals_folder)
        if not os.path.isdir(zone_zips_ws):
            os.makedirs(zone_zips_ws)
        if not os.path.isdir(zone_annuals_ws):
            os.makedirs(zone_annuals_ws)

        # Process date range by year
        interval_cnt = 1
        start_dt = datetime.datetime(ini['INPUTS']['start_year'], 1, 1)
        end_dt = datetime.datetime(ini['INPUTS']['end_year'] + 1, 1,
                                   1) - datetime.timedelta(0, 1)
        for i, iter_start_dt in enumerate(
                rrule.rrule(rrule.YEARLY,
                            interval=interval_cnt,
                            dtstart=start_dt,
                            until=end_dt)):
            iter_end_dt = (iter_start_dt +
                           relativedelta.relativedelta(years=interval_cnt) -
                           datetime.timedelta(0, 1))
            if ((ini['INPUTS']['start_month']
                 and iter_end_dt.month < ini['INPUTS']['start_month'])
                    or (ini['INPUTS']['end_month']
                        and iter_start_dt.month > ini['INPUTS']['end_month'])):
                logging.debug('  {}  {}  skipping'.format(
                    iter_start_dt.date(), iter_end_dt.date()))
                continue
            elif (
                (ini['INPUTS']['start_doy'] and
                 int(iter_end_dt.strftime('%j')) < ini['INPUTS']['start_doy'])
                    or
                (ini['INPUTS']['end_doy'] and int(iter_start_dt.strftime('%j'))
                 > ini['INPUTS']['end_doy'])):
                logging.debug('  {}  {}  skipping'.format(
                    iter_start_dt.date(), iter_end_dt.date()))
                continue
            else:
                logging.info('{}  {}'.format(iter_start_dt.date(),
                                             iter_end_dt.date()))
            year = iter_start_dt.year

            # image_id = 'etg_{}_{}'.format(
            image_id = '{}_{}'.format(zone_name.lower().replace(' ', '_'),
                                      year)
            zip_path = os.path.join(zone_zips_ws, image_id + '.zip')
            # median_path = os.path.join(
            #     zone_output_ws, image_id + '.img')
            logging.debug('  Zip: {}'.format(zip_path))

            if os.path.isfile(zip_path) and overwrite_flag:
                logging.debug('    Output already exists, removing zip')
                os.remove(zip_path)
            elif os.path.isfile(zip_path) and not overwrite_flag:
                # Check that existing ZIP files can be opened
                try:
                    with zipfile.ZipFile(zip_path, 'r') as z:
                        pass
                except Exception as e:
                    logging.warning('    Zip file error, removing'.format(i))
                    os.remove(zip_path)

            # Filter the GRIDMET collection
            wy_start_date = '{}-10-01'.format(year - 1)
            wy_end_date = '{}-10-01'.format(year)
            logging.debug('  WY: {} {}'.format(wy_start_date, wy_end_date))
            gridmet_coll = ee.ImageCollection('IDAHO_EPSCOR/GRIDMET') \
                .filterDate(wy_start_date, wy_end_date)

            # # PRISM collection was uploaded as an asset
            # if ini['BEAMER']['ppt_source'] == 'prism':
            #     def prism_time_start(input_image):
            #         """Set time_start property on PRISM water year PPT collection"""
            #         # Assume year is the 4th item separated by "_"
            #         water_year = ee.String(input_image.get('system:index')).split('_').get(3)
            #         date_start = ee.Date(ee.String(water_year).cat('-10-01'))
            #         return input_image.select([0], ['ppt']).set({
            #             'system:time_start': date_start.millis()
            #         })
            #     prism_coll = ee.ImageCollection('users/cgmorton/prism_800m_ppt_wy')
            #     prism_coll = prism_coll.map(prism_time_start) \
            #         .filterDate(wy_start_date, wy_end_date)

            # Get water year PPT from file
            # Convert all input data to mm to match GRIDMET data
            if ini['BEAMER']['ppt_source'] == 'file':
                wy_ppt_input = ppt_dict[zone_name][year]
                if ini['BEAMER']['data_ppt_units'] == 'mm':
                    pass
                elif ini['BEAMER']['data_ppt_units'] == 'in':
                    wy_ppt_input *= 25.4
                elif ini['BEAMER']['data_ppt_units'] == 'ft':
                    wy_ppt_input *= (25.4 * 12)
            elif ini['BEAMER']['ppt_source'] == 'gridmet':
                # GET GRIDMET value at centroid of geometry
                wy_ppt_input = float(
                    utils.ee_getinfo(
                        ee.ImageCollection(
                            gridmet_coll.select(['pr'],
                                                ['ppt']).sum()).getRegion(
                                                    zone['geom'].centroid(1),
                                                    500))[1][4])
                # Calculate GRIDMET zonal mean of geometry
                # wy_ppt_input = float(ee.ImageCollection(
                #     gridmet_coll.select(['pr'], ['ppt'])).reduceRegion(
                #         reducer=ee.Reducer.sum(),
                #         geometry=zone['geom'],
                #         crs=ini['SPATIAL']['crs'],
                #         crsTransform=zone['transform'],
                #         bestEffort=False,
                #         tileScale=1).getInfo()['ppt']
            # elif ini['BEAMER']['ppt_source'] == 'prism':
            #     # Calculate PRISM zonal mean of geometry
            #     wy_ppt_input = float(ee.ImageCollection(
            #         prism_coll.map(ee_common.prism_ppt_func)).sum().reduceRegion(
            #             reducer=ee.Reducer.mean(),
            #             geometry=zone['geom'],
            #             crs=ini['SPATIAL']['crs'],
            #             crsTransform=zone['transform'],
            #             bestEffort=False,
            #             tileScale=1).getInfo()['ppt'])

            # Get water year ETo read from file
            # Convert all input data to mm for Beamer Method
            if ini['BEAMER']['eto_source'] == 'file':
                wy_eto_input = eto_dict[zone_name][year]
                if ini['BEAMER']['data_eto_units'] == 'mm':
                    pass
                elif ini['BEAMER']['data_eto_units'] == 'in':
                    wy_eto_input *= 25.4
                elif ini['BEAMER']['data_eto_units'] == 'ft':
                    wy_eto_input *= (25.4 * 12)
            # This assumes GRIMET data is in millimeters
            elif ini['BEAMER']['eto_source'] == 'gridmet':
                wy_eto_input = float(
                    utils.ee_getinfo(
                        ee.ImageCollection(gridmet_coll.select(
                            ['eto']).sum()).getRegion(zone['geom'].centroid(1),
                                                      500))[1][4])
                # wy_eto_input = float(ee.ImageCollection(
                #     gridmet_coll.select(['eto'])).reduceRegion(
                #         reducer=ee.Reducer.sum(),
                #         geometry=zone['geom'],
                #         crs=ini['SPATIAL']['crs'],
                #         crsTransform=zone['transform'],
                #         bestEffort=False,
                #         tileScale=1).getInfo()
            logging.debug('  Input ETO: {} mm  PPT: {} mm'.format(
                wy_eto_input, wy_ppt_input))

            # Scale ETo & PPT
            wy_eto_input *= ini['BEAMER']['eto_factor']
            wy_ppt_input *= ini['BEAMER']['ppt_factor']

            # Convert output units from mm
            wy_ppt_output = wy_ppt_input
            wy_eto_output = wy_eto_input
            if ini['IMAGES']['ppt_units'] == 'mm':
                pass
            elif ini['IMAGES']['ppt_units'] == 'in':
                wy_ppt_output /= 25.4
            elif ini['IMAGES']['ppt_units'] == 'ft':
                wy_ppt_output /= (25.4 * 12)
            if ini['IMAGES']['eto_units'] == 'mm':
                pass
            elif ini['IMAGES']['eto_units'] == 'in':
                wy_eto_output /= 25.4
            elif ini['IMAGES']['eto_units'] == 'ft':
                wy_eto_output /= (25.4 * 12)
            logging.debug('  Output ETO: {} {} PPT: {} {}'.format(
                wy_eto_output, ini['IMAGES']['eto_units'], wy_ppt_output,
                ini['IMAGES']['ppt_units']))

            # Initialize the Landsat object for target zone and iteration
            landsat.zone_geom = zone['geom']
            landsat.start_date = iter_start_dt.strftime('%Y-%m-%d')
            landsat.end_date = iter_end_dt.strftime('%Y-%m-%d')
            landsat_coll = landsat.get_collection()
            # print(sorted(utils.ee_getinfo(
            #     landsat_coll.aggregate_histogram('SCENE_ID'))))
            # input('ENTER')

            # Skip if Landsat collection is empty
            if not utils.ee_getinfo(
                    landsat_coll.aggregate_histogram('SCENE_ID')):
                logging.info('  Empty Landsat collection, skipping')
                continue

            # Add water year ETo and PPT values to each image
            def eto_ppt_func(img):
                """"""
                return ee.Image(img).setMulti({
                    'wy_eto': wy_eto_output,
                    'wy_ppt': wy_ppt_output
                })

            landsat_coll = ee.ImageCollection(landsat_coll.map(eto_ppt_func))

            # Build each collection separately then merge
            etg_coll = ee.ImageCollection(landsat_coll.map(
                    ee_common.beamer_func)) \
                .select(ini['IMAGES']['download_bands'])

            # Clip using the feature geometry
            # Set the masked values to a nodata value
            # so that the TIF can have a nodata value other than 0 set
            etg_image = ee.Image(etg_coll.mean()) \
                .clip(zone['geom']) \
                .unmask(nodata_value, False)

            if not os.path.isfile(zip_path):
                # Get the download URL
                logging.debug('  Requesting URL')
                zip_url = utils.ee_request(
                    etg_image.getDownloadURL({
                        'name': image_id,
                        'crs': ini['SPATIAL']['crs'],
                        'crs_transform': output_transform,
                        'dimensions': output_shape
                    }))

                # Try downloading a few times
                logging.info('  Downloading')
                for i in range(1, 10):
                    try:
                        response = urlrequest.urlopen(zip_url)
                        with open(zip_path, 'wb') as output_f:
                            shutil.copyfileobj(response, output_f)
                        break
                    except Exception as e:
                        logging.info('  Resending query')
                        logging.debug('  {}'.format(e))
                        sleep(i**2)
                        os.remove(zip_path)

            # Try extracting the files
            try:
                logging.info('  Extracting')
                with zipfile.ZipFile(zip_path, 'r') as z:
                    z.extractall(zone_annuals_ws)
            except Exception as e:
                logging.warning('    Error: could not extract'.format(i))
                logging.debug('  {}'.format(e))
                try:
                    os.remove(zip_path)
                except Exception as e:
                    pass

            # Set nodata value
            for item in os.listdir(zone_annuals_ws):
                if item.startswith(image_id) and item.endswith('.tif'):
                    gdc.raster_path_set_nodata(
                        os.path.join(zone_annuals_ws, item), nodata_value)
                    raster_statistics(os.path.join(zone_annuals_ws, item))

        logging.info('\nComputing composite rasters from annual means')
        for stat in stat_list:
            logging.info('  Stat: {}'.format(stat))
            for band in ini['IMAGES']['download_bands']:
                logging.info('  {}'.format(band))
                image_band_list = [
                    os.path.join(zone_annuals_ws, item)
                    for item in os.listdir(zone_annuals_ws)
                    if item.endswith('.{}.tif'.format(band.lower()))
                ]
                # for image_path in image_band_list:
                #     raster_path_set_nodata(image_path, nodata_value)

                output_path = os.path.join(
                    # zone_output_ws, 'etg_{}_{}.{}.tif'.format(
                    zone_output_ws,
                    '{}_{}.{}.tif'.format(zone_name.lower().replace(' ', '_'),
                                          stat.lower(), band.lower()))
                logging.debug('  {}'.format(output_path))

                # Use GDAL to compute the composite raster
                cell_statistics(image_band_list, output_path, stat.lower())
                raster_statistics(output_path)
Пример #9
0
def get_lat_index(location):
    """Get latitude index min and max."""

    geom = ogr.CreateGeometryFromJson(json.dumps(location))
    env = geom.GetEnvelope()
    return int(math.floor(env[2] * 10)), int(math.ceil(env[3] * 10))
Пример #10
0
    def _to_ogr(source, srid=None):
        geom = None
        if isinstance(source, dict):
            # GeoJSON
            if not 'coordinates' in source:
                if 'features' in source:
                    raise ValueError(
                        f'FeatureCollection {repr(source)} cannot be converted to a single OGR Geometry'
                    )

                if 'geometry' in source:
                    source = source['geometry']
                else:
                    raise ValueError(
                        f'{repr(source)} doesn\'t look like a valid GeoJSON')

            geom_str = json.dumps(source)
            geom = ogr.CreateGeometryFromJson(geom_str)
            if geom is None:
                raise ValueError(
                    f'Failed to convert {repr(source)} to OGR Geometry')

        elif isinstance(source, str):
            try:
                jsn = json.loads(source)
            except json.JSONDecodeError:
                pass
            else:
                # GeoJSON string
                return OgrWrapper._to_ogr(jsn, srid)

            wkt = source
            m = re.match(r'\s*SRID=(\d*)\s*;(.*)$', wkt)  # Extended WKT
            if m:
                srid = int(m.group(1))
                wkt = m.group(2)
            # Try WKT
            geom = ogr.CreateGeometryFromWkt(wkt)

            if geom is None:
                geom = ogr.CreateGeometryFromGML(source)
            if geom is None:
                raise ValueError(
                    f'{repr(source)} doesn\'t look like neither of GeoJSON, WKT nor GML'
                )

        elif isinstance(source, ET.Element):
            xml = ET.tostring(source, encoding='utf-8').decode()
            try:
                return OgrWrapper._to_ogr(xml)
            except ValueError:
                raise ValueError(
                    f'{repr(source)} is not representing a valid GML')

        else:
            raise TypeError(
                f'{repr(source)} cannot be converted to OGR Geometry')

        if srid is not None:
            OgrWrapper._set_srs(geom, srid)

        return geom
Пример #11
0
def GeoJSONtoQgsGeomentry(_json: Union[dict, str]) -> QgsGeometry:
    if isinstance(_json, dict):
        _json = json.dumps(_json)
    ret = ogr.CreateGeometryFromJson(_json)
    return QgsGeometry.fromWkt(ret.ExportToWkt())
Пример #12
0
    def _save(self, tweet, hashtag=None):

        # Obtain the event from the database.
        event = self.__session.query(DBEONet).filter(DBEONet.hashtag == hashtag).one_or_none()

        # No need to continue if the tweet is already in the database.
        db_tweet = self.__session.query(DBTweets).filter(DBTweets.tweet_id == tweet.id_str).one_or_none()
        if db_tweet is not None:
            return

        # the data fields to save.
        name = tweet.user.name
        create_date = tweet.created_at
        place = tweet.place.name if tweet.place else None
        text = tweet.text
        profile_pic = tweet.user.profile_image_url
        screen_name = tweet.user.screen_name
        try:
            tweet.extended_entities
        except AttributeError:
            media_url = None
        else:
            media_url = tweet.extended_entities['media'][0]['media_url'] if tweet.extended_entities['media'] and tweet.extended_entities['media'][0]['type'] == 'photo' else None
            
        # The coordinates of the tweet are either the specific long, lat
        # provided in the tweet, or the centroid of the place's bounding
        # box.
        # 
        # The coordinates is a list that must be saved as a string.  However
        # the list needs to be reconstituted when it's read.  The clients
        # consuming the REST API need the JSON version of the coordinates.
        if tweet.coordinates is not None:
            coordinates = PythonDBObject(tweet.coordinates)
            json_coordinates = json.dumps(tweet.coordinates)

        # Second choice is the centroid of the place
        elif tweet.place is not None:
            center = place_centroid(tweet)
            
            # Build the dictionary.
            l_coords = [center.GetX(), center.GetY()]
            d_coords = {u'type': 'Point', u'coordinates': l_coords}
            coordinates = PythonDBObject(d_coords)
            json_coordinates = json.dumps(d_coords)

        # Use the centroid of the event if we have that.
        elif hashtag is not None:
            e_json = event.json_geometries
            f_json = str(e_json)
            g_json = f_json[1:-1]
            ogr_geometry = ogr.CreateGeometryFromJson(g_json)
            geotype = ogr_geometry.GetGeometryName
            if 'Point' == geotype:
                coordinates = PythonDBObject(g_json)
                json_coordinates =g_json
            else:
                center = ogr_geometry.Centroid()

                # Build the dictionary.
                l_coords = [center.GetX(), center.GetY()]
                d_coords = {u'type': 'Point', u'coordinates': l_coords}
                coordinates = PythonDBObject(d_coords)
                json_coordinates = json.dumps(d_coords)
                
        # Commit to the database.
        tweet_record = DBTweets(tweet_id=tweet.id_str,
                                eonet_id=event.eonet_id,
                                name=name,
                                createDate=create_date,
                                place=place,
                                msg=text,
                                coordinates=coordinates.encode(),
                                hashtag=hashtag,
                                media_url=media_url,
                                screen_name=screen_name,
                                profile_pic=profile_pic,
                                json_coordinates=json_coordinates)
        self.__session.add(tweet_record)
        self.__session.commit()
                
        # Write the same to standard out to monitor the progress.  The terminal
        # requires the text to be encoded as utf8.
        text = text.encode('utf8', 'replace')
        name = name.encode('utf8', 'replace')
        place = place.encode('utf8', 'replace') if place else None
        media_url = media_url.encode('utf8', 'replace') if media_url else None
        print('Written to the database: [Name: %s][Place: %s][At: %s] %s %s.' %
              (name, place, create_date, text, media_url))
Пример #13
0
    def pintarCapasReferencia(self, uriFigura, value, posicion):

        consulta = self.root.findGroup('referencia')

        if consulta == None:
            raise AttributeError('Capas no detectadas')
            return None

        #Nombre de la capa de acuerdo al valor del ComboBox de capas a cargar
        nameCapa = value

        egName = list(self.capasReferencia.keys())[list(
            self.capasReferencia.values()).index(value)]
        #Obtenemos los datos
        data = self.obtenerCapasDeReferencia(egName)
        type(data)
        srid = 32614
        inSpatialRef = osr.SpatialReference()
        inSpatialRef.ImportFromEPSG(int(srid))
        outSpatialRef = osr.SpatialReference()
        outSpatialRef.ImportFromEPSG(int(srid))
        coordTrans = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
        if not bool(data):
            raise Exception('Error')

        keys = list(data['features'][0]['properties'].keys())
        properties = []
        geoms = []
        for feature in data['features']:
            geom = feature['geometry']
            property = feature['properties']
            geom = json.dumps(geom)
            geometry = ogr.CreateGeometryFromJson(geom)
            geometry.Transform(coordTrans)
            geoms.append(geometry.ExportToWkt())
            l = []
            for i in range(0, len(keys)):
                l.append(property[keys[i]])
            properties.append(l)

        fields = ""
        for k in keys:
            fields = fields + "&field=" + k + ":string(15)"

        uri = str(uriFigura) + "?crs=epsg:" + str(srid) + fields + "&index=yes"
        mem_layer = QgsVectorLayer(uri, nameCapa, 'memory')
        mem_layer.setReadOnly(True)
        prov = mem_layer.dataProvider()
        feats = [QgsFeature() for i in range(len(geoms))]
        for i, feat in enumerate(feats):
            feat.setAttributes(properties[i])
            feat.setGeometry(QgsGeometry.fromWkt(geoms[i]))

        prov.addFeatures(feats)
        settings = QgsPalLayerSettings()
        settings.fieldName = 'clave'
        settings.centroidWhole = True

        textFormat = QgsTextFormat()
        textFormat.setColor(QColor(0, 0, 0))
        textFormat.setSize(8)
        textFormat.setNamedStyle('Bold')

        settings.setFormat(textFormat)

        labeling = QgsVectorLayerSimpleLabeling(settings)
        mem_layer.setLabeling(labeling)
        mem_layer.setLabelsEnabled(True)

        QgsProject.instance().addMapLayers([mem_layer], False)

        mzaNL = QgsLayerTreeLayer(mem_layer)

        consulta.insertChildNode(0, mzaNL)
Пример #14
0
    def pintarVolumenConstrucciones(self):

        mem_layer = QgsProject.instance().mapLayersByName('volumenes')

        if len(mem_layer) != 1:
            raise AttributeError
        mem_layer = mem_layer[0]

        if mem_layer == None:
            return

        data = self.obtenerVolConstrucciones()
        type(data)
        srid = 32614
        inSpatialRef = osr.SpatialReference()
        inSpatialRef.ImportFromEPSG(int(srid))
        outSpatialRef = osr.SpatialReference()
        outSpatialRef.ImportFromEPSG(int(srid))
        coordTrans = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
        if not bool(data):
            raise Exception('Error')

        if data['features'] == []:
            return
        keys = list(data['features'][0]['properties'].keys())
        properties = []
        geoms = []
        for feature in data['features']:
            geom = feature['geometry']
            property = feature['properties']
            geom = json.dumps(geom)
            geometry = ogr.CreateGeometryFromJson(geom)
            geometry.Transform(coordTrans)
            geoms.append(geometry.ExportToWkt())
            l = []
            for i in range(0, len(keys)):
                l.append(property[keys[i]])
            properties.append(l)

        prov = mem_layer.dataProvider()
        feats = [QgsFeature() for i in range(len(geoms))]
        for i, feat in enumerate(feats):
            feat.setAttributes(properties[i])
            feat.setGeometry(QgsGeometry.fromWkt(geoms[i]))

        prov.addFeatures(feats)
        settings = QgsPalLayerSettings()
        settings.fieldName = 'clave'
        settings.centroidWhole = True

        textFormat = QgsTextFormat()
        textFormat.setColor(QColor(0, 0, 255))
        textFormat.setSize(8)
        textFormat.setNamedStyle('Bold')

        settings.setFormat(textFormat)

        labeling = QgsVectorLayerSimpleLabeling(settings)
        mem_layer.setLabeling(labeling)
        mem_layer.setLabelsEnabled(True)

        mem_layer.triggerRepaint()

        mem_layer.commitChanges()
        mem_layer.triggerRepaint()
Пример #15
0
    def init_xy_with_subsets(self,
                             lon_min,
                             lat_min,
                             lon_max,
                             lat_max,
                             target_cellsize_meters,
                             subset_grid_shp,
                             subset_grid_field_name=None):
        """Create & initialize x/y dimensions/coordinate vars and subset vars.

        Args:
            lon_min: Minimum longitude of domain.
            lat_min: Minimum latitude of domain.
            lon_max: Maximum longitude of domain.
            lat_max: Maximum latitude of domain.
            target_cellsize_meters: Target cell size, in meters. Actual
                calculated cell sizes will be approximations of this.
            subset_grid_shp: Path to subset grid polygon shapefile used to
                define subgrid domains.
            subset_grid_field_name: Optional, default None) Shapefile
                field name to be stored in the index file.

        Raises: Exception when given subset grid shapefile does not exist or
            does not include any grid polygons intersecting with given extent.

        Returns:
            Instance of `RegularGrid` representing the extended generated
            grid whose extent matches the union of all intersecting subset grid
            polygons.
        """
        shp = ogr.Open(subset_grid_shp)
        layer = shp.GetLayer()

        # Create OGR Geometry from ocean model grid extent
        ring = ogr.Geometry(ogr.wkbLinearRing)
        ring.AddPoint(lon_min, lat_max)
        ring.AddPoint(lon_min, lat_min)
        ring.AddPoint(lon_max, lat_min)
        ring.AddPoint(lon_max, lat_max)
        ring.AddPoint(lon_min, lat_max)
        # Create polygon
        ofs_poly = ogr.Geometry(ogr.wkbPolygon)
        ofs_poly.AddGeometry(ring)

        # Get the EPSG value from the import shapefile and transform to WGS84
        spatial_ref = layer.GetSpatialRef()
        shp_srs = spatial_ref.GetAttrValue('AUTHORITY', 1)
        source = osr.SpatialReference()
        source.ImportFromEPSG(int(shp_srs))
        target = osr.SpatialReference()
        target.ImportFromEPSG(4326)
        transform = osr.CoordinateTransformation(source, target)
        ofs_poly.Transform(transform)

        # Find the intersection between grid polygon and ocean model grid extent
        subset_polys = {}
        fids = []
        fields = {}
        fid = 0
        for feature in layer:
            geom = feature.GetGeometryRef()
            if ofs_poly.Intersects(geom):
                subset_polys[fid] = geom.ExportToJson()
                if subset_grid_field_name is not None:
                    field_name = feature.GetField(str(subset_grid_field_name))
                    fields.update({fid: field_name})
                    fids.append(fid)
                else:
                    fids.append(fid)
            fid += 1

        if len(fids) == 0:
            raise Exception(
                'Given subset grid shapefile contains no polygons that intersect with model domain; cannot proceed.'
            )

        # Use a single subset polygon to calculate x/y cell sizes. This ensures
        # that cells do not fall on the border between two grid polygons.
        single_polygon = ogr.Geometry(ogr.wkbMultiPolygon)
        single_polygon.AddGeometry(
            ogr.CreateGeometryFromJson(subset_polys[fids[0]]))
        sp_x_min, sp_x_max, sp_y_min, sp_y_max = single_polygon.GetEnvelope()

        cellsize_x, cellsize_y = RegularGrid.calc_cellsizes(
            sp_x_min, sp_y_min, sp_x_max, sp_y_max, target_cellsize_meters)

        # Combine identified subset grid polygons into single multipolygon to
        # calculate full extent of all combined subset grids
        multipolygon = ogr.Geometry(ogr.wkbMultiPolygon)
        for fid in fids:
            multipolygon.AddGeometry(
                ogr.CreateGeometryFromJson(subset_polys[fid]))

        (x_min, x_max, y_min, y_max) = multipolygon.GetEnvelope()
        full_reg_grid = RegularGrid(x_min, y_min, x_max, y_max, cellsize_x,
                                    cellsize_y)

        # Create NetCDF dimensions & coordinate variables using dimension sizes
        # from regular grid
        self.create_dims_coord_vars(len(full_reg_grid.y_coords),
                                    len(full_reg_grid.x_coords))
        # Populate NetCDF coordinate variables using regular grid coordinates
        self.var_x[:] = full_reg_grid.x_coords[:]
        self.var_y[:] = full_reg_grid.y_coords[:]
        self.nc_file.gridSpacingLongitude = full_reg_grid.cellsize_x
        self.nc_file.gridSpacingLatitude = full_reg_grid.cellsize_y

        # Create subgrid dimension/variables
        self.create_subgrid_dims_vars(len(subset_polys),
                                      subset_grid_field_name)
        # Calculate subgrid mask ranges, populate subgrid ID
        for subgrid_index, fid in enumerate(fids):
            self.var_subgrid_id[subgrid_index] = fid
            if subset_grid_field_name is not None:
                self.var_subgrid_name[subgrid_index] = fields[fid]

            # Convert OGR geometry to shapely geometry
            subset_poly_shape = shape(json.loads(subset_polys[fid]))
            min_x_coord = subset_poly_shape.bounds[0]
            max_x_coord = subset_poly_shape.bounds[2]
            min_y_coord = subset_poly_shape.bounds[1]
            max_y_coord = subset_poly_shape.bounds[3]

            subgrid_x_min = None
            subgrid_x_max = None
            subgrid_y_min = None
            subgrid_y_max = None

            for i, x in enumerate(self.var_x):
                if x >= min_x_coord:
                    subgrid_x_min = i
                    break
            count_x = round(
                ((max_x_coord - min_x_coord) / full_reg_grid.cellsize_x))

            for i, y in enumerate(self.var_y):
                if y >= min_y_coord:
                    subgrid_y_min = i
                    break
            count_y = round(
                ((max_y_coord - min_y_coord) / full_reg_grid.cellsize_y))

            subgrid_x_max = subgrid_x_min + count_x - 1
            subgrid_y_max = subgrid_y_min + count_y - 1

            self.var_subgrid_x_min[subgrid_index] = subgrid_x_min
            self.var_subgrid_x_max[subgrid_index] = subgrid_x_max
            self.var_subgrid_y_min[subgrid_index] = subgrid_y_min
            self.var_subgrid_y_max[subgrid_index] = subgrid_y_max

        return full_reg_grid
Пример #16
0
def run_crophealth_app(ds, lat, lon, buffer):
    """
    Plots an interactive map of the crop health case-study area and allows
    the user to draw polygons. This returns a plot of the average NDVI value
    in the polygon area.
    Last modified: January 2020
    
    Parameters
    ----------
    ds: xarray.Dataset 
        data set containing combined, masked data
        Masked values are set to 'nan'
    lat: float
        The central latitude corresponding to the area of loaded ds
    lon: float
        The central longitude corresponding to the area of loaded ds
    buffer:
         The number of square degrees to load around the central latitude and longitude. 
         For reasonable loading times, set this as `0.1` or lower.
    """
    
    # Suppress warnings
    warnings.filterwarnings('ignore')

    # Update plotting functionality through rcParams
    mpl.rcParams.update({'figure.autolayout': True})
    
    # Define polygon bounds   
    latitude = (lat - buffer, lat + buffer)
    longitude = (lon - buffer, lon + buffer)

    # Define the bounding box that will be overlayed on the interactive map
    # The bounds are hard-coded to match those from the loaded data
    geom_obj = {
        "type": "Feature",
        "properties": {
            "style": {
                "stroke": True,
                "color": 'red',
                "weight": 4,
                "opacity": 0.8,
                "fill": True,
                "fillColor": False,
                "fillOpacity": 0,
                "showArea": True,
                "clickable": True
            }
        },
        "geometry": {
            "type": "Polygon",
            "coordinates": [
                [
                    [
                        longitude[0],
                        latitude[0]
                    ],
                    [
                        longitude[1],
                        latitude[0]
                    ],
                    [
                        longitude[1],
                        latitude[1]
                    ],
                    [
                        longitude[0],
                        latitude[1]
                    ],
                    [
                        longitude[0],
                        latitude[0]
                    ]
                ]
            ]
        }
    }
    
    # Create a map geometry from the geom_obj dictionary
    # center specifies where the background map view should focus on
    # zoom specifies how zoomed in the background map should be
    loadeddata_geometry = ogr.CreateGeometryFromJson(str(geom_obj['geometry']))
    loadeddata_center = [
        loadeddata_geometry.Centroid().GetY(),
        loadeddata_geometry.Centroid().GetX()
    ]
    loadeddata_zoom = 16

    # define the study area map
    studyarea_map = Map(
        center=loadeddata_center,
        zoom=loadeddata_zoom,
        basemap=basemaps.Esri.WorldImagery
    )

    # define the drawing controls
    studyarea_drawctrl = DrawControl(
        polygon={"shapeOptions": {"fillOpacity": 0}},
        marker={},
        circle={},
        circlemarker={},
        polyline={},
    )

    # add drawing controls and data bound geometry to the map
    studyarea_map.add_control(studyarea_drawctrl)
    studyarea_map.add_layer(GeoJSON(data=geom_obj))

    # Index to count drawn polygons
    polygon_number = 0

    # Define widgets to interact with
    instruction = widgets.Output(layout={'border': '1px solid black'})
    with instruction:
        print("Draw a polygon within the red box to view a plot of "
              "average NDVI over time in that area.")

    info = widgets.Output(layout={'border': '1px solid black'})
    with info:
        print("Plot status:")

    fig_display = widgets.Output(layout=widgets.Layout(
        width="50%",  # proportion of horizontal space taken by plot
    ))

    with fig_display:
        plt.ioff()
        fig, ax = plt.subplots(figsize=(8, 6))
        ax.set_ylim([0, 1])

    colour_list = plt.rcParams['axes.prop_cycle'].by_key()['color']

    # Function to execute each time something is drawn on the map
    def handle_draw(self, action, geo_json):
        nonlocal polygon_number

        # Execute behaviour based on what the user draws
        if geo_json['geometry']['type'] == 'Polygon':

            info.clear_output(wait=True)  # wait=True reduces flicker effect
            
            # Save geojson polygon to io temporary file to be rasterized later
            jsonData = json.dumps(geo_json)
            binaryData = jsonData.encode()
            io = BytesIO(binaryData)
            io.seek(0)
            
            # Read the polygon as a geopandas dataframe
            gdf = gpd.read_file(io)
            gdf.crs = "EPSG:4326"

            # Convert the drawn geometry to pixel coordinates
            xr_poly = xr_rasterize(gdf, ds.NDVI.isel(time=0), crs='EPSG:6933')

            # Construct a mask to only select pixels within the drawn polygon
            masked_ds = ds.NDVI.where(xr_poly)
            
            masked_ds_mean = masked_ds.mean(dim=['x', 'y'], skipna=True)
            colour = colour_list[polygon_number % len(colour_list)]

            # Add a layer to the map to make the most recently drawn polygon
            # the same colour as the line on the plot
            studyarea_map.add_layer(
                GeoJSON(
                    data=geo_json,
                    style={
                        'color': colour,
                        'opacity': 1,
                        'weight': 4.5,
                        'fillOpacity': 0.0
                    }
                )
            )

            # add new data to the plot
            xr.plot.plot(
                masked_ds_mean,
                marker='*',
                color=colour,
                ax=ax
            )

            # reset titles back to custom
            ax.set_title("Average NDVI from Landsat 8")
            ax.set_xlabel("Date")
            ax.set_ylabel("NDVI")

            # refresh display
            fig_display.clear_output(wait=True)  # wait=True reduces flicker effect
            with fig_display:
                display(fig)
                
            with info:
                print("Plot status: polygon sucessfully added to plot.")

            # Iterate the polygon number before drawing another polygon
            polygon_number = polygon_number + 1

        else:
            info.clear_output(wait=True)
            with info:
                print("Plot status: this drawing tool is not currently "
                      "supported. Please use the polygon tool.")

    # call to say activate handle_draw function on draw
    studyarea_drawctrl.on_draw(handle_draw)

    with fig_display:
        # TODO: update with user friendly something
        display(widgets.HTML(""))

    # Construct UI:
    #  +-----------------------+
    #  | instruction           |
    #  +-----------+-----------+
    #  |  map      |  plot     |
    #  |           |           |
    #  +-----------+-----------+
    #  | info                  |
    #  +-----------------------+
    ui = widgets.VBox([instruction,
                       widgets.HBox([studyarea_map, fig_display]),
                       info])
    display(ui)
Пример #17
0
def export_to_shp(jobs, file_name, output_folder):
    """Exports results to a shapefile.
    :param jobs: list of jobs (a job contains the result information)
    :param file_name: the output file name
    :param output_folder: the output task folder
    """
    global exported_count
    global errors_count

    from osgeo import ogr
    from osgeo import osr
    # os.environ['GDAL_DATA'] = r'C:\voyager\server_2-1381\app\gdal'
    driver = ogr.GetDriverByName("ESRI Shapefile")
    for job in jobs:
        try:
            geometry_type = None
            if '[geo]' not in job:
                errors_count += 1
                status_writer.send_state(status.STAT_WARNING,
                                         'No Geometry field')
                status_writer.send_state(status.STAT_WARNING)
                continue
            geo_json = job['[geo]']
            if geo_json['type'].lower() == 'polygon':
                geometry_type = ogr.wkbPolygon
            elif geo_json['type'].lower() == 'geometrycollection':
                geom = ogr.CreateGeometryFromJson("{0}".format(job['[geo]']))
                if geom.GetDimension() == 0:
                    geometry_type = ogr.wkbPoint
                elif geom.GetDimension() == 1:
                    geometry_type = ogr.wkbLineString
                else:
                    geometry_type = ogr.wkbPolygon
            elif geo_json['type'].lower() == 'multipolygon':
                geometry_type = ogr.wkbMultiPolygon
            elif geo_json['type'].lower() == 'linestring':
                geometry_type = ogr.wkbLineString
            elif geo_json['type'].lower() == 'multilinestring':
                geometry_type = ogr.wkbMultiLineString
            elif geo_json['type'].lower() == 'point':
                geometry_type = ogr.wkbPoint
            elif geo_json['type'].lower() == 'multipoint':
                geometry_type = ogr.wkbMultiPoint
        except KeyError as ke:
            errors_count += 1
            status_writer.send_state(status.STAT_WARNING, 'No Geometry field')
            continue
        except TypeError as te:
            errors_count += 1
            status_writer.send_state(status.STAT_WARNING, 'No Geometry field')
            status_writer.send_state(status.STAT_WARNING)
            continue

        if os.path.exists(
                os.path.join(output_folder,
                             '{0}_{1}.shp'.format(file_name,
                                                  geo_json['type']))):
            shape_file = ogr.Open(
                os.path.join(output_folder,
                             '{0}_{1}.shp'.format(file_name,
                                                  geo_json['type'])), 1)
            layer = shape_file.GetLayer()
        else:
            shape_file = driver.CreateDataSource(
                os.path.join(output_folder,
                             '{0}_{1}.shp'.format(file_name,
                                                  geo_json['type'])))
            epsg_code = 4326
            srs = osr.SpatialReference()
            srs.ImportFromEPSG(epsg_code)
            layer = shape_file.CreateLayer(
                '{0}_{1}'.format(file_name, geo_json['type']), srs,
                geometry_type)
            for name in jobs[0].keys():
                if not name == '[geo]':
                    name = str(name)
                    if name.startswith('fu_'):
                        new_field = ogr.FieldDefn(name, ogr.OFTReal)
                    elif name.startswith('fi_'):
                        new_field = ogr.FieldDefn(name, ogr.OFTInteger)
                    elif name.startswith('fl_'):
                        new_field = ogr.FieldDefn(name, ogr.OFTInteger64)
                    elif name.startswith('fd_'):
                        new_field = ogr.FieldDefn(name, ogr.OFTDateTime)
                    else:
                        new_field = ogr.FieldDefn(name, ogr.OFTString)
                    layer.CreateField(new_field)

        try:
            layer_def = layer.GetLayerDefn()
            feature = ogr.Feature(layer_def)
            geom = ogr.CreateGeometryFromJson("{0}".format(job['[geo]']))
            if not geom:
                geom = ogr.CreateGeometryFromJson("{0}".format(
                    json.dumps(job['[geo]'])))
            feature.SetGeometry(geom)
        except KeyError:
            feature.SetGeometry(None)
            pass
        try:
            job.pop('[geo]')
        except KeyError:
            pass

        try:
            for field, value in job.items():
                field, value = str(field), str(value)
                i = feature.GetFieldIndex(field[0:10])
                feature.SetField(i, value)
            layer.CreateFeature(feature)
            exported_count += 1
            shape_file.Destroy()
        except Exception as ex:
            errors_count += 1
            status_writer.send_state(status.STAT_WARNING, 'No Geometry field')
            shape_file.Destroy()
            continue
Пример #18
0
from osgeo import ogr
from osgeo import osr

source = osr.SpatialReference()
source.ImportFromEPSG(4326)

target = osr.SpatialReference()
target.ImportFromEPSG(3857)

transform = osr.CoordinateTransformation(source, target)

geojson = """{ "type": "LineString", "coordinates": [ [ -75.313585, 43.069271 ], [ -75.269426, 43.114027 ], [ -75.158731, 43.114325 ] ] }"""
geom = ogr.CreateGeometryFromJson(geojson)
geom.Transform(transform)
print "Length = %d" % geom.Length() + " meters"
Пример #19
0
def splitAtDateline(geom, preferredEpsg):
    """
    Given a Polygon Geometry object in lat/long, determine whether it crosses the date line, 
    and if so, split it into a multipolygon with a part on either side. 
    
    Use the given preferred EPSG to perform calculations. 
    
    Return a new Geometry in lat/long. 
    
    """
    crosses = crossesDateline(geom, preferredEpsg)
    if crosses:
        (projTr, llTr) = makeTransformations(4326, preferredEpsg)
        coords = getCoords(geom)
        (x, y) = (coords[:, 0], coords[:, 1])
        (yMin, yMax) = (y.min(), y.max())
        xMinPositive = None
        xMaxNegative = None
        xGe0 = (x >= 0)
        xLt0 = (x < 0)
        if xGe0.any() > 0 and xLt0.any() > 0:
            xMaxNegative = x[xLt0].max()
            xMinPositive = x[xGe0].min()

            # Create rectangles for the east and west hemispheres, constrained by the
            # extent of this polygon. Note that this assumes that we do not
            # cross both the date line, and also the prime (zero) meridian. This may not
            # always be true, notably when we are close to the pole.
            eastHemiRectCoords = [[xMinPositive, yMax], [xMinPositive, yMin],
                                  [180, yMin], [180, yMax],
                                  [xMinPositive, yMax]]
            eastHemiRectJson = repr({
                'type': 'Polygon',
                'coordinates': [eastHemiRectCoords]
            })
            westHemiRectCoords = [[-180, yMax], [-180, yMin],
                                  [xMaxNegative, yMin], [xMaxNegative, yMax],
                                  [-180, yMax]]
            westHemiRectJson = repr({
                'type': 'Polygon',
                'coordinates': [westHemiRectCoords]
            })
            eastHemiRect = ogr.CreateGeometryFromJson(eastHemiRectJson)
            westHemiRect = ogr.CreateGeometryFromJson(westHemiRectJson)

            geomProj = copyGeom(geom)
            geomProj.Transform(projTr)
            eastHemiRect.Transform(projTr)
            westHemiRect.Transform(projTr)

            eastHemiPart = geomProj.Intersection(eastHemiRect)
            westHemiPart = geomProj.Intersection(westHemiRect)
            eastHemiPart.Transform(llTr)
            westHemiPart.Transform(llTr)

            # Put these together as a single multipolygon
            eastPartCoords = getCoords(eastHemiPart)
            westPartCoords = getCoords(westHemiPart)
            # Discard any vertices which are still no the wrong side of the 180 line. I
            # do not understand what is going on here, but I have invested far more of
            # my valuable time than I should, and this kludge will be a reasonable approximation.
            eastPartCoords = eastPartCoords[eastPartCoords[:, 0] > 0, :]
            westPartCoords = westPartCoords[westPartCoords[:, 0] < 0, :]

            # Convert to lists
            eastPartCoords = eastPartCoords.tolist()
            westPartCoords = westPartCoords.tolist()
            # Discard anything left with only 2 points
            if len(eastPartCoords) < 3:
                eastPartCoords = []
            if len(westPartCoords) < 3:
                westPartCoords = []
            # Close polygons. What a kludge.....
            if len(eastPartCoords) > 2:
                if eastPartCoords[-1][0] != eastPartCoords[0][
                        0] or eastPartCoords[-1][1] != eastPartCoords[0][1]:
                    eastPartCoords.append(eastPartCoords[0])
            if len(westPartCoords) > 2:
                if westPartCoords[-1][0] != westPartCoords[0][
                        0] or westPartCoords[-1][1] != westPartCoords[0][1]:
                    westPartCoords.append(westPartCoords[0])

            # Make a multi-polygon from the two parts
            coordsMulti = [[eastPartCoords], [westPartCoords]]
            jsonStr = repr({
                'type': 'MultiPolygon',
                'coordinates': coordsMulti
            })
            newGeom = ogr.CreateGeometryFromJson(jsonStr)
        else:
            # It didn't really cross the date line, but seems to due to rounding
            # error in crossesDateline().
            newGeom = copyGeom(geom)
    else:
        newGeom = copyGeom(geom)
    return newGeom
Пример #20
0
    def calculate_on_GEE(self):
        self.close()

        crosses_180th, geojsons = self.aoi.bounding_box_gee_geojson()
        val = []
        n = 1

        if self.area_tab.area_fromfile.isChecked():
            for f in self.aoi.get_layer_wgs84().getFeatures():
                # Get an OGR geometry from the QGIS geometry
                geom = f.geometry()
                val.append(geom)
                n += 1

            # stringify json object
            val_string = '{}'.format(json.loads(val[0].asJson()))

            # create ogr geometry
            val_geom = ogr.CreateGeometryFromJson(val_string)
            # simplify polygon to tolerance of 0.003
            val_geom_simplified = val_geom.Simplify(0.003)

            # fetch coordinates from json
            coords = json.loads(
                val_geom_simplified.ExportToJson())['coordinates']
            geometries = json.dumps([{
                "coordinates": coords,
                "type": "Polygon"
            }])

        elif self.area_tab.area_fromadmin.isChecked():
            geometries = json.dumps([{
                "coordinates":
                self.get_admin_poly_geojson()['geometry']['coordinates'][0],
                "type":
                "Polygon"
            }])
        elif self.area_tab.area_frompoint.isChecked():
            point = QgsPointXY(
                float(self.area_tab.area_frompoint_point_x.text()),
                float(self.area_tab.area_frompoint_point_y.text()))
            crs_src = QgsCoordinateReferenceSystem(
                self.area_tab.canvas.mapSettings().destinationCrs().authid())
            point = QgsCoordinateTransform(
                crs_src, self.aoi.crs_dst,
                QgsProject.instance()).transform(point)
            geometries = json.dumps(
                json.loads(QgsGeometry.fromPointXY(point).asJson()))

        payload = {
            'year_start': self.lc_setup_tab.use_esa_bl_year.date().year(),
            'year_end': self.lc_setup_tab.use_esa_tg_year.date().year(),
            'fl': self.get_fl(),
            'download_annual_lc': self.download_annual_lc.isChecked(),
            #    'geojsons':json.dumps(geojsons),
            'geojsons': geometries,
            'crs': self.aoi.get_crs_dst_wkt(),
            'crosses_180th': crosses_180th,
            'remap_matrix': self.lc_setup_tab.dlg_esa_agg.get_agg_as_list(),
            'task_name': self.options_tab.task_name.text(),
            'task_notes': self.options_tab.task_notes.toPlainText()
        }

        resp = run_script(get_script_slug('soil-organic-carbon'), payload)

        if resp:
            mb.pushMessage(
                QtWidgets.QApplication.translate("MISLAND", "Submitted"),
                QtWidgets.QApplication.translate(
                    "MISLAND",
                    "Soil organic carbon submitted to Google Earth Engine."),
                level=0,
                duration=5)
        else:
            mb.pushMessage(
                QtWidgets.QApplication.translate("MISLAND", "Error"),
                QtWidgets.QApplication.translate(
                    "MISLAND",
                    "Unable to submit soil organic carbon task to Google Earth Engine."
                ),
                level=0,
                duration=5)
Пример #21
0
def get_stitch_cfgs(context_file):
    """Return all possible stitch interferogram configurations."""

    # get context
    with open(context_file) as f:
        context = json.load(f)

    # get args
    project = context['project']
    direction = context.get('direction', 'along')
    subswaths = [int(i) for i in context.get('subswaths', "1 2 3").split()]
    subswaths.sort()
    min_stitch_count = int(context['min_stitch_count'])
    extra_products = [i.strip() for i in context.get('extra_products', 'los.rdr.geo').split()]
    orig_query = context['query']
    logger.info("orig_query: %s" % json.dumps(orig_query, indent=2))

    # cleanse query of ids from triggered rules
    query = clean_query(orig_query)
    logger.info("clean query: %s" % json.dumps(query, indent=2))

    # log enumerator params
    logger.info("project: %s" % project)
    logger.info("direction: %s" % direction)
    logger.info("subswaths: %s" % subswaths)
    logger.info("min_stitch_count: %s" % min_stitch_count)
    logger.info("extra_products: %s" % extra_products)

    # get bbox from query
    coords = None
    bbox = [-90., 90., -180., 180.]
    if 'and' in query.get('query', {}).get('filtered', {}).get('filter', {}):
        filts = query['query']['filtered']['filter']['and']
    elif 'geo_shape' in query.get('query', {}).get('filtered', {}).get('filter', {}):
        filts = [ { "geo_shape": query['query']['filtered']['filter']['geo_shape'] } ]
    else: filts = []
    for filt in filts:
        if 'geo_shape' in filt:
            coords = filt['geo_shape']['location']['shape']['coordinates']
            roi = {
                'type': 'Polygon',
                'coordinates': coords,
            }
            logger.info("query filter ROI: %s" % json.dumps(roi))
            roi_geom = ogr.CreateGeometryFromJson(json.dumps(roi))
            roi_x_min, roi_x_max, roi_y_min, roi_y_max = roi_geom.GetEnvelope()
            bbox = [ roi_y_min, roi_y_max, roi_x_min, roi_x_max ]
            logger.info("query filter bbox: %s" % bbox)
            break

    # query docs
    uu = UU()
    logger.info("rest_url: {}".format(uu.rest_url))
    logger.info("dav_url: {}".format(uu.dav_url))
    logger.info("version: {}".format(uu.version))
    logger.info("grq_index_prefix: {}".format(uu.grq_index_prefix))

    # get normalized rest url
    rest_url = uu.rest_url[:-1] if uu.rest_url.endswith('/') else uu.rest_url

    # get index name and url
    url = "{}/{}/_search?search_type=scan&scroll=60&size=100".format(rest_url, uu.grq_index_prefix)
    logger.info("idx: {}".format(uu.grq_index_prefix))
    logger.info("url: {}".format(url))

    # query hits
    query.update({
        "partial_fields" : {
            "partial" : {
                "exclude" : "city",
            }
        }
    })
    #logger.info("query: {}".format(json.dumps(query, indent=2)))
    r = requests.post(url, data=json.dumps(query))
    r.raise_for_status()
    scan_result = r.json()
    count = scan_result['hits']['total']
    scroll_id = scan_result['_scroll_id']
    hits = []
    while True:
        r = requests.post('%s/_search/scroll?scroll=60m' % rest_url, data=scroll_id)
        res = r.json()
        scroll_id = res['_scroll_id']
        if len(res['hits']['hits']) == 0: break
        hits.extend(res['hits']['hits'])

    # remove partial fields and reformat metadata as expected by stitcher_utils
    #hits = remove_partials(hits)

    # extract reference ids
    ids = { h['_id']: True for h in hits }
    logger.info("ids: {}".format(json.dumps(ids, indent=2)))
    logger.info("hits count: {}".format(len(hits)))

    # dump metadata
    valid_meta_ts_out_file = "valid_meta_ts_out.json"
    with open(valid_meta_ts_out_file, 'w') as f:
        json.dump(hits, f, indent=2)

    # group frames by track and date pairs
    grouped = group_frames_by_track_date(hits)
    logger.info("grouped: %s" % json.dumps(grouped, indent=2))

    # enumerate configs
    projects = []
    directions = []
    extra_products_list = []
    filenames = []
    filename_urls = []
    ifg_ids = []
    base_products = ['filt_topophase.unw.geo', 'filt_topophase.unw.conncomp.geo', 'phsig.cor.geo']
    base_products.extend(extra_products)
    for track in sorted(grouped['grouped']):
        for dt_pair in sorted(grouped['grouped'][track]):
            stitch_count = 0

            # filter scenes without all requested subswaths
            swath_check = {}
            for swath in subswaths:
                if swath not in grouped['grouped'][track][dt_pair]:
                    raise RuntimeError("Did not find singlescene IFGs for subswath %s for track %s dates %s. Check your query results." %
                                       (swath, track, dt_pair))
                for tr, id in grouped['grouped'][track][dt_pair][swath]:
                    swath_check.setdefault(tr, {})[swath] = id
            skip_tr = {}
            for tr in sorted(swath_check):
                for swath in subswaths:
                    if swath not in swath_check[tr]: skip_tr[tr] = True
                
            furls = []
            swathnums = []
            ifg_sts = set()
            ifg_ets = set()
            fnames_tr = {}

            for swath in subswaths:
                swathnums.append(swath)
                for tr, id in grouped['grouped'][track][dt_pair][swath]:
                    if tr in skip_tr:
                        logger.warning("Skipping %s for scene %s since only subswaths %s exist." %
                                       (id, tr, sorted(swath_check[tr].keys())))
                        continue
                    bisect.insort(fnames_tr.setdefault(tr, []),
                                  os.path.join(id, 'merged', 'filt_topophase.unw.geo'))
                    for prod_file in base_products:
                        furls.append({
                            'url': "%s/merged/%s" % (grouped['hits'][id], prod_file),
                            'local_path': "%s/merged/" % id,
                        })
                        furls.append({
                            'url': "%s/merged/%s.xml" % (grouped['hits'][id], prod_file),
                            'local_path': "%s/merged/" % id,
                        })
                    furls.append({
                        'url': "%s/fine_interferogram.xml" % grouped['hits'][id],
                        'local_path': "%s/" % id,
                    })
                    furls.append({
                        'url': "%s/%s.dataset.json" % (grouped['hits'][id], id),
                        'local_path': "%s/_%s.dataset.json" % (id, id),
                    })
                    furls.append({
                        'url': "%s/%s.met.json" % (grouped['hits'][id], id),
                        'local_path': "%s/_%s.met.json" % (id, id),
                    })
                    stitch_count += 1
                    st, et = tr.split('_')
                    ifg_sts.add(st)
                    ifg_ets.add(et)
            ifg_sts = list(ifg_sts)
            ifg_sts.sort()
            ifg_ets = list(ifg_ets)
            ifg_ets.sort()

            # check minimum stitch count met
            if stitch_count < min_stitch_count:
                logger.warning("Failed to find minimum stitch count of %s for track %s date pair %s: %s" %
                               (min_stitch_count, track, dt_pair, stitch_count))
                continue

            # build job params
            projects.append(project)
            directions.append(direction)
            extra_products_list.append(extra_products)
            filenames.append([fnames_tr[tr] for tr in sorted(fnames_tr)])
            filename_urls.append(furls)
            ifg_hash = hashlib.md5(json.dumps([
                projects[-1],
                directions[-1],
                extra_products_list[-1],
                filenames[-1],
                filename_urls[-1],
            ], sort_keys=True)).hexdigest()
            ifg_ids.append(ID_TMPL.format(int(track), ifg_sts[0], ifg_ets[-1], 
                           ''.join(map(str, swathnums)), direction, ifg_hash[0:4]))
    logger.info("projects: %s" % projects)
    logger.info("directions: %s" % directions)
    logger.info("extra_products: %s" % extra_products_list)
    logger.info("filenames: %s" % json.dumps(filenames, indent=2))
    logger.info("filename_urls: %s" % json.dumps(filename_urls, indent=2))
    logger.info("ifg_ids: %s" % ifg_ids)
    return ( projects, directions, extra_products_list, filenames, filename_urls, ifg_ids )
Пример #22
0
def ee_image_download(ini_path=None, overwrite_flag=False):
    """Earth Engine Annual Mean Image Download

    Parameters
    ----------
    ini_path : str
    overwrite_flag : bool, optional
        If True, overwrite existing files (the default is False).

    """
    logging.info('\nEarth Engine EDDI Image Download')

    # 12 month EDDI
    aggregation_days = 365
    export_name = 'eddi_12month'
    output_name = 'eddi.12month'

    eddi_date_list = [
        '0131', '0228', '0331', '0430', '0531', '0630', '0731', '0831', '0930',
        '1031', '1130', '1231'
    ]
    # eddi_date_list = ['0930', '1231']
    # eddi_date_list = ['{:02d}01'.format(m) for m in range(1, 13)]
    # eddi_date_list = []

    eddi_folder = 'eddi'

    # Do we need to support separate EDDI years?
    # start_year = 1984
    # end_year = 2016

    #
    climo_year_start = 1979
    climo_year_end = 2017

    # Read config file
    # ini = inputs.ini_parse(ini_path, section='IMAGE')
    ini = inputs.read(ini_path)
    inputs.parse_section(ini, section='INPUTS')
    inputs.parse_section(ini, section='SPATIAL')
    inputs.parse_section(ini, section='EXPORT')
    inputs.parse_section(ini, section='IMAGES')

    nodata_value = -9999

    # Manually set output spatial reference
    logging.info('\nHardcoding GRIDMET snap, cellsize and spatial reference')
    ini['output_x'], ini['output_y'] = -124.79299639209513, 49.41685579737572
    ini['SPATIAL']['cellsize'] = 0.041666001963701
    # ini['SPATIAL']['cellsize'] = [0.041666001963701, 0.041666001489718]
    # ini['output_x'] = -124.79166666666666666667
    # ini['output_y'] = 25.04166666666666666667
    # ini['SPATIAL']['cellsize'] = 1. / 24
    ini['SPATIAL']['osr'] = gdc.epsg_osr(4326)
    # ini['SPATIAL']['osr'] = gdc.epsg_osr(4269)
    ini['SPATIAL']['crs'] = 'EPSG:4326'
    logging.debug('  Snap: {} {}'.format(ini['output_x'], ini['output_y']))
    logging.debug('  Cellsize: {}'.format(ini['SPATIAL']['cellsize']))
    logging.debug('  OSR: {}'.format(ini['SPATIAL']['osr']))

    # Get ee features from shapefile
    zone_geom_list = gdc.shapefile_2_geom_list_func(
        ini['INPUTS']['zone_shp_path'],
        zone_field=ini['INPUTS']['zone_field'],
        reverse_flag=False)

    # Filter features by FID before merging geometries
    if ini['INPUTS']['fid_keep_list']:
        zone_geom_list = [
            zone_obj for zone_obj in zone_geom_list
            if zone_obj[0] in ini['INPUTS']['fid_keep_list']
        ]
    if ini['INPUTS']['fid_skip_list']:
        zone_geom_list = [
            zone_obj for zone_obj in zone_geom_list
            if zone_obj[0] not in ini['INPUTS']['fid_skip_list']
        ]

    # Merge geometries
    if ini['INPUTS']['merge_geom_flag']:
        merge_geom = ogr.Geometry(ogr.wkbMultiPolygon)
        for zone in zone_geom_list:
            zone_multipolygon = ogr.ForceToMultiPolygon(
                ogr.CreateGeometryFromJson(json.dumps(zone[2])))
            for zone_polygon in zone_multipolygon:
                merge_geom.AddGeometry(zone_polygon)
        # merge_json = json.loads(merge_mp.ExportToJson())
        zone_geom_list = [[
            0, ini['INPUTS']['zone_filename'],
            json.loads(merge_geom.ExportToJson())
        ]]
        ini['INPUTS']['zone_field'] = ''

    # Need zone_shp_path projection to build EE geometries
    zone_osr = gdc.feature_path_osr(ini['INPUTS']['zone_shp_path'])
    zone_proj = gdc.osr_wkt(zone_osr)
    # zone_proj = ee.Projection(zone_proj).wkt().getInfo()
    # zone_proj = zone_proj.replace('\n', '').replace(' ', '')
    logging.debug('  Zone Projection: {}'.format(zone_proj))

    # Initialize Earth Engine API key
    logging.info('\nInitializing Earth Engine')
    ee.Initialize()
    utils.ee_request(ee.Number(1).getInfo())

    # Get current running tasks
    tasks = utils.get_ee_tasks()

    # Download images for each feature separately
    for zone_fid, zone_name, zone_json in zone_geom_list:
        zone_name = zone_name.replace(' ', '_')
        logging.info('ZONE: {} (FID: {})'.format(zone_name, zone_fid))

        # Build EE geometry object for zonal stats
        zone_geom = ee.Geometry(zone_json, zone_proj, False)

        # Project the zone_geom to the GRIDMET projection
        # if zone_proj != output_proj:
        zone_geom = zone_geom.transform(ini['SPATIAL']['crs'], 0.001)

        # Get the extent from the Earth Engine geometry object?
        zone_extent = zone_geom.bounds().getInfo()['coordinates'][0]
        zone_extent = gdc.Extent([
            min(zip(*zone_extent)[0]),
            min(zip(*zone_extent)[1]),
            max(zip(*zone_extent)[0]),
            max(zip(*zone_extent)[1])
        ])
        # # Use GDAL and geometry json to build extent, transform, and shape
        # zone_extent = gdc.Extent(
        #     ogr.CreateGeometryFromJson(json.dumps(zone_json)).GetEnvelope())
        # # zone_extent = gdc.Extent(zone_geom.GetEnvelope())
        # zone_extent.ymin, zone_extent.xmax = zone_extent.xmax, zone_extent.ymin

        # Adjust extent to match raster
        zone_extent = zone_extent.adjust_to_snap('EXPAND', ini['output_x'],
                                                 ini['output_y'],
                                                 ini['SPATIAL']['cellsize'])
        zone_geo = zone_extent.geo(ini['SPATIAL']['cellsize'])
        zone_transform = gdc.geo_2_ee_transform(zone_geo)
        zone_transform = '[' + ','.join(map(str, zone_transform)) + ']'
        zone_shape = zone_extent.shape(ini['SPATIAL']['cellsize'])
        logging.debug('  Zone Shape: {}'.format(zone_shape))
        logging.debug('  Zone Transform: {}'.format(zone_transform))
        logging.debug('  Zone Extent: {}'.format(zone_extent))
        # logging.debug('  Geom: {}'.format(zone_geom.getInfo()))

        # output_transform = zone_transform[:]
        output_transform = '[' + ','.join(map(str, zone_transform)) + ']'
        output_shape = '[{1}x{0}]'.format(*zone_shape)
        logging.debug('  Output Projection: {}'.format(ini['SPATIAL']['crs']))
        logging.debug('  Output Transform: {}'.format(output_transform))
        logging.debug('  Output Shape: {}'.format(output_shape))

        zone_eddi_ws = os.path.join(ini['IMAGES']['output_ws'], zone_name,
                                    eddi_folder)
        if not os.path.isdir(zone_eddi_ws):
            os.makedirs(zone_eddi_ws)

        # GRIDMET PDSI
        # Process each image in the collection by date
        export_list = []

        export_list = list(
            date_range(datetime.datetime(ini['INPUTS']['start_year'], 1, 1),
                       datetime.datetime(ini['INPUTS']['end_year'], 12, 31),
                       skip_leap_days=True))

        # Filter list to only keep last dekad of October and December
        if eddi_date_list:
            export_list = [
                tgt_dt for tgt_dt in export_list
                if tgt_dt.strftime('%m%d') in eddi_date_list
            ]

        for tgt_dt in export_list:
            date_str = tgt_dt.strftime('%Y%m%d')
            logging.info('{} {}'.format(tgt_dt.strftime('%Y-%m-%d'),
                                        output_name))

            if tgt_dt >= datetime.datetime.today():
                logging.info('  Date after current date, skipping')
                continue

            # Rename to match naming style from getDownloadURL
            #     image_name.band.tif
            export_id = '{}_{}_{}'.format(ini['INPUTS']['zone_filename'],
                                          date_str, export_name.lower())
            output_id = '{}_{}'.format(date_str, output_name)

            export_path = os.path.join(ini['EXPORT']['export_ws'],
                                       export_id + '.tif')
            output_path = os.path.join(zone_eddi_ws, output_id + '.tif')
            logging.debug('  Export: {}'.format(export_path))
            logging.debug('  Output: {}'.format(output_path))

            if overwrite_flag:
                if export_id in tasks.keys():
                    logging.debug('  Task already submitted, cancelling')
                    ee.data.cancelTask(tasks[export_id])
                    del tasks[export_id]
                if os.path.isfile(export_path):
                    logging.debug('  Export image already exists, removing')
                    utils.remove_file(export_path)
                    # os.remove(export_path)
                if os.path.isfile(output_path):
                    logging.debug('  Output image already exists, removing')
                    utils.remove_file(output_path)
                    # os.remove(output_path)
            else:
                if os.path.isfile(export_path):
                    logging.debug('  Export image already exists, moving')
                    shutil.move(export_path, output_path)
                    gdc.raster_path_set_nodata(output_path, nodata_value)
                    # DEADBEEF - should raster stats be computed?
                    # gdc.raster_statistics(output_path)
                    continue
                elif os.path.isfile(output_path):
                    logging.debug('  Output image already exists, skipping')
                    continue
                elif export_id in tasks.keys():
                    logging.debug('  Task already submitted, skipping')
                    continue

            eddi_image = ee_eddi_image(tgt_dt.strftime('%Y-%m-%d'),
                                       agg_days=aggregation_days,
                                       variable='eddi',
                                       year_start=climo_year_start,
                                       year_end=climo_year_end)

            logging.debug('  Building export task')
            # if ini['EXPORT']['export_dest'] == 'gdrive':
            task = ee.batch.Export.image.toDrive(
                image=eddi_image,
                description=export_id,
                # folder=ini['EXPORT']['export_folder'],
                fileNamePrefix=export_id,
                dimensions=output_shape,
                crs=ini['SPATIAL']['crs'],
                crsTransform=output_transform)
            # elif ini['EXPORT']['export_dest'] == 'gdrive':
            #     task = ee.batch.Export.image.toCloudStorage(
            #         image=eddi_image,
            #         description=export_id,
            #         bucket=ini['EXPORT']['export_folder'],
            #         fileNamePrefix=export_id,
            #         dimensions=output_shape,
            #         crs=ini['SPATIAL']['crs'],
            #         crsTransform=output_transform)

            logging.debug('  Starting export task')
            utils.ee_request(task.start())
Пример #23
0
def kml_geom(place):
    geojson_geom = place.get('location').get('geometry')
    json_geom = ogr.CreateGeometryFromJson(str(geojson_geom))
    kml_geom = json_geom.ExportToKML()
    return kml_geom
                prop, value))
        else:
            raise

    else:
        return feature


with open(os.path.join(pwd, '..', 'raw', 'chicago_community_areas.geojson'),
          'r') as f:
    ca_geojson = json.load(f)

    ca_feature = feature_from_array(ca_geojson['features'], 'community',
                                    community_area)

    ca_geom = ogr.CreateGeometryFromJson(json.dumps(ca_feature['geometry']))

intersections = []

with open(os.path.join(pwd, '..', 'raw', 'chicago_wards.geojson'), 'r') as f:
    ward_geojson = json.load(f)

    for ward in wards.split(','):
        ward_feature = feature_from_array(ward_geojson['features'], 'ward',
                                          ward)

        ward_geom = ogr.CreateGeometryFromJson(
            json.dumps(ward_feature['geometry']))

        intersection = json.loads(
            ca_geom.Intersection(ward_geom).ExportToJson())
def checkGeos():

    point = """{"type":"Point","coordinates":[-90.02001032734194,35.127405562848075]}"""
    geom = ogr.CreateGeometryFromJson(point)
    print(geom.GetX())
    print(geom.GetY())
Пример #26
0
def export_shapefile(vectorlayer):

    file_path = vectorlayer.file_path
    file_name = vectorlayer.file_name
    full_path = join(file_path, file_name)

    encoding = vectorlayer.encoding
    srs_wkt = vectorlayer.srs_wkt
    geom_type = vectorlayer.geom_type

    f = open(full_path, "r", encoding=encoding)
    geojson = json.loads(f.read())
    f.close()

    dst_dir = tempfile.mkdtemp()
    shapefile_name = file_name.replace(".json", ".shp")
    dst_file = join(dst_dir, shapefile_name)

    #sistema de coordenadas 4326
    src_spatial_ref = osr.SpatialReference()
    src_spatial_ref.ImportFromEPSG(4326)
    #sistema de coordenadas de la capa original
    dst_spatial_ref = osr.SpatialReference()
    dst_spatial_ref.ImportFromWkt(srs_wkt)
    coord_transform = osr.CoordinateTransformation(src_spatial_ref,
                                                   dst_spatial_ref)

    driver = ogr.GetDriverByName("ESRI shapefile")
    datasource = driver.CreateDataSource(dst_file)
    layer = datasource.CreateLayer(name=shapefile_name,
                                   srs=dst_spatial_ref,
                                   geom_type=geom_type)

    for i, attr in enumerate(geojson["attributes"]):
        field = ogr.FieldDefn(attr["name"], attr["type"])
        field.SetWidth(attr["width"])
        field.SetPrecision(attr["precision"])
        layer.CreateField(field)

    for i, feature in enumerate(geojson["features"]):
        geometry = feature.get("geometry", None)
        if (geometry == None):
            continue
        dst_geometry = ogr.CreateGeometryFromJson(json.dumps(geometry))
        dst_feature = ogr.Feature(layer.GetLayerDefn())
        dst_feature.SetGeometry(dst_geometry)
        dst_geometry.Transform(coord_transform)

        properties = feature.get("properties", None)
        if properties == None:
            continue

        for field, value in properties.items():
            field = field.replace("\"", "")
            field = field.replace("'", "")
            dst_feature.SetField(field, value)

        layer.CreateFeature(dst_feature)
        dst_feature.Destroy()

    datasource.Destroy()

    zip_dst = file_name.replace(".json", "_zip")
    zip_dst = shutil.make_archive(zip_dst, 'zip', dst_dir)
    shutil.rmtree(dst_dir)
    zip_file = open(zip_dst, "rb")

    f = FileWrapper(zip_file)
    response = HttpResponse(f, content_type="application/zip")
    zip_name = file_name.replace(".json", ".zip")
    response['Content-Disposition'] = "attachment; filename=" + zip_name

    return response
                # It’s known as "Cylindrical Equal-Area”, but I guess it’s called the other thing in EPSG because that agency was the first
                # one to add it under their own product name. </sigh>
                # https://nsidc.org/data/atlas/epsg_3410.html

                from osgeo import ogr
                from osgeo import osr

                source = osr.SpatialReference()
                source.ImportFromEPSG(4326)

                target = osr.SpatialReference()
                target.ImportFromEPSG(3410)

                transform = osr.CoordinateTransformation(source, target)

                poly = ogr.CreateGeometryFromJson(geojson.dumps(f['geometry']))
                poly.Transform(transform)

                sq_m = format(poly.GetArea(), 'f')
                props['geom:area_square_m'] = float(sq_m)

            except Exception, e:
                logging.warning(
                    "failed to calculate area in square meters, because %s" %
                    e)

        # end of osgeo stuff

        f['bbox'] = bbox

        # ensure that all properties are prefixed
def _calc_route_info(_route_data):
    link_collection = pd.DataFrame(columns=[
        'WVK_ID', 'WEGNUMMER', 'JTE_ID_BEG', 'JTE_ID_END', 'POS_TV_WOL',
        'BST_CODE', 'geometry', 'DutchRD_X', 'DutchRD_Y', 'latitude',
        'longitude', 'route_id', 'link_offset', 'distance_offset'
    ])
    route_id = _route_data['route_id']
    route_link_collection = _route_data['link_collection']
    XY = []
    latlon = []
    wvk_id = []
    link_begin_ind = []
    link_distance = []
    lnstr = ogr.Geometry(ogr.wkbLineString)
    distance_offset = 0
    for link_ind, link in enumerate(route_link_collection):
        link_info = {
            'route_id': route_id,
            'link_offset': link_ind,
            'JTE_ID_BEG': link[0],
            'JTE_ID_END': link[1],
            **link[2]
        }
        # DutchRD X,Y
        link_geo = ogr.CreateGeometryFromJson(link[2]['geometry'])
        pts = link_geo.GetPoints()
        link_info['DutchRD_X'] = np.array([p[0] for p in pts])
        link_info['DutchRD_Y'] = np.array([p[1] for p in pts])
        link_info['distance_offset'] = distance_offset
        XY += pts
        wvk_id.append(link[2]['WVK_ID'])
        link_begin_ind.append(len(latlon) + 1)
        segment_length = np.zeros(len(pts))
        p_pre = np.array(pts[0])
        for p_ind, p in enumerate(pts):
            p_wgs = DutchRDtoWGS84(*p)
            lnstr.AddPoint_2D(*p_wgs)
            latlon.append(p_wgs)
            p = np.array(p)
            segment_length[p_ind] = np.linalg.norm(p - p_pre)
            p_pre = p
        link_distance.append(np.sum(segment_length))
        link_info['segment_distances'] = segment_length
        link_info['distance_along_route'] = np.cumsum(
            segment_length) + distance_offset
        link_collection = link_collection.append(link_info, ignore_index=True)
        distance_offset = link_info['distance_along_route'][-1]
    route_info = {
        'id': route_id,
        'XY': XY,
        'latlon': latlon,
        'link': {
            'wvk_id': wvk_id,
            'distance': link_distance,
            'beginpointind': link_begin_ind
        },
        'geojson': {
            'type': 'Feature',
            'geometry': json.loads(lnstr.ExportToJson()),
            'properties': []
        }
    }
    return route_info, link_collection
Пример #29
0
    def get(self, request, *args, **kwargs):
        ext_lc = kwargs['extension'].lower()
        ##
        data = None
        if settings.GEODASH_CACHE_DATA:
            client = provision_memcached_client()
            if client:
                key = self._build_key(request, *args, **kwargs)
                print "Checking cache with key ", key

                data = None
                try:
                    data = client.get(key)
                except socket_error as serr:
                    data = None
                    print "Error getting data from in-memory cache."
                    if serr.errno == errno.ECONNREFUSED:
                        print "Memcached is likely not running.  Start memcached with supervisord."
                    raise serr

                if not data:
                    print "Data not found in cache."
                    data = self._build_data(request, *args, **kwargs)
                    if ext_lc == "geodash":
                        data = [int(x) for x in data]
                    try:
                        client.set(key, data)
                    except socket_error as serr:
                        print "Error saving data to in-memory cache."
                        if serr.errno == errno.ECONNREFUSED:
                            print "Memcached is likely not running or the data exceeds memcached item size limit.  Start memcached with supervisord."
                        raise serr
                else:
                    print "Data found in cache."
            else:
                print "Could not connect to memcached client.  Bypassing..."
                data = self._build_data(request, *args, **kwargs)
        else:
            print "Not caching data (settings.geodash_CACHE_DATA set to False)."
            data = self._build_data(request, *args, **kwargs)

        #content = json.dumps(data, default=jdefault)
        #content = re.sub(
        #    settings.GEODASH_REGEX_CLIP_COORDS_PATTERN,
        #    settings.GEODASH_REGEX_CLIP_COORDS_REPL,
        #    content,
        #    flags=re.IGNORECASE)

        root = self._build_root(request, *args, **kwargs)
        attributes = self._build_attributes(request, *args, **kwargs)
        if attributes:
            data = grep(obj=data,
                        root=root,
                        attributes=attributes,
                        filters=getRequestParameters(request, "grep", None))

        if ext_lc == "json":
            return HttpResponse(json.dumps(data, default=jdefault),
                                content_type="application/json")
        elif ext_lc == "yml" or ext_lc == "yaml":
            response = yaml.safe_dump(data,
                                      encoding="utf-8",
                                      allow_unicode=True,
                                      default_flow_style=False)
            return HttpResponse(response, content_type="text/plain")
        elif ext_lc == "csv" or ext_lc == "csv":
            writer = GeoDashDictWriter("", attributes)
            writer.writeheader()
            writer.writerows(extract(root, data, []))
            response = writer.getvalue()
            return HttpResponse(response, content_type="text/csv")
        elif ext_lc == "zip":
            # See the following for how to create zipfile in memory, mostly.
            # https://newseasandbeyond.wordpress.com/2014/01/27/creating-in-memory-zip-file-with-python/
            tempDirectory = tempfile.mkdtemp()
            print "Temp Directory:", tempDirectory
            if tempDirectory:
                geometryType = self._build_geometry_type(
                    request, *args, **kwargs)
                ########### Create Files ###########
                os.environ['SHAPE_ENCODING'] = "utf-8"
                # See following for how to create shapefiles using OGR python bindings
                # https://pcjericks.github.io/py-gdalogr-cookbook/vector_layers.html#filter-and-select-input-shapefile-to-new-output-shapefile-like-ogr2ogr-cli
                basepath, out_filename, ext = parse_path(request.path)
                out_shapefile = os.path.join(tempDirectory,
                                             out_filename + ".shp")
                out_driver = ogr.GetDriverByName("ESRI Shapefile")
                if os.path.exists(out_shapefile):
                    out_driver.DeleteDataSource(out_shapefile)
                out_datasource = out_driver.CreateDataSource(out_shapefile)
                out_layer = out_datasource.CreateLayer(
                    (out_filename + ".shp").encode('utf-8'),
                    geom_type=geometryType)
                ########### Create Fields ###########
                out_layer.CreateField(ogr.FieldDefn(
                    "id", ogr.OFTInteger))  # Create ID Field
                for attribute in attributes:
                    label = attribute.get('label_shp') or attribute.get(
                        'label')
                    out_layer.CreateField(
                        ogr.FieldDefn(
                            label,
                            ATTRIBUTE_TYPE_TO_OGR.get(
                                attribute.get('type', 'string'))))
                ########### Create Features ###########
                features = extract(root, data, [])
                for i in range(len(features)):
                    feature = features[i]
                    out_feature = ogr.Feature(out_layer.GetLayerDefn())
                    geom = extract(
                        self._build_geometry(request, *args, **kwargs),
                        feature, None)
                    out_feature.SetGeometry(
                        ogr.CreateGeometryFromJson(
                            json.dumps(geom, default=jdefault)))
                    out_feature.SetField("id", i)
                    for attribute in attributes:
                        label = attribute.get('label_shp') or attribute.get(
                            'label')
                        out_value = extract(attribute.get('path'), feature,
                                            None)
                        out_feature.SetField(
                            (attribute.get('label_shp')
                             or attribute.get('label')),
                            out_value.encode('utf-8') if isinstance(
                                out_value, basestring) else out_value)
                    out_layer.CreateFeature(out_feature)
                out_datasource.Destroy()
                ########### Create Projection ###########
                spatialRef = osr.SpatialReference()
                spatialRef.ImportFromEPSG(4326)
                spatialRef.MorphToESRI()
                with open(os.path.join(tempDirectory, out_filename + ".prj"),
                          'w') as f:
                    f.write(spatialRef.ExportToWkt())
                    f.close()
                ########### Create Zipfile ###########
                buff = StringIO.StringIO()
                zippedShapefile = zipfile.ZipFile(buff, mode='w')
                #memoryFiles = []
                component_filenames = os.listdir(tempDirectory)
                #for i in range(len(componentFiles)):
                #    memoryFiles.append(StringIO.StringIO())
                for i in range(len(component_filenames)):
                    with open(
                            os.path.join(tempDirectory,
                                         component_filenames[i]), 'r') as f:
                        contents = f.read()
                        zippedShapefile.writestr(component_filenames[i],
                                                 contents)
                zippedShapefile.close()

                print "zippedShapefile.printdir()", zippedShapefile.printdir()

                ########### Delete Temporary Directory ###########
                shutil.rmtree(tempDirectory)
                ########### Response ###########
                return HttpResponse(buff.getvalue(),
                                    content_type="application/zip")
                #for i in range(len(componentFiles)):
                #    with open(componentFiles[i], 'w') as componentFile:
                #        memoryFiles[i].write(componentFile.read())
            else:
                raise Http404(
                    "Could not acquire temporary directory for building shapefile."
                )
        elif ext_lc == "geodash":
            response = HttpResponse(content_type='application/octet-stream')
            # Need to do by bytes(bytearray(x)) to properly translate integers to 1 byte each
            # If you do bytes(data) it will give 4 bytes to each integer.
            response.write(bytes(bytearray(data)))
            return response
        else:
            raise Http404("Unknown config format.")
Пример #30
0
    def _create_base_map(self, ):
        """
        Deal with different types way to define the AOI, if none is specified, then the image bound is used.
        """
        gdal.UseExceptions()
        ogr.UseExceptions()
        if self.aoi is not None:
            if os.path.exists(self.aoi):
                try:
                    g = gdal.Open(self.aoi)
                    # subprocess.call(['gdaltindex', '-f', 'GeoJSON', '-t_srs', 'EPSG:4326', self.toa_dir + '/AOI.json', self.aoi])
                    geojson = get_boundary(self.aoi)[0]
                    with open(self.toa_dir + "/AOI.json", "wb") as f:
                        f.write(geojson.encode())
                except:
                    try:
                        gr = ogr.Open(str(self.aoi))
                        l = gr.GetLayer(0)
                        f = l.GetFeature(0)
                        g = f.GetGeometryRef()
                    except:
                        raise IOError(
                            "AOI file cannot be opened by gdal, please check it or transform into format can be opened by gdal"
                        )
            else:
                try:
                    g = ogr.CreateGeometryFromJson(self.aoi)
                except:
                    try:
                        g = ogr.CreateGeometryFromGML(self.aoi)
                    except:
                        try:
                            g = ogr.CreateGeometryFromWkt(self.aoi)
                        except:
                            try:
                                g = ogr.CreateGeometryFromWkb(self.aoi)
                            except:
                                raise IOError(
                                    "The AOI has to be one of GeoJSON, GML, Wkt or Wkb."
                                )
            gjson_str = (
                """{"type":"FeatureCollection","features":[{"type":"Feature","properties":{},"geometry":%s}]}"""
                % g.ExportToJson())
            with open(self.toa_dir + "/AOI.json", "wb") as f:
                f.write(gjson_str.encode())

        ogr.DontUseExceptions()
        gdal.DontUseExceptions()
        if not os.path.exists(self.toa_dir + "/AOI.json"):
            g = gdal.Open(self.toa_bands[0])
            proj = g.GetProjection()
            if "WGS 84" in proj:
                # subprocess.call(['gdaltindex', '-f', 'GeoJSON', self.toa_dir +'/AOI.json', self.toa_bands[0]])
                geojson = get_boundary(self.toa_bands[0], to_wgs84=False)
                with open(self.toa_dir + "/AOI.json", "wb") as f:
                    f.write(geojson.encode())
            else:
                # subprocess.call(['gdaltindex', '-f', 'GeoJSON', '-t_srs', 'EPSG:4326', self.toa_dir +'/AOI.json', self.toa_bands[0]])
                geojson = get_boundary(self.toa_bands[0])[0]
                with open(self.toa_dir + "/AOI.json", "wb") as f:
                    f.write(geojson.encode())

            self.logger.warning(
                "AOI is not created and full band extend is used")
            self.aoi = self.toa_dir + "/AOI.json"
        else:
            self.aoi = self.toa_dir + "/AOI.json"

        if self.pixel_res is None:
            self.pixel_res = abs(
                gdal.Open(self.toa_bands[0]).GetGeoTransform()[1])

        self.psf_xstd = 260 / self.pixel_res
        self.psf_ystd = 340 / self.pixel_res