def getParcelById(dsc, pid, withGeometry=False, db=1): """Get parcel information for the given parcel id""" conn = psycopg2.connect(conn_str(db)) cur = conn.cursor() data = [] values = config.read() dsc = values['set']['ds_conf'] dsy = values['set']['ds_year'] try: values = config.read() parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables'][ 'parcels'] crop_names = values['ds_conf'][dsc]['years'][dsy]['columns'][ 'crop_names'] crop_codes = values['ds_conf'][dsc]['years'][dsy]['columns'][ 'crop_codes'] parcels_id = values['ds_conf'][dsc]['years'][dsy]['columns'][ 'parcels_id'] getTableSrid = f""" SELECT srid FROM geometry_columns WHERE f_table_name = '{parcels_table}'""" cur.execute(getTableSrid) srid = cur.fetchone()[0] if withGeometry: geometrySql = ", st_asgeojson(wkb_geometry) as geom" else: geometrySql = "" getTableDataSql = f""" SELECT {parcels_id}, {crop_names} as cropname, {crop_codes} as cropcode, st_srid(wkb_geometry) as srid{geometrySql}, st_area(wkb_geometry) as area, st_X(st_transform(st_centroid(wkb_geometry), 4326)) as clon, st_Y(st_transform(st_centroid(wkb_geometry), 4326)) as clat FROM {parcels_table} WHERE {parcels_id} = {pid}; """ # Return a list of tuples cur.execute(getTableDataSql) rows = cur.fetchall() data.append(tuple(etup.name for etup in cur.description)) if len(rows) > 0: for r in rows: data.append(tuple(r)) else: print(f"No parcel found in {parcels_table} with id ({pid}).") return data except Exception as err: print( "2 Did not find data, please select the right database and table: ", err) return data.append('Ended with no data')
def getParcelTimeSeries(dsc, year, pid, tstype, band=None, db=1): """Get the time series for the given parcel""" conn = psycopg2.connect(conn_str(db)) cur = conn.cursor() data = [] values = config.read() dsc = values['set']['ds_conf'] dsy = values['set']['ds_year'] try: values = config.read() dias_catalog = values['ds_conf'][dsc]['years'][dsy]['tables'][ 'dias_catalog'] signatures_tb = values['ds_conf'][dsc]['years'][dsy]['tables'][tstype] if band: getTableDataSql = f""" SELECT extract('epoch' from obstime), count, mean, std, min, p25, p50, p75, max FROM {signatures_tb} s, {dias_catalog} d WHERE s.obsid = d.id and pid = {pid} and band = '{band}' ORDER By obstime asc; """ else: getTableDataSql = f""" SELECT extract('epoch' from obstime), band, count, mean, std, min, p25, p50, p75, max FROM {signatures_tb} s, {dias_catalog} d WHERE s.obsid = d.id and pid = {pid} ORDER By obstime, band asc; """ # Return a list of tuples cur.execute(getTableDataSql) rows = cur.fetchall() data.append(tuple(etup.name for etup in cur.description)) if len(rows) > 0: for r in rows: data.append(tuple(r)) else: print(f"No time series found for {pid} in {signatures_tb}") return data except Exception as err: print( "4 Did not find data, please select the right database and table: ", err) return data.append('Ended with no data')
def getS2frames(parcel_id, start, end, db=1): """Get the sentinel images frames from dias cataloge for the given parcel""" conn = psycopg2.connect(conn_str(db)) values = config.read() dsc = values['set']['ds_conf'] dsy = values['set']['ds_year'] dias_catalog = values['ds_conf'][dsc]['years'][dsy]['tables'][ 'dias_catalog'] parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels'] parcels_id = values['ds_conf'][dsc]['years'][dsy]['columns']['parcels_id'] # Get the S2 frames that cover a parcel identified by parcel # ID from the dias_catalogue for the selected date. end_date = pd.to_datetime(end) + pd.DateOffset(days=1) getS2framesSql = f""" SELECT reference, obstime, status FROM {dias_catalog}, {parcels_table} WHERE card = 's2' And footprint && st_transform(wkb_geometry, 4326) And {parcels_id} = {parcel_id} And obstime between '{start}' and '{end_date}' ORDER by obstime asc; """ # Read result set into a pandas dataframe df_s2frames = pd.read_sql_query(getS2framesSql, conn) return df_s2frames['reference'].tolist()
def btn_refresh_on_click(b): values = config.read() ds_c = values['set']['ds_conf'] ds_y = values['set']['ds_year'] dsc.options = [d for d in values['ds_conf']] dsy.options = [int(y) for y in values['ds_conf'][ds_c]['years']] dsc.value = ds_c dsy.value = int(ds_y)
def pid(aoi, year, pid, geom=False): values = config.read() db = int(values['ds_conf'][aoi]['db']) data = database.getParcelById(aoi, pid, geom, db) if not data: return json.dumps({}) elif len(data) == 1: return json.dumps( dict(zip(list(data[0]), [[] for i in range(len(data[0]))]))) else: return json.dumps( dict(zip(list(data[0]), [list(i) for i in zip(*data[1:])])))
def aois_years(): values = config.read() years = {} if values['set']['data_source'] == '0': for desc in values['api']['options']['aois']: aoi = values['api']['options']['aois'][desc] years[aoi] = values['api']['options']['years'][aoi] elif values['set']['data_source'] == '1': for aoi in values['ds_conf']: desc = f"{values['ds_conf'][aoi]['desc']}" years[aoi] = [y for y in values['ds_conf'][aoi]['years']] return years
def aois_options(): values = config.read() options = {} if values['set']['data_source'] == '0': for desc in values['api']['options']['aois']: aoi = f"{values['api']['options']['aois'][desc]}" options[(desc, aoi)] = values['api']['options']['years'][aoi] elif values['set']['data_source'] == '1': for aoi in values['ds_conf']: desc = f"{values['ds_conf'][aoi]['desc']}" confgs = values['ds_conf'][aoi]['years'] options[(f'{desc} ({aoi})', aoi)] = [y for y in confgs] return options
def crls(db=1): try: # Database values = config.read() DB_HOST = values['db'][f'{db}']['conn']['host'] DB_NAME = values['db'][f'{db}']['conn']['name'] DB_SCHE = values['db'][f'{db}']['conn']['sche'] DB_USER = values['db'][f'{db}']['conn']['user'] DB_PORT = values['db'][f'{db}']['conn']['port'] DB_PASS = values['db'][f'{db}']['conn']['pass'] return DB_HOST, DB_NAME, DB_USER, DB_PORT, DB_PASS except Exception as err: print(f"Could not read config file: {err}")
def api(mode=None): """""" values = config.read() wt_url = Text( value=values['api']['url'], placeholder='Add URL', description='API URL:', disabled=False ) wt_user = Text( value=values['api']['user'], placeholder='Username', description='API User:'******'api']['pass'], placeholder='******', description='API Password:'******'Save', disabled=False, icon='save' ) progress = Output() def outlog(*text): with progress: print(*text) @wb_save.on_click def wb_save_on_click(b): progress.clear_output() config.set_value(['api', 'url'], str(wt_url.value).replace(' ', '')) config.set_value(['api', 'user'], str(wt_user.value).replace(' ', '')) if wt_pass.value != '': config.set_value(['api', 'pass'], str( wt_pass.value).replace(' ', '')) outlog("The RESTful API credentials are saved.") wbox = VBox([HBox([wt_url, Label("Format: http://0.0.0.0/ or https://0.0.0.0/")]), wt_user, wt_pass, HBox([wb_save, progress])]) return wbox
def wb_save_on_click(b): progress.clear_output() dscode = ds_code.value config.update([ 'ds_conf', dscode, 'years', str(ds_year.value), 'tables', 'dias_catalog' ], str(tb_dc.value)) config.update([ 'ds_conf', dscode, 'years', str(ds_year.value), 'tables', 'parcels' ], str(tb_pr.value)) config.update([ 'ds_conf', dscode, 'years', str(ds_year.value), 'columns', 'parcels_id' ], str(tc_id.value)) config.update([ 'ds_conf', dscode, 'years', str(ds_year.value), 'columns', 'crop_names' ], str(tc_cn.value)) config.update([ 'ds_conf', dscode, 'years', str(ds_year.value), 'columns', 'crop_codes' ], str(tc_cc.value)) config.update([ 'ds_conf', dscode, 'years', str(ds_year.value), 'tables', 's2' ], str(tb_s2.value)) config.update([ 'ds_conf', dscode, 'years', str(ds_year.value), 'tables', 'bs' ], str(tb_bs.value)) config.update([ 'ds_conf', dscode, 'years', str(ds_year.value), 'tables', 'c6' ], str(tb_6c.value)) config.update(['ds_conf', dscode, 'db'], str(ds_db.value)) config.update(['ds_conf', dscode, 'desc'], str(ds_desc.value)) config.update(['ds_conf', dscode, 'center'], f"{map_cent_lat.value},{map_cent_lon.value}") config.update(['ds_conf', dscode, 'zoom'], str(map_zoom.value)) config.update(['set', 'ds_conf'], str(dscode)) config.update(['set', 'ds_year'], str(ds_year.value)) values = config.read() ds_c = values['set']['ds_conf'] ds_y = values['set']['ds_year'] dsc.options = [d for d in values['ds_conf']] dsy.options = [int(y) for y in values['ds_conf'][ds_c]['years']] dsc.value = ds_c dsy.value = int(ds_y) outlog("The configurations are saved.")
def getSRID(dsc, db=1): """Get the SRID""" # Get parcels SRID. conn = psycopg2.connect(conn_str(db)) values = config.read() dsc = values['set']['ds_conf'] dsy = values['set']['ds_year'] parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels'] pgq_srid = f""" SELECT ST_SRID(wkb_geometry) FROM {parcels_table} LIMIT 1; """ df_srid = pd.read_sql_query(pgq_srid, conn) srid = df_srid['st_srid'][0] target_EPSG = int(srid) return target_EPSG
def getPolygonCentroid(parcel_id, db=1): """Get the centroid of the given polygon""" conn = psycopg2.connect(conn_str(db)) values = config.read() dsc = values['set']['ds_conf'] dsy = values['set']['ds_year'] parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels'] parcels_id = values['ds_conf'][dsc]['years'][dsy]['columns']['parcels_id'] getParcelPolygonSql = f""" SELECT ST_Asgeojson(ST_transform(ST_Centroid(wkb_geometry), 4326)) as center, ST_Asgeojson(st_transform(wkb_geometry, 4326)) as polygon FROM {parcels_table} WHERE {parcels_id} = {parcel_id} LIMIT 1; """ # Read result set into a pandas dataframe df_pcent = pd.read_sql_query(getParcelPolygonSql, conn) return df_pcent
def get_data(parcel): values = config.read() get_requests = data_source() pid = parcel['ogc_fid'][0] source = int(config.get_value(['set', 'data_source'])) if source == 0: datapath = f'{paths.value}{aois.value}{year.value}/parcel_{pid}/' elif source == 1: ds_conf = config.get_value(['set', 'ds_conf']) datapath = f'{paths.value}{ds_conf}/parcel_{pid}/' file_pinf = f"{datapath}{pid}_information" outlog(data_handler.export(parcel, 10, file_pinf)) if pts_bt.value is True: outlog(f"Getting time series for parcel: '{pid}',", f"({pts_tstype.value} {pts_band.value}).") for pts in pts_tstype.value: ts = json.loads(get_requests.pts(aois.value, year.value, pid, pts, pts_band.value)) band = '' if pts_band.value != '': band = f"_{pts_band.value}" file_ts = f"{datapath}{pid}_time_series_{pts}{band}" outlog(data_handler.export(ts, 11, file_ts)) if pci_bt.value is True: files_pci = f"{datapath}{pid}_chip_images/" outlog(f"Getting '{pci_band.value}' chip images for parcel: {pid}") with progress: get_requests.rcbl(parcel, pci_start_date.value, pci_end_date.value, pci_band.value, pci_satellite.value, pci_chipsize.value, files_pci) filet = f'{datapath}/{pid}_chip_images/{pid}_images_list.{pci_band.value[0]}.csv' if file_len(filet) > 1: outlog(f"Completed, all GeoTIFFs for bands '{pci_band.value}' are ", f"downloaded in the folder: '{datapath}/{pid}_chip_images'") else: outlog("No files where downloaded, please check your configurations")
def wb_save_on_click(b): progress.clear_output() dscode = ds_code.value config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'tables', 'dias_catalog'], str(tb_dc.value)) config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'tables', 'parcels'], str(tb_pr.value)) config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'pcolumns', 'parcels_id'], str(tc_id.value)) config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'pcolumns', 'crop_names'], str(tc_cn.value)) config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'pcolumns', 'crop_codes'], str(tc_cc.value)) config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'tables', 's2'], str(tb_s2.value)) config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'tables', 'bs'], str(tb_bs.value)) config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'tables', 'c6'], str(tb_6c.value)) config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'db'], str(ds_db.value)) config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'description'], str(ds_desc.value)) config.set_value( ['dataset', f'{dscode}_{str(ds_year.value)}', 'center'], f"{map_cent_lat.value},{map_cent_lon.value}") config.set_value(['dataset', f'{dscode}_{str(ds_year.value)}', 'zoom'], str(map_zoom.value)) config.set_value(['set', 'dataset'], f'{dscode}_{str(ds_year.value)}') config.set_value(['set', 'ds_year'], str(ds_year.value)) values = config.read() ds_c = values['set']['dataset'] dsc.options = [d for d in values['dataset']] dsc.value = ds_c outlog("The configurations are saved.")