Ejemplo n.º 1
0
def getParcelsByPolygon(dsc, polygon, withGeometry=False, only_ids=True, db=1):
    """Get list of parcels within the given polygon"""
    poly = polygon.replace('_', ' ').replace('-', ',')

    conn = psycopg2.connect(conn_str(db))
    cur = conn.cursor()
    data = []
    values = config.read()
    dsc = values['set']['ds_conf']
    dsy = values['set']['ds_year']
    try:
        values = config.read()
        parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels']
        crop_names = values['ds_conf'][dsc]['years'][dsy]['columns']['crop_names']
        crop_codes = values['ds_conf'][dsc]['years'][dsy]['columns']['crop_codes']
        parcels_id = values['ds_conf'][dsc]['years'][dsy]['columns']['parcels_id']

        getTableSrid = f"""
            SELECT srid FROM geometry_columns
            WHERE f_table_name = '{parcels_table}'"""
        cur.execute(getTableSrid)
        srid = cur.fetchone()[0]

        if withGeometry:
            geometrySql = ", st_asgeojson(wkb_geometry) as geom"
        else:
            geometrySql = ""

        if only_ids:
            selectSql = f"{parcels_id}{geometrySql}"
        else:
            selectSql = f"""{parcels_id}, {crop_names} as cropname, {crop_codes} as cropcode,
                st_srid(wkb_geometry) as srid{geometrySql},
                st_area(wkb_geometry) as area,
                st_X(st_transform(st_centroid(wkb_geometry), 4326)) as clon,
                st_Y(st_transform(st_centroid(wkb_geometry), 4326)) as clat"""

        getTableDataSql = f"""
            SELECT {selectSql}
            FROM {parcels_table}
            WHERE st_intersects(wkb_geometry,
                  st_transform(st_geomfromtext('POLYGON(({poly}))', 4326), {srid}))
            LIMIT 100;
        """

        #  Return a list of tuples
        cur.execute(getTableDataSql)
        rows = cur.fetchall()

        data.append(tuple(etup.name for etup in cur.description))
        if len(rows) > 0:
            for r in rows:
                data.append(tuple(r))
        else:
            print(f"No parcel found in {parcels_table} that intersects with the polygon.")
        return data

    except Exception as err:
        print("3 Did not find data, please select the right database and table: ", err)
        return data.append('Ended with no data')
Ejemplo n.º 2
0
def getParcelByLocation(dsc, lon, lat, withGeometry=False, db=1):
    """Find the parcel under the given coordinates"""
    conn = psycopg2.connect(conn_str(db))
    cur = conn.cursor()
    data = []
    values = config.read()
    dsc = values['set']['ds_conf']
    dsy = values['set']['ds_year']
    try:
        values = config.read()
        parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels']
        crop_names = values['ds_conf'][dsc]['columns']['crop_names']
        crop_codes = values['ds_conf'][dsc]['columns']['crop_codes']
        parcels_id = values['ds_conf'][dsc]['columns']['parcels_id']

        getTableSrid = f"""
            SELECT srid FROM geometry_columns
            WHERE f_table_name = '{parcels_table}'"""
        cur.execute(getTableSrid)
        srid = cur.fetchone()[0]


        if withGeometry:
            geometrySql = ", st_asgeojson(wkb_geometry) as geom"
        else:
            geometrySql = ""

        getTableDataSql = f"""
            SELECT {parcels_id}, {crop_names} as cropname, {crop_codes} as cropcode,
                st_srid(wkb_geometry) as srid{geometrySql},
                st_area(wkb_geometry) as area,
                st_X(st_transform(st_centroid(wkb_geometry), 4326)) as clon,
                st_Y(st_transform(st_centroid(wkb_geometry), 4326)) as clat
            FROM {parcels_table}
            WHERE st_intersects(wkb_geometry,
                  st_transform(st_geomfromtext('POINT({lon} {lat})', 4326), {srid}));
        """

        #  Return a list of tuples
        cur.execute(getTableDataSql)
        rows = cur.fetchall()

        data.append(tuple(etup.name for etup in cur.description))
        if len(rows) > 0:
            for r in rows:
                data.append(tuple(r))
        else:
            print(f"No parcel found in {parcels_table} that intersects with point ({lon}, {lat})")
        return data

    except Exception as err:
        print("1 Did not find data, please select the right database and table: ", err)
        return data.append('Ended with no data')
Ejemplo n.º 3
0
def getParcelTimeSeries(dsc, year, pid, tstype, band=None, db=1):
    """Get the time series for the given parcel"""
    conn = psycopg2.connect(conn_str(db))
    cur = conn.cursor()
    data = []
    values = config.read()
    dsc = values['set']['ds_conf']
    dsy = values['set']['ds_year']
    try:
        values = config.read()
        dias_catalog = values['ds_conf'][dsc]['years'][dsy]['tables']['dias_catalog']
        signatures_tb = values['ds_conf'][dsc]['years'][dsy]['tables'][tstype]

        if band:
            getTableDataSql = f"""
                SELECT extract('epoch' from obstime), count,
                    mean, std, min, p25, p50, p75, max
                FROM {signatures_tb} s, {dias_catalog} d
                WHERE s.obsid = d.id and
                pid = {pid} and
                band = '{band}'
                ORDER By obstime asc;
            """
        else:
            getTableDataSql = f"""
                SELECT extract('epoch' from obstime), band,
                    count, mean, std, min, p25, p50, p75, max
                FROM {signatures_tb} s, {dias_catalog} d
                WHERE s.obsid = d.id and
                pid = {pid}
                ORDER By obstime, band asc;
            """
        #  Return a list of tuples
        cur.execute(getTableDataSql)

        rows = cur.fetchall()

        data.append(tuple(etup.name for etup in cur.description))
        if len(rows) > 0:
            for r in rows:
                data.append(tuple(r))
        else:
            print(f"No time series found for {pid} in {signatures_tb}")
        return data

    except Exception as err:
        print("4 Did not find data, please select the right database and table: ", err)
        return data.append('Ended with no data')
Ejemplo n.º 4
0
def getS2frames(parcel_id, start, end, db=1):
    """Get the sentinel images frames from dias cataloge for the given parcel"""
    conn = psycopg2.connect(conn_str(db))
    values = config.read()
    dsc = values['set']['ds_conf']
    dsy = values['set']['ds_year']
    dias_catalog = values['ds_conf'][dsc]['years'][dsy]['tables']['dias_catalog']
    parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels']
    parcels_id = values['ds_conf'][dsc]['years'][dsy]['columns']['parcels_id']
    # Get the S2 frames that cover a parcel identified by parcel
    # ID from the dias_catalogue for the selected date.

    end_date = pd.to_datetime(end) + pd.DateOffset(days=1)

    getS2framesSql = f"""
        SELECT reference, obstime, status
        FROM {dias_catalog}, {parcels_table}
        WHERE card = 's2'
        And footprint && st_transform(wkb_geometry, 4326)
        And {parcels_id} = {parcel_id}
        And obstime between '{start}' and '{end_date}'
        ORDER by obstime asc;
    """

    # Read result set into a pandas dataframe
    df_s2frames = pd.read_sql_query(getS2framesSql, conn)

    return df_s2frames['reference'].tolist()
Ejemplo n.º 5
0
 def btn_refresh_on_click(b):
     values = config.read()
     ds_c = values['set']['ds_conf']
     ds_y = values['set']['ds_year']
     dsc.options = [d for d in values['ds_conf']]
     dsy.options = [int(y) for y in values['ds_conf'][ds_c]['years']]
     dsc.value = ds_c
     dsy.value = int(ds_y)
Ejemplo n.º 6
0
 def bt_rec_on_click(b):
     progress.clear_output()
     if len(dsc.options) > 1:
         config.delete(['ds_conf', dsc.value])
         outlog(f"Dataset configuration '{dsc.value}' is deleted.")
         values = config.read()
         dsc.options = [d for d in values['ds_conf']]
     else:
         outlog("Can not remove last configuration.")
Ejemplo n.º 7
0
 def bt_rey_on_click(b):
     progress.clear_output()
     if len(dsy.options) > 1:
         config.delete(['ds_conf', dsc.value, 'years', str(dsy.value)])
         outlog(f"Year {dsy.value} of dataset '{dsc.value}' is deleted.")
         values = config.read()
         dsy.options = [int(y) for y in values['ds_conf']
                        [str(dsc.value)]['years']]
     else:
         outlog("Can not remove last configuration.")
Ejemplo n.º 8
0
def ppoly(aoi, year, polygon, geom=False, only_ids=True):
    values = config.read()
    db = int(values['ds_conf'][aoi]['db'])
    data = database.getParcelsByPolygon(aoi, polygon, geom, only_ids, db)
    if not data:
        return json.dumps({})
    elif len(data) == 1:
        return json.dumps(
            dict(zip(list(data[0]), [[] for i in range(len(data[0]))])))
    else:
        return json.dumps(
            dict(zip(list(data[0]), [list(i) for i in zip(*data[1:])])))
Ejemplo n.º 9
0
def pid(aoi, year, pid, geom=False):
    values = config.read()
    db = int(values['ds_conf'][aoi]['db'])
    data = database.getParcelById(aoi, pid, geom, db)
    if not data:
        return json.dumps({})
    elif len(data) == 1:
        return json.dumps(
            dict(zip(list(data[0]), [[] for i in range(len(data[0]))])))
    else:
        return json.dumps(
            dict(zip(list(data[0]), [list(i) for i in zip(*data[1:])])))
Ejemplo n.º 10
0
 def aois_years():
     values = config.read()
     years = {}
     if values['set']['data_source'] == '0':
         for desc in values['api']['options']['aois']:
             aoi = values['api']['options']['aois'][desc]
             years[aoi] = values['api']['options']['years'][aoi]
     elif values['set']['data_source'] == '1':
         for aoi in values['ds_conf']:
             desc = f"{values['ds_conf'][aoi]['desc']}"
             years[aoi] = [y for y in values['ds_conf'][aoi]['years']]
     return years
Ejemplo n.º 11
0
def crls(db=1):
    try:
        # Database
        values = config.read()
        DB_HOST = values['db'][f'{db}']['conn']['host']
        DB_NAME = values['db'][f'{db}']['conn']['name']
        DB_SCHE = values['db'][f'{db}']['conn']['sche']
        DB_USER = values['db'][f'{db}']['conn']['user']
        DB_PORT = values['db'][f'{db}']['conn']['port']
        DB_PASS = values['db'][f'{db}']['conn']['pass']
        return DB_HOST, DB_NAME, DB_USER, DB_PORT, DB_PASS
    except Exception as err:
        print(f"Could not read config file: {err}")
Ejemplo n.º 12
0
 def aois_options():
     values = config.read()
     options = {}
     if values['set']['data_source'] == '0':
         for desc in values['api']['options']['aois']:
             aoi = f"{values['api']['options']['aois'][desc]}"
             options[(desc, aoi)] = values['api']['options']['years'][aoi]
     elif values['set']['data_source'] == '1':
         for aoi in values['ds_conf']:
             desc = f"{values['ds_conf'][aoi]['desc']}"
             confgs = values['ds_conf'][aoi]['years']
             options[(f'{desc} ({aoi})', aoi)] = [y for y in confgs]
     return options
Ejemplo n.º 13
0
def pts(aoi, year, pid, tstype, band=None):
    values = config.read()
    db = int(values['ds_conf'][aoi]['db'])
    data = database.getParcelTimeSeries(aoi, year, pid, tstype, band, db)
    if not data:
        return json.dumps({})
    elif len(data) == 1:
        return json.dumps(
            dict(zip(list(data[0]), [[] for i in range(len(data[0]))])))
    else:
        return json.dumps(
            dict(zip(list(data[0]), [list(i) for i in zip(*data[1:])])))

    return response
Ejemplo n.º 14
0
def rest_api(mode=None):
    """"""
    values = config.read()

    wt_url = Text(
        value=values['api']['url'],
        placeholder='Add URL',
        description='API URL:',
        disabled=False
    )
    wt_user = Text(
        value=values['api']['user'],
        placeholder='Username',
        description='API User:'******'api']['pass'],
        placeholder='******',
        description='API Password:'******'Save',
        disabled=False,
        icon='save'
    )

    progress = Output()

    def outlog(*text):
        with progress:
            print(*text)

    @wb_save.on_click
    def wb_save_on_click(b):
        config.update(['api', 'url'], str(wt_url.value))
        config.update(['api', 'user'], str(wt_user.value))
        if wt_pass.value != '':
            config.update(['api', 'pass'], str(wt_pass.value))
        outlog("API information is updated")

    wbox = VBox([wt_url, wt_user, wt_pass, wb_save, progress])

    return wbox
Ejemplo n.º 15
0
def getSRID(dsc, db=1):
    """Get the SRID"""
    # Get parcels SRID.
    conn = psycopg2.connect(conn_str(db))
    values = config.read()
    dsc = values['set']['ds_conf']
    dsy = values['set']['ds_year']
    parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels']

    pgq_srid = f"""
        SELECT ST_SRID(wkb_geometry) FROM {parcels_table} LIMIT 1;
        """

    df_srid = pd.read_sql_query(pgq_srid, conn)
    srid = df_srid['st_srid'][0]
    target_EPSG = int(srid)

    return target_EPSG
Ejemplo n.º 16
0
def centroid(area, source):
    """Get the centroid for the selected area.
    """
    values = config.read()
    if source == 1:
        center = values['ds_conf'][area]['center'].split(",")
        zoom = values['ds_conf'][area]['zoom']
    else:
        if 'nld' in area:
            center, zoom = [52.13, 5.29], 10
        elif 'nrw' in area:
            center, zoom = [51.36, 7.32], 10
        elif 'es' in area:
            center, zoom = [41.85, 0.86], 10
        else:
            center, zoom = [45, 15], 4

    return [center, zoom]
Ejemplo n.º 17
0
    def get_data(parcel):
        values = config.read()
        get_requests = data_source()
        pid = parcel['ogc_fid'][0]
        source = int(config.get_value(['set', 'data_source']))
        if source == 0:
            datapath = f'{paths.value}{aois.value}{year.value}/parcel_{pid}/'
        elif source == 1:
            ds_conf = config.get_value(['set', 'ds_conf'])
            datapath = f'{paths.value}{ds_conf}/parcel_{pid}/'
        file_pinf = f"{datapath}{pid}_information"

        outlog(data_handler.export(parcel, 10, file_pinf))

        if pts_bt.value is True:
            outlog(f"Getting time series for parcel: '{pid}',",
                   f"({pts_tstype.value} {pts_band.value}).")
            for pts in pts_tstype.value:
                ts = json.loads(
                    get_requests.pts(aois.value, year.value, pid, pts,
                                     pts_band.value))
                band = ''
                if pts_band.value != '':
                    band = f"_{pts_band.value}"
                file_ts = f"{datapath}{pid}_time_series_{pts}{band}"
                outlog(data_handler.export(ts, 11, file_ts))
        if pci_bt.value is True:
            files_pci = f"{datapath}{pid}_chip_images/"
            outlog(f"Getting '{pci_band.value}' chip images for parcel: {pid}")
            with progress:
                get_requests.rcbl(parcel, pci_start_date.value,
                                  pci_end_date.value, pci_band.value,
                                  pci_satellite.value, pci_chipsize.value,
                                  files_pci)
            filet = f'{datapath}/{pid}_chip_images/{pid}_images_list.{pci_band.value[0]}.csv'
            if file_len(filet) > 1:
                outlog(
                    f"Completed, all GeoTIFFs for bands '{pci_band.value}' are ",
                    f"downloaded in the folder: '{datapath}/{pid}_chip_images'"
                )
            else:
                outlog(
                    "No files where downloaded, please check your configurations"
                )
Ejemplo n.º 18
0
def getPolygonCentroid(parcel_id, db=1):
    """Get the centroid of the given polygon"""
    conn = psycopg2.connect(conn_str(db))
    values = config.read()
    dsc = values['set']['ds_conf']
    dsy = values['set']['ds_year']
    parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels']
    parcels_id = values['ds_conf'][dsc]['years'][dsy]['columns']['parcels_id']

    getParcelPolygonSql = f"""
        SELECT ST_Asgeojson(ST_transform(ST_Centroid(wkb_geometry), 4326)) as center,
          ST_Asgeojson(st_transform(wkb_geometry, 4326)) as polygon
        FROM {parcels_table} 
        WHERE {parcels_id} = {parcel_id}
        LIMIT 1;
    """

    # Read result set into a pandas dataframe
    df_pcent = pd.read_sql_query(getParcelPolygonSql, conn)
    
    return df_pcent
Ejemplo n.º 19
0
 def wb_save_on_click(b):
     progress.clear_output()
     dscode = ds_code.value
     config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                    'tables', 'dias_catalog'], str(tb_dc.value))
     config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                    'tables', 'parcels'], str(tb_pr.value))
     config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                    'columns', 'parcels_id'], str(tc_id.value))
     config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                    'columns', 'crop_names'], str(tc_cn.value))
     config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                    'columns', 'crop_codes'], str(tc_cc.value))
     config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                    'tables', 's2'], str(tb_s2.value))
     config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                    'tables', 'bs'], str(tb_bs.value))
     config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                    'tables', 'c6'], str(tb_6c.value))
     config.update(['ds_conf', dscode,
                    'db'], str(ds_db.value))
     config.update(['ds_conf', dscode,
                    'desc'], str(ds_desc.value))
     config.update(['ds_conf', dscode, 'center'],
                   f"{map_cent_lat.value},{map_cent_lon.value}")
     config.update(['ds_conf', dscode,
                    'zoom'], str(map_zoom.value))
     config.update(['set', 'ds_conf'], str(dscode))
     config.update(['set', 'ds_year'], str(ds_year.value))
     values = config.read()
     ds_c = values['set']['ds_conf']
     ds_y = values['set']['ds_year']
     dsc.options = [d for d in values['ds_conf']]
     dsy.options = [int(y) for y in values['ds_conf'][ds_c]['years']]
     dsc.value = ds_c
     dsy.value = int(ds_y)
     outlog("The configurations are saved.")
Ejemplo n.º 20
0
def direct_conn(db='1'):
    values = config.read()

    info_db = Label("Database connection settings.")

    db_host = Text(value=values['db'][db]['conn']['host'],
                   placeholder='Database host',
                   description='db Host:',
                   disabled=False)
    db_port = Text(value=values['db'][db]['conn']['port'],
                   placeholder='Database port',
                   description='db Port:',
                   disabled=False)
    db_name = Text(value=values['db'][db]['conn']['name'],
                   placeholder='Database name',
                   description='db Name:',
                   disabled=False)
    db_user = Text(value=values['db'][db]['conn']['user'],
                   placeholder='Database user',
                   description='db User:'******'db'][db]['conn']['pass'],
                       placeholder='******',
                       description='db Pass:'******'EOSC', 'CREODIAS', 'SOBLOO', 'MUNDI', 'ONDA', 'WEKEO', ''],
        value=values['obst']['osdias'],
        description='DIAS:',
        disabled=False,
    )
    os_host = Text(value=values['obst']['oshost'],
                   placeholder='Storage host',
                   description='s3 Host:',
                   disabled=False)
    os_bucket = Text(value=values['obst']['bucket'],
                     placeholder='Bucket name',
                     description='Bucket name:',
                     disabled=False)
    os_access_key = Text(value=values['obst']['access_key'],
                         placeholder='Access key',
                         description='Access Key:',
                         disabled=False)
    os_secret_key = Password(value=values['obst']['secret_key'],
                             placeholder='Secret key',
                             description='Secret Key:',
                             disabled=False)

    wb_save = Button(description='Save', disabled=False, icon='save')

    progress = Output()

    def outlog(*text):
        with progress:
            print(*text)

    @wb_save.on_click
    def wb_save_on_click(b):
        progress.clear_output()
        # Save database connection information
        config.update(['db', db, 'conn', 'host'], str(db_host.value))
        config.update(['db', db, 'conn', 'port'], str(db_port.value))
        config.update(['db', db, 'conn', 'name'], str(db_name.value))
        config.update(['db', db, 'conn', 'user'], str(db_user.value))
        if db_pass.value != '':
            config.update(['db', db, 'conn', 'pass'], str(db_pass.value))
        # Save Object storage connection information
        config.update(['obst', 'osdias'], str(os_dias.value))
        config.update(['obst', 'oshost'], str(os_host.value))
        config.update(['obst', 'bucket'], str(os_bucket.value))
        config.update(['obst', 'access_key'], str(os_access_key.value))
        if os_secret_key.value != '':
            config.update(['obst', 'secret_key'], str(os_secret_key.value))

        outlog("All changes are saved.")

    wbox = VBox([
        info_db, db_host, db_port, db_name, db_user, db_pass, info_os, os_dias,
        os_host, os_bucket, os_access_key, os_secret_key, wb_save, progress
    ])

    return wbox
Ejemplo n.º 21
0
Exaple code:
    from src.utils import object_storage
    s3file = ''  # The file from the s3 storage # noqa
    localfile = ''  # The new name of the file on the local storage. Use only if 'to_memory' is False
    progress_bar = True  # Disable or enable the progress bar, accepts True or False (Default False).
    to_memory = True  # Download directly to memory or to a file on local storage (Default False).
    object_storage.get_file(s3file, localfile, to_memory, progress_bar)  # Download the file from 3s storage
    object_storage.listFileFromS3('card')  # List of the files in the 3s bucket
"""

import boto3
from src.ipycbm.utils import config

try:
    values = config.read()
    ACCESS_KEY = values['obst']['access_key']
    SECRET_KEY = values['obst']['secret_key']
    S3HOST = values['obst']['oshost']
    BUCKET = values['obst']['bucket']
    SERVICE_PROVIDER = values['obst']['osdias']
except Exception as err:
    print(f"Could not read config file: {err}")


def connection(arg=None):
    session = boto3.session.Session(
        aws_access_key_id=ACCESS_KEY, aws_secret_access_key=SECRET_KEY)
    if arg == 'session':
        return session
    elif arg == 'resource':
Ejemplo n.º 22
0
def extractS1bs(startdate, enddate):
    start = time.time()
    frootpath = 'tmp'

    values = config.read()
    dsc = values['set']['ds_conf']
    dsy = values['set']['ds_year']
    dias_catalogue = values['ds_conf'][dsc]['years'][dsy]['tables'][
        'dias_catalog']
    parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels']
    results_table = values['ds_conf'][dsc]['years'][dsy]['tables']['s2']

    inconn = database.connection()
    if not inconn:
        print("No in connection established")
        sys.exit(1)

    incurs = inconn.cursor()
    srid = -1
    sridSql = "select srid from geometry_columns where f_table_name = '{}';"

    try:
        incurs.execute(sridSql.format(parcels_table))
        result = incurs.fetchone()
        if not result:
            print("{} does not exist or is not a spatial table")
        else:
            srid = result[0]
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
        inconn.close()
        sys.exit(1)

    print("Parcel srid = ", srid)

    # Get the first image record that is not yet processed
    imagesql = f"""
    SELECT id, reference, obstime FROM {dias_catalogue}
    WHERE obstime between '{startdate}' And '{enddate}'
    And status ='ingested' And card = 's2'
    ORDER by obstime asc LIMIT 1
    """
    updateSql = """
    UPDATE {} SET status='{}'
    WHERE id = {} And status = '{}'
    """

    with inconn:
        with inconn.cursor() as trans_cur:
            trans_cur.execute(imagesql)
            result = trans_cur.fetchone()
            if not result:
                print(
                    "All signatures for the given dates have been extracted.")
                inconn.close()
                sys.exit(1)
            else:
                oid = result[0]
                reference = result[1]
                obstime = result[2]
            # Fails if this record is changed in the meantime
            trans_cur.execute(
                updateSql.format(dias_catalogue, 'inprogress', oid,
                                 'ingested'))
            inconn.commit()

    print(reference)
    obstime = reference.split('_')[2][0:8]
    print(obstime)
    obs_path = "{}/{}/{}".format(obstime[0:4], obstime[4:6], obstime[6:8])
    print(obs_path)

    mgrs_tile = reference.split('_')[5]
    full_tstamp = reference.split('_')[2]

    # Copy input data from S3 to local disk
    dias = values['obst']['osdias']
    if dias in ['EOSC', 'CREODIAS']:
        rootpath = 'Sentinel-1/SAR/CARD-BS'
        print(datetime.strptime(obstime, '%Y/%m/%d'), reference)
        s3path = '{}/{}/{}/{}.data/Gamma0_VV.img'.format(
            rootpath, datetime.strftime(obstime, '%Y/%m/%d'), reference,
            reference)
    elif dias == 'SOBLOO':
        s3path = '{}/GRD/{}/{}.data/Gamma0_VV.img'.format(
            reference.split('_')[0], reference, reference)

    fpath = f'{frootpath}/{reference}_VV.img'
    outsrid = -1

    if object_storage.get_file(s3path, fpath) == 0:
        print("Resource {} not available in S3 storage (FATAL)".format(s3path))
        incurs.execute(updateSql.format('No S3 VV img', oid, 'inprogress'))
        inconn.commit()
        incurs.close()
        inconn.close()
        sys.exit(1)

    s3path = s3path.replace('.img', '.hdr')
    fpath = f'{frootpath}/{reference}_VV.hdr'

    if object_storage.get_file(s3path, fpath) == 0:
        print("Resource {} not available in S3 storage (FATAL)".format(s3path))
        incurs.execute(updateSql.format('No S3 VV hdr', oid, 'inprogress'))
        inconn.commit()
        incurs.close()
        inconn.close()
        sys.exit(1)
    else:
        # Only if the header file is present can we open the image to check its projection
        with rasterio.open(fpath.replace('hdr', 'img')) as src:
            outsrid = src.crs.to_epsg()

    print('Out SRID: ', outsrid)

    if dias in ['EOSC', 'CREODIAS']:
        rootpath = 'Sentinel-1/SAR/CARD-BS'
        s3path = '{}/{}/{}/{}.data/Gamma0_VH.img'.format(
            rootpath, datetime.strftime(obstime, '%Y/%m/%d'), reference,
            reference)
    elif dias == 'SOBLOO':
        s3path = '{}/GRD/{}/{}.data/Gamma0_VH.img'.format(
            reference.split('_')[0], reference, reference)
    fpath = f'{frootpath}/{reference}_VH.img'

    if object_storage.get_file(s3path, fpath) == 0:
        print("Resource {} not available in S3 storage (FATAL)".format(s3path))
        incurs.execute(updateSql.format('No S3 VH img', oid, 'inprogress'))
        inconn.commit()
        incurs.close()
        inconn.close()
        sys.exit(1)

    s3path = s3path.replace('.img', '.hdr')
    fpath = f'{frootpath}/{reference}_VH.hdr'

    if object_storage.get_file(s3path, fpath) == 0:
        print("Resource {} not available in S3 storage (FATAL)".format(s3path))
        incurs.execute(updateSql.format('No S3 VH hdr', oid, 'inprogress'))
        inconn.commit()
        incurs.close()
        inconn.close()
        sys.exit(1)

    # Open a connection to save results
    outconn = psycopg2.connect(connString)
    if not outconn:
        print("No out connection established")
        incurs.execute(updateSql.format('no_out_conn', oid, 'inprogress'))
        inconn.commit()
        incurs.close()
        inconn.close()
        sys.exit(1)

    # Get the parcel polygon in this image' footprint

    incurs.close()
    # Open a named cursor
    incurs = inconn.cursor(name='fetch_image_coverage',
                           cursor_factory=psycopg2.extras.DictCursor)

    ds_conf = config.get_value(['set', 'ds_conf'])
    ds_year = config.get_value(['set', 'ds_year'])
    pid_column = config.get_value(
        ['ds_conf', ds_conf, 'years', ds_year, 'columns', 'parcels_id'])

    parcelsql = f"""
    SELECT p.{pid_column}, ST_AsGeoJSON(st_transform(p.wkb_geometry,
        {outsrid}))::json
    FROM {parcels_table} p, {dias_catalogue} dc
    WHERE p.wkb_geometry && st_transform(dc.footprint, {srid})
    And st_area(p.wkb_geometry) > 3000.0
    And dc.id = {oid}
    -- And p.{pid_column} not in (SELECT distinct pid
    --     FROM {results_table} where obsid = {oid})
    """
    incurs.execute(parcelsql)

    sqlload = time.time() - start
    print(
        "Images loaded and nrecs[0] features selected from database in {} seconds"
        .format(sqlload))

    nrows = {}
    nrows['VV'] = 0
    nrows['VH'] = 0

    affine = {}
    array = {}

    bands = ['VV', 'VH']

    for b in bands:
        with rasterio.open(f'{frootpath}/{reference}_{b}.img') as src:
            affine[b] = src.transform
            array[b] = src.read(1)

    while True:
        rowset = incurs.fetchmany(size=2000)

        if not rowset:
            break

        features = {
            "type":
            "FeatureCollection",
            "features": [{
                "type": "feature",
                "geometry": f[1],
                "properties": {
                    "pid": int(f[0])
                }
            } for f in rowset]
        }

        for b in bands:

            zs = zonal_stats(features,
                             array[b],
                             affine=affine[b],
                             stats=[
                                 "count", "mean", "std", "min", "max",
                                 "percentile_25", "percentile_50",
                                 "percentile_75"
                             ],
                             prefix="",
                             nodata=0,
                             geojson_out=True)

            df = pd.DataFrame(zs)

            df = pd.DataFrame.from_dict(df.properties.to_dict(),
                                        orient='index')

            df['obsid'] = oid
            df['band'] = b

            df.rename(index=str,
                      columns={
                          "percentile_25": "p25",
                          "percentile_50": "p50",
                          "percentile_75": "p75"
                      },
                      inplace=True)

            nrows[b] = nrows[b] + len(df)
            # df is the dataframe
            if len(df) > 0:
                df.dropna(inplace=True)
                if len(df.values) > 0:
                    df_columns = list(df)
                    s_buf = io.StringIO()
                    df.to_csv(s_buf, header=False, index=False, sep=',')
                    s_buf.seek(0)
                    outcurs = outconn.cursor()
                    #print(tuple(df_columns))
                    try:
                        #psycopg2.extras.execute_batch(outcurs, insert_stmt, df.values)
                        outcurs.copy_from(s_buf,
                                          dbconfig['tables']['results_table'],
                                          columns=tuple(df_columns),
                                          sep=',')
                        outconn.commit()
                    except psycopg2.IntegrityError as e:
                        print("insert statement {} contains duplicate index".
                              format(insert_stmt))
                    #except Error as e:
                    #    print(e)
                    finally:
                        outcurs.close()
                else:
                    print("No valid data in block {}".format(nrows[b]))

    outconn.close()

    incurs.close()

    incurs = inconn.cursor()

    try:
        incurs.execute(updateSql.format('extracted', oid, 'inprogress'))
        inconn.commit()
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
        inconn.close()
        if outconn:
            outconn.close()

    incurs.close()
    inconn.close()

    fpath = f'{frootpath}/{reference}_VV.img'

    if os.path.exists(fpath):
        os.remove(fpath)
        os.remove(fpath.replace('.img', '.hdr'))

    fpath = f'{frootpath}/{reference}_VH.img'

    if os.path.exists(fpath):
        os.remove(fpath)
        os.remove(fpath.replace('.img', '.hdr'))

    print(
        "Total time required for {} features and {} bands: {} seconds".format(
            nrows['VV'], len(bands),
            time.time() - start))
Ejemplo n.º 23
0
    def show_m():

        multipoly = []
        multycent = []
        geom = view_spatial.trasform_geometry(info_data)
        poly = geom['coordinates'][0][0]
        #     poly = view_spatial.swap_xy(geom['coordinates'][0])[0]
        multipoly.append(poly)
        cent = view_spatial.centroid(poly)
        multycent.append(cent)

        cent = view_spatial.centroid(multycent)
        m = Map(center=cent, zoom=16, basemap=basemaps.OpenStreetMap.Mapnik)

        polygon = Polygon(locations=multipoly,
                          name='Parcel polygon',
                          color="yellow",
                          fill_color=None)

        m.add_layer(polygon)
        basemap2 = basemap_to_tiles(basemaps.Esri.WorldImagery)

        poly_text = HTML()
        poly_text.value = f"""Parcel ID: {pid}<br>
                                    Crop name: {crop_name}<br>
                                    Area: {area:.2f} sqm<br>
                                    Coordinates: {cent}
                                    """
        poly_text.placeholder = "HTML"
        poly_text.description = ""

        # Popup with a given location on the map:
        poly_popup = Popup(child=poly_text,
                           close_button=False,
                           auto_close=False,
                           close_on_escape_key=False)
        m.add_layer(poly_popup)

        # Popup associated to a layer
        polygon.popup = poly_popup

        # Layers control
        show_poly = Checkbox(value=True,
                             description='Polygon',
                             disabled=False,
                             indent=False,
                             layout=Layout(width='140px'))
        show_sat = Checkbox(value=False,
                            description='High res basemap',
                            disabled=False,
                            indent=False,
                            layout=Layout(width='140px'))

        def polygon_changed(b):
            try:
                if show_poly.value is True:
                    m.add_layer(polygon)
                else:
                    m.remove_layer(polygon)
            except Exception:
                pass

        show_poly.observe(polygon_changed)

        def show_sat_changed(b):
            try:
                if show_sat.value is True:
                    m.add_layer(basemap2)
                else:
                    m.remove_layer(basemap2)
            except Exception:
                pass

        show_sat.observe(show_sat_changed)

        try:
            csv_list = f"{ci_path}{pid}_images_list.{ci_band.value[0]}.csv"
            df = view_images.create_df(ci_path, pid, ci_band.value)

            geotiff = f"{ci_path}{df['imgs'][0]}.{ci_band.value[0]}.tif"
            bounds = view_spatial.bounds(geotiff)

            images = {}
            for i, row in df.iterrows():
                str_date = str(row['date'].date()).replace('-', '')
                workdir = os.getcwd().split('/')[-1]
                img_tc = f"{ci_path}{('').join(ci_band.value)}_{str_date}.png"

                # Create false color image if it does not exist
                # Merge bands (images path, export image path, bands list)
                if not os.path.isfile(img_tc):
                    imgs_path = f"{ci_path}{row['imgs']}"
                    view_images.merge_bands(imgs_path, img_tc, ci_band.value)
                values = config.read()

                # Set the current environment
                if eval(values['set']['jupyterlab']) is True:
                    image_path = f'files/{workdir}/{img_tc}'
                else:
                    image_path = img_tc
                images[i] = ImageOverlay(url=image_path,
                                         name=str_date,
                                         bounds=(bounds))

            # Time slider
            slider = IntSlider(value=1,
                               min=1,
                               max=len(images),
                               step=1,
                               description=str(df['date'][0].date()),
                               disabled=False,
                               continuous_update=False,
                               orientation='horizontal',
                               readout=True,
                               readout_format='d')
            show_chip = Checkbox(value=True,
                                 description='Chip image',
                                 disabled=False,
                                 indent=False,
                                 layout=Layout(width='140px'))

            def on_ci_band_change(change):
                pass

            ci_band.observe(on_ci_band_change, 'value')

            def show_chip_changed(b):
                try:
                    if show_chip.value is True:
                        m.add_layer(images[slider.value - 1])
                    else:
                        m.remove_layer(images[slider.value - 1])
                except Exception:
                    pass

            show_chip.observe(show_chip_changed)

            # Slider control
            play = Play(value=1,
                        min=1,
                        max=len(images),
                        step=1,
                        interval=1000,
                        description="Press play",
                        disabled=False)

            def slider_changed(b):
                if show_chip.value is True:
                    try:
                        m.substitute_layer(images[b['old'] - 1],
                                           images[b['new'] - 1])
                    except Exception:
                        pass
                    slider.description = str(df['date'][slider.value -
                                                        1].date())

            slider.observe(slider_changed)
            jslink((play, 'value'), (slider, 'value'))
            time_box = HBox([slider, play])
            time_control = WidgetControl(widget=time_box,
                                         position='bottomleft')
            m.add_control(time_control)
            m.add_layer(images[0])

            map_options = VBox([show_poly, show_chip, show_sat])
        except Exception as err:
            map_options = VBox([show_poly, show_sat])
            print(err)

        layers_control = WidgetControl(widget=map_options,
                                       position='topright',
                                       max_width=150)
        m.add_control(layers_control)
        return m
Ejemplo n.º 24
0
def get():
    """Get the parcel's dataset for the given location or ids"""
    info = Label(
        "1. Select the region and the year to get parcel information.")

    values = config.read()
    # Set the max number of parcels that can be downloaded at once.
    plimit = int(values['set']['plimit'])

    def aois_options():
        values = config.read()
        options = {}
        if values['set']['data_source'] == '0':
            for desc in values['api']['options']['aois']:
                aoi = f"{values['api']['options']['aois'][desc]}"
                options[(desc, aoi)] = values['api']['options']['years'][aoi]
        elif values['set']['data_source'] == '1':
            for aoi in values['ds_conf']:
                desc = f"{values['ds_conf'][aoi]['desc']}"
                confgs = values['ds_conf'][aoi]['years']
                options[(f'{desc} ({aoi})', aoi)] = [y for y in confgs]
        return options

    def aois_years():
        values = config.read()
        years = {}
        if values['set']['data_source'] == '0':
            for desc in values['api']['options']['aois']:
                aoi = values['api']['options']['aois'][desc]
                years[aoi] = values['api']['options']['years'][aoi]
        elif values['set']['data_source'] == '1':
            for aoi in values['ds_conf']:
                desc = f"{values['ds_conf'][aoi]['desc']}"
                years[aoi] = [y for y in values['ds_conf'][aoi]['years']]
        return years

    try:
        aois = Dropdown(
            options=tuple(aois_options()),
            value=values['set']['ds_conf'],
            description='AOI:',
            disabled=False,
        )
    except:
        aois = Dropdown(
            options=tuple(aois_options()),
            description='AOI:',
            disabled=False,
        )

    year = Dropdown(
        options=next(iter(aois_options().values())),
        description='Year:',
        disabled=False,
    )
    button_refresh = Button(layout=Layout(width='35px'), icon='fa-refresh')

    @button_refresh.on_click
    def button_refresh_on_click(b):
        aois.options = tuple(aois_options())
        year.options = aois_years()[aois.value]

    def table_options_change(change):
        try:
            year.options = aois_years()[change.new]
        except:
            aois.options = tuple(aois_options())
            year.options = aois_years()[aois.value]

    aois.observe(table_options_change, 'value')

    info_method = Label("2. Select a method to get the data.")

    method = ToggleButtons(
        options=[('Parcel ID', 2), ('Coordinates', 1), ('Map marker', 3),
                 ('Polygon', 4)],
        value=None,
        description='',
        disabled=False,
        button_style='info',
        tooltips=[
            'Enter lat lon', 'Enter parcel ID', 'Select a point on a map',
            'Get parcels id in a polygon'
        ],
    )

    plon = Text(value='5.664',
                placeholder='Add lon',
                description='Lon:',
                disabled=False)

    plat = Text(value='52.694',
                placeholder='Add lat',
                description='Lat:',
                disabled=False)

    wbox_lat_lot = VBox(children=[plat, plon])

    info_pid = Label(
        "Multiple parcel id codes can be added (comma ',' separated, e.g.: 11111, 22222)."
    )

    pid = Textarea(value='34296',
                   placeholder='12345, 67890',
                   description='Parcel(s) ID:',
                   disabled=False)

    wbox_pids = VBox(children=[info_pid, pid])

    bt_get_ids = Button(description="Find parcels",
                        disabled=False,
                        button_style='info',
                        tooltip='Find parcels within the polygon.',
                        icon='')

    get_ids_box = HBox(
        [bt_get_ids,
         Label("Find the parcels that are in the polygon.")])

    ppoly_out = Output()

    progress = Output()

    def outlog(*text):
        with progress:
            print(*text)

    def outlog_poly(*text):
        with ppoly_out:
            print(*text)

    @bt_get_ids.on_click
    def bt_get_ids_on_click(b):
        with ppoly_out:
            try:
                get_requests = data_source()
                ppoly_out.clear_output()
                polygon = get_maps.polygon_map.feature_collection['features'][
                    -1]['geometry']['coordinates'][0]
                polygon_str = '-'.join(
                    ['_'.join(map(str, c)) for c in polygon])
                outlog_poly(f"Geting parcel ids within the polygon...")
                polyids = json.loads(
                    get_requests.ppoly(aois.value, year.value, polygon_str,
                                       False, True))
                outlog_poly(
                    f"'{len(polyids['ogc_fid'])}' parcels where found:")
                outlog_poly(polyids['ogc_fid'])
                file = config.get_value(['files', 'pids_poly'])
                with open(file, "w") as text_file:
                    text_file.write('\n'.join(map(str, polyids['ogc_fid'])))
            except Exception as err:
                outlog("No parcel ids found:", err)

    method_out = Output(layout=Layout(border='1px solid black'))

    def method_options(obj):
        with method_out:
            method_out.clear_output()
            if obj['new'] == 1:
                display(wbox_lat_lot)
            elif obj['new'] == 2:
                display(wbox_pids)
            elif obj['new'] == 3:
                display(
                    get_maps.base_map(
                        aois.value,
                        int(config.get_value(['set', 'data_source']))))
            elif obj['new'] == 4:
                display(
                    VBox([
                        get_maps.polygon(
                            aois.value,
                            int(config.get_value(['set', 'data_source']))),
                        get_ids_box, ppoly_out
                    ]))

    method.observe(method_options, 'value')

    info_type = Label("3. Select datasets to download.")

    table_options = HBox([aois, button_refresh, year])

    # ########### Time series options #########################################
    pts_bt = ToggleButton(
        value=False,
        description='Time series',
        disabled=False,
        button_style='success',  # success
        tooltip='Get parcel information',
        icon='toggle-off',
        layout=Layout(width='50%'))

    pts_bands = data_options.pts_bands()

    pts_tstype = SelectMultiple(
        options=data_options.pts_tstype(),
        value=['s2'],
        rows=3,
        description='TS type:',
        disabled=False,
    )

    pts_band = Dropdown(
        options=list(pts_bands['s2']),
        value='',
        description='Band:',
        disabled=False,
    )

    def pts_tstype_change(change):
        if len(pts_tstype.value) <= 1:
            pts_band.disabled = False
            try:
                pts_b = change.new[0]
                pts_band.options = pts_bands[pts_b]
            except:
                pass
        else:
            pts_band.value = ''
            pts_band.disabled = True

    pts_tstype.observe(pts_tstype_change, 'value')

    pts_options = VBox(children=[pts_tstype, pts_band])

    # ########### Chip images options #########################################
    pci_bt = ToggleButton(value=False,
                          description='Chip images',
                          disabled=False,
                          button_style='success',
                          tooltip='Get parcel information',
                          icon='toggle-off',
                          layout=Layout(width='50%'))

    pci_start_date = DatePicker(value=datetime.date(2019, 6, 1),
                                description='Start Date',
                                disabled=False)

    pci_end_date = DatePicker(value=datetime.date(2019, 6, 30),
                              description='End Date',
                              disabled=False)

    pci_plevel = RadioButtons(
        options=['LEVEL2A', 'LEVEL1C'],
        value='LEVEL2A',
        description='Proces. level:',  # Processing level
        disabled=False,
        layout=Layout(width='50%'))

    pci_chipsize = IntSlider(value=640,
                             min=100,
                             max=5120,
                             step=10,
                             description='Chip size:',
                             disabled=False,
                             continuous_update=False,
                             orientation='horizontal',
                             readout=True,
                             readout_format='d')

    pci_bands = data_options.pci_bands()

    pci_satellite = RadioButtons(options=list(pci_bands),
                                 value='Sentinel 2',
                                 disabled=True,
                                 layout=Layout(width='100px'))

    pci_band = SelectMultiple(options=list(pci_bands['Sentinel 2']),
                              value=['B04'],
                              rows=11,
                              description='Band:',
                              disabled=False)

    sats_plevel = HBox([pci_satellite, pci_plevel])

    def on_sat_change(change):
        sat = change.new
        pci_band.options = pci_bands[sat]

    pci_satellite.observe(on_sat_change, 'value')

    pci_options = VBox(children=[
        pci_start_date, pci_end_date, sats_plevel, pci_chipsize, pci_band
    ])

    # ########### General options #############################################
    pts_wbox = VBox(children=[])
    pci_wbox = VBox(children=[])

    def pts_observe(button):
        if button['new']:
            pts_bt.icon = 'toggle-on'
            pts_wbox.children = [pts_options]
        else:
            pts_bt.icon = 'toggle-off'
            pts_wbox.children = []

    def pci_observe(button):
        if button['new']:
            pci_bt.icon = 'toggle-on'
            pci_wbox.children = [pci_options]
        else:
            pci_bt.icon = 'toggle-off'
            pci_wbox.children = []

    pts_bt.observe(pts_observe, names='value')
    pci_bt.observe(pci_observe, names='value')

    pts = VBox(children=[pts_bt, pts_wbox], layout=Layout(width='40%'))
    pci = VBox(children=[pci_bt, pci_wbox], layout=Layout(width='40%'))

    data_types = HBox(children=[pts, pci])

    info_get = Label("4. Download the selected data.")

    bt_get = Button(description='Download',
                    disabled=False,
                    button_style='warning',
                    tooltip='Send the request',
                    icon='download')

    path_temp = config.get_value(['paths', 'temp'])
    path_data = config.get_value(['paths', 'data'])

    info_paths = HTML("".join([
        "<style>div.c {line-height: 1.1;}</style>",
        "<div class='c';>By default data will be stored in the temp folder ",
        f"({path_temp}), you will be asked to empty the temp folder each time ",
        "you start the notebook.<br>In your personal data folder ",
        f"({path_data}) you can permanently store the data.</div>"
    ]))

    paths = RadioButtons(options=[
        (f"Temporary folder: '{path_temp}'.", path_temp),
        (f"Personal data folder: '{path_data}'.", path_data)
    ],
                         layout={'width': 'max-content'},
                         value=path_temp)

    paths_box = Box([Label(value="Select folder:"), paths])

    def file_len(fname):
        with open(fname) as f:
            for i, l in enumerate(f):
                pass
        return i + 1

    def get_data(parcel):
        values = config.read()
        get_requests = data_source()
        pid = parcel['ogc_fid'][0]
        source = int(config.get_value(['set', 'data_source']))
        if source == 0:
            datapath = f'{paths.value}{aois.value}{year.value}/parcel_{pid}/'
        elif source == 1:
            ds_conf = config.get_value(['set', 'ds_conf'])
            datapath = f'{paths.value}{ds_conf}/parcel_{pid}/'
        file_pinf = f"{datapath}{pid}_information"

        outlog(data_handler.export(parcel, 10, file_pinf))

        if pts_bt.value is True:
            outlog(f"Getting time series for parcel: '{pid}',",
                   f"({pts_tstype.value} {pts_band.value}).")
            for pts in pts_tstype.value:
                ts = json.loads(
                    get_requests.pts(aois.value, year.value, pid, pts,
                                     pts_band.value))
                band = ''
                if pts_band.value != '':
                    band = f"_{pts_band.value}"
                file_ts = f"{datapath}{pid}_time_series_{pts}{band}"
                outlog(data_handler.export(ts, 11, file_ts))
        if pci_bt.value is True:
            files_pci = f"{datapath}{pid}_chip_images/"
            outlog(f"Getting '{pci_band.value}' chip images for parcel: {pid}")
            with progress:
                get_requests.rcbl(parcel, pci_start_date.value,
                                  pci_end_date.value, pci_band.value,
                                  pci_satellite.value, pci_chipsize.value,
                                  files_pci)
            filet = f'{datapath}/{pid}_chip_images/{pid}_images_list.{pci_band.value[0]}.csv'
            if file_len(filet) > 1:
                outlog(
                    f"Completed, all GeoTIFFs for bands '{pci_band.value}' are ",
                    f"downloaded in the folder: '{datapath}/{pid}_chip_images'"
                )
            else:
                outlog(
                    "No files where downloaded, please check your configurations"
                )

    def get_from_location(lon, lat):
        get_requests = data_source()
        outlog(f"Finding parcel information for coordinates: {lon}, {lat}")
        parcel = json.loads(
            get_requests.ploc(aois.value, year.value, lon, lat, True))
        pid = parcel['ogc_fid'][0]
        outlog(f"The parcel '{pid}' was found at this location.")
        try:
            get_data(parcel)
        except Exception as err:
            print(err)

    def get_from_id(pids):
        get_requests = data_source()
        outlog(f"Getting parcels information for: '{pids}'")
        for pid in pids:
            try:
                parcel = json.loads(
                    get_requests.pid(aois.value, year.value, pid, True))
                get_data(parcel)
            except Exception as err:
                print(err)

    @bt_get.on_click
    def bt_get_on_click(b):
        progress.clear_output()
        if method.value == 1:
            try:
                with progress:
                    get_requests = data_source()
                    lon, lat = plon.value, plat.value
                    get_from_location(lon, lat)
            except Exception as err:
                outlog(
                    f"Could not get parcel information for location '{lon}', '{lat}': {err}"
                )

        elif method.value == 2:
            try:
                with progress:
                    pids = pid.value.replace(" ", "").split(",")
                    get_from_id(pids)
            except Exception as err:
                outlog(f"Could not get parcel information: {err}")

        elif method.value == 3:
            try:
                marker = get_maps.base_map.map_marker
                lon = str(round(marker.location[1], 2))
                lat = str(round(marker.location[0], 2))
                get_from_location(lon, lat)
            except Exception as err:
                outlog(f"Could not get parcel information: {err}")
        elif method.value == 4:
            try:
                file = config.get_value(['files', 'pids_poly'])
                with open(file, "r") as text_file:
                    pids = text_file.read().split('\n')
                outlog("Geting data form the parcels:")
                outlog(pids)
                if len(pids) <= plimit:
                    get_from_id(pids)
                else:
                    outlog(
                        "You exceeded the maximum amount of selected parcels ",
                        f"({plimit}) to get data. Please select smaller area.")
            except Exception as err:
                outlog("No pids file found.", err)
        else:
            outlog(f"Please select method to get parcel information.")

    return VBox([
        info, table_options, info_method, method, method_out, info_type,
        data_types, info_get, info_paths, paths_box, bt_get, progress
    ])
Ejemplo n.º 25
0
def widget_box():
    """Update the repository.
    Args:
        None
    Returns:
        update_widget : A widget for general settings.
    Raises:
        Error:
    Example:

    """

    # User settings
    user_info = Label("General settings.")

    values = config.read()

    user_name = Text(value=values['set']['user'],
                     placeholder='user name',
                     description='User:'******'set']['email'],
                      placeholder='[email protected]',
                      description='email:',
                      disabled=False)
    user_institution = Text(value=values['set']['institution'],
                            placeholder='EU-',
                            description='Institution:',
                            disabled=False)
    ms_list = data_options.eu_ms()
    ms = Dropdown(
        options=[(ms_list[m], m) for m in ms_list] + [('', '')],
        value=values['set']['member_state'],
        description='Member state:',
        disabled=False,
    )
    wbox_user = VBox([user_info, user_name, user_email, user_institution, ms],
                     layout=Layout(border='1px solid black'))

    # System settings
    sys_info = Label("System settings.")
    paths_info = Label(
        "Select the personal data folder and the temporary folder.")

    jupyterlab = Checkbox(
        value=eval(values['set']['jupyterlab']),
        description=
        'Workin in Jupyter Lab (Uncheck for Voila and classical jupyter environment)',
        disabled=False,
        indent=False)

    def on_jupyterlab_change(change):
        config.update(['set', 'jupyterlab'], str(jupyterlab.value))

    jupyterlab.observe(on_jupyterlab_change, 'value')

    path_data = Text(value=values['paths']['data'], description='Data path:')

    path_temp = Text(value=values['paths']['temp'], description='Temp path:')

    files_info = Label("Select where to store the parcel IDs list file from:")

    file_pids_poly = Text(value=values['files']['pids_poly'],
                          description='Polygon:')
    file_pids_dist = Text(value=values['files']['pids_dist'],
                          description='Distance:')

    plimit_info = Label(
        "Warning: No more than 25 parcels are tested, unexpected results may occur."
    )
    plimit = BoundedIntText(value=int(values['set']['plimit']),
                            max=100_000_000,
                            min=1,
                            step=1,
                            description='Max parcels that can be downloaded:',
                            disabled=False)

    wbox_sys = VBox([
        sys_info, jupyterlab, plimit_info, plimit, paths_info, path_data,
        path_temp, files_info, file_pids_poly, file_pids_dist
    ],
                    layout=Layout(border='1px solid black'))

    # Git settings
    git_info = Label(
        "Git Settings. (To easily get the latest version of notebooks and scripts.)"
    )

    git_url, git_user, git_pass = config.credentials('git')

    git_url = Text(value=git_url, description='Url:')
    git_user = Text(value=git_user, description='User name:')
    git_pass = Password(value=git_pass,
                        placeholder='******',
                        description='Password:'******'1px solid black'))

    btn_save = Button(description='Save', disabled=False, icon='save')

    progress = Output()

    def outlog(*text):
        with progress:
            print(*text)

    @btn_save.on_click
    def btn_save_on_click(b):
        progress.clear_output()
        config.update(['set', 'user'], str(user_name.value))
        config.update(['set', 'email'], str(user_email.value))
        config.update(['set', 'institution'], str(user_institution.value))
        config.update(['set', 'member_state'], str(user_email.value))
        config.update(['set', 'plimit'], str(plimit.value))
        config.update(['git', 'url'], str(git_url.value))
        config.update(['git', 'user'], str(git_user.value))
        config.update(['paths', 'data'], str(path_data.value))
        config.update(['paths', 'temp'], str(path_temp.value))
        config.update(['files', 'pids_poly'], str(file_pids_poly.value))
        config.update(['files', 'pids_dist'], str(file_pids_dist.value))
        if git_pass.value != '':
            config.update(['git', 'pass'], str(git_pass.value))
        outlog("The new settings are saved.")

    wbox = VBox([
        config.clean_temp(), wbox_user, wbox_sys, wbox_git,
        HBox([btn_save, update.btn_update()]), progress
    ])

    return wbox
Ejemplo n.º 26
0
    def dsc_config(dsc_value):
        values = config.read()
        ds_db = Dropdown(
            options=["1"],
            value="1",
            description='Database:',
            disabled=False,
            layout=Layout(width='140px')
        )

        try:
            with open(f"{config.get_value(['paths','temp'])}tb_prefix", 'r') as f:
                code_value = f.read()
        except Exception:
            code_value = dsc_value

        ds_code = Combobox(
            value=code_value,
            placeholder='abc',
            options=[m for m in data_options.eu_ms()]+[''],
            description='AOI code:',
            ensure_option=False,
            disabled=False,
            layout=Layout(width='200px'),
            tooltip='Lowercase AOI code name for the dataset (5chr max).'
        )
        ds_year = BoundedIntText(
            value=int(dsy.value),
            min=1980,
            max=2100,
            step=1,
            description='Dataset year:',
            disabled=False,
            layout=Layout(width='180px')

        )
        ds_desc = Text(
            value=values['ds_conf'][dsc_value]['desc'],
            description='Description:',
            disabled=False
        )

        info_map_text = ["Set default map view options. ",
                         "You can get automatically the dataset ",
                         "center coordinates."]

        lat, lon = values['ds_conf'][dsc_value]['center'].split(",")
        map_cent_lat = FloatText(
            value=float(lat),
            description='Lat:',
            disabled=False,
            layout=Layout(width='160px')
        )
        map_cent_lon = FloatText(
            value=float(lon),
            description='Lon:',
            disabled=False,
            layout=Layout(width='160px')
        )
        map_zoom = BoundedIntText(
            value=values['ds_conf'][dsc_value]['zoom'],
            min=0,
            max=20,
            step=1,
            description='Zoom:',
            disabled=False,
            layout=Layout(width='140px')
        )
        bt_get_center = Button(
            layout=Layout(width='40px'),
            icon='bullseye',
            tooltip='Get center point from database.'
        )

        ds_box = HBox([ds_code, ds_year, ds_desc])
        map_box = HBox([Label("Map center: "), map_cent_lat,
                        map_cent_lon, bt_get_center, map_zoom])

        info_config = Label(
            """Change 'AOI code' value to create a new configuration set or 
            leave the same 'AOI code' value to configure the selected one.""")

        db = int(values['ds_conf'][dsc_value]['db'])

        def get_tb_list():
            tbls = database.tables(db, None, False)
            if tbls is None:
                return []
            else:
                return tbls

        tb_dc = Dropdown(
            options=get_tb_list(),
            value=config.autoselect(
                values['ds_conf'][dsc_value]['years'][
                    str(ds_year.value)]['tables']['dias_catalog'],
                get_tb_list(), False),
            description='DIAS catalog:',
            disabled=False
        )
        tb_pr = Dropdown(
            options=get_tb_list(),
            value=config.autoselect(
                values['ds_conf'][dsc_value]['years'][
                    str(ds_year.value)]['tables']['parcels'],
                get_tb_list(), False),
            description='Parcels:',
            disabled=False
        )

        def get_pr_columns():
            try:
                colms = database.table_columns(tb_pr.value, 1, None)
                if colms is None:
                    return []
                else:
                    return colms
            except Exception:
                return []

        tc_id = Dropdown(
            options=get_pr_columns(),
            value=config.autoselect(
                values['ds_conf'][dsc_value]['years'][
                    str(ds_year.value)]['columns']['parcels_id'],
                get_pr_columns(), False),
            description='Parcels ID:',
            disabled=False,
            layout=Layout(width='180px')
        )
        tc_cn = Dropdown(
            options=get_pr_columns(),
            value=config.autoselect(
                values['ds_conf'][dsc_value]['years'][
                    str(ds_year.value)]['columns']['crop_names'],
                get_pr_columns(), False),
            description='Crop names:',
            disabled=False,
            layout=Layout(width='180px')
        )
        tc_cc = Dropdown(
            options=get_pr_columns(),
            value=config.autoselect(
                values['ds_conf'][dsc_value]['years'][
                    str(ds_year.value)]['columns']['crop_codes'],
                get_pr_columns(), False),
            description='Crop codes:',
            disabled=False,
            layout=Layout(width='180px')
        )

        def on_tb_pr_change(change):
            tc_id.options = get_pr_columns()
            tc_cn.options = get_pr_columns()
            tc_cc.options = get_pr_columns()
        tb_pr.observe(on_tb_pr_change, 'value')

        parcel_box = HBox([tb_pr, tc_id, tc_cn, tc_cc])

        tb_s2 = Dropdown(
            options=get_tb_list(),
            value=config.autoselect(
                values['ds_conf'][dsc_value]['years'][
                    str(ds_year.value)]['tables']['s2'],
                get_tb_list(), False),
            description='S2 signatures:',
            disabled=False
        )
        tb_bs = Dropdown(
            options=get_tb_list(),
            value=config.autoselect(
                values['ds_conf'][dsc_value]['years'][
                    str(ds_year.value)]['tables']['bs'],
                get_tb_list(), False),
            description='Backscattering:',
            disabled=False
        )
        tb_6c = Dropdown(
            options=get_tb_list(),
            value=config.autoselect(
                values['ds_conf'][dsc_value]['years'][
                    str(ds_year.value)]['tables']['c6'],
                get_tb_list(), False),
            description='6 day coherence:',
            disabled=False
        )

        wb_save = Button(
            description='Save',
            disabled=False,
            icon='save'
        )

        @bt_get_center.on_click
        def bt_get_center_on_click(b):
            import json
            center_json = json.loads(
                database.getTableCentroid(tb_pr.value)['center'][0])
            map_cent_lat.value = round(center_json['coordinates'][1], 2)
            map_cent_lon.value = round(center_json['coordinates'][0], 2)
            map_zoom.value = 10

        @wb_save.on_click
        def wb_save_on_click(b):
            progress.clear_output()
            dscode = ds_code.value
            config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                           'tables', 'dias_catalog'], str(tb_dc.value))
            config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                           'tables', 'parcels'], str(tb_pr.value))
            config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                           'columns', 'parcels_id'], str(tc_id.value))
            config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                           'columns', 'crop_names'], str(tc_cn.value))
            config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                           'columns', 'crop_codes'], str(tc_cc.value))
            config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                           'tables', 's2'], str(tb_s2.value))
            config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                           'tables', 'bs'], str(tb_bs.value))
            config.update(['ds_conf', dscode, 'years', str(ds_year.value),
                           'tables', 'c6'], str(tb_6c.value))
            config.update(['ds_conf', dscode,
                           'db'], str(ds_db.value))
            config.update(['ds_conf', dscode,
                           'desc'], str(ds_desc.value))
            config.update(['ds_conf', dscode, 'center'],
                          f"{map_cent_lat.value},{map_cent_lon.value}")
            config.update(['ds_conf', dscode,
                           'zoom'], str(map_zoom.value))
            config.update(['set', 'ds_conf'], str(dscode))
            config.update(['set', 'ds_year'], str(ds_year.value))
            values = config.read()
            ds_c = values['set']['ds_conf']
            ds_y = values['set']['ds_year']
            dsc.options = [d for d in values['ds_conf']]
            dsy.options = [int(y) for y in values['ds_conf'][ds_c]['years']]
            dsc.value = ds_c
            dsy.value = int(ds_y)
            outlog("The configurations are saved.")

        return VBox([info_config, ds_box, parcel_box,
                     tb_dc, tb_s2, tb_bs, tb_6c,
                     Label(''.join(info_map_text)), map_box, wb_save])
Ejemplo n.º 27
0
 def on_dsc_change(change):
     config.update(['set', 'ds_conf'], dsc.value)
     values = config.read()
     ds_c = values['set']['ds_conf']
     dsy.options = [int(y) for y in values['ds_conf'][ds_c]['years']]
Ejemplo n.º 28
0
def extractS2(startdate, enddate):
    start = time.time()

    values = config.read()
    dsc = values['set']['ds_conf']
    dsy = values['set']['ds_year']
    dias_catalogue = values['ds_conf'][dsc]['years'][dsy]['tables']['dias_catalog']
    parcels_table = values['ds_conf'][dsc]['years'][dsy]['tables']['parcels']
    results_table = values['ds_conf'][dsc]['years'][dsy]['tables']['s2']

    inconn = database.connection()
    if not inconn:
        print("No in connection established")
        sys.exit(1)

    outconn = database.connection()
    if not outconn:
        print("No out connection established")
        sys.exit(1)

    incurs = inconn.cursor()
    srid = -1
    sridSql = "SELECT srid FROM geometry_columns WHERE f_table_name = '{}';"

    try:
        incurs.execute(sridSql.format(parcels_table))
        result = incurs.fetchone()
        if not result:
            print("{} does not exist or is not a spatial table")
        else:
            srid = result[0]
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
        inconn.close()
        sys.exit(1)
    # print("Parcel srid = ", srid)

    # Get the first image record that is not yet processed
    imagesql = f"""
    SELECT id, reference, obstime FROM {dias_catalogue}
    WHERE obstime between '{startdate}' And '{enddate}'
    And status ='ingested' And card = 's2'
    ORDER by obstime asc LIMIT 1
    """
    updateSql = """
    UPDATE {} SET status='{}'
    WHERE id = {} And status = '{}'
    """

    with inconn:
        with inconn.cursor() as trans_cur:
            trans_cur.execute(imagesql)
            result = trans_cur.fetchone()
            if not result:
                print("All signatures for the given dates have been extracted.")
                inconn.close()
                sys.exit(1)
            else:
                oid = result[0]
                reference = result[1]
                obstime = result[2]
            # Fails if this record is changed in the meantime
            trans_cur.execute(updateSql.format(
                dias_catalogue, 'inprogress', oid, 'ingested'))

    obstime = reference.split('_')[2][0:8]
    # print(obstime)
    obs_path = "{}/{}/{}".format(obstime[0:4], obstime[4:6], obstime[6:8])

    mgrs_tile = reference.split('_')[5]
    full_tstamp = reference.split('_')[2]

    # Due to some ESA issues with the manifest.safe sometime during 2018, the GRANULE
    # directory need to be checked to understand where image data is located.
    dias = values['obst']['osdias']
    if dias in ['EOSC', 'CREODIAS']:
        rootpath = 'Sentinel-2/MSI/L2A'
        s3path = "{}/{}/{}/GRANULE/".format(rootpath, obs_path, reference)
    elif dias == 'SOBLOO':
        rootpath = '{}/L1C'.format(reference.split('_')[0])
        s3path = "{}/{}/{}.SAFE/GRANULE/".format(rootpath, reference,
                                                 reference.replace('MSIL1C', 'MSIL2A'))
    elif dias == 'MUNDI':
        from .utils.mundi import get_mundi_s3path
        s3path = get_mundi_s3path(reference, obs_path)

    flist = object_storage.list_files(s3path)
    if not flist:
        print("Resource {} not available in S3 storage (FATAL)".format(s3path))
        incurs.execute(updateSql.format(
            dias_catalogue, 'S2_nopath', oid, 'inprogress'))
        inconn.commit()
        incurs.close()
        inconn.close()
        sys.exit(1)

    # We want 3 image files only, e.g. to create NDVI
    # SOBLOO does not produce 10 m L2A bands and only B8A (not B08)
    s3subdir = flist[1]['Key'].replace(s3path, '').split('/')[0]
    # print(s3path)
    # print(flist[1])
    # print(s3subdir)

    selection = {'B4': '{}/{}_{}_{}_{}.jp2'.format('R10m', mgrs_tile, full_tstamp, 'B04', '10m'),
                 'B8': '{}/{}_{}_{}_{}.jp2'.format('R10m', mgrs_tile, full_tstamp, 'B08', '10m'),
                 'SC': '{}/{}_{}_{}_{}.jp2'.format('R20m', mgrs_tile, full_tstamp, 'SCL', '20m')
                 }

    file_set = {}

    # Copy input data from S3 to local disk
    for k in selection.keys():
        s = selection.get(k)
        fpath = f"tmp/{s.split('/')[-1]}"
        alt_s = s.replace('0m/', '0m/L2A_')

        if object_storage.get_file('{}{}/IMG_DATA/{}'.format(
                s3path, s3subdir, s), fpath) == 1:
#             print("Image {} found in bucket".format(s))
            file_set[k] = fpath
        elif object_storage.get_file('{}{}/IMG_DATA/{}'.format(
                s3path, s3subdir, alt_s), fpath) == 1:
            # LEVEL2AP has another naming convention.
#             print("Image {} found in bucket".format(alt_s))
            file_set[k] = fpath
        else:
            print("Neither Image {} nor {} found in bucket".format(s, alt_s))
            incurs.execute(updateSql.format(
                dias_catalogue, '{} notfound'.format(k), oid, 'inprogress'))
            inconn.commit()
            incurs.close()
            inconn.close()
            sys.exit(1)

    # Get the parcel polygon in this image' footprint
    print(f"Downloaded '*{file_set['B4'][4:-12]}*' images ...")

    outsrid = int('326{}'.format(mgrs_tile[1:3]))

    incurs.close()

    outconn = database.connection()
    if not outconn:
        print("No out connection established")
        sys.exit(1)

    # Open a named cursor
    incurs = inconn.cursor(name='fetch_image_coverage',
                           cursor_factory=psycopg2.extras.DictCursor)
    ds_conf = config.get_value(['set', 'ds_conf'])
    ds_year = config.get_value(['set', 'ds_year'])
    pid_column = config.get_value(['ds_conf', ds_conf, 'years', ds_year, 'columns', 'parcels_id'])

    parcelsql = f"""
    SELECT p.{pid_column}, ST_AsGeoJSON(st_transform(p.wkb_geometry,
        {outsrid}))::json
    FROM {parcels_table} p, {dias_catalogue} dc
    WHERE p.wkb_geometry && st_transform(dc.footprint, {srid})
    And st_area(p.wkb_geometry) > 3000.0
    And dc.id = {oid}
    -- And p.{pid_column} not in (SELECT distinct pid
    --     FROM {results_table} where obsid = {oid})
    """
    incurs.execute(parcelsql)

    sqlload = time.time() - start
#     print(f"Images loaded and nrecs[0] features selected from database in {sqlload} seconds")

    nrows = {}
    for k in file_set.keys():
        nrows[k] = 0

    affine = {}
    array = {}

    bands = file_set.keys()

    for b in bands:
        with rasterio.open(file_set.get(b)) as src:
            affine[b] = src.transform
            array[b] = src.read(1)

    print(f"Extracting signatures for '*{file_set['B4'][4:-12]}* images ...'")
    while True:
        rowset = incurs.fetchmany(size=2000)

        if not rowset:
            break

        features = {"type": "FeatureCollection",
                    "features": [{"type": "feature", "geometry": f[1],
                                  "properties": {"pid": int(f[0])}} for f in rowset]}

        for b in bands:

            zs = zonal_stats(features, array[b], affine=affine[b], stats=[
                             "count", "mean", "std", "min", "max",
                             "percentile_25", "percentile_50",
                             "percentile_75"],
                             prefix="", nodata=0, geojson_out=True)

            df = pd.DataFrame(zs)

            df = pd.DataFrame.from_dict(
                df.properties.to_dict(), orient='index')

            df['obsid'] = oid
            df['band'] = b

            df.rename(index=str, columns={
                      "percentile_25": "p25", "percentile_50": "p50",
                      "percentile_75": "p75"}, inplace=True)

            nrows[b] = nrows[b] + len(df)
            # df is the dataframe
            if len(df) > 0:
                df.dropna(inplace=True)
                if len(df.values) > 0:
                    df_columns = list(df)
                    s_buf = io.StringIO()
                    df.to_csv(s_buf, header=False, index=False, sep=',')
                    s_buf.seek(0)
                    outcurs = outconn.cursor()
                    # print(tuple(df_columns))
                    try:
                        #psycopg2.extras.execute_batch(outcurs, insert_stmt, df.values)
                        outcurs.copy_from(s_buf, results_table,
                                          columns=tuple(df_columns), sep=',')
                        outconn.commit()
                    except psycopg2.IntegrityError as e:
                        print(f"insert statement {insert_stmt} contains duplicate index")
                    # except Error as e:
                    #    print(e)
                    finally:
                        outcurs.close()
                else:
                    print(f"No valid data in block {nrows[b]}")

    outconn.close()

    incurs.close()

    incurs = inconn.cursor()

    try:
        incurs.execute(updateSql.format(
            dias_catalogue, 'extracted', oid, 'inprogress'))
        inconn.commit()
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
        inconn.close()
        if outconn:
            outconn.close()

    incurs.close()
    inconn.close()

    print(f"Removing '*{file_set['B4'][4:-12]}*' images.")
    for f in file_set.keys():
        if os.path.exists(file_set.get(f)):
#             print("Removing {}".format(file_set.get(f)))
            os.remove(file_set.get(f))

    print("Total time required for {} features and {} bands: {} seconds".format(
        nrows.get('B8'), len(bands), time.time() - start))
Ejemplo n.º 29
0
def time_series(path):
    import matplotlib.pyplot as plt
    import matplotlib.dates as mdates
    from datetime import timedelta
    import pandas as pd
    import json
    import glob

    confvalues = config.read()
    inst = confvalues['set']['institution']
    file_info = glob.glob(f"{path}*_information.json")[0]

    with open(file_info, 'r') as f:
        info_data = json.loads(f.read())
    pid = info_data['ogc_fid'][0]
    crop_name = info_data['cropname'][0]
    area = info_data['area'][0]
    figure_dpi = 50

    def plot_ts_s2(cloud):
        file_ts = glob.glob(f"{path}*_time_series_s2.csv")[0]
        df = pd.read_csv(file_ts, index_col=0)

        df['date'] = pd.to_datetime(df['date_part'], unit='s')
        start_date = df.iloc[0]['date'].date()
        end_date = df.iloc[-1]['date'].date()
        print(f"From '{start_date}' to '{end_date}'.")

        pd.set_option('max_colwidth', 200)
        pd.set_option('display.max_columns', 20)

        # Plot settings are confirm IJRS graphics instructions
        plt.rcParams['axes.titlesize'] = 16
        plt.rcParams['axes.labelsize'] = 14
        plt.rcParams['xtick.labelsize'] = 12
        plt.rcParams['ytick.labelsize'] = 12
        plt.rcParams['legend.fontsize'] = 14

        df.set_index(['date'], inplace=True)

        dfB4 = df[df.band == 'B4'].copy()
        dfB8 = df[df.band == 'B8'].copy()
        datesFmt = mdates.DateFormatter('%-d %b %Y')
        if cloud is False:
            # Plot NDVI
            fig = plt.figure(figsize=(16.0, 10.0))
            axb = fig.add_subplot(1, 1, 1)

            axb.set_title(
                f"Parcel {pid} (crop: {crop_name}, area: {area:.2f} ha)")
            axb.set_xlabel("Date")
            axb.xaxis.set_major_formatter(datesFmt)

            axb.set_ylabel(r'DN')
            axb.plot(dfB4.index,
                     dfB4['mean'],
                     linestyle=' ',
                     marker='s',
                     markersize=10,
                     color='DarkBlue',
                     fillstyle='none',
                     label='B4')
            axb.plot(dfB8.index,
                     dfB8['mean'],
                     linestyle=' ',
                     marker='o',
                     markersize=10,
                     color='Red',
                     fillstyle='none',
                     label='B8')

            axb.set_xlim(start_date, end_date + timedelta(1))
            axb.set_ylim(0, 10000)

            axb.legend(frameon=False)  # loc=2)

            return plt.show()

        else:
            # Plot Cloud free NDVI.
            dfSC = df[df.band == 'SC'].copy()
            dfNDVI = (dfB8['mean'] - dfB4['mean']) / \
                (dfB8['mean'] + dfB4['mean'])

            cloudfree = ((dfSC['mean'] >= 4) & (dfSC['mean'] < 6))

            fig = plt.figure(figsize=(16.0, 10.0))
            axb = fig.add_subplot(1, 1, 1)

            axb.set_title(
                f"{inst}\nParcel {pid} (crop: {crop_name}, area: {area:.2f} sqm)"
            )

            axb.set_xlabel("Date")
            axb.xaxis.set_major_formatter(datesFmt)

            axb.set_ylabel(r'NDVI')
            axb.plot(dfNDVI.index,
                     dfNDVI,
                     linestyle=' ',
                     marker='s',
                     markersize=10,
                     color='DarkBlue',
                     fillstyle='none',
                     label='NDVI')
            axb.plot(dfNDVI[cloudfree].index,
                     dfNDVI[cloudfree],
                     linestyle=' ',
                     marker='P',
                     markersize=10,
                     color='Red',
                     fillstyle='none',
                     label='Cloud free NDVI')

            axb.set_xlim(start_date, end_date + timedelta(1))
            axb.set_ylim(0, 1.0)

            axb.legend(frameon=False)  # loc=2)

            return plt.show()

    def plot_ts_bs():
        import numpy as np
        file_ts = glob.glob(f"{path}*_time_series_bs.csv")[0]
        df = pd.read_csv(file_ts, index_col=0)

        df['date'] = pd.to_datetime(df['date_part'], unit='s')
        start_date = df.iloc[0]['date'].date()
        end_date = df.iloc[-1]['date'].date()
        print(f"From '{start_date}' to '{end_date}'.")

        pd.set_option('max_colwidth', 200)
        pd.set_option('display.max_columns', 20)

        # Plot settings are confirm IJRS graphics instructions
        plt.rcParams['axes.titlesize'] = 16
        plt.rcParams['axes.labelsize'] = 14
        plt.rcParams['xtick.labelsize'] = 12
        plt.rcParams['ytick.labelsize'] = 12
        plt.rcParams['legend.fontsize'] = 14

        df.set_index(['date'], inplace=True)
        datesFmt = mdates.DateFormatter('%-d %b %Y')
        # Plot Backscattering coefficient

        datesFmt = mdates.DateFormatter('%-d %b %Y')
        df = df[df['mean'] >= 0]  # to remove negative values

        dfVV = df[df.band == 'VV'].copy()
        dfVH = df[df.band == 'VH'].copy()
        fig = plt.figure(figsize=(16.0, 10.0))
        axb = fig.add_subplot(1, 1, 1)

        dfVV['mean'] = dfVV['mean'].map(lambda s: 10.0 * np.log10(s))
        dfVH['mean'] = dfVH['mean'].map(lambda s: 10.0 * np.log10(s))

        axb.set_title(
            f"{inst}\nParcel {pid} (crop: {crop_name}, area: {area:.2f} sqm)")
        axb.set_xlabel("Date")
        axb.xaxis.set_major_formatter(datesFmt)

        axb.set_ylabel(r'Backscattering coefficient, $\gamma\degree$ (dB)')
        axb.plot(dfVH.index,
                 dfVH['mean'],
                 linestyle=' ',
                 marker='s',
                 markersize=10,
                 color='DarkBlue',
                 fillstyle='none',
                 label='VH')
        axb.plot(dfVV.index,
                 dfVV['mean'],
                 linestyle=' ',
                 marker='o',
                 markersize=10,
                 color='Red',
                 fillstyle='none',
                 label='VV')

        axb.set_xlim(start_date, end_date + timedelta(1))
        axb.set_ylim(-25, 0)

        axb.legend(frameon=False)  # loc=2)

        return plt.show()

    def plot_ts_c6():
        file_ts = glob.glob(f"{path}*_time_series_c6.csv")[0]
        df = pd.read_csv(file_ts, index_col=0)

        df['date'] = pd.to_datetime(df['date_part'], unit='s')
        start_date = df.iloc[0]['date'].date()
        end_date = df.iloc[-1]['date'].date()
        print(f"From '{start_date}' to '{end_date}'.")

        pd.set_option('max_colwidth', 200)
        pd.set_option('display.max_columns', 20)
        datesFmt = mdates.DateFormatter('%-d %b %Y')

        # Plot settings are confirm IJRS graphics instructions
        plt.rcParams['axes.titlesize'] = 16
        plt.rcParams['axes.labelsize'] = 14
        plt.rcParams['xtick.labelsize'] = 12
        plt.rcParams['ytick.labelsize'] = 12
        plt.rcParams['legend.fontsize'] = 14

        df.set_index(['date'], inplace=True)

        # Plot Coherence

        dfVV = df[df.band == 'VV'].copy()
        dfVH = df[df.band == 'VH'].copy()
        fig = plt.figure(figsize=(16.0, 10.0))
        axb = fig.add_subplot(1, 1, 1)

        axb.set_title(
            f"{inst}\nParcel {pid} (crop: {crop_name}, area: {area:.2f} sqm)")
        axb.set_xlabel("Date")
        axb.xaxis.set_major_formatter(datesFmt)

        axb.set_ylabel(r'Coherence')
        axb.plot(dfVH.index,
                 dfVH['mean'],
                 linestyle=' ',
                 marker='s',
                 markersize=10,
                 color='DarkBlue',
                 fillstyle='none',
                 label='VH')
        axb.plot(dfVV.index,
                 dfVV['mean'],
                 linestyle=' ',
                 marker='o',
                 markersize=10,
                 color='Red',
                 fillstyle='none',
                 label='VV')

        axb.set_xlim(start_date, end_date + timedelta(1))
        axb.set_ylim(0, 1)

        axb.legend(frameon=False)  # loc=2)

        return plt.show()

    ts_cloud = Checkbox(value=True,
                        description='Cloud free',
                        disabled=False,
                        indent=False)

    ts_files = glob.glob(f"{path}*time_series*.csv")
    ts_file_types = [b.split('_')[-1].split('.')[0] for b in ts_files]
    ts_types = [t for t in data_options.pts_tstype() if t[1] in ts_file_types]

    ts_type = Dropdown(
        options=ts_types,
        description='Select type:',
        disabled=False,
    )

    btn_ts = Button(value=False,
                    description='Plot TS',
                    disabled=False,
                    button_style='info',
                    tooltip='Refresh output',
                    icon='')

    ts_out = Output()

    @btn_ts.on_click
    def btn_ts_on_click(b):
        btn_ts.description = 'Refresh'
        btn_ts.icon = 'refresh'
        with ts_out:
            ts_out.clear_output()
            if ts_type.value == 's2':
                plot_ts_s2(ts_cloud.value)
            elif ts_type.value == 'bs':
                plot_ts_bs()
            elif ts_type.value == 'c6':
                plot_ts_c6()

    def on_ts_type_change(change):
        if ts_type.value == 's2':
            wbox_ts.children = [btn_ts, ts_type, ts_cloud]
        else:
            wbox_ts.children = [btn_ts, ts_type]

    ts_type.observe(on_ts_type_change, 'value')

    wbox_ts = HBox([btn_ts, ts_type, ts_cloud])

    wbox = VBox([wbox_ts, ts_out])

    return wbox