Ejemplo n.º 1
0
def get_references(references, data_path, stage_in=True):

    ciop = cioppy.Cioppy()

    if len(references) == 1 and stage_in:

        logging.info('Stage-in product to {}'.format(data_path))

    if len(references) > 1 and stage_in:

        logging.info('Stage-in {} products to {}'.format(
            len(references), data_path))

    retrieved = []
    identifier = []

    for index, reference in enumerate(references):

        logging.info('The input reference (#{} of {}) is: {}'.format(
            index + 1, len(references), reference))

        search_params = dict()
        search_params['do'] = 'terradue'

        search = ciop.search(end_point=reference,
                             params=search_params,
                             output_fields='enclosure,identifier',
                             model='GeoTime')

        assert (search)

        identifier.append(search[0]['identifier'])

        logging.info('The input reference identifier is: {}'.format(
            search[0]['identifier']))

        if stage_in:

            logging.info('Retrieve {} from {}'.format(search[0]['identifier'],
                                                      search[0]['enclosure']))

            local_path = ciop.copy(search[0]['enclosure'], data_path)

            logging.info('Staged {}'.format(local_path))

            assert (local_path)

            retrieved.append(local_path)
        else:
            retrieved.append('')

    return retrieved, identifier
Ejemplo n.º 2
0
def get_vsi_url(input_reference, username, api_key):

    enclosure = cioppy.Cioppy().search(input_reference, [],
                                       'enclosure',
                                       'GeoTime',
                                       creds='{}:{}'.format(
                                           username, api_key))[0]['enclosure']

    parsed_url = urlparse(enclosure)

    vsi_url = '/vsicurl/{}://{}:{}@{}{}'.format(
        urlparse(enclosure).scheme, username, api_key,
        urlparse(enclosure).netloc,
        urlparse(enclosure).path)

    return vsi_url
Ejemplo n.º 3
0
def get_metadata(input_references, data_path):

    ciop = cioppy.Cioppy()

    if isinstance(input_references, str):

        search_params = dict()

        search_params['do'] = 'terradue'

        products = gp.GeoDataFrame(
            ciop.search(
                end_point=input_references,
                params=search_params,
                output_fields=
                'identifier,self,wkt,startdate,enddate,enclosure,orbitDirection,track,orbitNumber',
                model='EOP'))

    else:

        temp_results = []

        for index, self in enumerate(input_references):

            search_params = dict()

            search_params['do'] = 'terradue'

            temp_results.append(
                ciop.search(
                    end_point=self,
                    params=search_params,
                    output_fields=
                    'identifier,self,wkt,startdate,enddate,enclosure,orbitDirection,track,orbitNumber',
                    model='EOP')[0])

        products = gp.GeoDataFrame(temp_results)

        products = products.merge(products.apply(
            lambda row: analyse(row, data_path), axis=1),
                                  left_index=True,
                                  right_index=True)

    return products
def get_pipeline_results(pipeline_parameters, search_params):
    
    ciop = cioppy.Cioppy()
    
    if not 'cat' in search_params:
        # add cat out key
        search_params['cat'] = 'out'
    
    creds = '{}:{}'.format(pipeline_parameters['username'], 
                           pipeline_parameters['api_key'])
    
    search = gpd.GeoDataFrame(ciop.search(end_point=pipeline_parameters['end_point'],
                                      params=search_params,
                                      output_fields='link:results',
                                      model='GeoTime',
                                      creds=creds))
    
    fields = 'title,identifier,self,enclosure,cat,cc,wkt,updated,startdate,vs:"tileid"'
    search_result_params = []

    df = pd.DataFrame()

    for index, row in search.iterrows():

        end_point = row['link:results']

        temp_df = pd.DataFrame.from_dict(ciop.search(end_point=end_point,
                                                     params=search_result_params,
                                                     output_fields=fields, 
                                                     model='EOP', 
                                                     creds=creds))

        df = df.append(temp_df, ignore_index=True)
        
    df = df.merge(df.apply(lambda row: analyse_row(row), axis=1), 
              left_index=True,
              right_index=True)
        
    return df
Ejemplo n.º 5
0
def get_product_metadata(input_references, username, api_key):

    temp_searches = []

    for index, reference in enumerate(input_references):

        search_temp = gp.GeoDataFrame(cioppy.Cioppy().search(
            end_point=reference,
            params=[],
            output_fields=
            'self,track,enclosure,identifier,wkt,startdate,enddate,platform,cc',
            model='EOP',
            creds='{}:{}'.format(username, api_key)))

        temp_searches.append(search_temp)

    search = gp.GeoDataFrame(pd.concat(temp_searches, ignore_index=True))

    search['geometry'] = search['wkt'].apply(loads)
    search['cc'] = pd.to_numeric(search['cc'])
    search['startdate'] = pd.to_datetime(search['startdate'])
    search['enddate'] = pd.to_datetime(search['enddate'])

    return search
Ejemplo n.º 6
0
import lxml.etree as etree
import subprocess
import tempfile
import time
#import psutil
import os
import sys
sys.path.append('/opt/anaconda/envs/env_ewf_satcen_03_01_01/snap/.snap/snap-python')

import snappy 
from snappy import GPF
import logging
import cioppy 
ciop = cioppy.Cioppy() 
logging.basicConfig(stream=sys.stderr, 
                    level=logging.INFO,
                    format='%(asctime)s %(levelname)-8s %(message)s',
                    datefmt='%Y-%m-%dT%H:%M:%S')

from pygments import highlight
from pygments.lexers import XmlLexer
from pygments.formatters import HtmlFormatter
import IPython
from IPython.display import HTML

def display_xml_nice(xml):
    formatter = HtmlFormatter()
    IPython.display.display(HTML('<style type="text/css">{}</style>    {}'.format(formatter.get_style_defs('.highlight'), highlight(xml, XmlLexer(), formatter))))


def run_command(command, **kwargs):
Ejemplo n.º 7
0
def main(input_reference, data_path):

    os.environ['OTB_MAX_RAM_HINT'] = '4096'

    ciop = cioppy.Cioppy()
    temp_results = []

    search_params = dict()

    for index, entry in enumerate(input_reference['value'].split(',')):

        temp_results.append(
            ciop.search(
                end_point=entry,
                params=search_params,
                output_fields=
                'identifier,self,wkt,startdate,enddate,enclosure,orbitDirection,cc',
                model='EOP')[0])

    sentinel2_search = GeoDataFrame(temp_results)

    sentinel2_search['startdate'] = pd.to_datetime(
        sentinel2_search['startdate'])
    sentinel2_search['enddate'] = pd.to_datetime(sentinel2_search['enddate'])
    sentinel2_search['wkt'] = sentinel2_search['wkt'].apply(loads)

    sentinel2_search = sentinel2_search.merge(sentinel2_search.apply(
        lambda row: analyse(row, data_path['value']), axis=1),
                                              left_index=True,
                                              right_index=True)

    composites = []

    bands = ['B12', 'B8A', 'B04']

    for index, row in sentinel2_search.iterrows():

        # cloud mask
        logging.info('Cloud mask 20%')
        mask_prb = get_mask_prob(row)

        output_name = '{}_CLOUD_MASK_20.tif'.format(row['identifier'])

        cloud_mask(mask_prb, 20, output_name)

        cog(output_name)

        metadata(output_name, 'Cloud mask 20% {}'.format(row['identifier']),
                 row)

        vrt_bands = []

        for j, band in enumerate(bands):

            vrt_bands.append(get_band_path(row, band))

        vrt = '{0}.vrt'.format(row['identifier'])
        ds = gdal.BuildVRT(vrt,
                           vrt_bands,
                           srcNodata=0,
                           xRes=10,
                           yRes=10,
                           separate=True)

        ds.FlushCache()

        tif = '{}_ACTIVE_FIRE_UInt16.tif'.format(row['identifier'])

        logging.info('Convert {} to UInt16'.format(row['identifier']))

        metadata(tif, 'RGB UInt16 Composite {}'.format(row['identifier']), row)

        gdal.Translate(tif, vrt, outputType=gdal.GDT_UInt16)

        cog(tif)

        tif = '{0}.tif'.format(row['identifier'])

        logging.info('Convert {} to byte'.format(row['identifier']))

        gdal.Translate(tif,
                       vrt,
                       outputType=gdal.GDT_Byte,
                       scaleParams=[[0, 10000, 0, 255]])

        tif_e = '{}_ACTIVE_FIRE.tif'.format(row['identifier'])

        contrast_enhancement(tif, tif_e)

        composites.append(tif_e)
        os.remove(tif)
        os.remove(vrt)

        cog(tif_e)

        metadata(tif_e, 'RGB Composite {}'.format(row['identifier']), row)

        vrt = '{0}.vrt'.format(row['identifier'])
        ds = gdal.BuildVRT(vrt, [get_band_path(row, 'SCL')], separate=True)
        ds.FlushCache()

        scl_tif = '{0}_SCL.tif'.format(row['identifier'])

        metadata(scl_tif, 'Scene Classification {}'.format(row['identifier']),
                 row)

        gdal.Translate(scl_tif,
                       vrt,
                       xRes=10,
                       yRes=10,
                       outputType=gdal.GDT_Byte,
                       resampleAlg=gdal.GRA_Mode)

        cog(scl_tif)

    bands = ['B12']

    #resampleAlg=gdal.GRA_Mode,
    for index, row in sentinel2_search.iterrows():

        vrt_bands = []

        for j, band in enumerate(bands):

            vrt_bands.append(get_band_path(row, band))

        vrt = '{0}.vrt'.format(row['identifier'])
        ds = gdal.BuildVRT(vrt,
                           vrt_bands,
                           srcNodata=0,
                           xRes=10,
                           yRes=10,
                           separate=True)
        ds.FlushCache()

        tif = '{0}.tif'.format(row['identifier'])

        gdal.Translate(tif, vrt, outputType=gdal.GDT_UInt16)

        hot_spot_name = '{}_HOT_SPOT.tif'.format(row['identifier'])
        metadata(hot_spot_name, 'Hot spot {}'.format(row['identifier']), row)

        logging.info('Hot spot detection for {}'.format(row['identifier']))
        hot_spot(tif, scl_tif, hot_spot_name)

        cog(hot_spot_name)

        logging.info('Vectorize detected hot spots in {}'.format(
            row['identifier']))

        results_gdf = polygonize(hot_spot_name, row['startdate'],
                                 row['identifier'])

        results_gdf.to_file('{}_HOT_SPOT_VECTOR.geojson'.format(
            row['identifier']),
                            driver='GeoJSON')

        metadata('{}_HOT_SPOT_VECTOR.geojson'.format(row['identifier']),
                 'Hot spot vector {}'.format(row['identifier']), row)

        os.remove(tif)
        os.remove(vrt)