Exemple #1
0
def download(url, cache=False):
    """
    Downloads URL using the Python requests module to the current directory.
    :param cache: if True then files will be downloaded to a cache directory.
    """
    try:
        if cache:
            parsed_url = urlparse.urlparse(url)
            filename = os.path.join(config.cache_path(), parsed_url.netloc,
                                    parsed_url.path.strip('/'))
            if os.path.exists(filename):
                LOGGER.info('file already in cache: %s',
                            os.path.basename(filename))
                if check_creationtime(filename, url):
                    LOGGER.info(
                        'file in cache older than archive file, downloading: %s ',
                        os.path.basename(filename))
                    os.remove(filename)
                    filename = download_file(url, out=filename)
            else:
                if not os.path.exists(os.path.dirname(filename)):
                    os.makedirs(os.path.dirname(filename))
                LOGGER.info('downloading: %s', url)
                filename = download_file(url, out=filename)
                # make softlink to current dir
                # os.symlink(filename, os.path.basename(filename))


# filename = os.path.basename(filename)
        else:
            filename = download_file(url)
    except Exception:
        LOGGER.exception('failed to download data')
    return filename
Exemple #2
0
def download(url, cache=False):
    """
    Downloads URL using the Python wget module to the current directory.
    :param cache: if True then files will be downloaded to a cache directory.
    """
    try:
        if cache:
            parsed_url = urlparse.urlparse(url)
            filename = os.path.join(config.cache_path(), parsed_url.netloc, parsed_url.path.strip("/"))
            if os.path.exists(filename):
                logger.info("file already in cache: %s", os.path.basename(filename))
                if check_creationtime(filename, url):
                    logger.info("file in cache older than archive file, downloading: %s ", os.path.basename(filename))
                    os.remove(filename)
                    filename = wget.download(url, out=filename, bar=None)
            else:
                if not os.path.exists(os.path.dirname(filename)):
                    os.makedirs(os.path.dirname(filename))
                logger.info("downloading: %s", url)
                filename = wget.download(url, out=filename, bar=None)
            # make softlink to current dir
            # os.symlink(filename, os.path.basename(filename))
            # filename = os.path.basename(filename)
        else:
            filename = wget.download(url, bar=None)
    except Exception as e:
        logger.debug("failed to download data %s" % e)
    return filename
Exemple #3
0
    def execute(self):
        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        from flyingpigeon.utils import searchfile
        from flyingpigeon.subset import masking
        from flyingpigeon.utils import archive, archiveextract

        from flyingpigeon import config
        from os import path

        resources = archiveextract(self.getInputValues(identifier='resource'))
        masks = archiveextract(self.getInputValues(identifier='mask'))
        land_area = self.land_area.getValue()

        fp_cache = config.cache_path().split('/')
        base_dir = '/'.join(fp_cache[0:-1])  # base dir for all birds

        logger.debug('base dir of directory tree: %s' % base_dir)

        ncs = []
        sftlf = []
        for nc in resources:
            try:
                basename = path.basename(nc)
                bs = basename.split('_')
                pattern = 'sftlf_' + '_'.join(bs[1:-2]) + '_fx.nc'
                pattern = pattern.replace('historical',
                                          '*').replace('rcp85',
                                                       '*').replace('rcp65',
                                                                    '*').replace('rcp45',
                                                                                 '*').replace('rcp26', '*')
                logger.debug('searching for %s ' % pattern)
                sftlf.extend(searchfile(pattern, path.curdir))
                sftlf.extend(searchfile(pattern, base_dir))
                logger.debug('lenght of sftlf: %s' % len(sftlf))
                if len(sftlf) >= 1:
                    if len(sftlf) > 1:
                        logger.warn(
                            'more than one sftlf file is found fitting to the pattern, first one will be taken %s'
                            % sftlf[0])
                    prefix = 'masked%s' % basename.replace('.nc', '')
                    nc_mask = masking(nc, sftlf[0], land_area=land_area, prefix=prefix)
                    ncs.extend([nc_mask])
                    logger.info('masking processed for %s' % basename)
                else:
                    logger.warn('no masked found. Please perform a "Download Resources"\
                     to make sure the land_area file is in cache')
            except:
                logger.exception('failed to mask file: %s' % basename)
        nc_archive = archive(ncs)

        self.output_archive.setValue(nc_archive)
        i = next((i for i, x in enumerate(ncs) if x), None)
        self.output_example.setValue(ncs[i])
    def _handler(self, request, response):
        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        colorscheems = [inpt.data for inpt in request.inputs['colorscheems']]

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                'period ends before period starts; period now set to the last 30 days from now'
            )

        username = request.inputs['username'][0].data
        password = request.inputs['password'][0].data
        cloud_cover = request.inputs['cloud_cover'][0].data

        api = SentinelAPI(username, password)

        geom = {
            "type":
            "Polygon",
            "coordinates": [[[bbox[0], bbox[1]], [bbox[2], bbox[1]],
                             [bbox[2], bbox[3]], [bbox[0], bbox[3]],
                             [bbox[0], bbox[1]]]]
        }

        footprint = geojson_to_wkt(geom)

        response.update_status("start searching tiles acording query", 15)

        products = api.query(
            footprint,
            date=(start, end),
            platformname='Sentinel-2',
            cloudcoverpercentage=(0, cloud_cover),
            # producttype='SLC',
            # orbitdirection='ASCENDING',
        )

        LOGGER.debug('%s products found' % len(products.keys()))
        DIR_cache = cache_path()
        DIR_EO = join(DIR_cache, 'scihub.copernicus')
        if not exists(DIR_EO):
            makedirs(DIR_EO)

        resources = []

        for key in products.keys():
            try:
                filename = products[key]['filename']
                # form = products[key]['format']
                ID = str(products[key]['identifier'])
                file_zip = join(DIR_EO, '%s.zip' % (ID))
                DIR_tile = join(DIR_EO, '%s' % (filename))
                response.update_status("fetch file %s" % ID, 20)
                LOGGER.debug('path: %s' % DIR_tile)
                if exists(file_zip):
                    LOGGER.debug('file %s.zip already fetched' % ID)
                else:
                    try:
                        api.download(key, directory_path=DIR_EO)
                        # Does the '***' denote a string formatting function?
                        response.update_status(
                            "***%s sucessfully fetched" % ID, 20)
                        # TODO: Figure out why these are duplicate
                        LOGGER.debug('Tile {} fetched'.format(ID))
                        LOGGER.debug('Files {} fetched'.format(ID))
                    except Exception as ex:
                        msg = 'failed to extract file {}: {}'.format(
                            filename, str(ex))
                        LOGGER.exception(msg)
                        raise Exception(msg)

                if exists(DIR_tile):
                    LOGGER.debug('file %s already unzipped' % filename)
                else:
                    try:
                        # zipfile = join(DIR_EO, '%szip' % (filename)).strip(form)
                        zip_ref = zipfile.ZipFile(file_zip, 'r')
                        zip_ref.extractall(DIR_EO)
                        zip_ref.close()
                        LOGGER.debug('Tile %s unzipped' % ID)
                    except Exception as ex:
                        msg = 'failed to extract {}: {}'.format(
                            file_zip, str(ex))
                        LOGGER.exception(msg)
                        raise Exception(msg)

                resources.append(DIR_tile)

            except Exception as ex:
                msg = 'failed to fetch {}: {}'.format(key, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        response.update_status("Plotting RGB graphics", 40)
        size = float(products[key]['size'].split(' ')[0])
        producttype = products[key]['producttype']
        beginposition = str(products[key]['beginposition'])

        # fp.write('%s \t %s \t %s \t %s \t %s \n' % (ID, size, producttype, beginposition, key))
        # response.outputs['output_txt'].file = filepathes
        # except:
        #     LOGGER.exception('failed to fetch resource')
        # response.outputs['output'].file = filepathes

        # try:
        #     extend = [float(bboxStr[0])-5, float(bboxStr[1])+5, float(bboxStr[2])-5, float(bboxStr[3])+5]
        #     img = eodata.plot_products(products, extend=extend)
        #     response.outputs['output_plot'].file = img
        #     LOGGER.debug('location of tiles plotted to map')
        # except:
        #     LOGGER.exception("Failed to plot extents of EO data")

        imgs = []
        colorscheem = colorscheems[0]
        try:
            for recource in resources:
                # LOGGER.debug('Scale and merge RGB bands')
                # tile = eodata.get_RGB(recource)
                LOGGER.debug('plot RGB image')
                img = eodata.plot_RGB(recource, colorscheem=colorscheem)
                LOGGER.debug('IMG plotted: {}'.format(img))
                imgs.append(img)
            LOGGER.debug('resources plotted')
        except Exception as ex:
            msg = 'failed to plot RGB graph: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        from flyingpigeon.utils import archive
        tarf = archive(imgs)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(imgs) if x), None)
        if i is None:
            i = "dummy.png"

        response.outputs['output_plot'].file = imgs[i]

        # from flyingpigeon import visualisation as vs
        #
        # images = vs.concat_images(imgs, orientation='v')

        response.update_status("done", 100)
        return response
Exemple #5
0
def fetch_eodata(item_type,
                 asset,
                 token,
                 bbox,
                 period=[dt.today() - timedelta(days=30),
                         dt.today()],
                 cloud_cover=0.5,
                 cache=True):
    """
    search for given EO data product provided by planet.
    The search and appropriate download is limited by bbox and search period

    :param item_type: product provided by planet
    :param asset: product asset, (visible, analytic, bands)
    :param token: Authentification token generated by planet Earth Obersavation Explorer
    :param bbox: latitude longitude coordinates defining a bounding box
    :param period: [start , end] datetime objects (default last 30 days)
    :param cloud_cover: threshold for cloud_cover tolerance. 0 = 0percent cloud_cover 1=100percent cloud_cover
    :param cache: if True file (default) is stored in local cache

    return list: list of pathes for fetched products
    """

    import os
    import json
    import requests
    from requests.auth import HTTPBasicAuth
    from tempfile import mkstemp
    import shutil
    import time
    from os.path import join
    from os import path, makedirs
    from flyingpigeon.config import cache_path
    #  Enter a bbox: min_lon, max_lon, min_lat, max_lat.
    #                xmin ymin xmax ymax
    geojson_geometry = {
        "type":
        "Polygon",
        "coordinates": [[
            [bbox[0], bbox[1]],  # [14.600830078125, 8.677421123289992],
            [bbox[2], bbox[1]],  # [14.797210693359375, 8.677421123289992],
            [bbox[2], bbox[3]],  # [14.797210693359375, 8.90678000752024],
            [bbox[0], bbox[3]],  # [14.600830078125, 8.90678000752024],
            [bbox[0], bbox[1]],  # [14.600830078125, 8.677421123289992]
        ]]
    }

    LOGGER.debug("geojson_geometry: %s" % geojson_geometry)
    # get images that overlap with our AOI
    geometry_filter = {
        "type": "GeometryFilter",
        "field_name": "geometry",
        "config": geojson_geometry
    }

    start = period[0]
    end = period[1]

    LOGGER.debug("Period %s to %s " % (start, end))

    # get images acquired within a date range
    date_range_filter = {
        "type": "DateRangeFilter",
        "field_name": "acquired",
        "config": {
            "gte": "%s000Z" % (start.strftime('%Y-%m-%dT%H:%M:%S.')),
            "lte": "%s000Z" % (end.strftime('%Y-%m-%dT%H:%M:%S.')),
        }
    }

    # only get images which have <50% cloud coverage
    cloud_cover_filter = {
        "type": "RangeFilter",
        "field_name": "cloud_cover",
        "config": {
            "lte": cloud_cover
        }
    }

    # combine our geo, date, cloud filters
    combined_filter = {
        "type": "AndFilter",
        "config": [geometry_filter, date_range_filter, cloud_cover_filter]
    }

    # API Key
    PLANET_API_KEY = token  # os.getenv('PL_API_KEY')

    # item_type = item_type, assetproducts[0]  # "PSScene4Band"
    # API request object

    search_request = {
        "interval": "day",
        "item_types": [item_type],
        "filter": combined_filter
    }

    if cache:
        DIR_archiv = cache_path()
    else:
        DIR_archiv = '.'
    DIR = join(DIR_archiv, "EO_data", item_type, asset)

    if not os.path.exists(DIR):
        makedirs(DIR)

    # fire off the POST request
    search_result = requests.post(
        'https://api.planet.com/data/v1/quick-search',
        auth=HTTPBasicAuth(PLANET_API_KEY, ''),
        json=search_request)

    # LOGGER.info('Search result: %s ' % json.dumps(search_result.json(), indent=1))

    # extract image IDs only
    image_ids = [feature['id'] for feature in search_result.json()['features']]
    LOGGER.info("image IDs:  %s " % image_ids)

    resources = []
    resources_sleeping = []

    for image_id in image_ids:

        id0 = image_id
        if "xml" in asset:
            filename = "%s.xml" % id0
        else:
            filename = "%s.tif" % id0

        local_file = join(
            DIR, filename
        )  # mkstemp(dir="/home/nils/data/planet/", prefix=id0, suffix='.tif')

        if os.path.exists(local_file):
            LOGGER.info('File %s in cache' % filename)
            resources.extend([local_file])
        else:
            id0_url = 'https://api.planet.com/data/v1/item-types/{}/items/{}/assets'.format(
                item_type, id0)

            # Returns JSON metadata for assets in this ID. Learn more: planet.com/docs/reference/data-api/items-assets/#asset
            result = requests.get(id0_url,
                                  auth=HTTPBasicAuth(PLANET_API_KEY, ''))
            # List of asset types available for this particular satellite image
            keys = result.json().keys()
            LOGGER.debug("assets in file %s : %s " % (filename, keys))
            # This is "inactive" if the "visual" asset has not yet been activated; otherwise 'active'
            #  if 'analytic' in result.json().keys():
            if asset in keys:
                LOGGER.debug("downloading file %s" % filename)
                # LOGGER.debug(result.json()[asset]['status'])
                # Parse out useful links
                links = result.json()[asset]["_links"]  # u"analytic"
                self_link = links["_self"]
                activation_link = links["activate"]
                # Request activation of the 'visual' asset:
                activate_result = requests.get(activation_link,
                                               auth=HTTPBasicAuth(
                                                   PLANET_API_KEY, ''))
                # Parse out useful links
                links = result.json()[asset]["_links"]  # u"analytic"
                self_link = links["_self"]
                activation_link = links["activate"]
                # Request activation of the 'visual' asset:
                activate_result = requests.get(activation_link,
                                               auth=HTTPBasicAuth(
                                                   PLANET_API_KEY, ''))
                activation_status_result = requests.get(self_link,
                                                        auth=HTTPBasicAuth(
                                                            PLANET_API_KEY,
                                                            ''))

                try:
                    timeout = time.time() + 30  # 30 seconds from now
                    while activation_status_result.json(
                    )["status"] != 'active':
                        if time.time(
                        ) > timeout and activation_status_result.json(
                        )["status"] == 'inactive':
                            LOGGER.debug(
                                "File %s is still inactive after 30sec. Giving up"
                                % filename)
                            resources_sleeping.extend([filename])
                            break
                        else:
                            LOGGER.debug(
                                'File %s is sleeping. gently waking up' %
                                filename)
                            LOGGER.debug(
                                activation_status_result.json()["status"])
                            time.sleep(30)
                            activation_status_result = requests.get(
                                self_link,
                                auth=HTTPBasicAuth(PLANET_API_KEY, ''))

                    if time.time() < timeout or activation_status_result.json(
                    )["status"] == 'active':
                        LOGGER.debug(
                            'File ready to download: %s' %
                            (activation_status_result.json()["status"]))
                        # Image can be downloaded by making a GET with your Planet API key, from here:
                        download_link = activation_status_result.json(
                        )["location"]
                        r = requests.get(download_link,
                                         stream=True,
                                         verify=False)
                        with open(local_file, 'wb') as fp:
                            shutil.copyfileobj(r.raw, fp)
                            resources.extend([local_file])
                except:
                    LOGGER.exception("failed to download file %s " % filename)
            else:
                LOGGER.debug(
                    'Asset not found in keys, most likely no permissions for this data set %s '
                    % filename)

    return resources_sleeping, resources
Exemple #6
0
    def _handler(self, request, response):
        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # products = [inpt.data for inpt in request.inputs['indices']]

        indice = request.inputs['indices'][0].data

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception('period ends before period starts; period now set to the last 30 days from now')

        username = request.inputs['username'][0].data
        password = request.inputs['password'][0].data
        cloud_cover = request.inputs['cloud_cover'][0].data

        api = SentinelAPI(username, password)

        geom = {
            "type": "Polygon",
            "coordinates": [[[bbox[0], bbox[1]],
                             [bbox[2], bbox[1]],
                             [bbox[2], bbox[3]],
                             [bbox[0], bbox[3]],
                             [bbox[0], bbox[1]]]]}

        footprint = geojson_to_wkt(geom)

        response.update_status('start searching tiles according to query', 15)

        products = api.query(footprint,
                             date=(start, end),
                             platformname='Sentinel-2',
                             cloudcoverpercentage=(0, cloud_cover),
                             # producttype='SLC',
                             # orbitdirection='ASCENDING',
                             )

        LOGGER.debug('{} products found'.format(len(products.keys())))
        DIR_cache = cache_path()
        DIR_EO = join(DIR_cache, 'scihub.copernicus')
        if not exists(DIR_EO):
            makedirs(DIR_EO)

        # api.download_all(products)
        # try:
        # with open(filepaths, 'w') as fp:
        #     fp.write('############################################\n')
        #     fp.write('###     Following files are fetched      ###\n')
        #     fp.write('############################################\n')
        #     fp.write('\n')

        resources = []

        for key in products.keys():
            try:
                filename = products[key]['filename']
                # form = products[key]['format']
                ID = str(products[key]['identifier'])

                file_zip = join(DIR_EO, '{}.zip'.format(ID))
                DIR_tile = join(DIR_EO, str(filename))
                response.update_status('fetch file {}'.format(ID), 20)
                LOGGER.debug('path: {}'.format(DIR_tile))

                if exists(file_zip):
                    LOGGER.debug('file %s.zip already fetched' % ID)
                else:
                    try:
                        api.download(key, directory_path=DIR_EO)
                        # Does the '***' denote a string formatting function?
                        response.update_status("***%s sucessfully fetched" % ID, 20)
                        # TODO: Figure out why these are duplicate
                        LOGGER.debug('Tile {} fetched'.format(ID))
                        LOGGER.debug('Files {} fetched'.format(ID))
                    except Exception as ex:
                        msg = 'failed to extract file {}: {}'.format(filename, str(ex))
                        LOGGER.exception(msg)
                        raise Exception(msg)

                if exists(DIR_tile):
                    LOGGER.debug('file {} already unzipped'.format(filename))

                else:
                    try:
                        # zipfile = join(DIR_EO, '%szip' % (filename)).strip(form)
                        zip_ref = zipfile.ZipFile(file_zip, 'r')
                        zip_ref.extractall(DIR_EO)
                        zip_ref.close()
                        LOGGER.debug('Tile {} unzipped'.format(ID))
                    except Exception as ex:
                        msg = 'failed to extract {}'.format(file_zip)
                        LOGGER.exception(msg)
                        raise Exception(msg)

                resources.append(DIR_tile)
            except Exception as ex:
                msg = 'failed to fetch {}: {}'.format(key, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        # TODO: Find a place for these variables or remove them
        size = float(products[key]['size'].split(' ')[0])
        producttype = products[key]['producttype']
        beginposition = str(products[key]['beginposition'])

        imgs = []
        tiles = []
        for resource in resources:
            try:
                response.update_status('Calculating {} indices'.format(indice), 40)
                if indice == 'NDVI':
                    LOGGER.debug('Calculate NDVI for {}'.format(resource))
                    tile = eodata.get_ndvi(resource)
                    LOGGER.debug('resources BAI calculated')
                if indice == 'BAI':
                    LOGGER.debug('Calculate BAI for {}'.format(resource))

                    tile = eodata.get_bai(resource)
                    LOGGER.debug('resources BAI calculated')
                tiles.append(tile)
            except Exception as ex:
                msg = 'failed to calculate indice for {}: {}'.format(resource, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        for tile in tiles:
            try:
                LOGGER.debug('Plot tile {}'.format(tile))
                img = eodata.plot_band(tile, file_extension='PNG', colorscheem=indice)
                imgs.append(img)
            except Exception as ex:
                msg = 'Failed to plot tile {}: {}'.format(tile, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        from flyingpigeon.utils import archive
        tarf = archive(imgs)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(imgs) if x), None)
        if i is None:
            i = "dummy.png"
        response.outputs['output_plot'].file = imgs[i]

        # from flyingpigeon import visualisation as vs
        #
        # images = vs.concat_images(imgs, orientation='v')

        response.update_status("done", 100)
        return response
    def _handler(self, request, response):
        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        products = [inpt.data for inpt in request.inputs['products']]

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if (start > end):
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                "periode end befor periode start, period is set to the last 30 days from now"
            )

        username = request.inputs['username'][0].data
        password = request.inputs['password'][0].data
        cloud_cover = request.inputs['cloud_cover'][0].data

        api = SentinelAPI(username, password)

        geom = {
            "type":
            "Polygon",
            "coordinates": [[[bbox[0], bbox[1]], [bbox[2], bbox[1]],
                             [bbox[2], bbox[3]], [bbox[0], bbox[3]],
                             [bbox[0], bbox[1]]]]
        }

        footprint = geojson_to_wkt(geom)

        response.update_status("start searching tiles acording query", 15)

        products = api.query(
            footprint,
            date=(start, end),
            platformname='Sentinel-2',
            cloudcoverpercentage=(0, cloud_cover),
            # producttype='SLC',
            # orbitdirection='ASCENDING',
        )

        DIR_cache = cache_path()
        DIR_EO = join(DIR_cache, 'scihub.copernicus')

        if not exists(DIR_EO):
            makedirs(DIR_EO)

        # api.download_all(products)
        _, filepathes = mkstemp(dir='.', suffix='.txt')
        try:
            with open(filepathes, 'w') as fp:
                fp.write('############################################\n')
                fp.write('###     Following files are fetched      ###\n')
                fp.write('############################################\n')
                fp.write('\n')
                for key in products.keys():
                    try:

                        filename = products[key]['filename']
                        form = products[key]['format']
                        response.update_status("fetch file %s" % filename, 20)
                        ID = str(products[key]['identifier'])
                        file_zip = join(DIR_EO, '%s.zip' % (ID))
                        DIR_tile = join(DIR_EO, '%s' % (filename))

                        if exists(file_zip):
                            LOGGER.debug('file %s.zip already fetched' % ID)
                        else:
                            try:
                                api.download(key, directory_path=DIR_EO)
                                response.update_status(
                                    "***%s sucessfully fetched" % ID, 20)
                                LOGGER.debug('Tile %s fetched' % ID)
                            except:
                                LOGGER.exception('failed to extract file %s' %
                                                 filename)
                        if exists(DIR_tile):
                            LOGGER.debug('file %s already unzipped' % filename)
                        else:
                            try:
                                # zipfile = join(DIR_EO, '%szip' % (filename)).strip(form)
                                zip_ref = zipfile.ZipFile(file_zip, 'r')
                                zip_ref.extractall(DIR_EO)
                                zip_ref.close()
                                LOGGER.debug('Tile %s unzipped' % ID)
                            except:
                                LOGGER.exception('failed to extract %s ' %
                                                 file_zip)
                    except:
                        LOGGER.exception('failed to fetch %s' % filename)

                    response.update_status("write out information about files",
                                           80)
                    size = float(products[key]['size'].split(' ')[0])
                    producttype = products[key]['producttype']
                    beginposition = str(products[key]['beginposition'])
                    fp.write('%s \t %s \t %s \t %s \t %s \n' %
                             (ID, size, producttype, beginposition, key))
            response.outputs['output_txt'].file = filepathes
        except:
            LOGGER.exception('failed to fetch resource')
        # response.outputs['output'].file = filepathes
        try:
            extend = [
                float(bboxStr[0]) - 5,
                float(bboxStr[1]) + 5,
                float(bboxStr[2]) - 5,
                float(bboxStr[3]) + 5
            ]
            img = eodata.plot_products(products, extend=extend)
            response.outputs['output_plot'].file = img
            LOGGER.debug('location of tiles plotted to map')
        except:
            LOGGER.exception("Failed to plot extents of EO data")

        response.update_status("done", 100)
        return response