Beispiel #1
0
    def _handler(self, request, response):
        from eggshell.log import init_process_logger
        from flyingpigeon.utils import rename_complexinputs
        from flyingpigeon.datafetch import write_fileinfo

        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        resource = rename_complexinputs(request.inputs['resource'])

        response.outputs['output'].file = write_fileinfo(resource,
                                                         filepath=True)

        # filepaths = 'out.txt'
        # with open(filepaths, 'w') as fp:
        #     fp.write('###############################################\n')
        #     fp.write('###############################################\n')
        #     fp.write('Following files are stored to your local discs: \n')
        #     fp.write('\n')
        #     for f in resources:
        #         fp.write('%s \n' % os.path.realpath(f))

        # response.outputs['output'].file = filepaths
        response.update_status("done", 100)

        return response
Beispiel #2
0
    def _handler(self, request, response):
        import uuid
        import time
        import json
        outputpath = configuration.get_config_value('server', 'outputpath')
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # -------------- #
        # Input handling #
        # -------------- #
        resource = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        LOGGER.info("resource: %s " % resource)

        dest = archiveextract(
            resource=rename_complexinputs(request.inputs['dest']))
        LOGGER.info("dest: %s " % dest)

        method = request.inputs['method'][0].data
        LOGGER.info("method: %s " % method)

        snippet = request.inputs['snippet'][0].data
        LOGGER.info("snippet: %s " % snippet)

        # -------------------- #
        # Regridding operation #
        # -------------------- #
        d = ocgis.RequestDataset(dest)
        m = getattr(ESMF.RegridMethod, method.upper())
        LOGGER.info('Start ocgis module call function')

        # Prepare the environment
        ocgis.env.OVERWRITE = True
        prefix = str(uuid.uuid1())
        ocgis.env.PREFIX = prefix

        outputs = []
        for source in resource:
            s = ocgis.RequestDataset(source)
            ops = ocgis.OcgOperations(dataset=s,
                                      regrid_destination=d,
                                      regrid_options={'regrid_method': m},
                                      snippet=snippet,
                                      dir_output=outputpath,
                                      output_format='nc',
                                      prefix=prefix)
            outputs.append(ops.execute())

        response.outputs['output_netcdf'].file = outputs[0]

        time_str = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
        output_json = "esmf_regrid_results_{}.json".format(time_str)
        with open(output_json, 'w') as f:
            f.write(json.dumps([actual_output_path(o) for o in outputs]))

        response.outputs['output'].file = output_json
        response.outputs['output'].output_format = json_format
        return response
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'
        #
        # init_process_logger('log.txt')
        # self.output_log.setValue('log.txt')
        #
        response.update_status('Start process', 0)
        from flyingpigeon import sdm

        try:
            LOGGER.info('reading the arguments')
            taxon_name = request.inputs['taxon_name'][0].data
            bbox = [-180, -90, 180, 90]
            # bbox_obj = self.BBox.getValue()
            # bbox = [bbox_obj.coords[0][0],
            #         bbox_obj.coords[0][1],
            #         bbox_obj.coords[1][0],
            #         bbox_obj.coords[1][1]]
            LOGGER.info("bbox={}".format(bbox))
            LOGGER.info("Taxon Name={}".format(taxon_name))
        except Exception as ex:
            msg = 'failed to read in the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('Fetching GBIF Data', 10)
            gbifdic = sdm.get_gbif(taxon_name, bbox=bbox)
        except Exception as ex:
            msg = 'failed to search gbif: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('write csv file', 70)
            gbifcsv = sdm.gbifdic2csv(gbifdic)
        except Exception as ex:
            msg = 'failed to write csv file: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('plot map', 80)
            from flyingpigeon.visualisation import map_gbifoccurrences
            latlon = sdm.latlon_gbifdic(gbifdic)
            occurence_map = map_gbifoccurrences(latlon)
        except Exception as ex:
            msg = 'failed to plot occurence map: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output_map'].file = occurence_map
        response.outputs['output_csv'].file = gbifcsv
        response.update_status('done', 100)
        return response
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            indices = [inpt.data for inpt in request.inputs['indices']]
            LOGGER.debug("indices = {}".format(indices))
            archive_format = request.inputs['archive_format'][0].data
        except Exception as ex:
            msg = 'failed to read the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
        LOGGER.info('indices {}'.format(indices))

        #################################
        # calculate the climate indices
        #################################

        # indices calculation
        try:
            response.update_status('calculation of indices', 30)
            ncs_indices = sdm.get_indices(resource=resources, indices=indices)
            LOGGER.info('indice calculation done')
        except Exception as ex:
            # TODO: 'ds_name' does not resolve. What is this referring to? This will throw a critical error!
            msg = 'indice calculation failed for {}: {}'.format(ds_name, str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        # archive multiple output files to one archive file
        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices 3D added to tarfile')
        except Exception as ex:
            msg = 'failed adding indices to tar: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output_indices'].file = archive_indices

        i = next((i for i, x in enumerate(ncs_indices) if x), None)
        response.outputs['ncout'].file = ncs_indices[i]

        response.update_status('done', 100)
        return response
Beispiel #5
0
    def _handler(self, request, response):

        tic = dt.now()
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        response.update_status('Execution started at : {}'.format(tic), 1)

        ######################################
        # Read inputs
        ######################################
        try:
            resource = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))[0]
            fmts = [e.data for e in request.inputs['fmt']]
            title = request.inputs['title'][0].data

        except Exception as ex:
            msg = 'Failed to read input parameter {}'.format(ex)
            LOGGER.error(msg)
            raise Exception(msg)

        response.update_status('Input parameters ingested', 2)

        try:
            fig = map_spatial_analog(resource, title=title)
            output = []

            for fmt in fmts:
                output.append(fig2plot(fig, fmt))

        except Exception as ex:
            msg = "Failed to create figure: {}".format(ex)
            LOGGER.error(msg)
            raise Exception(msg)

        finally:
            plt.close()

        if len(fmts) == 1:
            output = output[0]
        else:
            output = archive(output)

        response.outputs['output_figure'].file = output
        response.update_status("done", 100)
        return response
Beispiel #6
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncfiles = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        if 'variable' in request.inputs:
            var = request.inputs['variable'][0].data
        else:
            var = get_variable(ncfiles[0])
            #  var = ncfiles[0].split("_")[0]

        response.update_status('plotting variable {}'.format(var), 10)

        try:
            plotout_spagetti_file = vs.spaghetti(
                ncfiles,
                variable=var,
                title='Field mean of {}'.format(var),
            )
            LOGGER.info("spagetti plot done")
            response.update_status(
                'Spagetti plot for %s %s files done' % (len(ncfiles), var), 50)
            response.outputs['plotout_spagetti'].file = plotout_spagetti_file
        except Exception as e:
            raise Exception("spagetti plot failed : {}".format(e))

        try:
            plotout_uncertainty_file = vs.uncertainty(
                ncfiles,
                variable=var,
                title='Ensemble uncertainty for {}'.format(var),
            )

            response.update_status(
                'Uncertainty plot for {} {} files done'.format(
                    len(ncfiles), var), 90)
            response.outputs[
                'plotout_uncertainty'].file = plotout_uncertainty_file
            LOGGER.info("uncertainty plot done")
        except Exception as err:
            raise Exception("uncertainty plot failed {}".format(err.message))

        response.update_status('visualisation done', 100)
        return response
Beispiel #7
0
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ######################################
        # Process inputs
        ######################################

        try:
            resources = self._resource_input_handler(request)
            options = self._option_input_handler(request)
            extras = self._extra_input_handler(request)

        except Exception as ex:
            msg = 'Failed to read input parameter {}'.format(ex)
            LOGGER.error(msg)
            raise Exception(msg)

        response.update_status('Input parameters ingested', 2)

        ######################################
        # Call ocgis function
        ######################################
        # Mapping for multivariate functions
        if getattr(self, 'has_required_variables', None):
            extras.update({k: k for k in resources.keys()})

        output = run_op(resource=resources,
                        calc=[{
                            'func': self.identifier,
                            'name': self.identifier,
                            'kwds': extras
                        }],
                        options=options)

        response.outputs['output_netcdf'].file = output

        response.update_status('Execution completed', 100)

        return response
Beispiel #8
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'
        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            period = request.inputs['period']
            period = period[0].data
            indices = [inpt.data for inpt in request.inputs['indices']]
            archive_format = request.inputs['archive_format'][0].data
            LOGGER.info(
                "all arguments read in nr of files in resources: {}".foirmat(
                    len(resources)))
        except Exception as ex:
            msg = 'failed to read in the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            gbif_url = request.inputs['gbif'][0].data
            csv_file = download(gbif_url)
            LOGGER.info('CSV file fetched sucessfully: %s' % csv_file)
        except Exception as ex:
            msg = 'failed to fetch GBIF file: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('read in latlon coordinates', 10)
            latlon = sdm.latlon_gbifcsv(csv_file)
            LOGGER.info('got occurence coordinates %s ' % csv_file)
        except Exception as ex:
            msg = 'failed to extract the latlon points from file {}: {}'.format(
                csv_file, str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('plot map', 20)
            occurence_map = map_gbifoccurrences(latlon)
            LOGGER.info('GBIF occourence ploted')
        except Exception as ex:
            msg = 'failed to plot occurence map: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        #################################
        # calculate the climate indices
        #################################

        # get the indices
        try:
            response.update_status('start calculation of indices', 30)
            ncs_indices = sdm.get_indices(resource=resources, indices=indices)
            LOGGER.info('indice calculation done')
        except Exception as ex:
            msg = 'failed to calculate indices: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            # sort indices
            indices_dic = sdm.sort_indices(ncs_indices)
            LOGGER.info('indice files sorted in dictionary')
        except Exception as ex:
            msg = 'failed to sort indices: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            indices_dic = {'dummy': []}

        ncs_references = []
        species_files = []
        stat_infos = []
        PAmask_pngs = []

        response.update_status('Start processing for {} datasets'.format(
            len(indices_dic.keys())))
        for count, key in enumerate(indices_dic.keys()):
            try:
                status_nr = 40 + count * 10
                response.update_status('Start processing of {}'.format(key),
                                       status_nr)

                ncs = indices_dic[key]
                LOGGER.info('with {} files'.format(len(ncs)))

                try:
                    response.update_status('generating the PA mask', 20)
                    PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0])
                    LOGGER.info('PA mask sucessfully generated')
                except Exception as ex:
                    msg = 'failed to generate the PA mask: {}'.format(str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

                try:
                    response.update_status('Ploting PA mask', 25)
                    PAmask_pngs.extend([map_PAmask(PAmask)])
                except Exception as ex:
                    msg = 'failed to plot the PA mask: {}'.format(str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

                try:
                    ncs_reference = sdm.get_reference(ncs_indices=ncs,
                                                      period=period)
                    ncs_references.extend(ncs_reference)
                    LOGGER.info('reference indice calculated {}'.format(
                        ncs_references))
                except Exception as ex:
                    msg = 'failed to calculate the reference: {}'.format(
                        str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

                try:
                    gam_model, predict_gam, gam_info = sdm.get_gam(
                        ncs_reference, PAmask)
                    stat_infos.append(gam_info)
                    response.update_status('GAM sucessfully trained',
                                           status_nr + 5)
                except Exception as ex:
                    msg = 'failed to train GAM for {}: {}'.format(key, str(ex))
                    LOGGER.debug(msg)
                    raise Exception(msg)

                try:
                    prediction = sdm.get_prediction(gam_model, ncs)
                    response.update_status('prediction done', status_nr + 7)
                except Exception as ex:
                    msg = 'failed to predict tree occurence: {}'.format(
                        str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)
                #
                # try:
                #     response.update_status('land sea mask for predicted data',  status_nr + 8)
                #     from numpy import invert, isnan, nan, broadcast_arrays  # , array, zeros, linspace, meshgrid
                #     mask = invert(isnan(PAmask))
                #     mask = broadcast_arrays(prediction, mask)[1]
                #     prediction[mask is False] = nan
                # except:
                #     LOGGER.debug('failed to mask predicted data')

                try:
                    species_files.append(sdm.write_to_file(ncs[0], prediction))
                    LOGGER.info('Favourability written to file')
                except Exception as ex:
                    msg = 'failed to write species file: {}'.format(str(ex))
                    LOGGER.debug(msg)
                    raise Exception(msg)

            except Exception as ex:
                msg = 'failed to process SDM chain for {} : {}'.format(
                    key, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices added to archive')
        except Exception as ex:
            msg = 'failed adding indices to archive: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            archive_indices = tempfile.mkstemp(suffix='.tar',
                                               prefix='foobar-',
                                               dir='.')

        try:
            archive_references = archive(ncs_references, format=archive_format)
            LOGGER.info('indices reference added to archive')
        except Exception as ex:
            msg = 'failed adding reference indices to archive: {}'.format(
                str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            archive_references = tempfile.mkstemp(suffix='.tar',
                                                  prefix='foobar-',
                                                  dir='.')

        try:
            archive_prediction = archive(species_files, format=archive_format)
            LOGGER.info('species_files added to archive')
        except Exception as ex:
            msg = 'failed adding species_files indices to archive: {}'.format(
                str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            stat_infosconcat = pdfmerge(stat_infos)
            LOGGER.debug('pngs {}'.format(PAmask_pngs))
            PAmask_png = concat_images(PAmask_pngs, orientation='h')
            LOGGER.info('stat infos pdfs and mask pngs merged')
        except Exception as ex:
            msg = 'failed to concat images: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf',
                                                   prefix='foobar-',
                                                   dir='.')
            _, PAmask_png = tempfile.mkstemp(suffix='.png',
                                             prefix='foobar-',
                                             dir='.')

        # self.output_csv.setValue(csv_file)
        response.outputs['output_gbif'].file = occurence_map
        response.outputs['output_PA'].file = PAmask_png
        response.outputs['output_indices'].file = archive_indices
        response.outputs['output_reference'].file = archive_references
        response.outputs['output_prediction'].file = archive_prediction
        response.outputs['output_info'].file = stat_infosconcat

        response.update_status('done', 100)
        return response
    def _handler(self, request, response):
        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        colorscheems = [inpt.data for inpt in request.inputs['colorscheems']]

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                'period ends before period starts; period now set to the last 30 days from now'
            )

        username = request.inputs['username'][0].data
        password = request.inputs['password'][0].data
        cloud_cover = request.inputs['cloud_cover'][0].data

        api = SentinelAPI(username, password)

        geom = {
            "type":
            "Polygon",
            "coordinates": [[[bbox[0], bbox[1]], [bbox[2], bbox[1]],
                             [bbox[2], bbox[3]], [bbox[0], bbox[3]],
                             [bbox[0], bbox[1]]]]
        }

        footprint = geojson_to_wkt(geom)

        response.update_status("start searching tiles acording query", 15)

        products = api.query(
            footprint,
            date=(start, end),
            platformname='Sentinel-2',
            cloudcoverpercentage=(0, cloud_cover),
            # producttype='SLC',
            # orbitdirection='ASCENDING',
        )

        LOGGER.debug('%s products found' % len(products.keys()))
        DIR_cache = cache_path()
        DIR_EO = join(DIR_cache, 'scihub.copernicus')
        if not exists(DIR_EO):
            makedirs(DIR_EO)

        resources = []

        for key in products.keys():
            try:
                filename = products[key]['filename']
                # form = products[key]['format']
                ID = str(products[key]['identifier'])
                file_zip = join(DIR_EO, '%s.zip' % (ID))
                DIR_tile = join(DIR_EO, '%s' % (filename))
                response.update_status("fetch file %s" % ID, 20)
                LOGGER.debug('path: %s' % DIR_tile)
                if exists(file_zip):
                    LOGGER.debug('file %s.zip already fetched' % ID)
                else:
                    try:
                        api.download(key, directory_path=DIR_EO)
                        # Does the '***' denote a string formatting function?
                        response.update_status(
                            "***%s sucessfully fetched" % ID, 20)
                        # TODO: Figure out why these are duplicate
                        LOGGER.debug('Tile {} fetched'.format(ID))
                        LOGGER.debug('Files {} fetched'.format(ID))
                    except Exception as ex:
                        msg = 'failed to extract file {}: {}'.format(
                            filename, str(ex))
                        LOGGER.exception(msg)
                        raise Exception(msg)

                if exists(DIR_tile):
                    LOGGER.debug('file %s already unzipped' % filename)
                else:
                    try:
                        # zipfile = join(DIR_EO, '%szip' % (filename)).strip(form)
                        zip_ref = zipfile.ZipFile(file_zip, 'r')
                        zip_ref.extractall(DIR_EO)
                        zip_ref.close()
                        LOGGER.debug('Tile %s unzipped' % ID)
                    except Exception as ex:
                        msg = 'failed to extract {}: {}'.format(
                            file_zip, str(ex))
                        LOGGER.exception(msg)
                        raise Exception(msg)

                resources.append(DIR_tile)

            except Exception as ex:
                msg = 'failed to fetch {}: {}'.format(key, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        response.update_status("Plotting RGB graphics", 40)
        size = float(products[key]['size'].split(' ')[0])
        producttype = products[key]['producttype']
        beginposition = str(products[key]['beginposition'])

        # fp.write('%s \t %s \t %s \t %s \t %s \n' % (ID, size, producttype, beginposition, key))
        # response.outputs['output_txt'].file = filepathes
        # except:
        #     LOGGER.exception('failed to fetch resource')
        # response.outputs['output'].file = filepathes

        # try:
        #     extend = [float(bboxStr[0])-5, float(bboxStr[1])+5, float(bboxStr[2])-5, float(bboxStr[3])+5]
        #     img = eodata.plot_products(products, extend=extend)
        #     response.outputs['output_plot'].file = img
        #     LOGGER.debug('location of tiles plotted to map')
        # except:
        #     LOGGER.exception("Failed to plot extents of EO data")

        imgs = []
        colorscheem = colorscheems[0]
        try:
            for recource in resources:
                # LOGGER.debug('Scale and merge RGB bands')
                # tile = eodata.get_RGB(recource)
                LOGGER.debug('plot RGB image')
                img = eodata.plot_RGB(recource, colorscheem=colorscheem)
                LOGGER.debug('IMG plotted: {}'.format(img))
                imgs.append(img)
            LOGGER.debug('resources plotted')
        except Exception as ex:
            msg = 'failed to plot RGB graph: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        from flyingpigeon.utils import archive
        tarf = archive(imgs)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(imgs) if x), None)
        if i is None:
            i = "dummy.png"

        response.outputs['output_plot'].file = imgs[i]

        # from flyingpigeon import visualisation as vs
        #
        # images = vs.concat_images(imgs, orientation='v')

        response.update_status("done", 100)
        return response
    def _handler(self, request, response):

        ocgis.env.DIR_OUTPUT = tempfile.mkdtemp(dir=os.getcwd())
        ocgis.env.OVERWRITE = True
        tic = dt.now()
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        response.update_status('Execution started at : {}'.format(tic), 1)

        ######################################
        # Read inputs
        ######################################
        try:
            candidate = archiveextract(
                resource=rename_complexinputs(request.inputs['candidate']))
            target = archiveextract(
                resource=rename_complexinputs(request.inputs['target']))
            location = request.inputs['location'][0].data
            indices = [el.data for el in request.inputs['indices']]
            dist = request.inputs['dist'][0].data
            dateStartCandidate = request.inputs['dateStartCandidate'][0].data
            dateEndCandidate = request.inputs['dateEndCandidate'][0].data
            dateStartTarget = request.inputs['dateStartTarget'][0].data
            dateEndTarget = request.inputs['dateEndTarget'][0].data

        except Exception as ex:
            msg = 'Failed to read input parameter {}'.format(ex)
            LOGGER.error(msg)
            raise Exception(msg)

        response.update_status('Input parameters ingested', 2)

        ######################################
        # Process inputs
        ######################################

        try:
            point = Point(*map(float, location.split(',')))
            dateStartCandidate = dt.strptime(dateStartCandidate, '%Y-%m-%d')
            dateEndCandidate = dt.strptime(dateEndCandidate, '%Y-%m-%d')
            dateStartTarget = dt.strptime(dateStartTarget, '%Y-%m-%d')
            dateEndTarget = dt.strptime(dateEndTarget, '%Y-%m-%d')

        except Exception as ex:
            msg = 'failed to process inputs {}'.format(ex)
            LOGGER.error(msg)
            raise Exception(msg)

        LOGGER.debug("init took {}".format(dt.now() - tic))
        response.update_status('Processed input parameters', 3)

        ######################################
        # Extract target time series
        ######################################
        savetarget = False
        try:
            # Using `call` creates a netCDF file in the tmp directory.
            #
            # Here we keep this stuff in memory
            if savetarget:
                prefix = 'target_ts'
                target_ts = call(resource=target,
                                 geom=point,
                                 variable=indices,
                                 time_range=[dateStartTarget, dateEndTarget],
                                 select_nearest=True,
                                 prefix=prefix)

                # target_ts = [get_values(prefix+'.nc', ind) for ind in indices]

            else:
                trd = RequestDataset(
                    target,
                    variable=indices,
                    time_range=[dateStartTarget, dateEndTarget])

                op = OcgOperations(trd,
                                   geom=point,
                                   select_nearest=True,
                                   search_radius_mult=1.75)
                out = op.execute()
                target_ts = out.get_element()

        except Exception as ex:
            msg = 'Target extraction failed {}'.format(ex)
            LOGGER.debug(msg)
            raise Exception(msg)

        response.update_status('Extracted target series', 5)

        ######################################
        # Compute dissimilarity metric
        ######################################

        response.update_status('Computing spatial analog', 6)
        try:
            output = call(
                resource=candidate,
                calc=[{
                    'func': 'dissimilarity',
                    'name': 'spatial_analog',
                    'kwds': {
                        'dist': dist,
                        'target': target_ts,
                        'candidate': indices
                    }
                }],
                time_range=[dateStartCandidate, dateEndCandidate],
            )

        except Exception as ex:
            msg = 'Spatial analog failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        add_metadata(output,
                     dist=dist,
                     indices=",".join(indices),
                     target_location=location,
                     candidate_time_range="{},{}".format(
                         dateStartCandidate, dateEndCandidate),
                     target_time_range="{},{}".format(dateStartTarget,
                                                      dateEndTarget))

        response.update_status('Computed spatial analog', 95)

        response.outputs['output_netcdf'].file = output

        response.update_status('Execution completed', 100)
        LOGGER.debug("Total execution took {}".format(dt.now() - tic))
        return response
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        var = get_variable(ncs[0])
        LOGGER.info('variable to be plotted: {}'.format(var))

        # mosaic = self.mosaic.getValue()
        if 'region' in request.inputs:
            regions = [inp.data for inp in request.inputs['region']]
            try:
                png_region = vs.plot_polygons(regions)
            except Exception as ex:
                msg = 'failed to plot the polygon to world map: {}'.format(
                    str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)
                o1, png_region = mkstemp(dir='.', suffix='.png')

            # clip the demanded polygons
            subsets = clipping(
                resource=ncs,
                variable=var,
                polygons=regions,
                mosaic=True,
                spatial_wrapping='wrap',
            )
        else:
            subsets = ncs
            png_region = vs.plot_extend(ncs[0])

        response.update_status('Arguments set for subset process', 0)

        try:
            tar_subsets = archive(subsets)
        except Exception as ex:
            msg = 'failed to archive subsets: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, tar_subsets = mkstemp(dir='.', suffix='.tar')

        try:
            png_uncertainty = vs.uncertainty(subsets, variable=var)
        except Exception as ex:
            msg = 'failed to generate the uncertainty plot: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_uncertainty = mkstemp(dir='.', suffix='.png')

        try:
            png_spaghetti = vs.spaghetti(
                subsets,
                variable=var,
            )

        except Exception as ex:
            msg = 'failed to generate the spaghetti plot: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_spaghetti = mkstemp(dir='.', suffix='.png')

        try:
            from flyingpigeon import robustness as ro
            signal, low_agreement_mask, high_agreement_mask, text_src = ro.signal_noise_ratio(
                resource=subsets,
                # start=None, end=None,
                # timeslice=None,
                # variable=var
            )
            # if title is None:
            title = 'signal robustness of %s ' % (
                var)  # , end1, end2, start1, start2
            png_robustness = vs.map_robustness(
                signal,
                high_agreement_mask,
                low_agreement_mask,
                # cmap=cmap,
                #    title=title
            )
            LOGGER.info('robustness graphic generated')
        except Exception as ex:
            msg = 'failed to generate the robustness plot: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_robustness = mkstemp(dir='.', suffix='.png')

        factsheet = vs.factsheetbrewer(png_region=png_region,
                                       png_uncertainty=png_uncertainty,
                                       png_spaghetti=png_spaghetti,
                                       png_robustness=png_robustness)

        response.outputs['output_nc'].file = tar_subsets
        response.outputs['output_factsheet'].file = factsheet
        response.update_status("done", 100)
        return response
Beispiel #12
0
    def _handler(self, request, response):
        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # products = [inpt.data for inpt in request.inputs['indices']]

        indice = request.inputs['indices'][0].data

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception('period ends before period starts; period now set to the last 30 days from now')

        username = request.inputs['username'][0].data
        password = request.inputs['password'][0].data
        cloud_cover = request.inputs['cloud_cover'][0].data

        api = SentinelAPI(username, password)

        geom = {
            "type": "Polygon",
            "coordinates": [[[bbox[0], bbox[1]],
                             [bbox[2], bbox[1]],
                             [bbox[2], bbox[3]],
                             [bbox[0], bbox[3]],
                             [bbox[0], bbox[1]]]]}

        footprint = geojson_to_wkt(geom)

        response.update_status('start searching tiles according to query', 15)

        products = api.query(footprint,
                             date=(start, end),
                             platformname='Sentinel-2',
                             cloudcoverpercentage=(0, cloud_cover),
                             # producttype='SLC',
                             # orbitdirection='ASCENDING',
                             )

        LOGGER.debug('{} products found'.format(len(products.keys())))
        DIR_cache = cache_path()
        DIR_EO = join(DIR_cache, 'scihub.copernicus')
        if not exists(DIR_EO):
            makedirs(DIR_EO)

        # api.download_all(products)
        # try:
        # with open(filepaths, 'w') as fp:
        #     fp.write('############################################\n')
        #     fp.write('###     Following files are fetched      ###\n')
        #     fp.write('############################################\n')
        #     fp.write('\n')

        resources = []

        for key in products.keys():
            try:
                filename = products[key]['filename']
                # form = products[key]['format']
                ID = str(products[key]['identifier'])

                file_zip = join(DIR_EO, '{}.zip'.format(ID))
                DIR_tile = join(DIR_EO, str(filename))
                response.update_status('fetch file {}'.format(ID), 20)
                LOGGER.debug('path: {}'.format(DIR_tile))

                if exists(file_zip):
                    LOGGER.debug('file %s.zip already fetched' % ID)
                else:
                    try:
                        api.download(key, directory_path=DIR_EO)
                        # Does the '***' denote a string formatting function?
                        response.update_status("***%s sucessfully fetched" % ID, 20)
                        # TODO: Figure out why these are duplicate
                        LOGGER.debug('Tile {} fetched'.format(ID))
                        LOGGER.debug('Files {} fetched'.format(ID))
                    except Exception as ex:
                        msg = 'failed to extract file {}: {}'.format(filename, str(ex))
                        LOGGER.exception(msg)
                        raise Exception(msg)

                if exists(DIR_tile):
                    LOGGER.debug('file {} already unzipped'.format(filename))

                else:
                    try:
                        # zipfile = join(DIR_EO, '%szip' % (filename)).strip(form)
                        zip_ref = zipfile.ZipFile(file_zip, 'r')
                        zip_ref.extractall(DIR_EO)
                        zip_ref.close()
                        LOGGER.debug('Tile {} unzipped'.format(ID))
                    except Exception as ex:
                        msg = 'failed to extract {}'.format(file_zip)
                        LOGGER.exception(msg)
                        raise Exception(msg)

                resources.append(DIR_tile)
            except Exception as ex:
                msg = 'failed to fetch {}: {}'.format(key, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        # TODO: Find a place for these variables or remove them
        size = float(products[key]['size'].split(' ')[0])
        producttype = products[key]['producttype']
        beginposition = str(products[key]['beginposition'])

        imgs = []
        tiles = []
        for resource in resources:
            try:
                response.update_status('Calculating {} indices'.format(indice), 40)
                if indice == 'NDVI':
                    LOGGER.debug('Calculate NDVI for {}'.format(resource))
                    tile = eodata.get_ndvi(resource)
                    LOGGER.debug('resources BAI calculated')
                if indice == 'BAI':
                    LOGGER.debug('Calculate BAI for {}'.format(resource))

                    tile = eodata.get_bai(resource)
                    LOGGER.debug('resources BAI calculated')
                tiles.append(tile)
            except Exception as ex:
                msg = 'failed to calculate indice for {}: {}'.format(resource, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        for tile in tiles:
            try:
                LOGGER.debug('Plot tile {}'.format(tile))
                img = eodata.plot_band(tile, file_extension='PNG', colorscheem=indice)
                imgs.append(img)
            except Exception as ex:
                msg = 'Failed to plot tile {}: {}'.format(tile, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        from flyingpigeon.utils import archive
        tarf = archive(imgs)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(imgs) if x), None)
        if i is None:
            i = "dummy.png"
        response.outputs['output_plot'].file = imgs[i]

        # from flyingpigeon import visualisation as vs
        #
        # images = vs.concat_images(imgs, orientation='v')

        response.update_status("done", 100)
        return response
Beispiel #13
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'
        process_start_time = time.time()  # measure process execution time ...

        response.update_status(
            'execution started at: {}'.format(str(dt.now())), 5)

        LOGGER.debug('starting segetalflora process execution')
        response.update_status('starting calcualtion segetalflora', 5)

        ############################
        # read argments to variables
        ############################
        try:
            resource = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            climate_type = request.inputs['climate_type'][0].data
            culture_type = request.inputs['culture_type'][0].data

            LOGGER.info('urls for {} ncs found'.format(len(resource)))
            LOGGER.info('culture type: {}'.format(culture_type))
        except Exception as ex:
            msg = 'Failed to read in the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            if type(climate_type) != list:
                climate_type = list([climate_type])
            if type(culture_type) != list:
                culture_type = list([culture_type])
            LOGGER.info('arguments are lists')
        except Exception as ex:
            msg = 'Failed to transform arguments to lists: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        #############################
        # get yearly mean temperature
        #############################

        nc_tasmean = sf.get_yrmean(resource)

        #######################################
        # main call for segetalflora processing
        #######################################

        nc_sf = sf.get_segetalflora(resource=nc_tasmean,
                                    culture_type=culture_type,
                                    climate_type=climate_type)

        ####################
        # tar file archiving
        ####################

        try:
            response.update_status('preparing output', 99)
            LOGGER.debug('length of sf: {}'.format(len(nc_sf)))
            if len(nc_sf) == 1:
                # TODO: fix pywps output formats OR use separate output params.
                response.outputs['out_segetalflora'].file = nc_sf[0]
                response.outputs['out_segetalflora'].format = FORMATS.NETCDF
            else:
                response.outputs['out_segetalflora'].file = archive(
                    nc_sf, format='tar', dir_output='.', mode='w')
                response.outputs['out_segetalflora'].format = Format(
                    'application/x-tar')
            if len(nc_tasmean) == 1:
                response.outputs['out_tasmean'].file = nc_tasmean[0]
                response.outputs['out_segetalflora'].format = FORMATS.NETCDF
            else:
                response.outputs['out_tasmean'].file = archive(nc_tasmean,
                                                               format='tar',
                                                               dir_output='.',
                                                               mode='w')
                response.outputs['out_segetalflora'].format = Format(
                    'application/x-tar')
        except Exception as ex:
            msg = 'Failed to prepare output files: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.update_status('done', 100)
        LOGGER.debug(
            "total execution took {} seconds.".format(time.time() -
                                                      process_start_time))

        return response
Beispiel #14
0
    def _handler(self, request, response):
        response.update_status("start fetch data", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        products = [inpt.data for inpt in request.inputs['products']]

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                "period ends before period starts; period now set to the last 30 days from now"
            )

        token = request.inputs['token'][0].data
        archive_format = request.inputs['archive_format'][0].data

        resources = []
        # resources_sleeping = []
        for product in products:
            item_type, asset = product.split('__')
            LOGGER.debug('item type: {} , asset: {}'.format(item_type, asset))
            fetch_sleep, tiles = fetch_eodata(item_type,
                                              asset,
                                              token,
                                              bbox,
                                              period=[start, end],
                                              cloud_cover=0.5,
                                              cache=True)
            resources.extend(tiles)
            # resources_sleeping.extend(fetch_sleep)

            dates = set()

            for tile in resources:
                dates = dates.union([eodata.get_timestamp(tile).date()])
            dl = list(dates)

            merged_tiles = []

            for date in dl:
                try:
                    LOGGER.debug('calculating date {}'.format(date))
                    tiles_day = [
                        tile for tile in tiles
                        if eodata.get_timestamp(tile).date() == date
                    ]
                    LOGGER.debug('{} files ready for merging'.format(
                        len(tiles_day)))
                    prefix = date.strftime("%Y%m%d")
                    mosaic = eodata.merge(tiles_day, prefix=prefix)
                    merged_tiles.extend([mosaic])
                except Exception as ex:
                    msg = 'merge failed for date {}: {}'.format(date, str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)
        try:
            output_archive = archive(merged_tiles, format=archive_format)
            LOGGER.info('geotiff files added to archive')
        except Exception as ex:
            msg = 'failed to add species_files indices to archive: {}'.format(
                str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        # response.outputs['output'].file = write_fileinfo(resource, filepath=True)
        response.outputs['output_archive'].file = output_archive

        i = next((i for i, x in enumerate(merged_tiles) if x), None)
        if i is None:
            i = 'dummy.png'
            LOGGER.exception('failed to select example file')

        plot_example = eodata.plot_truecolorcomposite(merged_tiles[i])
        response.outputs['output_png'].file = plot_example

        response.update_status("done", 100)

        return response
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # input files
        LOGGER.debug("url={}, mime_type={}".format(
            request.inputs['resource'][0].url,
            request.inputs['resource'][0].data_format.mime_type))
        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        # mime_type=request.inputs['resource'][0].data_format.mime_type)
        # mosaic option
        # TODO: fix defaults in pywps 4.x
        if 'mosaic' in request.inputs:
            mosaic = request.inputs['mosaic'][0].data
        else:
            mosaic = False
        # regions used for subsetting
        regions = [inp.data for inp in request.inputs['region']]

        LOGGER.info('ncs = {}'.format(ncs))
        LOGGER.info('regions = {}'.format(regions))
        LOGGER.info('mosaic = {}'.format(mosaic))

        response.update_status("Arguments set for subset process", 0)
        LOGGER.debug('starting: regions=%s, num_files=%s', len(regions),
                     len(ncs))

        try:
            results = clipping(
                resource=ncs,
                polygons=regions,  # self.region.getValue(),
                mosaic=mosaic,
                spatial_wrapping='wrap',
                # variable=variable,
                # dir_output=os.path.abspath(os.curdir),
                # dimension_map=dimension_map,
            )
            LOGGER.info('results {}'.format(results))
        except Exception as ex:
            msg = 'clipping failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('No results produced.')

        # prepare tar file
        try:
            tarf = archive(results)
            LOGGER.info('Tar file prepared')
        except Exception as ex:
            msg = 'Tar file preparation failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        response.outputs['ncout'].file = results[i]

        response.update_status("done", 100)
        return response
Beispiel #16
0
    def _handler(self, request, response):
        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        products = [inpt.data for inpt in request.inputs['products']]

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                "period ends before period starts; period now set to the last 30 days from now"
            )

        token = request.inputs['token'][0].data

        resources = []
        resources_sleeping = []
        for product in products:
            item_type, asset = product.split('__')
            LOGGER.debug('item type: {} , asset: {}'.format(item_type, asset))
            fetch_sleep, fetch = fetch_eodata(item_type,
                                              asset,
                                              token,
                                              bbox,
                                              period=[start, end],
                                              cloud_cover=0.5,
                                              cache=True)
            resources.extend(fetch)
            resources_sleeping.extend(fetch_sleep)

        _, filepaths = mkstemp(dir='.', suffix='.txt')
        try:
            with open(filepaths, 'w') as fp:
                fp.write(
                    '######################################################\n')
                fp.write(
                    '### Following files are stored to compute provider ###:\n'
                )
                fp.write(
                    '######################################################\n')
                fp.write('\n')
                for f in resources:
                    fp.write('%s \n' % os.path.realpath(f))
                fp.write('/n')
                fp.write('/n')
                fp.write(
                    '######################################################\n')
                fp.write(
                    '### Following files didn\'t want to wake up       ###:\n')
                fp.write(
                    '######################################################\n')
                for f in resources_sleeping:
                    fp.write('%s \n' % f)
            response.outputs['output'].file = filepaths
        except Exception as ex:
            msg = 'failed to write resources to textfile: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
        # response.outputs['output'].file = write_fileinfo(resource, filepath=True)
        response.update_status("done", 100)

        return response
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        try:
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))

            indices = [inpt.data for inpt in request.inputs['indices']]
            grouping = [inpt.data for inpt in request.inputs['grouping']]

            if 'mosaic' in request.inputs:
                mosaic = request.inputs['mosaic'][0].data
            else:
                mosaic = False

            if 'region' in request.inputs:
                region = [inpt.data for inpt in request.inputs['region']]
            else:
                region = None

            LOGGER.debug('grouping: {}'.format(grouping))
            LOGGER.debug('mosaic: {}'.format(mosaic))
            LOGGER.debug('indices: {}'.format(indices))
            LOGGER.debug('region: {}'.format(region))
            LOGGER.debug('Nr of input files: {}'.format(len(resources)))
        except Exception as ex:
            LOGGER.exception('failed to read in the arguments: {}'.format(str(ex)))

        response.update_status(
            'starting: indices={}, grouping={}, num_files={}'.format(indices, grouping, len(resources)), 2)

        results = []

        from flyingpigeon.utils import sort_by_filename
        datasets = sort_by_filename(resources, historical_concatination=True)
        results = []
        try:
            group = grouping[0]  # for group in grouping:
            indice = indices[0]  # for indice in indices:
            for key in datasets.keys():
                try:
                    response.update_status('Dataset {}: {}'.format(len(results) + 1, key), 10)

                    LOGGER.debug('grouping: {}'.format(grouping))
                    LOGGER.debug('mosaic: {}'.format(mosaic))
                    LOGGER.debug('indice: {}'.format(indice))
                    LOGGER.debug('region: {}'.format(region))
                    LOGGER.debug('Nr of input files: {}'.format(len(datasets[key])))

                    result = calc_indice_simple(
                        resource=datasets[key],
                        mosaic=mosaic,
                        indice=indice,
                        polygons=region,
                        grouping=group,
                        # dir_output=path.curdir,
                    )
                    LOGGER.debug('result: {}'.format(result))
                    results.extend(result)

                except Exception as ex:
                    msg = 'failed for {}: {}'.format(key, str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

        except Exception as ex:
            msg = 'Failed to calculate indices: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        #         # if not results:
        #         #     raise Exception("failed to produce results")
        #         # response.update_status('num results %s' % len(results), 90)

        tarf = archive(results)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        if i is None:
            i = 'dummy.nc'
        response.outputs['ncout'].file = results[i]

        #       response.update_status("done", 100)
        return response
Beispiel #18
0
    def _handler(self, request, response):
        response.update_status("start fetch data", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        products = [inpt.data for inpt in request.inputs['products']]

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                'period ends before period starts; period now set to the last 30 days from now'
            )

        token = request.inputs['token'][0].data
        archive_format = request.inputs['archive_format'][0].data

        resources = []

        # resources_sleeping = []
        for product in products:
            if product == 'PlanetScope':
                item_type = 'PSScene4Band'
                assets = ['analytic', 'analytic_xml']
                for asset in assets:
                    LOGGER.debug('item type: {}, asset: {}'.format(
                        item_type, asset))
                    fetch_sleep, tiles = fetch_eodata(item_type,
                                                      asset,
                                                      token,
                                                      bbox,
                                                      period=[start, end],
                                                      cloud_cover=0.5,
                                                      cache=True)
                    resources.extend(tiles)

                    # TODO: include merge of NDVI
                    # for tile in tiles:
                    #     dates = dates.union([eodata.get_timestamp(tile).date()])
                    # dl = list(dates)
                    #
                    # for date in dl:
                    #     print "calculating date %s " % date
                    #     tiles_day = [tile for tile in tiles if eodata.get_timestamp(tile).date() == date]
                    #     print(tiles_day)
                    #     archive = eodata.merge(tiles_day)
                    #     print archive

                    # resources_sleeping.extend(fetch_sleep)
                LOGGER.debug('{} tiles fetched'.format(len(resources)))
                response.update_status("calculating NDVI ", 30)
                try:
                    LOGGER.debug('Start calculating NDVI')
                    ndvi_tiles = eodata.ndvi(resources, product)
                    # ndvi_merged = eodata.merge(ndvi_tiles)
                except Exception as ex:
                    msg = 'failed to calculate NDVI: {}'.format(str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)
        try:
            ndvi_archive = archive(ndvi_tiles, format=archive_format)
            LOGGER.info('geotiff files added to archive')
        except Exception as ex:
            msg = 'failed adding species_files indices to archive: {}'.format(
                str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['ndvi_archive'].file = ndvi_archive

        i = next((i for i, x in enumerate(ndvi_tiles) if x), None)
        if i is None:
            response.outputs['ndviexample'].file = "dummy.png"
        else:
            LOGGER.debug('start plotting test files for quick check')
            # TODO plot_ndvi does not resolve. Critical error if called.
            ndvi_plot = eodata.plot_ndvi(ndvi_tiles[i])
            LOGGER.debug('NDVI test plot {}'.format(ndvi_plot))

            response.outputs['ndviexample'].file = ndvi_plot

        response.update_status("done", 100)

        return response
    def _handler(self, request, response):
        response.update_status('starting uncertainty process', 0)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        try:
            ncfiles = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            # TODO: See where 'method' should be called
            method = request.inputs['method'][0].data

            if 'start' in request.inputs:
                start = request.inputs['start'][0].data
            else:
                start = None

            if 'end' in request.inputs:
                end = request.inputs['end'][0].data
            else:
                end = None

            if 'timeslice' in request.inputs:
                timeslice = request.inputs['timeslice'][0].data
            else:
                timeslice = None

            response.update_status('arguments read', 5)
            LOGGER.info('Successfully read in the arguments')
        except Exception as ex:
            msg = 'failed to read in the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output_text'].file = write_fileinfo(ncfiles)

        #  LOGGER.debug('variable set to %s' % variable)
        # if method == 'signal_noise_ratio':

        signal, low_agreement_mask, high_agreement_mask, text_src = erob.signal_noise_ratio(
            resource=ncfiles,
            start=start,
            end=end,
            timeslice=timeslice,
            # variable=variable
        )  # graphic,

        LOGGER.debug('Robustness calculated')

        try:
            # LOGGER.info('variable to be plotted: %s' % variable)
            from flyingpigeon.visualisation import map_robustness

            # if title is None:
            title = 'signal robustness'  # , end1, end2, start1, start2

            graphic = map_robustness(
                signal,
                high_agreement_mask,
                low_agreement_mask,
                # variable=variable,
                # cmap=cmap,
                title=title)

            LOGGER.info('graphic generated')
        except Exception as ex:
            LOGGER.exception('graphic generation failed: {}'.format(str(ex)))
            _, graphic = mkstemp(dir='.', suffix='.png')

        response.update_status('process worker done', 95)

        response.outputs['output_signal'].file = signal
        response.outputs['output_high'].file = high_agreement_mask
        response.outputs['output_low'].file = low_agreement_mask
        response.outputs['output_graphic'].file = graphic

        response.update_status('uncertainty process done', 100)
        return response