示例#1
0
    def _handler(self, request, response):
        import uuid
        import time
        import json
        outputpath = configuration.get_config_value('server', 'outputpath')
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # -------------- #
        # Input handling #
        # -------------- #
        resource = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        LOGGER.info("resource: %s " % resource)

        dest = archiveextract(
            resource=rename_complexinputs(request.inputs['dest']))
        LOGGER.info("dest: %s " % dest)

        method = request.inputs['method'][0].data
        LOGGER.info("method: %s " % method)

        snippet = request.inputs['snippet'][0].data
        LOGGER.info("snippet: %s " % snippet)

        # -------------------- #
        # Regridding operation #
        # -------------------- #
        d = ocgis.RequestDataset(dest)
        m = getattr(ESMF.RegridMethod, method.upper())
        LOGGER.info('Start ocgis module call function')

        # Prepare the environment
        ocgis.env.OVERWRITE = True
        prefix = str(uuid.uuid1())
        ocgis.env.PREFIX = prefix

        outputs = []
        for source in resource:
            s = ocgis.RequestDataset(source)
            ops = ocgis.OcgOperations(dataset=s,
                                      regrid_destination=d,
                                      regrid_options={'regrid_method': m},
                                      snippet=snippet,
                                      dir_output=outputpath,
                                      output_format='nc',
                                      prefix=prefix)
            outputs.append(ops.execute())

        response.outputs['output_netcdf'].file = outputs[0]

        time_str = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
        output_json = "esmf_regrid_results_{}.json".format(time_str)
        with open(output_json, 'w') as f:
            f.write(json.dumps([actual_output_path(o) for o in outputs]))

        response.outputs['output'].file = output_json
        response.outputs['output'].output_format = json_format
        return response
示例#2
0
    def _handler(self, request, response):
        from rpy2 import robjects
        from rpy2.robjects.packages import importr

        import os
        import datetime as dt

        tic = dt.datetime.now()
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        response.update_status('Execution started at : {}'.format(tic), 1)

        ######################################
        # Read inputs
        ######################################
        try:
            obs = archiveextract(
                resource=rename_complexinputs(request.inputs['obs']))
            ref = archiveextract(
                resource=rename_complexinputs(request.inputs['ref']))
            fut = archiveextract(
                resource=rename_complexinputs(request.inputs['fut']))
            # dedrizzle = request.inputs['dedrizzle'][0].data
            # norm = request.inputs['norm'][0].data
        except Exception as e:
            msg = 'Failed to read input parameter {}'.format(e)
            msg += "obs: " + request.inputs['obs']
            msg += "ref: " + request.inputs['ref']
            msg += "fut: " + request.inputs['fut']
            LOGGER.error(msg)
            raise Exception(msg)

        response.update_status('Input parameters ingested', 2)

        rp, ext = os.path.splitext(ref[0])
        ref_out = rp + '_kddm-bc' + ext

        fp, ext = os.path.splitext(fut[0])
        fut_out = fp + '_kddm-bc' + ext

        # Assuming all files share the same variable.
        rd = ocgis.RequestDataset(ref)
        varname = rd.variable

        # Calling the R code
        Rsrc = os.path.join(flyingpigeon.config.Rsrc_dir(), 'bc.kddm.R')
        devtools = importr("devtools")

        rfunc = robjects.r(open(Rsrc).read())
        rfunc(varname, obs[0], ref[0], fut[0], ref_out, fut_out, False)

        response.outputs['output_netcdf_ref'].file = ref_out
        response.outputs['output_netcdf_fut'].file = fut_out

        return response
示例#3
0
    def execute(self):
        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        from flyingpigeon.utils import searchfile
        from flyingpigeon.subset import masking
        from flyingpigeon.utils import archive, archiveextract

        from flyingpigeon import config
        from os import path

        resources = archiveextract(self.getInputValues(identifier='resource'))
        masks = archiveextract(self.getInputValues(identifier='mask'))
        land_area = self.land_area.getValue()

        fp_cache = config.cache_path().split('/')
        base_dir = '/'.join(fp_cache[0:-1])  # base dir for all birds

        logger.debug('base dir of directory tree: %s' % base_dir)

        ncs = []
        sftlf = []
        for nc in resources:
            try:
                basename = path.basename(nc)
                bs = basename.split('_')
                pattern = 'sftlf_' + '_'.join(bs[1:-2]) + '_fx.nc'
                pattern = pattern.replace('historical',
                                          '*').replace('rcp85',
                                                       '*').replace('rcp65',
                                                                    '*').replace('rcp45',
                                                                                 '*').replace('rcp26', '*')
                logger.debug('searching for %s ' % pattern)
                sftlf.extend(searchfile(pattern, path.curdir))
                sftlf.extend(searchfile(pattern, base_dir))
                logger.debug('lenght of sftlf: %s' % len(sftlf))
                if len(sftlf) >= 1:
                    if len(sftlf) > 1:
                        logger.warn(
                            'more than one sftlf file is found fitting to the pattern, first one will be taken %s'
                            % sftlf[0])
                    prefix = 'masked%s' % basename.replace('.nc', '')
                    nc_mask = masking(nc, sftlf[0], land_area=land_area, prefix=prefix)
                    ncs.extend([nc_mask])
                    logger.info('masking processed for %s' % basename)
                else:
                    logger.warn('no masked found. Please perform a "Download Resources"\
                     to make sure the land_area file is in cache')
            except:
                logger.exception('failed to mask file: %s' % basename)
        nc_archive = archive(ncs)

        self.output_archive.setValue(nc_archive)
        i = next((i for i, x in enumerate(ncs) if x), None)
        self.output_example.setValue(ncs[i])
示例#4
0
    def execute(self):
        self.status.set('starting uncertainty process', 0)

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')
        from flyingpigeon.utils import archiveextract

        ncfiles = archiveextract(self.getInputValues(identifier='resource'))
        start = self.start.getValue()
        end = self.end.getValue()
        timeslice = self.timeslice.getValue()
        variable = self.variableIn.getValue()
        method = self.method.getValue()

        self.status.set('arguments read', 5)

        if method == 'Method_A':
            signal, low_agreement_mask, high_agreement_mask, graphic, text_src = erob.method_A(
                resource=ncfiles,
                start=start,
                end=end,
                timeslice=timeslice,
                variable=variable)  # graphic,

        self.status.set('process worker done', 95)

        self.output_signal.setValue(signal)
        self.output_high.setValue(high_agreement_mask)
        self.output_low.setValue(low_agreement_mask)
        self.output_graphic.setValue(graphic)
        self.output_text.setValue(text_src)

        self.status.set('uncertainty process done', 100)
示例#5
0
    def execute(self):
        from ast import literal_eval
        from flyingpigeon.utils import archive, archiveextract

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        ncs = archiveextract(self.getInputValues(identifier='resource'))
        mosaic = self.mosaic.getValue()
        regions = self.region.getValue()
        # variable = self.variable.getValue()

        # logger.info('regions: %s' % regions)
        # dimension_map = self.dimension_map.getValue()
        # if dimension_map != None:
        #     dimension_map = literal_eval(dimension_map)

        logger.info('ncs = %s', ncs)
        logger.info('regions = %s', regions)
        logger.info('mosaic = %s', mosaic)
        # logger.info('dimension_map = %s', dimension_map)

        self.status.set('Arguments set for subset process', 0)
        logger.debug('starting: regions=%s, num_files=%s' %
                     (len(regions), len(ncs)))
        try:
            results = clipping(
                resource=ncs,
                polygons=regions,  # self.region.getValue(),
                mosaic=mosaic,
                spatial_wrapping='wrap',
                # variable=variable,
                dir_output=os.path.abspath(os.curdir),
                # dimension_map=dimension_map,
            )
            logger.info('results %s' % results)
        except Exception as e:
            msg = 'clipping failed'
            logger.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('no results produced.')

        # prepare tar file
        try:
            tarf = archive(results)
            logger.info('Tar file prepared')
        except Exception as e:
            msg = 'Tar file preparation failed'
            logger.exception(msg)
            raise Exception(msg)

        self.output.setValue(tarf)

        i = next((i for i, x in enumerate(results) if x), None)
        self.output_netcdf.setValue(results[i])

        self.status.set('done', 100)
示例#6
0
    def _resource_input_handler(self, request):
        out = OrderedDict()

        for obj in self.resource_inputs:
            key = obj.identifier
            out[key] = archiveextract(
                resource=rename_complexinputs(request.inputs[key]))
        return out
    def execute(self):
        from flyingpigeon.utils import archive, archiveextract
        from tempfile import mkstemp

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        ncs = archiveextract(self.getInputValues(identifier='resource'))
        # mosaic = self.mosaic.getValue()
        regions = self.region.getValue()

        self.status.set('Arguments set for subset process', 0)
        logger.debug('starting: regions=%s, num_files=%s' % (len(regions), len(ncs)))

        try:
            from flyingpigeon.visualisation import plot_polygons
            png_country = plot_polygons(regions)
        except:
            logger.exception('failed to plot the polygon to world map')
            o1, factsheet_plot = mkstemp(dir='.', suffix='.png')

        # clip the demanded polygons
        from flyingpigeon.subset import clipping
        subsets = clipping(resource=ncs, variable=None,
                           dimension_map=None,
                           calc=None,
                           output_format='nc',
                           calc_grouping=None,
                           time_range=None,
                           time_region=None,
                           historical_concatination=True,
                           prefix=None,
                           spatial_wrapping='wrap',
                           polygons=regions,
                           mosaic=True
                           )
        try:
            from flyingpigeon.visualisation import uncertainty
            png_uncertainty = uncertainty(subsets)
        except:
            logger.exception('failed to generate the uncertainty plot')
            _, png_uncertainty = mkstemp(dir='.', suffix='.png')

        try:
            from flyingpigeon.visualisation import spaghetti
            png_spaghetti = spaghetti(subsets)
        except:
            logger.exception('failed to generate the spaghetti plot')
            _, png_spaghetti = mkstemp(dir='.', suffix='.png')

        from flyingpigeon.visualisation import factsheetbrewer
        factsheet = factsheetbrewer(png_country=png_country,
                                    png_uncertainty=png_uncertainty,
                                    png_spaghetti=png_spaghetti)

        self.output_factsheet.setValue(factsheet)
        self.status.set('done', 100)
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            indices = [inpt.data for inpt in request.inputs['indices']]
            LOGGER.debug("indices = %s", indices)
            archive_format = request.inputs['archive_format'][0].data
        except:
            msg = 'failed to read the arguments.'
            LOGGER.exception(msg)
            raise Exception(msg)
        LOGGER.info('indices %s ' % indices)

        #################################
        # calculate the climate indices
        #################################

        # indices calculation
        ncs_indices = None
        datasets = sort_by_filename(resources, historical_concatination=True)
        LOGGER.debug("datasets=%s", datasets.keys())

        for ds_name in datasets:
            try:
                response.update_status('calculation of {}'.format(ds_name), 30)
                # TODO: what is happening with the results for each ds?
                ncs_indices = sdm.get_indices(resource=datasets[ds_name],
                                              indices=indices)
            except:
                msg = 'indice calculation failed for {}'.format(ds_name)
                LOGGER.exception(msg)
                raise Exception(msg)

        # archive multiple output files to one archive file
        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices 3D added to tarfile')
        except:
            msg = 'failed adding indices to tar'
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output_indices'].file = archive_indices

        i = next((i for i, x in enumerate(ncs_indices) if x), None)
        response.outputs['ncout'].file = ncs_indices[i]

        response.update_status('done', 100)
        return response
示例#9
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        LOGGER.info('ncs: {}'.format(ncs))

        coords = []
        for coord in request.inputs['coords']:
            coords.append(coord.data)

        LOGGER.info('coords {}'.format(coords))
        filenames = []
        nc_exp = sort_by_filename(ncs, historical_concatination=True)

        for key in nc_exp.keys():
            try:
                LOGGER.info('start calculation for {}'.format(key))
                ncs = nc_exp[key]
                times = get_time(ncs)  # , format='%Y-%m-%d_%H:%M:%S')
                concat_vals = times  # ['%s-%02d-%02d_%02d:%02d:%02d' %
                # (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
                header = 'date_time'
                filename = '{}.csv'.format(key)
                filenames.append(filename)

                for p in coords:
                    try:
                        response.update_status('processing point: {}'.format(p), 20)
                        # define the point:
                        p = p.split(',')
                        point = Point(float(p[0]), float(p[1]))

                        # get the values
                        timeseries = call(resource=ncs, geom=point, select_nearest=True)
                        vals = get_values(timeseries)

                        # concatenation of values
                        header = header + ',{}-{}'.format(p[0], p[1])
                        concat_vals = column_stack([concat_vals, vals])
                    except Exception as e:
                        LOGGER.debug('failed for point {} {}'.format(p, e))
                response.update_status('*** all points processed for {0} ****'.format(key), 50)

                # TODO: Ascertain whether this 'savetxt' is a valid command without string formatting argument: '%s'
                savetxt(filename, concat_vals, fmt='%s', delimiter=',', header=header)
            except Exception as ex:
                LOGGER.debug('failed for {}: {}'.format(key, str(ex)))

        # set the outputs
        response.update_status('*** creating output tar archive ****', 90)
        tarout_file = archive(filenames)
        response.outputs['tarout'].file = tarout_file
        return response
示例#10
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        datasets = []
        # append file urls
        if 'dataset' in request.inputs:
            datasets.extend(
                archiveextract(
                    resource=rename_complexinputs(request.inputs['dataset'])))
        # append opendap urls
        if 'dataset_opendap' in request.inputs:
            for dataset in request.inputs['dataset_opendap']:
                datasets.append(dataset.data)
        # land or sea flag
        land_area_flag = request.inputs['land_or_sea'][0].data == 'land'

        masked_datasets = []
        count = 0
        max_count = len(datasets)
        for ds in datasets:
            ds_name = os.path.basename(ds)
            LOGGER.info('masking dataset: {}'.format(ds_name))
            if 'mask' in request.inputs:
                landsea_mask = request.inputs['mask'][0].data
            else:
                landsea_mask = search_landsea_mask_by_esgf(ds)

            LOGGER.info("using landsea_mask: {}".format(landsea_mask))
            prefix = 'masked_{}'.format(ds_name.replace('.nc', ''))
            try:
                new_ds = masking(ds,
                                 landsea_mask,
                                 land_area=land_area_flag,
                                 prefix=prefix)
                masked_datasets.append(new_ds)

            except Exception as ex:
                msg = 'Could not subset dataset {}: {}'.format(
                    ds_name, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)
            count = count + 1
            response.update_status(
                "masked: {:d}/{:d}".format(count, max_count),
                int(100.0 * count / max_count))

        response.outputs['output_archive'].file = archive(masked_datasets)
        response.outputs['output_example'].file = masked_datasets[0]

        response.update_status("done", 100)
        return response
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            indices = [inpt.data for inpt in request.inputs['indices']]
            LOGGER.debug("indices = {}".format(indices))
            archive_format = request.inputs['archive_format'][0].data
        except Exception as ex:
            msg = 'failed to read the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
        LOGGER.info('indices {}'.format(indices))

        #################################
        # calculate the climate indices
        #################################

        # indices calculation
        try:
            response.update_status('calculation of indices', 30)
            ncs_indices = sdm.get_indices(resource=resources, indices=indices)
            LOGGER.info('indice calculation done')
        except Exception as ex:
            # TODO: 'ds_name' does not resolve. What is this referring to? This will throw a critical error!
            msg = 'indice calculation failed for {}: {}'.format(ds_name, str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        # archive multiple output files to one archive file
        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices 3D added to tarfile')
        except Exception as ex:
            msg = 'failed adding indices to tar: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output_indices'].file = archive_indices

        i = next((i for i, x in enumerate(ncs_indices) if x), None)
        response.outputs['ncout'].file = ncs_indices[i]

        response.update_status('done', 100)
        return response
示例#12
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        LOGGER.info("ncs: %s " % ncs)
        coords = request.inputs['coords']  # self.getInputValues(identifier='coords')
        LOGGER.info("coords %s", coords)
        filenames = []
        nc_exp = sort_by_filename(ncs, historical_concatination=True)

        for key in nc_exp.keys():
            try:
                LOGGER.info('start calculation for %s ' % key)
                ncs = nc_exp[key]
                times = get_time(ncs, format='%Y-%m-%d_%H:%M:%S')
                concat_vals = times  # ['%s-%02d-%02d_%02d:%02d:%02d' %
                # (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
                header = 'date_time'
                filename = '%s.csv' % key
                filenames.append(filename)

                for p in coords:
                    try:
                        response.update_status('processing point : {0}'.format(p), 20)
                        # define the point:
                        p = p.split(',')
                        point = Point(float(p[0]), float(p[1]))

                        # get the values
                        timeseries = call(resource=ncs, geom=point, select_nearest=True)
                        vals = get_values(timeseries)

                        # concatenation of values
                        header = header + ',%s-%s' % (p[0], p[1])
                        concat_vals = column_stack([concat_vals, vals])
                    except Exception as e:
                        LOGGER.debug('failed for point %s %s' % (p, e))
                response.update_status('*** all points processed for {0} ****'.format(key), 50)
                savetxt(filename, concat_vals, fmt='%s', delimiter=',', header=header)
            except Exception as e:
                LOGGER.debug('failed for %s %s' % (key, e))

    # set the outputs
        response.update_status('*** creating output tar archive ****', 90)
        tarout_file = archive(filenames)
        response.outputs['tarout'].file = tarout_file
        return response
    def _handler(self, request, response):
        from tempfile import mkstemp

        tic = dt.now()
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        response.update_status('Execution started at : {}'.format(tic), 1)

        ######################################
        # Read inputs
        ######################################
        try:
            resource = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))[0]
            fmts = [e.data for e in request.inputs['fmt']]
            title = request.inputs['title'][0].data

        except Exception as e:
            msg = 'Failed to read input parameter {}'.format(e)
            LOGGER.error(msg)
            raise Exception(msg)

        response.update_status('Input parameters ingested', 2)

        try:
            fig = map_spatial_analog(resource, title=title)
            output = []

            for fmt in fmts:
                output.append(fig2plot(fig, fmt))

        except Exception as e:
            msg = "Failed to create figure: {}".format(e)
            LOGGER.error(msg)
            raise Exception(msg)

        finally:
            plt.close()

        if len(fmts) == 1:
            output = output[0]
        else:
            output = archive(output)

        response.outputs['output_figure'].file = output
        response.update_status("done", 100)
        return response
示例#14
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncfiles = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        if 'variable' in request.inputs:
            var = request.inputs['variable'][0].data
        else:
            var = get_variable(ncfiles[0])
            #  var = ncfiles[0].split("_")[0]

        response.update_status('plotting variable {}'.format(var), 10)

        try:
            plotout_spagetti_file = vs.spaghetti(
                ncfiles,
                variable=var,
                title='Field mean of {}'.format(var),
            )
            LOGGER.info("spagetti plot done")
            response.update_status(
                'Spagetti plot for %s %s files done' % (len(ncfiles), var), 50)
            response.outputs['plotout_spagetti'].file = plotout_spagetti_file
        except Exception as e:
            raise Exception("spagetti plot failed : {}".format(e))

        try:
            plotout_uncertainty_file = vs.uncertainty(
                ncfiles,
                variable=var,
                title='Ensemble uncertainty for {}'.format(var),
            )

            response.update_status(
                'Uncertainty plot for {} {} files done'.format(
                    len(ncfiles), var), 90)
            response.outputs[
                'plotout_uncertainty'].file = plotout_uncertainty_file
            LOGGER.info("uncertainty plot done")
        except Exception as err:
            raise Exception("uncertainty plot failed {}".format(err.message))

        response.update_status('visualisation done', 100)
        return response
示例#15
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncfiles = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        var = request.inputs['variableIn']

        if var is None:
            from flyingpigeon.utils import get_variable
            var = get_variable(ncfiles[0])

        response.update_status('plotting variable %s' % var, 10)

        try:
            plotout_spagetti_file = vs.spaghetti(ncfiles,
                                                 variable=var,
                                                 title='Fieldmean of %s ' %
                                                 (var),
                                                 dir_out=None)
            LOGGER.info("spagetti plot done")
            response.update_status(
                'Spagetti plot for %s %s files done' % (len(ncfiles), var), 50)
        except:
            LOGGER.exception("spagetti plot failed")

        try:
            plotout_uncertainty_file = vs.uncertainty(
                ncfiles,
                variable=var,
                title='Ensemble uncertainty for %s ' % (var),
                dir_out=None)

            response.update_status(
                'Uncertainty plot for %s %s files done' % (len(ncfiles), var),
                90)
            LOGGER.info("uncertainty plot done")
        except:
            LOGGER.exception("uncertainty plot failed")

        response.outputs['plotout_spagetti'].file = plotout_spagetti_file
        response.outputs['plotout_uncertainty'].file = plotout_uncertainty_file
        response.update_status('visualisation done', 100)
        return response
示例#16
0
    def execute(self):

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')
        from flyingpigeon.utils import archiveextract

        ncfiles = archiveextract(self.getInputValues(identifier='resource'))
        var = self.variableIn.getValue()

        if var is None:
            from flyingpigeon.utils import get_variable
            var = get_variable(ncfiles[0])

        self.status.set('plotting variable %s' % var, 10)

        try:
            plotout_spagetti_file = vs.spaghetti(
                                             ncfiles,
                                             variable=var,
                                             title='Fieldmean of %s ' % (var),
                                             dir_out=None
                                             )
            logger.info("spagetti plot done")
            self.status.set('Spagetti plot for %s %s files done' % (len(ncfiles), var), 50)
        except:
            logger.exception("spagetti plot failed")

        try:
            plotout_uncertainty_file = vs.uncertainty(
                                                  ncfiles,
                                                  variable=var,
                                                  title='Ensemble uncertainty for %s ' % (var),
                                                  dir_out=None
                                                  )

            self.status.set('Uncertainty plot for %s %s files done' % (len(ncfiles), var), 90)
            logger.info("uncertainty plot done")
        except:
            logger.exception("uncertainty plot failed")

        self.plotout_spagetti.setValue(plotout_spagetti_file)
        self.plotout_uncertainty.setValue(plotout_uncertainty_file)
        self.status.set('visualisation done', 100)
示例#17
0
 def _handler(self, request, response):
     try:
         ocgis.env.DIR_OUTPUT = tempfile.mkdtemp(dir=os.getcwd())
         ocgis.env.OVERWRITE = True
         nc_files = archiveextract(resource=rename_complexinputs(
             request.inputs['resource']))
         rd = RequestDataset(nc_files)
         rd.dimension_map.set_bounds('time', None)
         if nc_files[0][-3:] == '.nc':
             out_prefix = nc_files[0][:-3] + '_merged'
         else:
             out_prefix = nc_files[0] + '_merged'
         ops = OcgOperations(dataset=rd, output_format='nc',
                             prefix=out_prefix)
         ret = ops.execute()
         response.outputs['output'].file = ret
         response.outputs['output'].output_format = \
             Format('application/x-netcdf')
         return response
     except:
         raise Exception(traceback.format_exc())
示例#18
0
    def _handler(self, request, response):
        resource = archiveextract(resource=rename_complexinputs(
            request.inputs['resource']))[0]

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        LOGGER.debug("Rank : {}".format(ocgis.vm.rank))
        print("Rank : {}".format(ocgis.vm.rank))

        rd = ocgis.RequestDataset(resource)
        ops = ocgis.OcgOperations(dataset=rd,
                                  calc=[{'func': 'mean', 'name': 'mean'}],
                                  calc_grouping=['month'],
                                  output_format='nc')
        output = ops.execute()

        response.outputs['output_netcdf'].file = output

        response.update_status('Execution completed', 100)
        return response
    def _handler(self, request, response):

        ocgis.env.DIR_OUTPUT = tempfile.mkdtemp(dir=os.getcwd())
        ocgis.env.OVERWRITE = True
        tic = dt.now()
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        response.update_status('Execution started at : {}'.format(tic), 1)

        ######################################
        # Read inputs
        ######################################
        try:
            candidate = archiveextract(
                resource=rename_complexinputs(request.inputs['candidate']))
            target = archiveextract(
                resource=rename_complexinputs(request.inputs['target']))
            location = request.inputs['location'][0].data
            indices = [el.data for el in request.inputs['indices']]
            dist = request.inputs['dist'][0].data
            dateStartCandidate = request.inputs['dateStartCandidate'][0].data
            dateEndCandidate = request.inputs['dateEndCandidate'][0].data
            dateStartTarget = request.inputs['dateStartTarget'][0].data
            dateEndTarget = request.inputs['dateEndTarget'][0].data

        except Exception as ex:
            msg = 'Failed to read input parameter {}'.format(ex)
            LOGGER.error(msg)
            raise Exception(msg)

        response.update_status('Input parameters ingested', 2)

        ######################################
        # Process inputs
        ######################################

        try:
            point = Point(*map(float, location.split(',')))
            dateStartCandidate = dt.strptime(dateStartCandidate, '%Y-%m-%d')
            dateEndCandidate = dt.strptime(dateEndCandidate, '%Y-%m-%d')
            dateStartTarget = dt.strptime(dateStartTarget, '%Y-%m-%d')
            dateEndTarget = dt.strptime(dateEndTarget, '%Y-%m-%d')

        except Exception as ex:
            msg = 'failed to process inputs {}'.format(ex)
            LOGGER.error(msg)
            raise Exception(msg)

        LOGGER.debug("init took {}".format(dt.now() - tic))
        response.update_status('Processed input parameters', 3)

        ######################################
        # Extract target time series
        ######################################
        savetarget = False
        try:
            # Using `call` creates a netCDF file in the tmp directory.
            #
            # Here we keep this stuff in memory
            if savetarget:
                prefix = 'target_ts'
                target_ts = call(resource=target,
                                 geom=point,
                                 variable=indices,
                                 time_range=[dateStartTarget, dateEndTarget],
                                 select_nearest=True,
                                 prefix=prefix)

                # target_ts = [get_values(prefix+'.nc', ind) for ind in indices]

            else:
                trd = RequestDataset(
                    target,
                    variable=indices,
                    time_range=[dateStartTarget, dateEndTarget])

                op = OcgOperations(trd,
                                   geom=point,
                                   select_nearest=True,
                                   search_radius_mult=1.75)
                out = op.execute()
                target_ts = out.get_element()

        except Exception as ex:
            msg = 'Target extraction failed {}'.format(ex)
            LOGGER.debug(msg)
            raise Exception(msg)

        response.update_status('Extracted target series', 5)

        ######################################
        # Compute dissimilarity metric
        ######################################

        response.update_status('Computing spatial analog', 6)
        try:
            output = call(
                resource=candidate,
                calc=[{
                    'func': 'dissimilarity',
                    'name': 'spatial_analog',
                    'kwds': {
                        'dist': dist,
                        'target': target_ts,
                        'candidate': indices
                    }
                }],
                time_range=[dateStartCandidate, dateEndCandidate],
            )

        except Exception as ex:
            msg = 'Spatial analog failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        add_metadata(output,
                     dist=dist,
                     indices=",".join(indices),
                     target_location=location,
                     candidate_time_range="{},{}".format(
                         dateStartCandidate, dateEndCandidate),
                     target_time_range="{},{}".format(dateStartTarget,
                                                      dateEndTarget))

        response.update_status('Computed spatial analog', 95)

        response.outputs['output_netcdf'].file = output

        response.update_status('Execution completed', 100)
        LOGGER.debug("Total execution took {}".format(dt.now() - tic))
        return response
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        var = get_variable(ncs[0])
        LOGGER.info('variable to be plotted: {}'.format(var))

        # mosaic = self.mosaic.getValue()
        if 'region' in request.inputs:
            regions = [inp.data for inp in request.inputs['region']]
            try:
                png_region = vs.plot_polygons(regions)
            except Exception as ex:
                msg = 'failed to plot the polygon to world map: {}'.format(
                    str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)
                o1, png_region = mkstemp(dir='.', suffix='.png')

            # clip the demanded polygons
            subsets = clipping(
                resource=ncs,
                variable=var,
                polygons=regions,
                mosaic=True,
                spatial_wrapping='wrap',
            )
        else:
            subsets = ncs
            png_region = vs.plot_extend(ncs[0])

        response.update_status('Arguments set for subset process', 0)

        try:
            tar_subsets = archive(subsets)
        except Exception as ex:
            msg = 'failed to archive subsets: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, tar_subsets = mkstemp(dir='.', suffix='.tar')

        try:
            png_uncertainty = vs.uncertainty(subsets, variable=var)
        except Exception as ex:
            msg = 'failed to generate the uncertainty plot: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_uncertainty = mkstemp(dir='.', suffix='.png')

        try:
            png_spaghetti = vs.spaghetti(
                subsets,
                variable=var,
            )

        except Exception as ex:
            msg = 'failed to generate the spaghetti plot: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_spaghetti = mkstemp(dir='.', suffix='.png')

        try:
            from flyingpigeon import robustness as ro
            signal, low_agreement_mask, high_agreement_mask, text_src = ro.signal_noise_ratio(
                resource=subsets,
                # start=None, end=None,
                # timeslice=None,
                # variable=var
            )
            # if title is None:
            title = 'signal robustness of %s ' % (
                var)  # , end1, end2, start1, start2
            png_robustness = vs.map_robustness(
                signal,
                high_agreement_mask,
                low_agreement_mask,
                # cmap=cmap,
                #    title=title
            )
            LOGGER.info('robustness graphic generated')
        except Exception as ex:
            msg = 'failed to generate the robustness plot: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_robustness = mkstemp(dir='.', suffix='.png')

        factsheet = vs.factsheetbrewer(png_region=png_region,
                                       png_uncertainty=png_uncertainty,
                                       png_spaghetti=png_spaghetti,
                                       png_robustness=png_robustness)

        response.outputs['output_nc'].file = tar_subsets
        response.outputs['output_factsheet'].file = factsheet
        response.update_status("done", 100)
        return response
示例#21
0
    def execute(self):
        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        from os.path import basename
        from flyingpigeon import sdm
        from flyingpigeon.utils import archive, archiveextract

        self.status.set('Start process', 0)

        try:
            logger.info('reading the arguments')
            resources = archiveextract(
                self.getInputValues(identifier='resources'))
            taxon_name = self.getInputValues(identifier='taxon_name')[0]
            bbox_obj = self.BBox.getValue()
            bbox = [
                bbox_obj.coords[0][0], bbox_obj.coords[0][1],
                bbox_obj.coords[1][0], bbox_obj.coords[1][1]
            ]
            period = self.getInputValues(identifier='period')
            period = period[0]
            indices = self.getInputValues(identifier='input_indices')
            archive_format = self.archive_format.getValue()
            logger.debug("indices = %s for %s ", indices, taxon_name)
            logger.info("bbox={0}".format(bbox))
        except Exception as e:
            logger.error('failed to read in the arguments %s ' % e)
        logger.info('indices %s ' % indices)

        try:
            self.status.set('Fetching GBIF Data', 10)
            gbifdic = sdm.get_gbif(taxon_name, bbox=bbox)
        except Exception as e:
            msg = 'failed to search gbif.'
            logger.exception(msg)
            raise Exception(msg)

        try:
            self.status.set('write csv file', 70)
            gbifcsv = sdm.gbifdic2csv(gbifdic)
        except Exception as e:
            msg = 'failed to write csv file.'
            logger.exception(msg)
            raise Exception(msg)

        try:
            self.status.set('plot map', 80)
            from flyingpigeon.visualisation import map_gbifoccurrences
            latlon = sdm.latlon_gbifdic(gbifdic)
            occurence_map = map_gbifoccurrences(latlon)
        except Exception as e:
            msg = 'failed to plot occurence map.'
            logger.exception(msg)
            raise Exception(msg)

        #################################
        # calculate the climate indices
        #################################

        # get the indices
        ncs_indices = None
        try:
            self.status.set(
                'start calculation of climate indices for %s' % indices, 30)
            ncs_indices = sdm.get_indices(resources=resources, indices=indices)
            logger.info('indice calculation done')
        except:
            msg = 'failed to calculate indices'
            logger.exception(msg)
            raise Exception(msg)

        try:
            self.status.set('get domain', 30)
            domains = set()
            for resource in ncs_indices:
                # get_domain works only if metadata are set in a correct way
                domains = domains.union([basename(resource).split('_')[1]])
            if len(domains) == 1:
                domain = list(domains)[0]
                logger.debug('Domain %s found in indices files' % domain)
            else:
                logger.error('Not a single domain in indices files %s' %
                             domains)
        except Exception as e:
            logger.exception('failed to get domains %s' % e)

        try:
            self.status.set('generating the PA mask', 20)
            PAmask = sdm.get_PAmask(coordinates=latlon, domain=domain)
            logger.info('PA mask sucessfully generated')
        except Exception as e:
            logger.exception('failed to generate the PA mask: %s' % e)

        try:
            self.status.set('Ploting PA mask', 25)
            from flyingpigeon.visualisation import map_PAmask
            PAmask_png = map_PAmask(PAmask)
        except Exception as e:
            logger.exception('failed to plot the PA mask: %s' % e)

        try:
            # sort indices
            indices_dic = None
            indices_dic = sdm.sort_indices(ncs_indices)
            logger.info('indice files sorted for %s Datasets' %
                        len(indices_dic.keys()))
        except:
            msg = 'failed to sort indices'
            logger.exception(msg)
            raise Exception(msg)

        ncs_references = []
        species_files = []
        stat_infos = []

        for count, key in enumerate(indices_dic.keys()):
            try:
                staus_nr = 40 + count * 10
                self.status.set('Start processing of %s' % key, staus_nr)
                ncs = indices_dic[key]
                logger.info('with %s files' % len(ncs))
                try:
                    ncs_reference = sdm.get_reference(ncs_indices=ncs,
                                                      period=period)
                    ncs_references.extend(ncs_reference)
                    logger.info('reference indice calculated %s ' %
                                ncs_references)
                except:
                    msg = 'failed to calculate the reference'
                    logger.exception(msg)
                    raise Exception(msg)

                try:
                    gam_model, predict_gam, gam_info = sdm.get_gam(
                        ncs_reference, PAmask)
                    stat_infos.append(gam_info)
                    self.status.set('GAM sucessfully trained', staus_nr + 5)
                except Exception as e:
                    msg = 'failed to train GAM for %s : %s' % (key, e)
                    logger.debug(msg)

                try:
                    prediction = sdm.get_prediction(gam_model, ncs)
                    self.status.set('prediction done', staus_nr + 7)
                except Exception as e:
                    msg = 'failed to predict tree occurence %s' % e
                    logger.exception(msg)
                    # raise Exception(msg)

                try:
                    self.status.set('land sea mask for predicted data',
                                    staus_nr + 8)
                    from numpy import invert, isnan, nan, broadcast_arrays  # , array, zeros, linspace, meshgrid
                    mask = invert(isnan(PAmask))
                    mask = broadcast_arrays(prediction, mask)[1]
                    prediction[mask is False] = nan
                except Exception as e:
                    logger.debug('failed to mask predicted data: %s' % e)

                try:
                    species_files.append(sdm.write_to_file(ncs[0], prediction))
                    logger.info('Favourabillity written to file')
                except Exception as e:
                    msg = 'failed to write species file %s' % e
                    logger.debug(msg)
                    # raise Exception(msg)

            except Exception as e:
                msg = 'failed to calculate reference indices. %s ' % e
                logger.exception(msg)
                raise Exception(msg)

        try:
            archive_indices = None
            archive_indices = archive(ncs_indices, format=archive_format)
            logger.info('indices added to archive')
        except:
            msg = 'failed adding indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        archive_references = None
        try:
            archive_references = archive(ncs_references, format=archive_format)
            logger.info('indices reference added to archive')
        except:
            msg = 'failed adding reference indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        archive_predicion = None
        try:
            archive_predicion = archive(species_files, format=archive_format)
            logger.info('species_files added to archive')
        except:
            msg = 'failed adding species_files indices to archive'
            logger.exception(msg)
            raise Exception(msg)
        try:
            from flyingpigeon.visualisation import pdfmerge
            stat_infosconcat = pdfmerge(stat_infos)
            logger.info('stat infos pdfs merged')
        except:
            logger.exception('failed to concat images')
            _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf',
                                                   prefix='foobar-',
                                                   dir='.')

        self.output_csv.setValue(gbifcsv)
        self.output_gbif.setValue(occurence_map)
        self.output_PA.setValue(PAmask_png)
        self.output_indices.setValue(archive_indices)
        self.output_reference.setValue(archive_references)
        self.output_prediction.setValue(archive_predicion)
        self.output_info.setValue(stat_infosconcat)

        self.status.set('done', 100)
示例#22
0
    def execute(self):
        from os.path import basename
        from flyingpigeon import sdm
        from flyingpigeon.utils import archive, archiveextract  # , get_domain
        from flyingpigeon.visualisation import map_PAmask

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        self.status.set('Start process', 0)

        try:
            self.status.set('reading the arguments', 5)
            resources = archiveextract(
                self.getInputValues(identifier='input_indices'))
            csv_file = self.getInputValues(identifier='gbif')[0]
            period = self.getInputValues(identifier='period')
            period = period[0]
            archive_format = self.archive_format.getValue()
        except:
            logger.error('failed to read in the arguments')

        try:
            self.status.set('read in latlon coordinates', 10)
            latlon = sdm.latlon_gbifcsv(csv_file)
        except:
            logger.exception('failed to extract the latlon points')

        try:
            self.status.set('plot map', 20)
            from flyingpigeon.visualisation import map_gbifoccurrences
            # latlon = sdm.latlon_gbifdic(gbifdic)
            occurence_map = map_gbifoccurrences(latlon)
        except:
            logger.exception('failed to plot occurence map')

        # try:
        #     self.status.set('get domain', 30)
        #     domains = set()
        #     for indice in resources:
        #         # get_domain works only if metadata are set in a correct way
        #         domains = domains.union([basename(indice).split('_')[1]])
        #     if len(domains) == 1:
        #         domain = list(domains)[0]
        #         logger.info('Domain %s found in indices files' % domain)
        #     else:
        #         logger.warn('NOT a single domain in indices files %s' % domains)
        # except:
        #     logger.exception('failed to get domains')

        try:
            # sort indices
            indices_dic = sdm.sort_indices(resources)
            logger.info('indice files sorted for %s Datasets' %
                        len(indices_dic.keys()))
        except:
            msg = 'failed to sort indices'
            logger.exception(msg)
            raise Exception(msg)

        ncs_references = []
        species_files = []
        stat_infos = []
        PAmask_pngs = []

        self.status.set('Start processing for %s Datasets' %
                        len(indices_dic.keys()))
        for count, key in enumerate(indices_dic.keys()):
            try:
                staus_nr = 40 + count * 10
                self.status.set('Start processing of %s' % key, staus_nr)
                ncs = indices_dic[key]
                logger.info('with %s files' % len(ncs))

                try:
                    self.status.set('generating the PA mask', 20)
                    PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0])
                    logger.info('PA mask sucessfully generated')
                except:
                    logger.exception('failed to generate the PA mask')

                try:
                    self.status.set('Ploting PA mask', 25)
                    PAmask_pngs.extend([map_PAmask(PAmask)])
                except:
                    logger.exception('failed to plot the PA mask')

                try:
                    ncs_reference = sdm.get_reference(ncs_indices=ncs,
                                                      period=period)
                    ncs_references.extend(ncs_reference)
                    logger.info('reference indice calculated %s ' %
                                ncs_references)
                    self.status.set('reference indice calculated',
                                    staus_nr + 2)
                except:
                    msg = 'failed to calculate the reference'
                    logger.exception(msg)
                    # raise Exception(msg)

                try:
                    gam_model, predict_gam, gam_info = sdm.get_gam(
                        ncs_reference, PAmask, modelname=key)
                    stat_infos.append(gam_info)
                    self.status.set('GAM sucessfully trained', staus_nr + 5)
                except:
                    msg = 'failed to train GAM for %s' % (key)
                    logger.exception(msg)

                try:
                    prediction = sdm.get_prediction(gam_model, ncs)
                    self.status.set('prediction done', staus_nr + 7)
                except:
                    msg = 'failed to predict tree occurence'
                    logger.exception(msg)
                    # raise Exception(msg)

                # try:
                #     self.status.set('land sea mask for predicted data', staus_nr + 8)
                #     from numpy import invert, isnan, nan, broadcast_arrays  # , array, zeros, linspace, meshgrid
                #     mask = invert(isnan(PAmask))
                #     mask = broadcast_arrays(prediction, mask)[1]
                #     prediction[mask is False] = nan
                # except:
                #     logger.exception('failed to mask predicted data')

                try:
                    species_files.append(sdm.write_to_file(ncs[0], prediction))
                    logger.info('Favourabillity written to file')
                except:
                    msg = 'failed to write species file'
                    logger.exception(msg)
                    # raise Exception(msg)
            except:
                msg = 'failed to process SDM chain for %s ' % key
                logger.exception(msg)
                # raise Exception(msg)

        try:
            archive_references = None
            archive_references = archive(ncs_references, format=archive_format)
            logger.info('indices 2D added to archive')
        except:
            msg = 'failed adding 2D indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        archive_predicion = None
        try:
            archive_predicion = archive(species_files, format=archive_format)
            logger.info('species_files added to archive')
        except:
            msg = 'failed adding species_files indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        try:
            from flyingpigeon.visualisation import pdfmerge, concat_images
            stat_infosconcat = pdfmerge(stat_infos)
            logger.debug('pngs %s' % PAmask_pngs)
            PAmask_png = concat_images(PAmask_pngs, orientation='h')
            logger.info('stat infos pdfs and mask pngs merged')
        except:
            logger.exception('failed to concat images')
            _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf',
                                                   prefix='foobar-',
                                                   dir='.')
            _, PAmask_png = tempfile.mkstemp(suffix='.png',
                                             prefix='foobar-',
                                             dir='.')

        self.output_gbif.setValue(occurence_map)
        self.output_PA.setValue(PAmask_png)
        self.output_reference.setValue(archive_references)
        self.output_prediction.setValue(archive_predicion)
        self.output_info.setValue(stat_infosconcat)
        self.status.set('done', 100)
示例#23
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'
        process_start_time = time.time()  # measure process execution time ...

        response.update_status(
            'execution started at: {}'.format(str(dt.now())), 5)

        LOGGER.debug('starting segetalflora process execution')
        response.update_status('starting calcualtion segetalflora', 5)

        ############################
        # read argments to variables
        ############################
        try:
            resource = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            climate_type = request.inputs['climate_type'][0].data
            culture_type = request.inputs['culture_type'][0].data

            LOGGER.info('urls for {} ncs found'.format(len(resource)))
            LOGGER.info('culture type: {}'.format(culture_type))
        except Exception as ex:
            msg = 'Failed to read in the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            if type(climate_type) != list:
                climate_type = list([climate_type])
            if type(culture_type) != list:
                culture_type = list([culture_type])
            LOGGER.info('arguments are lists')
        except Exception as ex:
            msg = 'Failed to transform arguments to lists: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        #############################
        # get yearly mean temperature
        #############################

        nc_tasmean = sf.get_yrmean(resource)

        #######################################
        # main call for segetalflora processing
        #######################################

        nc_sf = sf.get_segetalflora(resource=nc_tasmean,
                                    culture_type=culture_type,
                                    climate_type=climate_type)

        ####################
        # tar file archiving
        ####################

        try:
            response.update_status('preparing output', 99)
            LOGGER.debug('length of sf: {}'.format(len(nc_sf)))
            if len(nc_sf) == 1:
                # TODO: fix pywps output formats OR use separate output params.
                response.outputs['out_segetalflora'].file = nc_sf[0]
                response.outputs['out_segetalflora'].format = FORMATS.NETCDF
            else:
                response.outputs['out_segetalflora'].file = archive(
                    nc_sf, format='tar', dir_output='.', mode='w')
                response.outputs['out_segetalflora'].format = Format(
                    'application/x-tar')
            if len(nc_tasmean) == 1:
                response.outputs['out_tasmean'].file = nc_tasmean[0]
                response.outputs['out_segetalflora'].format = FORMATS.NETCDF
            else:
                response.outputs['out_tasmean'].file = archive(nc_tasmean,
                                                               format='tar',
                                                               dir_output='.',
                                                               mode='w')
                response.outputs['out_segetalflora'].format = Format(
                    'application/x-tar')
        except Exception as ex:
            msg = 'Failed to prepare output files: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.update_status('done', 100)
        LOGGER.debug(
            "total execution took {} seconds.".format(time.time() -
                                                      process_start_time))

        return response
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # input files
        LOGGER.debug("url={}, mime_type={}".format(
            request.inputs['resource'][0].url,
            request.inputs['resource'][0].data_format.mime_type))
        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        # mime_type=request.inputs['resource'][0].data_format.mime_type)
        # mosaic option
        # TODO: fix defaults in pywps 4.x
        if 'mosaic' in request.inputs:
            mosaic = request.inputs['mosaic'][0].data
        else:
            mosaic = False
        # regions used for subsetting
        regions = [inp.data for inp in request.inputs['region']]

        LOGGER.info('ncs = {}'.format(ncs))
        LOGGER.info('regions = {}'.format(regions))
        LOGGER.info('mosaic = {}'.format(mosaic))

        response.update_status("Arguments set for subset process", 0)
        LOGGER.debug('starting: regions=%s, num_files=%s', len(regions),
                     len(ncs))

        try:
            results = clipping(
                resource=ncs,
                polygons=regions,  # self.region.getValue(),
                mosaic=mosaic,
                spatial_wrapping='wrap',
                # variable=variable,
                # dir_output=os.path.abspath(os.curdir),
                # dimension_map=dimension_map,
            )
            LOGGER.info('results {}'.format(results))
        except Exception as ex:
            msg = 'clipping failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('No results produced.')

        # prepare tar file
        try:
            tarf = archive(results)
            LOGGER.info('Tar file prepared')
        except Exception as ex:
            msg = 'Tar file preparation failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        response.outputs['ncout'].file = results[i]

        response.update_status("done", 100)
        return response
示例#25
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'
        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            period = request.inputs['period']
            period = period[0].data
            indices = [inpt.data for inpt in request.inputs['indices']]
            archive_format = request.inputs['archive_format'][0].data
            LOGGER.info(
                "all arguments read in nr of files in resources: {}".foirmat(
                    len(resources)))
        except Exception as ex:
            msg = 'failed to read in the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            gbif_url = request.inputs['gbif'][0].data
            csv_file = download(gbif_url)
            LOGGER.info('CSV file fetched sucessfully: %s' % csv_file)
        except Exception as ex:
            msg = 'failed to fetch GBIF file: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('read in latlon coordinates', 10)
            latlon = sdm.latlon_gbifcsv(csv_file)
            LOGGER.info('got occurence coordinates %s ' % csv_file)
        except Exception as ex:
            msg = 'failed to extract the latlon points from file {}: {}'.format(
                csv_file, str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('plot map', 20)
            occurence_map = map_gbifoccurrences(latlon)
            LOGGER.info('GBIF occourence ploted')
        except Exception as ex:
            msg = 'failed to plot occurence map: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        #################################
        # calculate the climate indices
        #################################

        # get the indices
        try:
            response.update_status('start calculation of indices', 30)
            ncs_indices = sdm.get_indices(resource=resources, indices=indices)
            LOGGER.info('indice calculation done')
        except Exception as ex:
            msg = 'failed to calculate indices: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            # sort indices
            indices_dic = sdm.sort_indices(ncs_indices)
            LOGGER.info('indice files sorted in dictionary')
        except Exception as ex:
            msg = 'failed to sort indices: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            indices_dic = {'dummy': []}

        ncs_references = []
        species_files = []
        stat_infos = []
        PAmask_pngs = []

        response.update_status('Start processing for {} datasets'.format(
            len(indices_dic.keys())))
        for count, key in enumerate(indices_dic.keys()):
            try:
                status_nr = 40 + count * 10
                response.update_status('Start processing of {}'.format(key),
                                       status_nr)

                ncs = indices_dic[key]
                LOGGER.info('with {} files'.format(len(ncs)))

                try:
                    response.update_status('generating the PA mask', 20)
                    PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0])
                    LOGGER.info('PA mask sucessfully generated')
                except Exception as ex:
                    msg = 'failed to generate the PA mask: {}'.format(str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

                try:
                    response.update_status('Ploting PA mask', 25)
                    PAmask_pngs.extend([map_PAmask(PAmask)])
                except Exception as ex:
                    msg = 'failed to plot the PA mask: {}'.format(str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

                try:
                    ncs_reference = sdm.get_reference(ncs_indices=ncs,
                                                      period=period)
                    ncs_references.extend(ncs_reference)
                    LOGGER.info('reference indice calculated {}'.format(
                        ncs_references))
                except Exception as ex:
                    msg = 'failed to calculate the reference: {}'.format(
                        str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

                try:
                    gam_model, predict_gam, gam_info = sdm.get_gam(
                        ncs_reference, PAmask)
                    stat_infos.append(gam_info)
                    response.update_status('GAM sucessfully trained',
                                           status_nr + 5)
                except Exception as ex:
                    msg = 'failed to train GAM for {}: {}'.format(key, str(ex))
                    LOGGER.debug(msg)
                    raise Exception(msg)

                try:
                    prediction = sdm.get_prediction(gam_model, ncs)
                    response.update_status('prediction done', status_nr + 7)
                except Exception as ex:
                    msg = 'failed to predict tree occurence: {}'.format(
                        str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)
                #
                # try:
                #     response.update_status('land sea mask for predicted data',  status_nr + 8)
                #     from numpy import invert, isnan, nan, broadcast_arrays  # , array, zeros, linspace, meshgrid
                #     mask = invert(isnan(PAmask))
                #     mask = broadcast_arrays(prediction, mask)[1]
                #     prediction[mask is False] = nan
                # except:
                #     LOGGER.debug('failed to mask predicted data')

                try:
                    species_files.append(sdm.write_to_file(ncs[0], prediction))
                    LOGGER.info('Favourability written to file')
                except Exception as ex:
                    msg = 'failed to write species file: {}'.format(str(ex))
                    LOGGER.debug(msg)
                    raise Exception(msg)

            except Exception as ex:
                msg = 'failed to process SDM chain for {} : {}'.format(
                    key, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices added to archive')
        except Exception as ex:
            msg = 'failed adding indices to archive: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            archive_indices = tempfile.mkstemp(suffix='.tar',
                                               prefix='foobar-',
                                               dir='.')

        try:
            archive_references = archive(ncs_references, format=archive_format)
            LOGGER.info('indices reference added to archive')
        except Exception as ex:
            msg = 'failed adding reference indices to archive: {}'.format(
                str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            archive_references = tempfile.mkstemp(suffix='.tar',
                                                  prefix='foobar-',
                                                  dir='.')

        try:
            archive_prediction = archive(species_files, format=archive_format)
            LOGGER.info('species_files added to archive')
        except Exception as ex:
            msg = 'failed adding species_files indices to archive: {}'.format(
                str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            stat_infosconcat = pdfmerge(stat_infos)
            LOGGER.debug('pngs {}'.format(PAmask_pngs))
            PAmask_png = concat_images(PAmask_pngs, orientation='h')
            LOGGER.info('stat infos pdfs and mask pngs merged')
        except Exception as ex:
            msg = 'failed to concat images: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf',
                                                   prefix='foobar-',
                                                   dir='.')
            _, PAmask_png = tempfile.mkstemp(suffix='.png',
                                             prefix='foobar-',
                                             dir='.')

        # self.output_csv.setValue(csv_file)
        response.outputs['output_gbif'].file = occurence_map
        response.outputs['output_PA'].file = PAmask_png
        response.outputs['output_indices'].file = archive_indices
        response.outputs['output_reference'].file = archive_references
        response.outputs['output_prediction'].file = archive_prediction
        response.outputs['output_info'].file = stat_infosconcat

        response.update_status('done', 100)
        return response
示例#26
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            taxon_name = request.inputs['taxon_name'][0].data
            bbox = [-180, -90, 180, 90]
            # bbox_obj = self.BBox.getValue()
            # bbox = [bbox_obj.coords[0][0],
            #         bbox_obj.coords[0][1],
            #         bbox_obj.coords[1][0],
            #         bbox_obj.coords[1][1]]
            period = request.inputs['period']
            period = period[0].data
            indices = [inpt.data for inpt in request.inputs['indices']]
            archive_format = request.inputs['archive_format'][0].data
            LOGGER.exception("indices = %s for %s ", indices, taxon_name)
            LOGGER.info("bbox={0}".format(bbox))
        except:
            LOGGER.exception('failed to read in the arguments')
        LOGGER.info('indices %s ' % indices)

        try:
            response.update_status('Fetching GBIF Data', 10)
            gbifdic = sdm.get_gbif(taxon_name, bbox=bbox)
            LOGGER.info('Fetched GBIF data')
        except:
            msg = 'failed to search gbif.'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('write csv file', 70)
            gbifcsv = sdm.gbifdic2csv(gbifdic)
            LOGGER.info('GBIF data written to file')
        except:
            msg = 'failed to write csv file.'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('plot map', 80)
            latlon = sdm.latlon_gbifdic(gbifdic)
            occurence_map = map_gbifoccurrences(latlon)
        except:
            msg = 'failed to plot occurence map.'
            LOGGER.exception(msg)
            raise Exception(msg)

        #################################
        # calculate the climate indices
        #################################

        # get the indices
        ncs_indices = None
        try:
            response.update_status(
                'start calculation of climate indices for %s' % indices, 30)
            ncs_indices = sdm.get_indices(resource=resources, indices=indices)
            LOGGER.info('indice calculation done')
        except:
            msg = 'failed to calculate indices'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            # sort indices
            indices_dic = sdm.sort_indices(ncs_indices)
            LOGGER.info('indice files sorted in dictionary')
        except:
            msg = 'failed to sort indices'
            LOGGER.exception(msg)
            indices_dic = {'dummy': []}

        ncs_references = []
        species_files = []
        stat_infos = []
        PAmask_pngs = []

        response.update_status('Start processing for %s Datasets' %
                               len(indices_dic.keys()))

        for count, key in enumerate(indices_dic.keys()):
            try:
                staus_nr = 40 + count * 10
                response.update_status('Start processing of %s' % key,
                                       staus_nr)
                ncs = indices_dic[key]
                LOGGER.info('with %s files' % len(ncs))

                try:
                    response.update_status('generating the PA mask', 20)
                    PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0])
                    LOGGER.info('PA mask sucessfully generated')
                except:
                    LOGGER.exception('failed to generate the PA mask')

                try:
                    response.update_status('Ploting PA mask', 25)
                    PAmask_pngs.extend([map_PAmask(PAmask)])
                except:
                    LOGGER.exception('failed to plot the PA mask')

                try:
                    ncs_reference = sdm.get_reference(ncs_indices=ncs,
                                                      period=period)
                    ncs_references.extend(ncs_reference)
                    LOGGER.info('reference indice calculated %s ' %
                                ncs_references)
                except:
                    msg = 'failed to calculate the reference'
                    LOGGER.exception(msg)

                try:
                    gam_model, predict_gam, gam_info = sdm.get_gam(
                        ncs_reference, PAmask)
                    stat_infos.append(gam_info)
                    response.update_status('GAM sucessfully trained',
                                           staus_nr + 5)
                except:
                    msg = 'failed to train GAM for %s' % (key)
                    LOGGER.exception(msg)

                try:
                    prediction = sdm.get_prediction(gam_model, ncs)
                    response.update_status('prediction done', staus_nr + 7)
                except:
                    msg = 'failed to predict tree occurence'
                    LOGGER.exception(msg)
                    # raise Exception(msg)

                # try:
                #     response.update_status('land sea mask for predicted data',  staus_nr + 8)
                #     from numpy import invert, isnan, nan, broadcast_arrays  # , array, zeros, linspace, meshgrid
                #     mask = invert(isnan(PAmask))
                #     mask = broadcast_arrays(prediction, mask)[1]
                #     prediction[mask is False] = nan
                # except:
                #     LOGGER.exception('failed to mask predicted data')

                try:
                    species_files.append(sdm.write_to_file(ncs[0], prediction))
                    LOGGER.info('Favourabillity written to file')
                except:
                    msg = 'failed to write species file'
                    LOGGER.exception(msg)
                    # raise Exception(msg)

            except:
                msg = 'failed to calculate reference indices'
                LOGGER.exception(msg)
                raise Exception(msg)

        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices added to archive')
        except:
            msg = 'failed adding indices to archive'
            LOGGER.exception(msg)
            archive_indices = tempfile.mkstemp(suffix='.tar',
                                               prefix='foobar-',
                                               dir='.')

        try:
            archive_references = archive(ncs_references, format=archive_format)
            LOGGER.info('indices reference added to archive')
        except:
            msg = 'failed adding reference indices to archive'
            LOGGER.exception(msg)
            archive_references = tempfile.mkstemp(suffix='.tar',
                                                  prefix='foobar-',
                                                  dir='.')

        try:
            archive_prediction = archive(species_files, format=archive_format)
            LOGGER.info('species_files added to archive')
        except:
            msg = 'failed adding species_files indices to archive'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            stat_infosconcat = pdfmerge(stat_infos)
            LOGGER.debug('pngs %s' % PAmask_pngs)
            PAmask_png = concat_images(PAmask_pngs, orientation='h')
            LOGGER.info('stat infos pdfs and mask pngs merged')
        except:
            LOGGER.exception('failed to concat images')
            _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf',
                                                   prefix='foobar-',
                                                   dir='.')
            _, PAmask_png = tempfile.mkstemp(suffix='.png',
                                             prefix='foobar-',
                                             dir='.')

        response.outputs['output_gbif'].file = occurence_map
        response.outputs['output_PA'].file = PAmask_png
        response.outputs['output_indices'].file = archive_indices
        response.outputs['output_reference'].file = archive_references
        response.outputs['output_prediction'].file = archive_prediction
        response.outputs['output_info'].file = stat_infosconcat
        response.outputs['output_csv'].file = gbifcsv

        response.update_status('done', 100)
        return response
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        try:
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))

            indices = [inpt.data for inpt in request.inputs['indices']]
            grouping = [inpt.data for inpt in request.inputs['grouping']]

            if 'mosaic' in request.inputs:
                mosaic = request.inputs['mosaic'][0].data
            else:
                mosaic = False

            if 'region' in request.inputs:
                region = [inpt.data for inpt in request.inputs['region']]
            else:
                region = None

            LOGGER.debug('grouping: {}'.format(grouping))
            LOGGER.debug('mosaic: {}'.format(mosaic))
            LOGGER.debug('indices: {}'.format(indices))
            LOGGER.debug('region: {}'.format(region))
            LOGGER.debug('Nr of input files: {}'.format(len(resources)))
        except Exception as ex:
            LOGGER.exception('failed to read in the arguments: {}'.format(str(ex)))

        response.update_status(
            'starting: indices={}, grouping={}, num_files={}'.format(indices, grouping, len(resources)), 2)

        results = []

        from flyingpigeon.utils import sort_by_filename
        datasets = sort_by_filename(resources, historical_concatination=True)
        results = []
        try:
            group = grouping[0]  # for group in grouping:
            indice = indices[0]  # for indice in indices:
            for key in datasets.keys():
                try:
                    response.update_status('Dataset {}: {}'.format(len(results) + 1, key), 10)

                    LOGGER.debug('grouping: {}'.format(grouping))
                    LOGGER.debug('mosaic: {}'.format(mosaic))
                    LOGGER.debug('indice: {}'.format(indice))
                    LOGGER.debug('region: {}'.format(region))
                    LOGGER.debug('Nr of input files: {}'.format(len(datasets[key])))

                    result = calc_indice_simple(
                        resource=datasets[key],
                        mosaic=mosaic,
                        indice=indice,
                        polygons=region,
                        grouping=group,
                        # dir_output=path.curdir,
                    )
                    LOGGER.debug('result: {}'.format(result))
                    results.extend(result)

                except Exception as ex:
                    msg = 'failed for {}: {}'.format(key, str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

        except Exception as ex:
            msg = 'Failed to calculate indices: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        #         # if not results:
        #         #     raise Exception("failed to produce results")
        #         # response.update_status('num results %s' % len(results), 90)

        tarf = archive(results)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        if i is None:
            i = 'dummy.nc'
        response.outputs['ncout'].file = results[i]

        #       response.update_status("done", 100)
        return response
示例#28
0
    def _handler(self, request, response):
        response.update_status('starting uncertainty process', 0)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        try:
            ncfiles = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))

            start = request.inputs['start'][0].data
            end = request.inputs['end'][0].data
            timeslice = request.inputs['timeslice'][0].data
            # # variable = self.variableIn.getValue()
            method = request.inputs['method'][0].data

            response.update_status('arguments read', 5)
            LOGGER.info('Successfully read in the arguments')
        except:
            LOGGER.exception("failed to read in the arguments")
            raise

        response.outputs['output_text'].file = write_fileinfo(ncfiles)

        #  LOGGER.debug('variable set to %s' % variable)
        # if method == 'Method_A':
        signal, low_agreement_mask, high_agreement_mask, text_src = erob.method_A(
            resource=ncfiles,
            start=start,
            end=end,
            timeslice=timeslice,
            # variable=variable
        )  # graphic,

        LOGGER.debug('Robustness calculated')

        try:
            # LOGGER.info('variable to be plotted: %s' % variable)
            from flyingpigeon.visualisation import map_robustness
            # if title is None:
            title = 'signal robustness'  # , end1, end2, start1, start2

            graphic = map_robustness(
                signal,
                high_agreement_mask,
                low_agreement_mask,
                # variable=variable,
                # cmap=cmap,
                title=title)

            LOGGER.info('graphic generated')
        except:
            msg = 'graphic generation failed'
            LOGGER.exception(msg)
            _, graphic = mkstemp(dir='.', suffix='.png')

        response.update_status('process worker done', 95)

        response.outputs['output_signal'].file = signal
        response.outputs['output_high'].file = high_agreement_mask
        response.outputs['output_low'].file = low_agreement_mask
        response.outputs['output_graphic'].file = graphic
        # = text_src

        response.update_status('uncertainty process done', 100)
        return response
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        try:
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))

            if 'region' in request.inputs:
                region = request.inputs['region'][0].data
            else:
                region = None

            if 'mosaic' in request.inputs:
                mosaic = request.inputs['mosaic'][0].data
            else:
                mosaic = False

            percentile = request.inputs['percentile'][0].data

            LOGGER.debug("mosaic %s " % mosaic)
            LOGGER.debug('percentile: %s' % percentile)
            LOGGER.debug('region %s' % region)
            LOGGER.debug('Nr of input files %s ' % len(resources))

        except:
            LOGGER.exception('failed to read in the arguments')

        from flyingpigeon.utils import sort_by_filename
        from flyingpigeon.ocgis_module import call

        datasets = sort_by_filename(resources, historical_concatination=True)
        results = []

        kwds = {'percentile': 90, 'window_width': 5}
        calc = [{'func': 'daily_perc', 'name': 'dp', 'kwds': kwds}]

        try:
            for key in datasets.keys():
                try:
                    if region is None:
                        result = call(
                            resource=datasets[key],
                            output_format='nc',
                            calc=calc,
                            # prefix=key,
                            # time_region={'year': [1995, 2000]}
                            # calc_grouping='year'
                        )
                        results.extend([result])
                        LOGGER.debug('percentile based indice done for %s' %
                                     result)
                    else:
                        result = clipping(
                            resource=datasets[key],
                            #  variable=None,
                            calc=calc,
                            #  calc_grouping=None,
                            #  time_range=None,
                            #  time_region=None,
                            polygons=region,
                            mosaic=mosaic)
                        results.extend(result)
                except:
                    LOGGER.exception(
                        "failed to calculate percentil based indice for %s " %
                        key)
        except:
            LOGGER.exception("failed to calculate percentile indices")

        tarf = archive(results)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        if i is None:
            i = "dummy.nc"
        response.outputs['ncout'].file = results[i]

        #       response.update_status("done", 100)
        response.update_status("done", 100)
        return response
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'
        process_start_time = time.time()  # measure process execution time ...

        response.update_status('execution started at : %s ' % dt.now(), 5)

        start_time = time.time()  # measure init ...

        resource = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        refSt = request.inputs['refSt'][0].data
        refEn = request.inputs['refEn'][0].data
        dateSt = request.inputs['dateSt'][0].data
        dateEn = request.inputs['dateEn'][0].data
        regrset = request.inputs['regrset'][0].data

        # fix 31 December issue
        # refSt = dt.combine(refSt,dt_time(12,0))
        # refEn = dt.combine(refEn,dt_time(12,0))
        # dateSt = dt.combine(dateSt,dt_time(12,0))
        # dateEn = dt.combine(dateEn,dt_time(12,0))

        seasonwin = request.inputs['seasonwin'][0].data
        nanalog = request.inputs['nanalog'][0].data
        # bbox = [-80, 20, 50, 70]
        # TODO: Add checking for wrong cordinates and apply default if nesessary
        bbox = []
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        direction = request.inputs['direction'][0].data
        normalize = request.inputs['normalize'][0].data
        distance = request.inputs['dist'][0].data
        outformat = request.inputs['outformat'][0].data
        timewin = request.inputs['timewin'][0].data

        model_var = request.inputs['reanalyses'][0].data
        model, var = model_var.split('_')

        try:
            if direction == 're2mo':
                anaSt = dt.combine(dateSt, dt_time(
                    0, 0))  #dt.strptime(dateSt[0], '%Y-%m-%d')
                anaEn = dt.combine(dateEn, dt_time(
                    0, 0))  #dt.strptime(dateEn[0], '%Y-%m-%d')
                refSt = dt.combine(refSt, dt_time(
                    12, 0))  #dt.strptime(refSt[0], '%Y-%m-%d')
                refEn = dt.combine(refEn, dt_time(
                    12, 0))  #dt.strptime(refEn[0], '%Y-%m-%d')
                r_time_range = [anaSt, anaEn]
                m_time_range = [refSt, refEn]
            elif direction == 'mo2re':
                anaSt = dt.combine(dateSt, dt_time(
                    12, 0))  #dt.strptime(refSt[0], '%Y-%m-%d')
                anaEn = dt.combine(dateEn, dt_time(
                    12, 0))  #dt.strptime(refEn[0], '%Y-%m-%d')
                refSt = dt.combine(refSt, dt_time(
                    0, 0))  #dt.strptime(dateSt[0], '%Y-%m-%d')
                refEn = dt.combine(refEn, dt_time(
                    0, 0))  #dt.strptime(dateEn[0], '%Y-%m-%d')
                r_time_range = [refSt, refEn]
                m_time_range = [anaSt, anaEn]
            else:
                LOGGER.exception(
                    'failed to find time periods for comparison direction')
        except:
            msg = 'failed to put simulation and reference time in order'
            LOGGER.exception(msg)
            raise Exception(msg)

        if normalize == 'None':
            seacyc = False
        else:
            seacyc = True

        if outformat == 'ascii':
            outformat = '.txt'
        elif outformat == 'netCDF':
            outformat = '.nc'
        else:
            LOGGER.exception('output format not valid')

        try:
            if model == 'NCEP':
                getlevel = True
                if 'z' in var:
                    level = var.strip('z')
                    variable = 'hgt'
                    # conform_units_to='hPa'
                else:
                    variable = 'slp'
                    level = None
                    # conform_units_to='hPa'
            elif '20CRV2' in model:
                getlevel = False
                if 'z' in var:
                    variable = 'hgt'
                    level = var.strip('z')
                    # conform_units_to=None
                else:
                    variable = 'prmsl'
                    level = None
                    # conform_units_to='hPa'
            else:
                LOGGER.exception('Reanalyses model not known')
            LOGGER.info('environment set')
        except:
            msg = 'failed to set environment'
            LOGGER.exception(msg)
            raise Exception(msg)

        # LOGGER.exception("init took %s seconds.", time.time() - start_time)
        response.update_status('Read in the arguments', 6)

        #################
        # get input data
        #################
        # TODO: do not forget to select years

        start_time = time.time()  # measure get_input_data ...
        response.update_status('fetching input data', 7)
        try:
            if direction == 're2mo':
                nc_reanalyses = reanalyses(start=anaSt.year,
                                           end=anaEn.year,
                                           variable=var,
                                           dataset=model,
                                           getlevel=getlevel)
            else:
                nc_reanalyses = reanalyses(start=refSt.year,
                                           end=refEn.year,
                                           variable=var,
                                           dataset=model,
                                           getlevel=getlevel)

            if type(nc_reanalyses) == list:
                nc_reanalyses = sorted(
                    nc_reanalyses,
                    key=lambda i: path.splitext(path.basename(i))[0])
            else:
                nc_reanalyses = [nc_reanalyses]

            # For 20CRV2 geopotential height, daily dataset for 100 years is about 50 Gb
            # So it makes sense, to operate it step-by-step
            # TODO: need to create dictionary for such datasets (for models as well)
            # TODO: benchmark the method bellow for NCEP z500 for 60 years, may be use the same (!)
            # TODO Now everything regrid to the reanalysis

            if ('20CRV2' in model) and ('z' in var):
                tmp_total = []
                origvar = get_variable(nc_reanalyses)

                for z in nc_reanalyses:
                    tmp_n = 'tmp_%s' % (uuid.uuid1())
                    b0 = call(resource=z,
                              variable=origvar,
                              level_range=[int(level), int(level)],
                              geom=bbox,
                              spatial_wrapping='wrap',
                              prefix='levdom_' + path.basename(z)[0:-3])
                    tmp_total.append(b0)

                tmp_total = sorted(
                    tmp_total,
                    key=lambda i: path.splitext(path.basename(i))[0])
                inter_subset_tmp = call(resource=tmp_total,
                                        variable=origvar,
                                        time_range=r_time_range)

                # Clean
                for i in tmp_total:
                    tbr = 'rm -f %s' % (i)
                    #system(tbr)

                # Create new variable
                ds = Dataset(inter_subset_tmp, mode='a')
                z_var = ds.variables.pop(origvar)
                dims = z_var.dimensions
                new_var = ds.createVariable('z%s' % level,
                                            z_var.dtype,
                                            dimensions=(dims[0], dims[2],
                                                        dims[3]))
                new_var[:, :, :] = squeeze(z_var[:, 0, :, :])
                # new_var.setncatts({k: z_var.getncattr(k) for k in z_var.ncattrs()})
                ds.close()
                nc_subset = call(inter_subset_tmp, variable='z%s' % level)
            else:
                nc_subset = call(
                    resource=nc_reanalyses,
                    variable=var,
                    geom=bbox,
                    spatial_wrapping='wrap',
                    time_range=r_time_range,
                    # conform_units_to=conform_units_to
                )

            # nc_subset = call(resource=nc_reanalyses, variable=var, geom=bbox, spatial_wrapping='wrap') # XXXXXX wrap
            # LOGGER.exception("get_input_subset_model took %s seconds.", time.time() - start_time)
            response.update_status('**** Input reanalyses data fetched', 10)
        except:
            msg = 'failed to fetch or subset input files'
            LOGGER.exception(msg)
            raise Exception(msg)

        ########################
        # input data preperation
        ########################
        response.update_status('Start preparing input data', 12)

        # Filter resource:
        if type(resource) == list:
            resource = sorted(resource,
                              key=lambda i: path.splitext(path.basename(i))[0])
        else:
            resource = [resource]

        tmp_resource = []

        m_start = m_time_range[0]
        m_end = m_time_range[1]

        for re in resource:
            s, e = get_timerange(re)
            tmpSt = dt.strptime(s, '%Y%m%d')
            tmpEn = dt.strptime(e, '%Y%m%d')
            if ((tmpSt <= m_end) and (tmpEn >= m_start)):
                tmp_resource.append(re)
                LOGGER.debug('Selected file: %s ' % (re))
        resource = tmp_resource

        start_time = time.time()  # mesure data preperation ...
        # TODO: Check the callendars ! for model vs reanalyses.
        # TODO: Check the units! model vs reanalyses.
        try:
            m_total = []
            modvar = get_variable(resource)
            # resource properties
            ds = Dataset(resource[0])
            m_var = ds.variables[modvar]
            dims = list(m_var.dimensions)
            dimlen = len(dims)

            try:
                model_id = ds.getncattr('model_id')
            except AttributeError:
                model_id = 'Unknown model'

            LOGGER.debug('MODEL: %s ' % (model_id))

            lev_units = 'hPa'

            if (dimlen > 3):
                lev = ds.variables[dims[1]]
                # actually index [1] need to be detected... assuming zg(time, plev, lat, lon)
                lev_units = lev.units

                if (lev_units == 'Pa'):
                    m_level = str(int(level) * 100)
                else:
                    m_level = level
            else:
                m_level = None

            if level == None:
                level_range = None
            else:
                level_range = [int(m_level), int(m_level)]

            for z in resource:
                tmp_n = 'tmp_%s' % (uuid.uuid1())
                # select level and regrid
                b0 = call(
                    resource=z,
                    variable=modvar,
                    level_range=level_range,
                    spatial_wrapping='wrap',  #cdover='system',
                    regrid_destination=nc_reanalyses[0],
                    regrid_options='bil',
                    prefix=tmp_n)
                # select domain
                b01 = call(resource=b0,
                           geom=bbox,
                           spatial_wrapping='wrap',
                           prefix='levregr_' + path.basename(z)[0:-3])
                tbr = 'rm -f %s' % (b0)
                #system(tbr)
                tbr = 'rm -f %s' % (tmp_n)
                #system(tbr)
                # get full resource
                m_total.append(b01)
            ds.close()
            model_subset = call(m_total, time_range=m_time_range)
            for i in m_total:
                tbr = 'rm -f %s' % (i)
                #system(tbr)

            if m_level is not None:
                # Create new variable in model set
                ds = Dataset(model_subset, mode='a')
                mod_var = ds.variables.pop(modvar)
                dims = mod_var.dimensions
                new_modvar = ds.createVariable('z%s' % level,
                                               mod_var.dtype,
                                               dimensions=(dims[0], dims[2],
                                                           dims[3]))
                new_modvar[:, :, :] = squeeze(mod_var[:, 0, :, :])
                # new_var.setncatts({k: z_var.getncattr(k) for k in z_var.ncattrs()})
                ds.close()
                mod_subset = call(model_subset, variable='z%s' % level)
            else:
                mod_subset = model_subset

#            if direction == 're2mo':
#                try:
#                    response.update_status('Preparing simulation data', 15)
#                    reanalyses_subset = call(resource=nc_subset, time_range=[anaSt, anaEn])
#                except:
#                    msg = 'failed to prepare simulation period'
#                    LOGGER.exception(msg)
#                try:
#                    response.update_status('Preparing target data', 17)
#                    var_target = get_variable(resource)
#                    # var_simulation = get_variable(simulation)

#                    model_subset_tmp = call(resource=resource, variable=var_target,
#                                            time_range=[refSt, refEn],
#                                            t_calendar='standard',
#                                            spatial_wrapping='wrap',
#                                            regrid_destination=nc_reanalyses[0],
#                                            regrid_options='bil')

#                    # model_subset = call(resource=resource, variable=var_target,
#                    #                     time_range=[refSt, refEn],
#                    #                     geom=bbox,
#                    #                     t_calendar='standard',
#                    #                     # conform_units_to=conform_units_to,
#                    #                     spatial_wrapping='wrap',
#                    #                     regrid_destination=reanalyses_subset,
#                    #                     regrid_options='bil') # XXXXXXXXXXXX ADD WRAP rem calendar

#                    model_subset = call(resource=model_subset_tmp,variable=var_target, geom=bbox, spatial_wrapping='wrap', t_calendar='standard')

#                   # ISSUE: the regrided model has white border with null! Check it.
#                   # check t_calendar!
#                except:
#                    msg = 'failed subset archive model'
#                    LOGGER.exception(msg)
#                    raise Exception(msg)
#            else:
#                try:
#                    response.update_status('Preparing target data', 15)
#                    var_target = get_variable(resource)
#                    # var_simulation = get_variable(simulation)
#                    model_subset = call(resource=resource, variable=var_target,
#                                        time_range=[refSt, refEn],
#                                        geom=bbox,
#                                        t_calendar='standard',
#                                        # conform_units_to=conform_units_to,
#                                        # spatial_wrapping='wrap',
#                                        )
#                except:
#                    msg = 'failed subset archive model'
#                    LOGGER.exception(msg)
#                    raise Exception(msg)
#                try:
#                    response.update_status('Preparing simulation data', 17)
#                    reanalyses_subset = call(resource=nc_subset,
#                                             time_range=[anaSt, anaEn],
#                                             regrid_destination=model_subset,
#                                             regrid_options='bil')
#                except:
#                    msg = 'failed to prepare simulation period'
#                    LOGGER.exception(msg)
        except:
            msg = 'failed to subset simulation or reference data'
            LOGGER.exception(msg)
            raise Exception(msg)

# --------------------------------------------
        try:
            if direction == 'mo2re':
                simulation = mod_subset
                archive = nc_subset
                base_id = model
                sim_id = model_id
            elif direction == 're2mo':
                simulation = nc_subset
                archive = mod_subset
                base_id = model_id
                sim_id = model
            else:
                LOGGER.exception('direction not valid: %s ' % direction)
        except:
            msg = 'failed to find comparison direction'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            if level is not None:
                out_var = 'z%s' % level
            else:
                var_archive = get_variable(archive)
                var_simulation = get_variable(simulation)
                if var_archive != var_simulation:
                    rename_variable(archive,
                                    oldname=var_archive,
                                    newname=var_simulation)
                    out_var = var_simulation
                    LOGGER.info('varname %s in netCDF renamed to %s' %
                                (var_archive, var_simulation))
        except:
            msg = 'failed to rename variable in target files'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            if seacyc is True:
                seasoncyc_base, seasoncyc_sim = analogs.seacyc(
                    archive, simulation, method=normalize)
            else:
                seasoncyc_base = None
                seasoncyc_sim = None
        except:
            msg = 'failed to prepare seasonal cycle reference files'
            LOGGER.exception(msg)
            raise Exception(msg)

        ip, output = mkstemp(dir='.', suffix='.txt')
        output_file = path.abspath(output)
        files = [path.abspath(archive), path.abspath(simulation), output_file]

        # LOGGER.exception("data preperation took %s seconds.", time.time() - start_time)

        ############################
        # generating the config file
        ############################

        response.update_status('writing config file', 18)
        start_time = time.time()  # measure write config ...

        try:
            config_file = analogs.get_configfile(
                files=files,
                seasoncyc_base=seasoncyc_base,
                seasoncyc_sim=seasoncyc_sim,
                base_id=base_id,
                sim_id=sim_id,
                timewin=timewin,
                varname=var,
                seacyc=seacyc,
                cycsmooth=91,
                nanalog=nanalog,
                seasonwin=seasonwin,
                distfun=distance,
                outformat=outformat,
                calccor=True,
                silent=False,
                period=[
                    dt.strftime(refSt, '%Y-%m-%d'),
                    dt.strftime(refEn, '%Y-%m-%d')
                ],
                bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3]))
        except:
            msg = 'failed to generate config file'
            LOGGER.exception(msg)
            raise Exception(msg)

        # LOGGER.exception("write_config took %s seconds.", time.time() - start_time)

        #######################
        # CASTf90 call
        #######################
        import subprocess
        import shlex

        start_time = time.time()  # measure call castf90

        response.update_status('Start CASTf90 call', 20)
        try:
            # response.update_status('execution of CASTf90', 50)
            cmd = 'analogue.out %s' % path.relpath(config_file)
            # system(cmd)
            args = shlex.split(cmd)
            output, error = subprocess.Popen(
                args, stdout=subprocess.PIPE,
                stderr=subprocess.PIPE).communicate()
            LOGGER.info('analogue.out info:\n %s ' % output)
            LOGGER.exception('analogue.out errors:\n %s ' % error)
            response.update_status('**** CASTf90 suceeded', 90)
        except:
            msg = 'CASTf90 failed'
            LOGGER.exception(msg)
            raise Exception(msg)

        LOGGER.debug("castf90 took %s seconds.", time.time() - start_time)

        response.update_status('preparting output', 91)

        # Stopper to keep twitcher results, for debug
        # dummy=dummy

        response.outputs[
            'config'].file = config_file  #config_output_url  # config_file )
        response.outputs['analogs'].file = output_file
        response.outputs['output_netcdf'].file = simulation
        response.outputs['target_netcdf'].file = archive

        ########################
        # generate analog viewer
        ########################

        formated_analogs_file = analogs.reformat_analogs(output_file)
        # response.outputs['formated_analogs'].storage = FileStorage()
        response.outputs['formated_analogs'].file = formated_analogs_file
        LOGGER.info('analogs reformated')
        response.update_status('reformatted analog file', 95)
        viewer_html = analogs.render_viewer(
            # configfile=response.outputs['config'].get_url(),
            configfile=config_file,
            # datafile=response.outputs['formated_analogs'].get_url())
            datafile=formated_analogs_file)
        response.outputs['output'].file = viewer_html
        response.update_status('Successfully generated analogs viewer', 99)
        LOGGER.info('rendered pages: %s ', viewer_html)
        response.update_status('execution ended', 100)
        LOGGER.debug("total execution took %s seconds.",
                     time.time() - process_start_time)
        return response