def get_yrmean(resource=[]):
  """
  Calculation of annual mean temperature and clipping Europe.

  :param resource: list or netCDF tas input files

  :return list: list of output files
  """
  from flyingpigeon.subset import clipping
  ncs = sort_by_filename(resource)
  nc_tasmean = []

  try:
    for key in ncs.keys():
      try:
        LOGGER.info('process %s' % (key))
        calc =  [{'func':'mean','name':'tas'}]
        calc_group = calc_grouping('yr')
        prefix = key.replace(key.split('_')[7],'yr')
        nc_tasmean.append(clipping(resource=ncs[key],
                                   variable='tas',
                                   calc=calc, calc_grouping= calc_group,
                                   prefix=prefix, polygons='Europe')[0])
        LOGGER.info('clipping and mean tas calculation done for %s' % (key))
      except Exception as e:
        LOGGER.debug('mean tas calculation failed for %s : %s ' % (key,e))
  except Exception as e:
    LOGGER.debug('clipping failed for %s: %s' % (key, e))
  return nc_tasmean
Exemple #2
0
    def execute(self):
        from ast import literal_eval
        from flyingpigeon.utils import archive, archiveextract

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        ncs = archiveextract(self.getInputValues(identifier='resource'))
        mosaic = self.mosaic.getValue()
        regions = self.region.getValue()
        # variable = self.variable.getValue()

        # logger.info('regions: %s' % regions)
        # dimension_map = self.dimension_map.getValue()
        # if dimension_map != None:
        #     dimension_map = literal_eval(dimension_map)

        logger.info('ncs = %s', ncs)
        logger.info('regions = %s', regions)
        logger.info('mosaic = %s', mosaic)
        # logger.info('dimension_map = %s', dimension_map)

        self.status.set('Arguments set for subset process', 0)
        logger.debug('starting: regions=%s, num_files=%s' %
                     (len(regions), len(ncs)))
        try:
            results = clipping(
                resource=ncs,
                polygons=regions,  # self.region.getValue(),
                mosaic=mosaic,
                spatial_wrapping='wrap',
                # variable=variable,
                dir_output=os.path.abspath(os.curdir),
                # dimension_map=dimension_map,
            )
            logger.info('results %s' % results)
        except Exception as e:
            msg = 'clipping failed'
            logger.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('no results produced.')

        # prepare tar file
        try:
            tarf = archive(results)
            logger.info('Tar file prepared')
        except Exception as e:
            msg = 'Tar file preparation failed'
            logger.exception(msg)
            raise Exception(msg)

        self.output.setValue(tarf)

        i = next((i for i, x in enumerate(results) if x), None)
        self.output_netcdf.setValue(results[i])

        self.status.set('done', 100)
    def execute(self):
        from flyingpigeon.utils import archive, archiveextract
        from tempfile import mkstemp

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        ncs = archiveextract(self.getInputValues(identifier='resource'))
        # mosaic = self.mosaic.getValue()
        regions = self.region.getValue()

        self.status.set('Arguments set for subset process', 0)
        logger.debug('starting: regions=%s, num_files=%s' % (len(regions), len(ncs)))

        try:
            from flyingpigeon.visualisation import plot_polygons
            png_country = plot_polygons(regions)
        except:
            logger.exception('failed to plot the polygon to world map')
            o1, factsheet_plot = mkstemp(dir='.', suffix='.png')

        # clip the demanded polygons
        from flyingpigeon.subset import clipping
        subsets = clipping(resource=ncs, variable=None,
                           dimension_map=None,
                           calc=None,
                           output_format='nc',
                           calc_grouping=None,
                           time_range=None,
                           time_region=None,
                           historical_concatination=True,
                           prefix=None,
                           spatial_wrapping='wrap',
                           polygons=regions,
                           mosaic=True
                           )
        try:
            from flyingpigeon.visualisation import uncertainty
            png_uncertainty = uncertainty(subsets)
        except:
            logger.exception('failed to generate the uncertainty plot')
            _, png_uncertainty = mkstemp(dir='.', suffix='.png')

        try:
            from flyingpigeon.visualisation import spaghetti
            png_spaghetti = spaghetti(subsets)
        except:
            logger.exception('failed to generate the spaghetti plot')
            _, png_spaghetti = mkstemp(dir='.', suffix='.png')

        from flyingpigeon.visualisation import factsheetbrewer
        factsheet = factsheetbrewer(png_country=png_country,
                                    png_uncertainty=png_uncertainty,
                                    png_spaghetti=png_spaghetti)

        self.output_factsheet.setValue(factsheet)
        self.status.set('done', 100)
    def execute(self):
        from ast import literal_eval

        urls = self.getInputValues(identifier='resource')
        mosaic = self.mosaic.getValue()
        regions = self.region.getValue()
        variable = self.variable.getValue()
        
        #logger.info('regions: %s' % regions)

        # dimension_map = self.dimension_map.getValue()
        # if dimension_map != None: 
        #     dimension_map = literal_eval(dimension_map)

        logger.info('urls = %s', urls)
        logger.info('regions = %s', regions)
        logger.info('mosaic = %s', mosaic)
        # logger.info('dimension_map = %s', dimension_map)
    
        self.status.set('Arguments set for subset process', 0)
        logger.debug('starting: regions=%s, num_files=%s' % (len(regions), len(urls)))
        try:
            results = clipping(
                resource = urls,
                polygons = regions, # self.region.getValue(),
                mosaic = mosaic,
                spatial_wrapping='wrap',
                variable = variable, 
                dir_output = os.path.abspath(os.curdir),
                # dimension_map=dimension_map,
                )
            logger.info('results %s' % results )
        except Exception as e:
            msg = 'clipping failed'
            logger.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('no results produced.')
        
        # prepare tar file 
        try:
            from flyingpigeon.utils import archive
            tarf = archive(results)
            logger.info('Tar file prepared')
        except Exception as e:
            msg = 'Tar file preparation failed'
            logger.exception(msg)
            raise Exception(msg)

        self.output.setValue(tarf)
        
        i = next((i for i, x in enumerate(results) if x), None)
        self.output_netcdf.setValue(results[i])
            
        self.status.set('done', 100)
Exemple #5
0
    def execute(self):
        from ast import literal_eval

        urls = self.getInputValues(identifier='resource')
        mosaik = self.mosaik.getValue()
        regions = self.region.getValue()
        variable = self.variable.getValue()

        dimension_map = self.dimension_map.getValue()
        if dimension_map != None: 
            dimension_map = literal_eval(dimension_map)


        logging.debug('urls = %s', urls)
        logging.debug('regions = %s', regions)
        logging.debug('mosaik = %s', mosaik)
        logging.debug('dimension_map = %s', dimension_map)
    
        self.status.set('Arguments set for subset process', 0)

        logging.debug('starting: regions=%s, num_files=%s' % (len(regions), len(urls)))

        try:
            results = clipping(
                resource = urls,
                polygons = regions, # self.region.getValue(),
                mosaik = mosaik,
                variable = variable, 
                dir_output = os.path.abspath(os.curdir),
                dimension_map=dimension_map,
                )

        except Exception as e:
            logging.exception('clipping failed')
            self.status.set('clipping failed')
        # prepare tar file 
        try: 
            tarf = 'out.tar'
            tar = tarfile.open(tarf, "w")

            for result in results: 
                tar.add( result , arcname = result.replace(os.path.abspath(os.path.curdir), ""))
            tar.close()

            logging.info('Tar file prepared')
        except Exception as e:
            logging.exception('Tar file preparation failed')
            raise

        self.output.setValue( tarf )
        self.status.set('done', 100)
Exemple #6
0
    def execute(self):
        from ast import literal_eval

        urls = self.getInputValues(identifier='resource')
        mosaic = self.mosaic.getValue()
        regions = self.region.getValue()
        variable = self.variable.getValue()

        #logger.info('regions: %s' % regions)

        # dimension_map = self.dimension_map.getValue()
        # if dimension_map != None:
        #     dimension_map = literal_eval(dimension_map)

        logger.info('urls = %s', urls)
        logger.info('regions = %s', regions)
        logger.info('mosaic = %s', mosaic)
        # logger.info('dimension_map = %s', dimension_map)

        self.status.set('Arguments set for subset process', 0)

        logger.debug('starting: regions=%s, num_files=%s' %
                     (len(regions), len(urls)))

        try:
            results = clipping(
                resource=urls,
                polygons=regions,  # self.region.getValue(),
                mosaic=mosaic,
                variable=variable,
                dir_output=os.path.abspath(os.curdir),
                #  dimension_map=dimension_map,
            )

        except Exception as e:
            logger.exception('clipping failed')
            self.status.set('clipping failed')
        # prepare tar file
        try:
            from flyingpigeon.utils import archive
            tarf = archive(results)
            logger.info('Tar file prepared')
        except Exception as e:
            logger.exception('Tar file preparation failed')
            raise

        self.output.setValue(tarf)
        self.status.set('done', 100)
    def execute(self):
        from ast import literal_eval

        urls = self.getInputValues(identifier='resource')
        mosaik = self.mosaik.getValue()
        regions = self.region.getValue()
        variable = self.variable.getValue()
        
        #logger.info('regions: %s' % regions)

        dimension_map = self.dimension_map.getValue()
        if dimension_map != None: 
            dimension_map = literal_eval(dimension_map)

        logger.info('urls = %s', urls)
        logger.info('regions = %s', regions)
        logger.info('mosaik = %s', mosaik)
        logger.info('dimension_map = %s', dimension_map)
    
        self.status.set('Arguments set for subset process', 0)

        logger.debug('starting: regions=%s, num_files=%s' % (len(regions), len(urls)))

        try:
            results = clipping(
                resource = urls,
                polygons = regions, # self.region.getValue(),
                mosaik = mosaik,
                spatial_wrapping='wrap',
                variable = variable, 
                dir_output = os.path.abspath(os.curdir),
                dimension_map=dimension_map,
                )

        except Exception as e:
            logger.exception('clipping failed')
            self.status.set('clipping failed')
        # prepare tar file 
        try: 
            from flyingpigeon.utils import archive
            tarf = archive(results)
            logger.info('Tar file prepared')
        except Exception as e:
            logger.exception('Tar file preparation failed')
            raise

        self.output.setValue( tarf )
        self.status.set('done', 100)
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # input files
        LOGGER.debug("url={}, mime_type={}".format(
            request.inputs['resource'][0].url,
            request.inputs['resource'][0].data_format.mime_type))
        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        # mime_type=request.inputs['resource'][0].data_format.mime_type)
        # mosaic option
        # TODO: fix defaults in pywps 4.x
        if 'mosaic' in request.inputs:
            mosaic = request.inputs['mosaic'][0].data
        else:
            mosaic = False
        # regions used for subsetting
        regions = [inp.data for inp in request.inputs['region']]

        LOGGER.info('ncs = {}'.format(ncs))
        LOGGER.info('regions = {}'.format(regions))
        LOGGER.info('mosaic = {}'.format(mosaic))

        response.update_status("Arguments set for subset process", 0)
        LOGGER.debug('starting: regions=%s, num_files=%s', len(regions),
                     len(ncs))

        try:
            results = clipping(
                resource=ncs,
                polygons=regions,  # self.region.getValue(),
                mosaic=mosaic,
                spatial_wrapping='wrap',
                # variable=variable,
                # dir_output=os.path.abspath(os.curdir),
                # dimension_map=dimension_map,
            )
            LOGGER.info('results {}'.format(results))
        except Exception as ex:
            msg = 'clipping failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('No results produced.')

        # prepare tar file
        try:
            tarf = archive(results)
            LOGGER.info('Tar file prepared')
        except Exception as ex:
            msg = 'Tar file preparation failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        response.outputs['ncout'].file = results[i]

        response.update_status("done", 100)
        return response
Exemple #9
0
def calc_indice_simple(resource=[],
                       variable=None,
                       prefix=None,
                       indices=None,
                       polygons=None,
                       mosaic=False,
                       groupings='yr',
                       dir_output=None,
                       dimension_map=None,
                       memory_limit=None):
    """
    Calculates given simple indices for suitable files in the appropriate time grouping and polygon.

    :param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='SU')
    :param polygons: list of polgons (default ='FRA')
    :param grouping: indices time aggregation (default='yr')
    :param out_dir: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir.
    """
    from os.path import join, dirname, exists
    from flyingpigeon import ocgis_module
    from flyingpigeon.subset import clipping
    import uuid

    #DIR_SHP = config.shapefiles_dir()
    #env.DIR_SHPCABINET = DIR_SHP
    #env.OVERWRITE = True

    if type(resource) != list:
        resource = list([resource])
    if type(indices) != list:
        indices = list([indices])
    if type(polygons) != list and polygons != None:
        polygons = list([polygons])
    if type(groupings) != list:
        groupings = list([groupings])

    if dir_output != None:
        if not exists(dir_output):
            makedirs(dir_output)

    #from flyingpigeon.subset import select_ugid
    #    tile_dim = 25
    output = None

    experiments = sort_by_filename(resource)
    outputs = []

    for key in experiments:
        if variable == None:
            variable = get_variable(experiments[key][0])
            #variable = key.split('_')[0]
        try:

            if variable == 'pr':
                calc = 'pr=pr*86400'
                ncs = ocgis_module.call(
                    resource=experiments[key],
                    variable=variable,
                    dimension_map=dimension_map,
                    calc=calc,
                    memory_limit=memory_limit,
                    #alc_grouping= calc_group,
                    prefix=str(uuid.uuid4()),
                    dir_output=dir_output,
                    output_format='nc')

            else:
                ncs = experiments[key]
            for indice in indices:
                logger.info('indice: %s' % indice)
                try:
                    calc = [{'func': 'icclim_' + indice, 'name': indice}]
                    logger.info('calc: %s' % calc)
                    for grouping in groupings:
                        logger.info('grouping: %s' % grouping)
                        try:
                            calc_group = calc_grouping(grouping)
                            logger.info('calc_group: %s' % calc_group)
                            if polygons == None:
                                try:
                                    prefix = key.replace(variable,
                                                         indice).replace(
                                                             '_day_',
                                                             '_%s_' % grouping)
                                    tmp = ocgis_module.call(
                                        resource=ncs,
                                        variable=variable,
                                        dimension_map=dimension_map,
                                        calc=calc,
                                        calc_grouping=calc_group,
                                        prefix=prefix,
                                        dir_output=dir_output,
                                        output_format='nc')
                                    outputs.append(tmp)
                                except Exception as e:
                                    msg = 'could not calc indice %s for domain in %s' % (
                                        indice, key)
                                    logger.debug(msg)
                                    # raise Exception(msg)
                            else:
                                try:
                                    prefix = key.replace(variable,
                                                         indice).replace(
                                                             '_day_',
                                                             '_%s_' % grouping)
                                    tmp = clipping(resource=ncs,
                                                   variable=variable,
                                                   dimension_map=dimension_map,
                                                   calc=calc,
                                                   calc_grouping=calc_group,
                                                   prefix=prefix,
                                                   polygons=polygons,
                                                   mosaic=mosaic,
                                                   dir_output=dir_output,
                                                   output_format='nc')
                                    outputs.append(tmp)
                                except Exception as e:
                                    msg = 'could not calc indice %s for domain in %s' % (
                                        indice, key)
                                    logger.debug(msg)
                                    # raise Exception(msg)
                            logger.info('indice file calculated: %s' % tmp)
                        except Exception as e:
                            msg = 'could not calc indice %s for key %s and grouping %s' % (
                                indice, key, grouping)
                            logger.debug(msg)
                            # raise Exception(msg)
                except Exception as e:
                    msg = 'could not calc indice %s for key %s' % (indice, key)
                    logger.debug(msg)
                    # raise Exception(msg)
        except Exception as e:
            msg = 'could not calc key %s' % key
            logger.debug(msg)
            # raise Exception(msg)
    logger.info('indice outputs %s ' % outputs)
    return outputs
Exemple #10
0
def calc_indice_percentile(resources=[],
                           variable=None,
                           prefix=None,
                           indices='TG90p',
                           refperiod=None,
                           groupings='yr',
                           polygons=None,
                           percentile=90,
                           mosaic=False,
                           dir_output=None,
                           dimension_map=None):
    """
    Calculates given indices for suitable files in the appropriate time grouping and polygon.

    :param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='TG90p')
    :param prefix: filename prefix 
    :param refperiod: reference period tuple = (start,end)
    :param grouping: indices time aggregation (default='yr')
    :param dir_output: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir.
    """
    from os.path import join, dirname, exists
    from os import remove
    import uuid
    from numpy import ma
    from datetime import datetime as dt

    from flyingpigeon.ocgis_module import call
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import get_values, get_time

    if type(resources) != list:
        resources = list([resources])
    if type(indices) != list:
        indices = list([indices])

    if type(groupings) != list:
        groupings = list([groupings])

    if type(refperiod) == list:
        refperiod = refperiod[0]

    if refperiod != None:
        start = dt.strptime(refperiod.split('-')[0], '%Y%m%d')
        end = dt.strptime(refperiod.split('-')[1], '%Y%m%d')
        time_range = [start, end]
    else:
        time_range = None

    if dir_output != None:
        if not exists(dir_output):
            makedirs(dir_output)

    ########################################################################################################################
    # Compute a custom percentile basis using ICCLIM. ######################################################################
    ########################################################################################################################

    from ocgis.contrib import library_icclim as lic
    nc_indices = []
    nc_dic = sort_by_filename(resources)

    for grouping in groupings:
        calc_group = calc_grouping(grouping)
        for key in nc_dic.keys():
            resource = nc_dic[key]
            if variable == None:
                variable = get_variable(resource)
            if polygons == None:
                nc_reference = call(resource=resource,
                                    prefix=str(uuid.uuid4()),
                                    time_range=time_range,
                                    output_format='nc',
                                    dir_output=dir_output)
            else:
                nc_reference = clipping(resource=resource,
                                        prefix=str(uuid.uuid4()),
                                        time_range=time_range,
                                        output_format='nc',
                                        polygons=polygons,
                                        dir_output=dir_output,
                                        mosaic=mosaic)

            arr = get_values(resource=nc_reference)
            dt_arr = get_time(resource=nc_reference)
            arr = ma.masked_array(arr)
            dt_arr = ma.masked_array(dt_arr)
            percentile = percentile
            window_width = 5

            for indice in indices:
                name = indice.replace('_', str(percentile))
                var = indice.split('_')[0]

                operation = None
                if 'T' in var:
                    if percentile >= 50:
                        operation = 'Icclim%s90p' % var
                        func = 'icclim_%s90p' % var  # icclim_TG90p
                    else:
                        operation = 'Icclim%s10p' % var
                        func = 'icclim_%s10p' % var

                ################################
                # load the appropriate operation
                ################################

                ops = [op for op in dir(lic) if operation in op]
                if len(ops) == 0:
                    raise Exception("operator does not exist %s", operation)

                exec "percentile_dict = lic.%s.get_percentile_dict(arr, dt_arr, percentile, window_width)" % ops[
                    0]
                calc = [{
                    'func': func,
                    'name': name,
                    'kwds': {
                        'percentile_dict': percentile_dict
                    }
                }]

                if polygons == None:
                    nc_indices.append(
                        call(resource=resource,
                             prefix=key.replace(variable, name).replace(
                                 '_day_', '_%s_' % grouping),
                             calc=calc,
                             calc_grouping=calc_group,
                             output_format='nc',
                             dir_output=dir_output))
                else:
                    nc_indices.extend(
                        clipping(
                            resource=resource,
                            prefix=key.replace(variable, name).replace(
                                '_day_', '_%s_' % grouping),
                            calc=calc,
                            calc_grouping=calc_group,
                            output_format='nc',
                            dir_output=dir_output,
                            polygons=polygons,
                            mosaic=mosaic,
                        ))
    return nc_indices


#def calc_indice_unconventional(resource=[], variable=None, prefix=None,
#indices=None, polygons=None,  groupings=None,
#dir_output=None, dimension_map = None):
#"""
#Calculates given indices for suitable files in the appropriate time grouping and polygon.

#:param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
#:param variable: variable name to be selected in the in netcdf file (default=None)
#:param indices: list of indices (default ='TGx')
#:param polygons: list of polygons (default =None)
#:param grouping: indices time aggregation (default='yr')
#:param out_dir: output directory for result file (netcdf)
#:param dimension_map: optional dimension map if different to standard (default=None)

#:return: list of netcdf files with calculated indices. Files are saved into dir_output
#"""

#from os.path import join, dirname, exists
#from os import remove
#import uuid
#from flyingpigeon import ocgis_module
#from flyingpigeon.subset import get_ugid, get_geom

#if type(resource) != list:
#resource = list([resource])
#if type(indices) != list:
#indices = list([indices])
#if type(polygons) != list and polygons != None:
#polygons = list([polygons])
#elif polygons == None:
#polygons = [None]
#else:
#logger.error('Polygons not found')
#if type(groupings) != list:
#groupings = list([groupings])

#if dir_output != None:
#if not exists(dir_output):
#makedirs(dir_output)

#experiments = sort_by_filename(resource)
#outputs = []

#print('environment for calc_indice_unconventional set')
#logger.info('environment for calc_indice_unconventional set')

#for key in experiments:
#if variable == None:
#variable = get_variable(experiments[key][0])
#try:
#ncs = experiments[key]
#for indice in indices:
#logger.info('indice: %s' % indice)
#try:
#for grouping in groupings:
#logger.info('grouping: %s' % grouping)
#try:
#calc_group = calc_grouping(grouping)
#logger.info('calc_group: %s' % calc_group)
#for polygon in polygons:
#try:
#domain = key.split('_')[1].split('-')[0]
#if polygon == None:
#if prefix == None:
#prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping )
#geom = None
#ugid = None
#else:
#if prefix == None:
#prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping ).replace(domain,polygon)
#geom = get_geom(polygon=polygon)
#ugid = get_ugid(polygons=polygon, geom=geom)
#if indice == 'TGx':
#calc=[{'func': 'max', 'name': 'TGx'}]
#tmp = ocgis_module.call(resource=ncs,# conform_units_to='celcius',
#variable=variable, dimension_map=dimension_map,
#calc=calc, calc_grouping=calc_group, prefix=prefix,
#dir_output=dir_output, geom=geom, select_ugid=ugid)
#elif indice == 'TGn':
#calc=[{'func': 'min', 'name': 'TGn'}]
#tmp = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
#variable=variable, dimension_map=dimension_map,
#calc=calc, calc_grouping= calc_group, prefix=prefix,
#dir_output=dir_output, geom=geom, select_ugid = ugid)
#elif indice == 'TGx5day':
#calc = [{'func': 'moving_window', 'name': 'TGx5day', 'kwds': {'k': 5, 'operation': 'mean', 'mode': 'same' }}]
#tmp2 = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
#variable=variable, dimension_map=dimension_map,
#calc=calc, prefix=str(uuid.uuid4()),
#geom=geom, select_ugid = ugid)
#calc=[{'func': 'max', 'name': 'TGx5day'}]
#logger.info('moving window calculated : %s' % tmp2)
#tmp = ocgis_module.call(resource=tmp2,
#variable=indice, dimension_map=dimension_map,
#calc=calc, calc_grouping=calc_group, prefix=prefix,
#dir_output=dir_output)
#remove(tmp2)
#elif indice == 'TGn5day':
#calc = [{'func': 'moving_window', 'name': 'TGn5day', 'kwds': {'k': 5, 'operation': 'mean', 'mode': 'same' }}]
#tmp2 = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
#variable=variable, dimension_map=dimension_map,
#calc=calc, prefix=str(uuid.uuid4()),
#geom=geom, select_ugid = ugid)
#calc=[{'func': 'min', 'name': 'TGn5day'}]

#logger.info('moving window calculated : %s' % tmp2)

#tmp = ocgis_module.call(resource=tmp2,
#variable=indice, dimension_map=dimension_map,
#calc=calc, calc_grouping=calc_group, prefix=prefix,
#dir_output=dir_output)
#remove(tmp2)
#else:
#logger.error('Indice %s is not a known inidce' % (indice))
#outputs.append(tmp)
#logger.info('indice file calcualted %s ' % (tmp))
#except Exception as e:
#logger.debug('could not calc indice %s for key %s, polygon %s and calc_grouping %s : %s' %  (indice, key, polygon, grouping, e ))
#except Exception as e:
#logger.debug('could not calc indice %s for key %s and calc_grouping %s : %s' % ( indice, key, polygon, e ))
#except Exception as e:
#logger.debug('could not calc indice %s for key %s: %s'%  (indice, key, e ))
#except Exception as e:
#logger.debug('could not calc key %s: %s' % (key, e))
#return outputs
Exemple #11
0
def get_segetalflora(
    resource=[], dir_output=".", culture_type="fallow", climate_type=2, region=None, dimension_map=None
):
    """productive worker for segetalflora jobs
  :param resources: list of tas netCDF files. (Any time aggregation is possible)
  :param culture_type: Type of culture. Possible values are:
                       'fallow', 'intensive', 'extensive' (default:'fallow')
  :param climate_type: Type of climate: number 1 to 7 or 'all' (default: 2)
  :param region: Region for subset. If 'None' (default), the values will be calculated for Europe
  """
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import calc_grouping, sort_by_filename
    import os
    from os import remove
    from tempfile import mkstemp
    from ocgis import RequestDataset, OcgOperations

    from cdo import Cdo

    cdo = Cdo()

    if not os.path.exists(dir_output):
        os.makedirs(dir_output)

    os.chdir(dir_output)
    # outputs = []

    if region == None:
        region = "Europe"

    if not type(culture_type) == list:
        culture_type = list([culture_type])
    if not type(climate_type) == list:
        climate_type = list([climate_type])

    ncs = sort_by_filename(resource)
    print "%s experiments found" % (len(ncs))
    print "keys: %s " % (ncs.keys())

    # generate outfolder structure:

    dir_netCDF = "netCDF"
    dir_ascii = "ascii"
    dir_netCDF_tas = dir_netCDF + "/tas"
    dir_ascii_tas = dir_ascii + "/tas"

    if not os.path.exists(dir_netCDF):
        os.makedirs(dir_netCDF)
    if not os.path.exists(dir_ascii):
        os.makedirs(dir_ascii)
    if not os.path.exists(dir_netCDF_tas):
        os.makedirs(dir_netCDF_tas)
    if not os.path.exists(dir_ascii_tas):
        os.makedirs(dir_ascii_tas)

    tas_files = []

    for key in ncs.keys():
        try:
            print "process %s" % (key)
            calc = [{"func": "mean", "name": "tas"}]
            calc_group = calc_grouping("yr")
            prefix = key.replace(key.split("_")[7], "yr")
            if not os.path.exists(os.path.join(dir_netCDF_tas, prefix + ".nc")):
                nc_tas = clipping(
                    resource=ncs[key],
                    variable="tas",
                    calc=calc,
                    dimension_map=dimension_map,
                    calc_grouping=calc_group,
                    prefix=prefix,
                    polygons="Europe",
                    dir_output=dir_netCDF_tas,
                )[0]
                print "clipping done for %s" % (key)
                if os.path.exists(os.path.join(dir_netCDF_tas, prefix + ".nc")):
                    tas_files.append(prefix)
                else:
                    print "clipping failed for %s: No output file exists" % (key)
            else:
                print "netCDF file already exists %s" % (key)
                nc_tas = os.path.join(dir_netCDF_tas, prefix + ".nc")
        except Exception as e:
            print "clipping failed for %s: %s" % (key, e)
        try:
            asc_tas = os.path.join(dir_ascii_tas, prefix + ".asc")
            if not os.path.exists(asc_tas):
                f, tmp = mkstemp(dir=os.curdir, suffix=".asc")
                tmp = tmp.replace(os.path.abspath(os.curdir), ".")

                # cdo.outputtab('name,date,lon,lat,value', input = nc_tas , output = tmp)
                cmd = "cdo outputtab,name,date,lon,lat,value %s > %s" % (nc_tas, tmp)
                print cmd
                os.system(cmd)
                print ("tanslation to ascii done")
                remove_rows(tmp, asc_tas)
                remove(tmp)
                print ("rows with missing values removed")
            else:
                print ("tas ascii already exists")
            plot_ascii(asc_tas)
        except Exception as e:
            print "translation to ascii failed %s: %s" % (key, e)
            if os.path.exists(tmp):
                remove(tmp)

    tas_files = [os.path.join(dir_netCDF_tas, nc) for nc in os.listdir(dir_netCDF_tas)]
    outputs = []

    for name in tas_files:
        for cult in culture_type:
            for climat in climate_type:
                try:
                    calc = get_equation(culture_type=cult, climate_type=climat)
                    if type(calc) != None:
                        try:
                            var = "sf%s%s" % (cult, climat)
                            prefix = os.path.basename(name).replace("tas", var).strip(".nc")

                            infile = name  # os.path.join(dir_netCDF_tas,name+'.nc')
                            dir_sf = os.path.join(dir_netCDF, var)
                            if not os.path.exists(dir_sf):
                                os.makedirs(dir_sf)
                            if os.path.exists(os.path.join(dir_sf, prefix + ".nc")):
                                nc_sf = os.path.join(dir_sf, prefix + ".nc")
                                print "netCDF file already exists: %s %s " % (dir_sf, prefix)
                            else:
                                rd = RequestDataset(name, variable="tas", dimension_map=dimension_map)
                                op = OcgOperations(
                                    dataset=rd,
                                    calc=calc,
                                    prefix=prefix,
                                    output_format="nc",
                                    dir_output=dir_sf,
                                    add_auxiliary_files=False,
                                )
                                nc_sf = op.execute()
                                print "segetalflora done for %s" % (prefix)
                                outputs.append(prefix)

                            dir_ascii_sf = os.path.join(dir_ascii, var)
                            if not os.path.exists(dir_ascii_sf):
                                os.makedirs(dir_ascii_sf)
                            asc_sf = os.path.join(dir_ascii_sf, prefix + ".asc")
                            if not os.path.exists(asc_sf):
                                f, tmp = mkstemp(dir=os.curdir, suffix=".asc")
                                tmp = tmp.replace(os.path.abspath(os.curdir), ".")
                                # cdo.outputtab('name,date,lon,lat,value', input = nc_sf , output = tmp)
                                cmd = "cdo outputtab,name,date,lon,lat,value %s > %s" % (nc_sf, tmp)
                                os.system(cmd)
                                print ("translation to ascii done")
                                remove_rows(tmp, asc_sf)
                                remove(tmp)
                                print ("rows with missing values removed")
                            else:
                                print "ascii file already exists"
                            plot_ascii(asc_sf)
                        except Exception as e:
                            print "failed for ascii file: %s %s " % (name, e)
                            if os.path.exists(tmp):
                                remove(tmp)
                    else:
                        print "NO EQUATION found for %s %s " % (cult, climat)
                except Exception as e:
                    print "Segetal flora failed: %s" % (e)
    return outputs
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        try:
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))

            if 'region' in request.inputs:
                region = request.inputs['region'][0].data
            else:
                region = None

            if 'mosaic' in request.inputs:
                mosaic = request.inputs['mosaic'][0].data
            else:
                mosaic = False

            percentile = request.inputs['percentile'][0].data

            LOGGER.debug("mosaic %s " % mosaic)
            LOGGER.debug('percentile: %s' % percentile)
            LOGGER.debug('region %s' % region)
            LOGGER.debug('Nr of input files %s ' % len(resources))

        except:
            LOGGER.exception('failed to read in the arguments')

        from flyingpigeon.utils import sort_by_filename
        from flyingpigeon.ocgis_module import call

        datasets = sort_by_filename(resources, historical_concatination=True)
        results = []

        kwds = {'percentile': 90, 'window_width': 5}
        calc = [{'func': 'daily_perc', 'name': 'dp', 'kwds': kwds}]

        try:
            for key in datasets.keys():
                try:
                    if region is None:
                        result = call(
                            resource=datasets[key],
                            output_format='nc',
                            calc=calc,
                            # prefix=key,
                            # time_region={'year': [1995, 2000]}
                            # calc_grouping='year'
                        )
                        results.extend([result])
                        LOGGER.debug('percentile based indice done for %s' %
                                     result)
                    else:
                        result = clipping(
                            resource=datasets[key],
                            #  variable=None,
                            calc=calc,
                            #  calc_grouping=None,
                            #  time_range=None,
                            #  time_region=None,
                            polygons=region,
                            mosaic=mosaic)
                        results.extend(result)
                except:
                    LOGGER.exception(
                        "failed to calculate percentil based indice for %s " %
                        key)
        except:
            LOGGER.exception("failed to calculate percentile indices")

        tarf = archive(results)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        if i is None:
            i = "dummy.nc"
        response.outputs['ncout'].file = results[i]

        #       response.update_status("done", 100)
        response.update_status("done", 100)
        return response
Exemple #13
0
def calc_indice_percentile(resources=[],
                           variable=None,
                           prefix=None,
                           indices='TG90p',
                           refperiod=None,
                           groupings='yr',
                           polygons=None,
                           percentile=90,
                           mosaic=False,
                           dir_output=None,
                           dimension_map=None):
    """
    Calculates given indices for suitable files in the appropriate time grouping and polygon.

    :param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='TG90p')
    :param prefix: filename prefix
    :param refperiod: reference period tuple = (start,end)
    :param grouping: indices time aggregation (default='yr')
    :param dir_output: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir.
    """
    from os.path import join, dirname, exists
    from os import remove
    import uuid
    from numpy import ma
    from datetime import datetime as dt

    from flyingpigeon.ocgis_module import call
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import get_values, get_time

    if type(resources) != list:
        resources = list([resources])
    if type(indices) != list:
        indices = list([indices])

    if type(groupings) != list:
        groupings = list([groupings])

    if type(refperiod) == list:
        refperiod = refperiod[0]

    if refperiod is None:
        start = dt.strptime(refperiod.split('-')[0], '%Y%m%d')
        end = dt.strptime(refperiod.split('-')[1], '%Y%m%d')
        time_range = [start, end]
    else:
        time_range = None

    if dir_output is None:
        if not exists(dir_output):
            makedirs(dir_output)

    ################################################
    # Compute a custom percentile basis using ICCLIM
    ################################################
    from ocgis.contrib import library_icclim as lic
    nc_indices = []
    nc_dic = sort_by_filename(resources)

    for grouping in groupings:
        calc_group = calc_grouping(grouping)
        for key in nc_dic.keys():
            resource = nc_dic[key]
            if variable is None:
                variable = get_variable(resource)
            if polygons is None:
                nc_reference = call(resource=resource,
                                    prefix=str(uuid.uuid4()),
                                    time_range=time_range,
                                    output_format='nc',
                                    dir_output=dir_output)
        else:
            nc_reference = clipping(resource=resource,
                                    prefix=str(uuid.uuid4()),
                                    time_range=time_range,
                                    output_format='nc',
                                    polygons=polygons,
                                    dir_output=dir_output,
                                    mosaic=mosaic)

        arr = get_values(resource=nc_reference)
        dt_arr = get_time(resource=nc_reference)
        arr = ma.masked_array(arr)
        dt_arr = ma.masked_array(dt_arr)
        percentile = percentile
        window_width = 5

        for indice in indices:
            name = indice.replace('_', str(percentile))
            var = indice.split('_')[0]

            operation = None
            if 'T' in var:
                if percentile >= 50:
                    operation = 'Icclim%s90p' % var
                    func = 'icclim_%s90p' % var  # icclim_TG90p
                else:
                    operation = 'Icclim%s10p' % var
                    func = 'icclim_%s10p' % var

                ################################
                # load the appropriate operation
                ################################

                ops = [op for op in dir(lic) if operation in op]
                if len(ops) == 0:
                    raise Exception("operator does not exist %s", operation)

                exec "percentile_dict = lic.%s.get_percentile_dict(arr, dt_arr, percentile, window_width)" % ops[
                    0]
                calc = [{
                    'func': func,
                    'name': name,
                    'kwds': {
                        'percentile_dict': percentile_dict
                    }
                }]

                if polygons is None:
                    nc_indices.extend(
                        call(resource=resource,
                             prefix=key.replace(variable, name).replace(
                                 '_day_', '_%s_' % grouping),
                             calc=calc,
                             calc_grouping=calc_group,
                             output_format='nc',
                             dir_output=dir_output))
                else:
                    nc_indices.extend(
                        clipping(
                            resource=resource,
                            prefix=key.replace(variable, name).replace(
                                '_day_', '_%s_' % grouping),
                            calc=calc,
                            calc_grouping=calc_group,
                            output_format='nc',
                            dir_output=dir_output,
                            polygons=polygons,
                            mosaic=mosaic,
                        ))
    if len(nc_indices) is 0:
        logger.debug('No indices are calculated')
        return None
    return nc_indices
Exemple #14
0
def calc_indice_simple(resource=[], variable=None, prefix=None, indice='SU',
                       polygons=None, mosaic=False, grouping='yr', dir_output=None,
                       dimension_map=None, memory_limit=None):
    """
    Calculates given simple indices for suitable files in the appropriate time grouping and polygon.

    :param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: Indice (default ='SU')
    :param polygons: list of polgons (default ='FRA')
    :param grouping: indices time aggregation (default='yr')
    :param out_dir: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir.
    """
    from os.path import join, dirname, exists
    from flyingpigeon import ocgis_module
    from flyingpigeon.subset import clipping
    import uuid

    if type(resource) != list:
        resource = list([resource])
    # if type(indices) != list:
    #     indices = list([indices])
    if type(polygons) != list and polygons is None:
        polygons = list([polygons])
    # if type(groupings) != list:
    #     groupings = list([groupings])

    if dir_output is not None:
        if not exists(dir_output):
            makedirs(dir_output)

    datasets = sort_by_filename(resource).keys()

    if len(datasets) is 1:
        key = datasets[0]
    else:
        LOGGER.warning('more than one dataset in resource')

    # from flyingpigeon.subset import select_ugid
    #    tile_dim = 25
    output = None

    # experiments = sort_by_filename(resource)
    outputs = []

    # for key in experiments:

    if variable is None:
        variable = get_variable(resource)
        LOGGER.debug('Variable detected % s ' % variable)

    # variable = key.split('_')[0]
    try:
        # icclim can't handling 'kg m2 sec' needs to be 'mm/day'
        if variable == 'pr':
            calc = 'pr=pr*86400'
            ncs = ocgis_module.call(resource=resource,
                                    variable=variable,
                                    dimension_map=dimension_map,
                                    calc=calc,
                                    memory_limit=memory_limit,
                                    # calc_grouping= calc_group,
                                    prefix=str(uuid.uuid4()),
                                    dir_output=dir_output,
                                    output_format='nc')
        else:
            ncs = resource

        try:
            calc = [{'func': 'icclim_' + indice, 'name': indice}]
            LOGGER.info('calc: %s' % calc)
            try:
                calc_group = calc_grouping(grouping)
                LOGGER.info('calc_group: %s' % calc_group)
                if polygons is None:
                    try:
                        prefix = key.replace(variable, indice).replace('_day_', '_%s_' % grouping)
                        LOGGER.debug(' **** dir_output = %s ' % dir_output)
                        tmp = ocgis_module.call(resource=ncs,
                                                variable=variable,
                                                dimension_map=dimension_map,
                                                calc=calc,
                                                calc_grouping=calc_group,
                                                prefix=prefix,
                                                dir_output=dir_output,
                                                output_format='nc')
                        if len(tmp) is not 0:
                            outputs.extend(tmp)
                        else:
                            msg = 'could not calc indice %s for domain ' % (indice)
                            LOGGER.exception(msg)
                    except:
                        msg = 'could not calc indice %s for domain in %s' % (indice)
                        LOGGER.exception(msg)
                else:
                    try:
                        prefix = key.replace(variable, indice).replace('_day_', '_%s_' % grouping)
                        tmp = clipping(resource=ncs,
                                       variable=variable,
                                       dimension_map=dimension_map,
                                       calc=calc,
                                       calc_grouping=calc_group,
                                       prefix=prefix,
                                       polygons=polygons,
                                       mosaic=mosaic,
                                       dir_output=dir_output,
                                       output_format='nc')
                        if len(tmp) is not 0:
                            outputs.extend(tmp)
                        else:
                            msg = 'could not calc clipped indice %s ' % (indice)
                            LOGGER.exception(msg)
                    except:
                        msg = 'could not calc indice %s for domai' % (indice)
                        LOGGER.debug(msg)
                        # raise Exception(msg)
                    LOGGER.info('indice file calculated: %s' % tmp)
            except:
                msg = 'could not calc indice %s for key %s and grouping %s' % (indice, grouping)
                LOGGER.exception(msg)
                # raise Exception(msg)
        except:
            msg = 'could not calc indice %s ' % (indice)
            LOGGER.exception(msg)
            # raise Exception(msg)
    except:
        msg = 'could not calculate indices'
        LOGGER.exception(msg)
        # raise Exception(msg)
    LOGGER.info('indice outputs %s ' % outputs)

    if len(outputs) is 0:
        LOGGER.debug('No indices are calculated')
        return None
    return outputs
Exemple #15
0
def calc_indice_percentile(resources=[], variable=None, 
    prefix=None, indices='TG90p', refperiod=None,
    groupings='yr', polygons=None, percentile=90, mosaic = False, 
    dir_output=None, dimension_map = None):
    """
    Calculates given indices for suitable files in the appropriate time grouping and polygon.

    :param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='TG90p')
    :param prefix: filename prefix 
    :param refperiod: reference period tuple = (start,end)
    :param grouping: indices time aggregation (default='yr')
    :param dir_output: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir.
    """
    from os.path import join, dirname, exists
    from os import remove
    import uuid
    from numpy import ma 
    from datetime import datetime as dt

    from flyingpigeon.ocgis_module import call
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import get_values, get_time
    
    if type(resources) != list: 
      resources = list([resources])
    if type(indices) != list: 
      indices = list([indices])
      
    if type(groupings) != list: 
      groupings = list([groupings])
      
    if type(refperiod) == list: 
      refperiod = refperiod[0]
      
    if refperiod != None:
      start = dt.strptime(refperiod.split('-')[0] , '%Y%m%d')
      end = dt.strptime(refperiod.split('-')[1] , '%Y%m%d')
      time_range = [start, end]
    else:  
      time_range = None
    
    if dir_output != None:
      if not exists(dir_output): 
        makedirs(dir_output)
    
    ########################################################################################################################
    # Compute a custom percentile basis using ICCLIM. ######################################################################
    ########################################################################################################################

    from ocgis.contrib import library_icclim  as lic 
    nc_indices = []
    nc_dic = sort_by_filename(resources)
    
    for grouping in groupings:
      calc_group = calc_grouping(grouping)
      for key in nc_dic.keys():
        resource = nc_dic[key]
        if variable == None: 
          variable = get_variable(resource)
        if polygons == None:
          nc_reference = call(resource=resource, 
            prefix=str(uuid.uuid4()), 
            time_range=time_range,
            output_format='nc', 
            dir_output=dir_output)
        else:
          nc_reference = clipping(resource=resource, 
            prefix=str(uuid.uuid4()),
            time_range=time_range, 
            output_format='nc', 
            polygons=polygons,
            dir_output=dir_output, 
            mosaic = mosaic)
          
        arr = get_values(resource=nc_reference)
        dt_arr = get_time(resource=nc_reference)
        arr = ma.masked_array(arr)
        dt_arr = ma.masked_array(dt_arr)
        percentile = percentile
        window_width = 5
        
        for indice in indices:
          name = indice.replace('_', str(percentile))
          var = indice.split('_')[0]

          operation = None
          if 'T' in var: 
            if percentile >= 50: 
              operation = 'Icclim%s90p' % var
              func = 'icclim_%s90p' % var # icclim_TG90p
            else: 
              operation = 'Icclim%s10p' % var
              func = 'icclim_%s10p' % var
              
          ################################
          # load the appropriate operation
          ################################

          ops = [op for op in dir(lic) if operation in op]
          if len(ops) == 0:
              raise Exception("operator does not exist %s", operation)
          
          exec "percentile_dict = lic.%s.get_percentile_dict(arr, dt_arr, percentile, window_width)" % ops[0]
          calc = [{'func': func, 'name': name, 'kwds': {'percentile_dict': percentile_dict}}]
          
          if polygons == None:
            nc_indices.append(call(resource=resource, 
                                prefix=key.replace(variable,name).replace('_day_', '_%s_' % grouping), 
                                calc=calc, 
                                calc_grouping=calc_group, 
                                output_format='nc',
                                dir_output=dir_output))
          else: 
            nc_indices.extend(clipping(resource=resource, 
                                prefix=key.replace(variable,name).replace('_day_', '_%s_' % grouping), 
                                calc=calc, 
                                calc_grouping=calc_group, 
                                output_format='nc',
                                dir_output=dir_output,
                                polygons=polygons, 
                                mosaic = mosaic,
                                ))
    return nc_indices

#def calc_indice_unconventional(resource=[], variable=None, prefix=None,
  #indices=None, polygons=None,  groupings=None, 
  #dir_output=None, dimension_map = None):
    #"""
    #Calculates given indices for suitable files in the appropriate time grouping and polygon.

    #:param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    #:param variable: variable name to be selected in the in netcdf file (default=None)
    #:param indices: list of indices (default ='TGx')
    #:param polygons: list of polygons (default =None)
    #:param grouping: indices time aggregation (default='yr')
    #:param out_dir: output directory for result file (netcdf)
    #:param dimension_map: optional dimension map if different to standard (default=None)

    #:return: list of netcdf files with calculated indices. Files are saved into dir_output
    #"""
    
    #from os.path import join, dirname, exists
    #from os import remove
    #import uuid
    #from flyingpigeon import ocgis_module
    #from flyingpigeon.subset import get_ugid, get_geom

    #if type(resource) != list: 
      #resource = list([resource])
    #if type(indices) != list: 
      #indices = list([indices])
    #if type(polygons) != list and polygons != None:
      #polygons = list([polygons])
    #elif polygons == None:
      #polygons = [None]
    #else: 
      #logger.error('Polygons not found')
    #if type(groupings) != list:
      #groupings = list([groupings])
    
    #if dir_output != None:
      #if not exists(dir_output): 
        #makedirs(dir_output)
    
    #experiments = sort_by_filename(resource)
    #outputs = []

    #print('environment for calc_indice_unconventional set')
    #logger.info('environment for calc_indice_unconventional set')
    
    #for key in experiments:
      #if variable == None:
        #variable = get_variable(experiments[key][0])
      #try: 
        #ncs = experiments[key]
        #for indice in indices:
          #logger.info('indice: %s' % indice)
          #try: 
            #for grouping in groupings:
              #logger.info('grouping: %s' % grouping)
              #try:
                #calc_group = calc_grouping(grouping)
                #logger.info('calc_group: %s' % calc_group)
                #for polygon in polygons:  
                  #try:
                    #domain = key.split('_')[1].split('-')[0]
                    #if polygon == None:
                      #if prefix == None: 
                        #prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping )
                      #geom = None
                      #ugid = None
                    #else:
                      #if prefix == None: 
                        #prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping ).replace(domain,polygon)
                      #geom = get_geom(polygon=polygon)
                      #ugid = get_ugid(polygons=polygon, geom=geom)
                    #if indice == 'TGx':
                      #calc=[{'func': 'max', 'name': 'TGx'}]
                      #tmp = ocgis_module.call(resource=ncs,# conform_units_to='celcius',
                                              #variable=variable, dimension_map=dimension_map, 
                                              #calc=calc, calc_grouping=calc_group, prefix=prefix,
                                              #dir_output=dir_output, geom=geom, select_ugid=ugid)
                    #elif indice == 'TGn':
                      #calc=[{'func': 'min', 'name': 'TGn'}]
                      #tmp = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
                                              #variable=variable, dimension_map=dimension_map, 
                                              #calc=calc, calc_grouping= calc_group, prefix=prefix,
                                               #dir_output=dir_output, geom=geom, select_ugid = ugid)
                    #elif indice == 'TGx5day':
                      #calc = [{'func': 'moving_window', 'name': 'TGx5day', 'kwds': {'k': 5, 'operation': 'mean', 'mode': 'same' }}]
                      #tmp2 = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
                                              #variable=variable, dimension_map=dimension_map, 
                                              #calc=calc, prefix=str(uuid.uuid4()),
                                              #geom=geom, select_ugid = ugid)
                      #calc=[{'func': 'max', 'name': 'TGx5day'}]
                      #logger.info('moving window calculated : %s' % tmp2)
                      #tmp = ocgis_module.call(resource=tmp2,
                                              #variable=indice, dimension_map=dimension_map, 
                                              #calc=calc, calc_grouping=calc_group, prefix=prefix,
                                              #dir_output=dir_output)
                      #remove(tmp2)
                    #elif indice == 'TGn5day':
                      #calc = [{'func': 'moving_window', 'name': 'TGn5day', 'kwds': {'k': 5, 'operation': 'mean', 'mode': 'same' }}]
                      #tmp2 = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
                                              #variable=variable, dimension_map=dimension_map, 
                                              #calc=calc, prefix=str(uuid.uuid4()),
                                              #geom=geom, select_ugid = ugid)
                      #calc=[{'func': 'min', 'name': 'TGn5day'}]
                      
                      #logger.info('moving window calculated : %s' % tmp2)
                      
                      #tmp = ocgis_module.call(resource=tmp2,
                                              #variable=indice, dimension_map=dimension_map, 
                                              #calc=calc, calc_grouping=calc_group, prefix=prefix,
                                              #dir_output=dir_output)
                      #remove(tmp2)
                    #else: 
                      #logger.error('Indice %s is not a known inidce' % (indice))
                    #outputs.append(tmp)
                    #logger.info('indice file calcualted %s ' % (tmp))
                  #except Exception as e:
                    #logger.debug('could not calc indice %s for key %s, polygon %s and calc_grouping %s : %s' %  (indice, key, polygon, grouping, e ))
              #except Exception as e:
                #logger.debug('could not calc indice %s for key %s and calc_grouping %s : %s' % ( indice, key, polygon, e ))
          #except Exception as e:
            #logger.debug('could not calc indice %s for key %s: %s'%  (indice, key, e ))
      #except Exception as e:
        #logger.debug('could not calc key %s: %s' % (key, e))
    #return outputs
geom = get_geom('CMR')
ugid = get_ugid(polygons='CMR', geom=geom)

ops = OcgOperations(dataset=rd,
                    calc=calc,
                    geom=geom,
                    select_ugid=ugid,
                    output_format='nc',
                    prefix='polygonsubset'
                    ).execute()
print ops

from flyingpigeon import subset
ops = subset.clipping(resource=resource,
                      variable=None,
                      # dimension_map=None,
                      calc=calc,
                      output_format='nc',
                      # calc_grouping=None,
                      # time_range=None,
                      # time_region=None,
                      # historical_concatination=True,
                      prefix="clipping_call",
                      spatial_wrapping='wrap',
                      polygons='CMR',
                      mosaic=False,
                      dir_output=None,
                      memory_limit=None)

print ops
Exemple #17
0
def calc_indice_percentile(resources=[], variable=None, 
    prefix=None, indices='TG90p', refperiod=None,
    groupings='yr', polygons=None, percentile=90, mosaik = False, 
    dir_output=None, dimension_map = None):
    """
    Calculates given indices for suitable files in the appopriate time grouping and polygon.

    :param resource: list of filenames in drs convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='TG90p')
    :param prefix: filename prefix 
    :param refperiod: reference refperiod touple = (start,end)
    :param grouping: indices time aggregation (default='yr')
    :param dir_output: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir
    """
    from os.path import join, dirname, exists
    from os import remove
    import uuid
    from numpy import ma 
    from datetime import datetime as dt

    from flyingpigeon.ocgis_module import call
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import get_values, get_time
    
    if type(resources) != list: 
      resources = list([resources])
    if type(indices) != list: 
      indices = list([indices])
      
    if type(groupings) != list: 
      groupings = list([groupings])
      
    if type(refperiod) == list: 
      refperiod = refperiod[0]
      
    if refperiod != None:
      start = dt.strptime(refperiod.split('-')[0] , '%Y%m%d')
      end = dt.strptime(refperiod.split('-')[1] , '%Y%m%d')
      time_range = [start, end]
    else:  
      time_range = None
    
    if dir_output != None:
      if not exists(dir_output): 
        makedirs(dir_output)
    
    ########################################################################################################################
    # Compute a custom percentile basis using ICCLIM. ######################################################################
    ########################################################################################################################

    from ocgis.contrib import library_icclim  as lic 
    nc_indices = []
    nc_dic = sort_by_filename(resources)
    
    for grouping in groupings:
      calc_group = calc_grouping(grouping)
      for key in nc_dic.keys():
        resource = nc_dic[key]
        if variable == None: 
          variable = get_variable(resource)
        if polygons == None:
          nc_reference = call(resource=resource, 
            prefix=str(uuid.uuid4()), 
            time_range=time_range,
            output_format='nc', 
            dir_output=dir_output)
        else:
          nc_reference = clipping(resource=resource, 
            prefix=str(uuid.uuid4()),
            time_range=time_range, 
            output_format='nc', 
            polygons=polygons,
            dir_output=dir_output, 
            mosaik = mosaik)
          
        arr = get_values(nc_files=nc_reference)
        dt_arr = get_time(nc_files=nc_reference)
        arr = ma.masked_array(arr)
        dt_arr = ma.masked_array(dt_arr)
        percentile = percentile
        window_width = 5
        
        for indice in indices:
          name = indice.replace('_', str(percentile))
          var = indice.split('_')[0]

          operation = None
          if 'T' in var: 
            if percentile >= 50: 
              operation = 'Icclim%s90p' % var
              func = 'icclim_%s90p' % var # icclim_TG90p
            else: 
              operation = 'Icclim%s10p' % var
              func = 'icclim_%s10p' % var
              
          ################################
          # load the appropriate operation
          ################################

          ops = [op for op in dir(lic) if operation in op]
          if len(ops) == 0:
              raise Exception("operator does not exist %s", operation)
          
          exec "percentile_dict = lic.%s.get_percentile_dict(arr, dt_arr, percentile, window_width)" % ops[0]
          calc = [{'func': func, 'name': name, 'kwds': {'percentile_dict': percentile_dict}}]
          
          if polygons == None:
            nc_indices.append(call(resource=resource, 
                                prefix=key.replace(variable,name).replace('_day_', '_%s_' % grouping), 
                                calc=calc, 
                                calc_grouping=calc_group, 
                                output_format='nc',
                                dir_output=dir_output))
          else: 
            nc_indices.extend(clipping(resource=resource, 
                                prefix=key.replace(variable,name).replace('_day_', '_%s_' % grouping), 
                                calc=calc, 
                                calc_grouping=calc_group, 
                                output_format='nc',
                                dir_output=dir_output,
                                polygons=polygons, 
                                mosaik = mosaik,
                                ))
    return nc_indices
Exemple #18
0
def calc_indice_simple(resource=[], variable=None, prefix=None,indices=None,
    polygons=None, mosaik = False, groupings='yr', dir_output=None, dimension_map = None, memory_limit=None):
    """
    Calculates given simple indices for suitable files in the appopriate time grouping and polygon.

    :param resource: list of filenames in drs convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='SU')
    :param polygons: list of polgons (default ='FRA')
    :param grouping: indices time aggregation (default='yr')
    :param out_dir: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir
    """
    from os.path import join, dirname, exists
    from flyingpigeon import ocgis_module
    from flyingpigeon.subset import clipping
    import uuid

    #DIR_SHP = config.shapefiles_dir()
    #env.DIR_SHPCABINET = DIR_SHP
    #env.OVERWRITE = True

    if type(resource) != list: 
      resource = list([resource])
    if type(indices) != list: 
      indices = list([indices])
    if type(polygons) != list and polygons != None:
      polygons = list([polygons])
    if type(groupings) != list:
      groupings = list([groupings])
    
    if dir_output != None:
      if not exists(dir_output): 
        makedirs(dir_output)
    
    #from flyingpigeon.subset import select_ugid
    #    tile_dim = 25
    output = None


    experiments = sort_by_filename(resource)
    outputs = []
    
    for key in experiments:
      if variable == None: 
        variable = get_variable(experiments[key][0])
        #variable = key.split('_')[0]
      try: 
        
        if variable == 'pr': 
          calc = 'pr=pr*86400'
          ncs = ocgis_module.call(resource=experiments[key],
                     variable=variable,
                     dimension_map=dimension_map, 
                     calc=calc,
                     memory_limit=memory_limit,
                     #alc_grouping= calc_group, 
                     prefix=str(uuid.uuid4()), 
                     dir_output=dir_output,
                     output_format='nc')

        else:
          
          ncs = experiments[key]         
        for indice in indices:
          logger.info('indice: %s' % indice)
          try: 
            calc = [{'func' : 'icclim_' + indice, 'name' : indice}]
            logger.info('calc: %s' % calc)
            for grouping in groupings:
              logger.info('grouping: %s' % grouping)
              try:
                calc_group = calc_grouping(grouping)
                logger.info('calc_group: %s' % calc_group)
                if polygons == None:
                  try:
                    if prefix == None:   
                      prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping )
                    tmp = ocgis_module.call(resource=ncs,
                     variable=variable,
                     dimension_map=dimension_map, 
                     calc=calc,
                     calc_grouping= calc_group, 
                     prefix=prefix, 
                     dir_output=dir_output,
                     output_format='nc')
                    outputs.extend( [tmp] )
                  except Exception as e:
                    msg = 'could not calc indice %s for domain in %s' %( indice, key)
                    logger.exception( msg )
                    raise Exception(msg)   
                else:
                  try:
                    if prefix == None:   
                      prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping )
                    tmp = clipping(resource=ncs,
                     variable=variable,
                     dimension_map=dimension_map, 
                     calc=calc,
                     calc_grouping= calc_group, 
                     prefix=prefix, 
                     polygons=polygons,
                     mosaik=mosaik,
                     dir_output=dir_output,
                     output_format='nc')
                    outputs.extend( [tmp] )
                  except Exception as e:
                    msg = 'could not calc indice %s for domain in %s' %( indice, key)
                    logger.exception( msg )
                    raise Exception(msg)
                logger.info('indice file calculated')      
              except Exception as e:
                msg = 'could not calc indice %s for key %s and grouping %s' %  (indice, key, grouping)
                logger.exception(msg)
                raise Exception(msg)  
          except Exception as e:
            msg = 'could not calc indice %s for key %s' % ( indice, key)
            logger.exception(msg)
            raise Exception(msg)        
      except Exception as e:
        msg = 'could not calc key %s' % key
        logger.exception(msg)
        raise Exception(msg)
    return outputs
    def _handler(self, request, response):

        # input files
        LOGGER.debug("url={}, mime_type={}".format(request.inputs['resource'][0].url,
                     request.inputs['resource'][0].data_format.mime_type))
        ncs = extract_archive(
            resources=[inpt.file for inpt in request.inputs['resource']],
            dir_output=self.workdir)
        # mime_type=request.inputs['resource'][0].data_format.mime_type)
        # mosaic option
        # TODO: fix defaults in pywps 4.x
        if 'mosaic' in request.inputs:
            mosaic = request.inputs['mosaic'][0].data
        else:
            mosaic = False
        # regions used for subsetting
        regions = [inp.data for inp in request.inputs['region']]

        LOGGER.info('ncs: {}'.format(ncs))
        LOGGER.info('regions: {}'.format(regions))
        LOGGER.info('mosaic: {}'.format(mosaic))
        LOGGER.info('flyingpigeon dir_output : {}'.format(abspath(self.workdir)))

        response.update_status("Arguments set for subset process", 0)
        LOGGER.debug('starting: regions={}, num_files={}'.format(len(regions), len(ncs)))

        try:
            results = clipping(
                resource=ncs,
                polygons=regions,
                mosaic=mosaic,
                spatial_wrapping='wrap',
                # variable=variable,
                dir_output=self.workdir,
                # dimension_map=dimension_map,
            )
            LOGGER.info('results %s' % results)

        except Exception as ex:
            msg = 'Clipping failed: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('No results produced.')

        # prepare tar file
        try:
            tarf = archive(results, dir_output=self.workdir)
            LOGGER.info('Tar file prepared')

        except Exception as ex:
            msg = 'Tar file preparation failed: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        response.outputs['ncout'].file = results[i]

        response.update_status("done", 100)
        return response
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        var = get_variable(ncs[0])
        LOGGER.info('variable to be plotted: {}'.format(var))

        # mosaic = self.mosaic.getValue()
        if 'region' in request.inputs:
            regions = [inp.data for inp in request.inputs['region']]
            try:
                png_region = vs.plot_polygons(regions)
            except Exception as ex:
                msg = 'failed to plot the polygon to world map: {}'.format(
                    str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)
                o1, png_region = mkstemp(dir='.', suffix='.png')

            # clip the demanded polygons
            subsets = clipping(
                resource=ncs,
                variable=var,
                polygons=regions,
                mosaic=True,
                spatial_wrapping='wrap',
            )
        else:
            subsets = ncs
            png_region = vs.plot_extend(ncs[0])

        response.update_status('Arguments set for subset process', 0)

        try:
            tar_subsets = archive(subsets)
        except Exception as ex:
            msg = 'failed to archive subsets: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, tar_subsets = mkstemp(dir='.', suffix='.tar')

        try:
            png_uncertainty = vs.uncertainty(subsets, variable=var)
        except Exception as ex:
            msg = 'failed to generate the uncertainty plot: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_uncertainty = mkstemp(dir='.', suffix='.png')

        try:
            png_spaghetti = vs.spaghetti(
                subsets,
                variable=var,
            )

        except Exception as ex:
            msg = 'failed to generate the spaghetti plot: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_spaghetti = mkstemp(dir='.', suffix='.png')

        try:
            from flyingpigeon import robustness as ro
            signal, low_agreement_mask, high_agreement_mask, text_src = ro.signal_noise_ratio(
                resource=subsets,
                # start=None, end=None,
                # timeslice=None,
                # variable=var
            )
            # if title is None:
            title = 'signal robustness of %s ' % (
                var)  # , end1, end2, start1, start2
            png_robustness = vs.map_robustness(
                signal,
                high_agreement_mask,
                low_agreement_mask,
                # cmap=cmap,
                #    title=title
            )
            LOGGER.info('robustness graphic generated')
        except Exception as ex:
            msg = 'failed to generate the robustness plot: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_robustness = mkstemp(dir='.', suffix='.png')

        factsheet = vs.factsheetbrewer(png_region=png_region,
                                       png_uncertainty=png_uncertainty,
                                       png_spaghetti=png_spaghetti,
                                       png_robustness=png_robustness)

        response.outputs['output_nc'].file = tar_subsets
        response.outputs['output_factsheet'].file = factsheet
        response.update_status("done", 100)
        return response
Exemple #21
0
def calc_indice_percentile(resource=[], variable=None,
                           prefix=None, indices='TG90p', refperiod=None,
                           grouping='yr', polygons=None, percentile=90, mosaic=False,
                           dir_output=None, dimension_map=None):
    """
    Calculates given indices for suitable dataset in the appropriate time grouping and polygon.

    :param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: string of indice (default ='TG90p')
    :param prefix: filename prefix
    :param refperiod: reference period  = [datetime,datetime]
    :param grouping: indices time aggregation (default='yr')
    :param dir_output: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: reference_file, indice_file
    """
    from os.path import join, dirname, exists
    from os import remove
    import uuid
    from numpy import ma
    from datetime import datetime as dt

    from flyingpigeon.ocgis_module import call
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import get_values, get_time

    # TODO: see ticket https://github.com/bird-house/flyingpigeon/issues/200
    raise NotImplementedError('Sorry! Function is under construction.')

    if type(resource) != list:
        resource = list([resource])

    # if type(indices) != list:
    #     indices = list([indices])
    #
    # if type(groupings) != list:
    #     groupings = list([groupings])
    #
    # if type(refperiod) == list:
    #     refperiod = refperiod[0]
    #
    # if refperiod is not None:
    #     start = dt.strptime(refperiod.split('-')[0], '%Y%m%d')
    #     end = dt.strptime(refperiod.split('-')[1], '%Y%m%d')
    #     time_range = [start, end]
    # else:
    #     time_range = None

    ################################################
    # Compute a custom percentile basis using ICCLIM
    ################################################
    from ocgis.contrib import library_icclim as lic

    calc_group = calc_grouping(grouping)

    if variable is None:
        variable = get_variable(resource)

    if polygons is None:
        nc_reference = call(resource=resource,
                            prefix=str(uuid.uuid4()),
                            time_range=refperiod,
                            output_format='nc')
    else:
        nc_reference = clipping(resource=resource,
                                prefix=str(uuid.uuid4()),
                                time_range=refperiod,
                                output_format='nc',
                                polygons=polygons,
                                mosaic=mosaic)

    # arr = get_values(resource=nc_reference)
    # dt_arr = get_time(resource=nc_reference)
    # arr = ma.masked_array(arr)
    # dt_arr = ma.masked_array(dt_arr)
    # percentile = percentile
    # window_width = 5

    #     for indice in indices:
    #         name = indice.replace('_', str(percentile))
    #         var = indice.split('_')[0]
    #
    #         operation = None
    #         if 'T' in var:
    #             if percentile >= 50:
    #                 operation = 'Icclim%s90p' % var
    #                 func = 'icclim_%s90p' % var  # icclim_TG90p
    #             else:
    #                 operation = 'Icclim%s10p' % var
    #                 func = 'icclim_%s10p' % var
    #
    #             ################################
    #             # load the appropriate operation
    #             ################################
    #
    #             ops = [op for op in dir(lic) if operation in op]
    #             if len(ops) == 0:
    #                 raise Exception("operator does not exist %s", operation)
    #
    #             exec "percentile_dict = lic.%s.get_percentile_dict(arr, dt_arr, percentile, window_width)" % ops[0]
    #             calc = [{'func': func, 'name': name, 'kwds': {'percentile_dict': percentile_dict}}]
    #
    #             if polygons is None:
    #                 nc_indices.extend(call(resource=resource,
    #                                        prefix=key.replace(variable, name).replace('_day_', '_%s_' % grouping),
    #                                        calc=calc,
    #                                        calc_grouping=calc_group,
    #                                        output_format='nc'))
    #             else:
    #                 nc_indices.extend(clipping(resource=resource,
    #                                            prefix=key.replace(variable, name).replace('_day_', '_%s_' % grouping),
    #                                            calc=calc,
    #                                            calc_grouping=calc_group,
    #                                            output_format='nc',
    #                                            polygons=polygons,
    #                                            mosaic=mosaic,
    #                                            ))
    # if len(nc_indices) is 0:
    #     LOGGER.debug('No indices are calculated')
    #     return None
    return nc_indices
    def _handler(self, request, response):
        from flyingpigeon.utils import archive, archiveextract
        from tempfile import mkstemp

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        # mosaic = self.mosaic.getValue()
        regions = [inp.data for inp in request.inputs['region']]

        response.update_status('Arguments set for subset process', 0)
        LOGGER.debug('starting: regions=%s, num_files=%s' % (len(regions), len(ncs)))

        try:
            from flyingpigeon.visualisation import plot_polygons
            png_country = plot_polygons(regions)
        except:
            LOGGER.exception('failed to plot the polygon to world map')
            o1, png_country = mkstemp(dir='.', suffix='.png')

        # clip the demanded polygons
        from flyingpigeon.subset import clipping
        subsets = clipping(resource=ncs, variable=None,
                           dimension_map=None,
                           calc=None,
                           output_format='nc',
                           calc_grouping=None,
                           time_range=None,
                           time_region=None,
                           historical_concatination=True,
                           prefix=None,
                           spatial_wrapping='wrap',
                           polygons=regions,
                           mosaic=True
                           )

        try:
            tar_subsets = archive(subsets)
        except:
            LOGGER.exception('failed to archive subsets')
            _, tar_subsets = mkstemp(dir='.', suffix='.tar')

        try:
            from flyingpigeon.visualisation import uncertainty
            png_uncertainty = uncertainty(subsets)
        except:
            LOGGER.exception('failed to generate the uncertainty plot')
            _, png_uncertainty = mkstemp(dir='.', suffix='.png')

        try:
            from flyingpigeon.visualisation import spaghetti
            png_spaghetti = spaghetti(subsets)
        except:
            LOGGER.exception('failed to generate the spaghetti plot')
            _, png_spaghetti = mkstemp(dir='.', suffix='.png')

        try:
            from flyingpigeon import robustness as erob
            from flyingpigeon.utils import get_variable
            variable = get_variable(ncs[0])

            signal, low_agreement_mask, high_agreement_mask, text_src = erob.method_A(resource=subsets,
                                                                                      # start=None, end=None,
                                                                                      # timeslice=None,
                                                                                      variable=variable
                                                                                      )
            LOGGER.info('variable to be plotted: %s' % variable)
            from flyingpigeon.visualisation import map_robustness
            # if title is None:
            title = 'signal robustness of %s ' % (variable)  # , end1, end2, start1, start2
            png_robustness = map_robustness(signal,
                                            high_agreement_mask,
                                            low_agreement_mask,
                                            # cmap=cmap,
                                            title=title)
            LOGGER.info('graphic generated')

        except:
            LOGGER.exception('failed to generate the robustness plot')
            _, png_robustness = mkstemp(dir='.', suffix='.png')

        from flyingpigeon.visualisation import factsheetbrewer
        factsheet = factsheetbrewer(png_country=png_country,
                                    png_uncertainty=png_uncertainty,
                                    png_spaghetti=png_spaghetti,
                                    png_robustness=png_robustness)

        response.outputs['output_nc'].file = tar_subsets
        response.outputs['output_factsheet'].file = factsheet
        response.update_status("done", 100)
        return response