Beispiel #1
0
def test_get_values():
    values = utils.get_values(local_path(TESTDATA['cmip5_tasmax_2007_nc']))
    assert 12 == len(values)

    values = utils.get_values(local_path(TESTDATA['cordex_tasmax_2007_nc']))
    assert 12 == len(values)

    values = utils.get_values([local_path(TESTDATA['cordex_tasmax_2006_nc']),
                               local_path(TESTDATA['cordex_tasmax_2007_nc'])])
    assert 23 == len(values)
Beispiel #2
0
def test_get_values():
    values = utils.get_values(local_path(TESTDATA["cmip5_tasmax_2007_nc"]))
    assert 12 == len(values)

    values = utils.get_values(local_path(TESTDATA["cordex_tasmax_2007_nc"]))
    assert 12 == len(values)

    values = utils.get_values(
        [local_path(TESTDATA["cordex_tasmax_2006_nc"]), local_path(TESTDATA["cordex_tasmax_2007_nc"])]
    )
    assert 23 == len(values)
    def execute(self):
        from flyingpigeon.ocgis_module import call
        from flyingpigeon.utils import sort_by_filename, archive, get_values, get_time

        ncs = self.getInputValues(identifier='resource')
        logger.info("ncs: %s " % ncs)
        coords = self.getInputValues(identifier='coords')
        logger.info("coords %s", coords)
        filenames = []
        nc_exp = sort_by_filename(ncs, historical_concatination=True)

        from numpy import savetxt, column_stack
        from shapely.geometry import Point

        for key in nc_exp.keys():
            try:
                logger.info('start calculation for %s ' % key)
                ncs = nc_exp[key]
                times = get_time(ncs, format='%Y-%m-%d_%H:%M:%S')
                concat_vals = times  # ['%s-%02d-%02d_%02d:%02d:%02d' %
                # (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
                header = 'date_time'
                filename = '%s.csv' % key
                filenames.append(filename)

                for p in coords:
                    try:
                        self.status.set('processing point : {0}'.format(p), 20)
                        # define the point:
                        p = p.split(',')
                        point = Point(float(p[0]), float(p[1]))

                        # get the values
                        timeseries = call(resource=ncs,
                                          geom=point,
                                          select_nearest=True)
                        vals = get_values(timeseries)

                        # concatenation of values
                        header = header + ',%s-%s' % (p[0], p[1])
                        concat_vals = column_stack([concat_vals, vals])
                    except Exception as e:
                        logger.debug('failed for point %s %s' % (p, e))
                self.status.set(
                    '*** all points processed for {0} ****'.format(key), 50)
                savetxt(filename,
                        concat_vals,
                        fmt='%s',
                        delimiter=',',
                        header=header)
            except Exception as e:
                logger.debug('failed for %s %s' % (key, e))

    # set the outputs
        self.status.set('*** creating output tar archive ****', 90)
        tarout_file = archive(filenames)
        self.tarout.setValue(tarout_file)
Beispiel #4
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        LOGGER.info('ncs: {}'.format(ncs))

        coords = []
        for coord in request.inputs['coords']:
            coords.append(coord.data)

        LOGGER.info('coords {}'.format(coords))
        filenames = []
        nc_exp = sort_by_filename(ncs, historical_concatination=True)

        for key in nc_exp.keys():
            try:
                LOGGER.info('start calculation for {}'.format(key))
                ncs = nc_exp[key]
                times = get_time(ncs)  # , format='%Y-%m-%d_%H:%M:%S')
                concat_vals = times  # ['%s-%02d-%02d_%02d:%02d:%02d' %
                # (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
                header = 'date_time'
                filename = '{}.csv'.format(key)
                filenames.append(filename)

                for p in coords:
                    try:
                        response.update_status('processing point: {}'.format(p), 20)
                        # define the point:
                        p = p.split(',')
                        point = Point(float(p[0]), float(p[1]))

                        # get the values
                        timeseries = call(resource=ncs, geom=point, select_nearest=True)
                        vals = get_values(timeseries)

                        # concatenation of values
                        header = header + ',{}-{}'.format(p[0], p[1])
                        concat_vals = column_stack([concat_vals, vals])
                    except Exception as e:
                        LOGGER.debug('failed for point {} {}'.format(p, e))
                response.update_status('*** all points processed for {0} ****'.format(key), 50)

                # TODO: Ascertain whether this 'savetxt' is a valid command without string formatting argument: '%s'
                savetxt(filename, concat_vals, fmt='%s', delimiter=',', header=header)
            except Exception as ex:
                LOGGER.debug('failed for {}: {}'.format(key, str(ex)))

        # set the outputs
        response.update_status('*** creating output tar archive ****', 90)
        tarout_file = archive(filenames)
        response.outputs['tarout'].file = tarout_file
        return response
  def execute(self):
    from flyingpigeon.ocgis_module import call
    from flyingpigeon.utils import sort_by_filename, archive, get_values, get_time
        
    ncs = self.getInputValues(identifier='netcdf_file')
    logger.info("ncs: %s " % ncs) 
    coords = self.getInputValues(identifier='coords')
    logger.info("coords %s", coords)
    filenames = []    
    nc_exp = sort_by_filename(ncs, historical_concatination=True)
    
    #(fp_tar, tarout_file) = tempfile.mkstemp(dir=".", suffix='.tar')
    #tar = tarfile.open(tarout_file, "w")

    from numpy import savetxt, column_stack
    from shapely.geometry import Point
    
    for key in nc_exp.keys():
      try:
        logger.info('start calculation for %s ' % key )
        ncs = nc_exp[key]
        times = get_time(ncs)
        concat_vals = ['%s-%02d-%02d_%02d:%02d:%02d' %
                       (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
        header = 'date_time'
        filename = '%s.csv' % key
        filenames.append(filename) 
        
        for p in coords:
          try: 
            self.status.set('processing point : {0}'.format(p), 20)
            # define the point:  
            p = p.split(',')
            point = Point(float(p[0]), float(p[1]))       
            
            # get the values
            timeseries = call(resource=ncs, geom=point, select_nearest=True)
            vals = get_values(timeseries)
            
            # concatination of values 
            header = header + ',%s-%s' % (p[0], p[1])
            concat_vals = column_stack([concat_vals, vals])
          except Exception as e: 
            logger.debug('failed for point %s %s' % (p , e))
        self.status.set('*** all points processed for {0} ****'.format(key), 50)
        savetxt(filename, concat_vals, fmt='%s', delimiter=',', header=header)
      except Exception as e: 
        logger.debug('failed for %s %s' % (key, e))

    ### set the outputs
    self.status.set('*** creating output tar archive ****',90) 
    tarout_file = archive(filenames)
    self.tarout.setValue( tarout_file )
Beispiel #6
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        LOGGER.info("ncs: %s " % ncs)
        coords = request.inputs['coords']  # self.getInputValues(identifier='coords')
        LOGGER.info("coords %s", coords)
        filenames = []
        nc_exp = sort_by_filename(ncs, historical_concatination=True)

        for key in nc_exp.keys():
            try:
                LOGGER.info('start calculation for %s ' % key)
                ncs = nc_exp[key]
                times = get_time(ncs, format='%Y-%m-%d_%H:%M:%S')
                concat_vals = times  # ['%s-%02d-%02d_%02d:%02d:%02d' %
                # (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
                header = 'date_time'
                filename = '%s.csv' % key
                filenames.append(filename)

                for p in coords:
                    try:
                        response.update_status('processing point : {0}'.format(p), 20)
                        # define the point:
                        p = p.split(',')
                        point = Point(float(p[0]), float(p[1]))

                        # get the values
                        timeseries = call(resource=ncs, geom=point, select_nearest=True)
                        vals = get_values(timeseries)

                        # concatenation of values
                        header = header + ',%s-%s' % (p[0], p[1])
                        concat_vals = column_stack([concat_vals, vals])
                    except Exception as e:
                        LOGGER.debug('failed for point %s %s' % (p, e))
                response.update_status('*** all points processed for {0} ****'.format(key), 50)
                savetxt(filename, concat_vals, fmt='%s', delimiter=',', header=header)
            except Exception as e:
                LOGGER.debug('failed for %s %s' % (key, e))

    # set the outputs
        response.update_status('*** creating output tar archive ****', 90)
        tarout_file = archive(filenames)
        response.outputs['tarout'].file = tarout_file
        return response
Beispiel #7
0
def get_PAmask(coordinates=[], nc=None):
    """
    generates a matrix with 1/0 values over land areas. (NaN for water regions)

    :param coordinates: 2D array with lat lon coordinates representing tree observation
    :param domain: region (default='EUR-11')

    :return : PAmask
    """
    from scipy import spatial
    import numpy as np
    import numpy.ma as ma

#    from netCDF4 import Dataset
#    from flyingpigeon import config
#    DIR_MASKS = config.masks_dir()

    from flyingpigeon.utils import get_variable
    from flyingpigeon.utils import get_coordinates, get_values  # unrotate_pole,

    lats, lons = np.array(get_coordinates(nc))  # unrotate_pole(nc, write_to_file=False))
    sftlf = get_values(nc)[0, :, :]
    #
    # sftlf[sftlf.mask is True] = 0
    # sftlf[sftlf.mask is False] = np.nan

    domain = sftlf.shape

    lats1D = np.array(lats).ravel()
    lons1D = np.array(lons).ravel()
    tree = spatial.KDTree(zip(lats1D, lons1D))
    l, i = tree.query(coordinates)

    PA = np.zeros(len(lats1D))
    PA[i] = 1
    #
    PAmask = PA.reshape(domain)
    PAmask[sftlf.mask] = np.nan

    return PAmask
Beispiel #8
0
def fieldmean(resource):
    """
    calculating of a weighted field mean

    :param resource: str or list of str containing the netCDF files pathes

    :return list: averaged values
    """
    from flyingpigeon.utils import get_values, get_coordinates
    from numpy import radians, average, cos, sqrt

    data = get_values(resource)  # np.squeeze(ds.variables[variable][:])
    dim = data.shape
    if len(data.shape) == 3:
        # TODO if data.shape == 2 , 4 ...

        lats, lons = get_coordinates(resource, unrotate=True)

        if len(lats.shape) == 2:
            # TODO: calcult weighed average with 2D lats (rotated pole coordinates)
            lats, lons = get_coordinates(resource)

        if dim[0] == len(lats):
            lat_index = 0
        elif dim[1] == len(lats):
            lat_index = 1
        elif dim[2] == len(lats):
            lat_index = 2
        else:
            LOGGER.exception(
                'length of latitude is not matching values dimensions')

        lat_w = sqrt(cos(lats * radians(1)))
        meanLon = average(data, axis=lat_index, weights=lat_w)
        meanTimeserie = average(meanLon, axis=1)
        print('fieldmean calculated')
    else:
        print('not 3D shaped data. Average can not be calculated')
    return meanTimeserie
Beispiel #9
0
def fieldmean(resource):
    """
    calculating of a weighted field mean

    :param resource: str or list of str containing the netCDF files pathes

    :return list: timeseries of the averaged values per timepstep
    """
    from flyingpigeon.utils import get_values, get_coordinates, get_index_lat
    from numpy import radians, average, cos, sqrt, array

    data = get_values(resource)  # np.squeeze(ds.variables[variable][:])
    dim = data.shape
    LOGGER.debug(data.shape)

    if len(data.shape) == 3:
        # TODO if data.shape == 2 , 4 ...
        lats, lons = get_coordinates(resource, unrotate=False)
        lats = array(lats)
        if len(lats.shape) == 2:
            lats = lats[:, 0]
        else:
            LOGGER.debug('Latitudes not reduced to 1D')
        # TODO: calculat weighed average with 2D lats (rotated pole coordinates)
        # lats, lons = get_coordinates(resource, unrotate=False)
        # if len(lats.shape) == 2:
        #     lats, lons = get_coordinates(resource)

        lat_index = get_index_lat(resource)
        LOGGER.debug('lats dimension %s ' % len(lats.shape))
        LOGGER.debug('lats index %s' % lat_index)

        lat_w = sqrt(cos(lats * radians(1)))
        meanLon = average(data, axis=lat_index, weights=lat_w)
        meanTimeserie = average(meanLon, axis=1)
        LOGGER.debug('fieldmean calculated')
    else:
        LOGGER.error('not 3D shaped data. Average can not be calculated')
    return meanTimeserie
Beispiel #10
0
def calc_indice_percentile(resources=[],
                           variable=None,
                           prefix=None,
                           indices='TG90p',
                           refperiod=None,
                           groupings='yr',
                           polygons=None,
                           percentile=90,
                           mosaic=False,
                           dir_output=None,
                           dimension_map=None):
    """
    Calculates given indices for suitable files in the appropriate time grouping and polygon.

    :param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='TG90p')
    :param prefix: filename prefix
    :param refperiod: reference period tuple = (start,end)
    :param grouping: indices time aggregation (default='yr')
    :param dir_output: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir.
    """
    from os.path import join, dirname, exists
    from os import remove
    import uuid
    from numpy import ma
    from datetime import datetime as dt

    from flyingpigeon.ocgis_module import call
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import get_values, get_time

    if type(resources) != list:
        resources = list([resources])
    if type(indices) != list:
        indices = list([indices])

    if type(groupings) != list:
        groupings = list([groupings])

    if type(refperiod) == list:
        refperiod = refperiod[0]

    if refperiod is None:
        start = dt.strptime(refperiod.split('-')[0], '%Y%m%d')
        end = dt.strptime(refperiod.split('-')[1], '%Y%m%d')
        time_range = [start, end]
    else:
        time_range = None

    if dir_output is None:
        if not exists(dir_output):
            makedirs(dir_output)

    ################################################
    # Compute a custom percentile basis using ICCLIM
    ################################################
    from ocgis.contrib import library_icclim as lic
    nc_indices = []
    nc_dic = sort_by_filename(resources)

    for grouping in groupings:
        calc_group = calc_grouping(grouping)
        for key in nc_dic.keys():
            resource = nc_dic[key]
            if variable is None:
                variable = get_variable(resource)
            if polygons is None:
                nc_reference = call(resource=resource,
                                    prefix=str(uuid.uuid4()),
                                    time_range=time_range,
                                    output_format='nc',
                                    dir_output=dir_output)
        else:
            nc_reference = clipping(resource=resource,
                                    prefix=str(uuid.uuid4()),
                                    time_range=time_range,
                                    output_format='nc',
                                    polygons=polygons,
                                    dir_output=dir_output,
                                    mosaic=mosaic)

        arr = get_values(resource=nc_reference)
        dt_arr = get_time(resource=nc_reference)
        arr = ma.masked_array(arr)
        dt_arr = ma.masked_array(dt_arr)
        percentile = percentile
        window_width = 5

        for indice in indices:
            name = indice.replace('_', str(percentile))
            var = indice.split('_')[0]

            operation = None
            if 'T' in var:
                if percentile >= 50:
                    operation = 'Icclim%s90p' % var
                    func = 'icclim_%s90p' % var  # icclim_TG90p
                else:
                    operation = 'Icclim%s10p' % var
                    func = 'icclim_%s10p' % var

                ################################
                # load the appropriate operation
                ################################

                ops = [op for op in dir(lic) if operation in op]
                if len(ops) == 0:
                    raise Exception("operator does not exist %s", operation)

                exec "percentile_dict = lic.%s.get_percentile_dict(arr, dt_arr, percentile, window_width)" % ops[
                    0]
                calc = [{
                    'func': func,
                    'name': name,
                    'kwds': {
                        'percentile_dict': percentile_dict
                    }
                }]

                if polygons is None:
                    nc_indices.extend(
                        call(resource=resource,
                             prefix=key.replace(variable, name).replace(
                                 '_day_', '_%s_' % grouping),
                             calc=calc,
                             calc_grouping=calc_group,
                             output_format='nc',
                             dir_output=dir_output))
                else:
                    nc_indices.extend(
                        clipping(
                            resource=resource,
                            prefix=key.replace(variable, name).replace(
                                '_day_', '_%s_' % grouping),
                            calc=calc,
                            calc_grouping=calc_group,
                            output_format='nc',
                            dir_output=dir_output,
                            polygons=polygons,
                            mosaic=mosaic,
                        ))
    if len(nc_indices) is 0:
        logger.debug('No indices are calculated')
        return None
    return nc_indices
Beispiel #11
0
def map_spatial_analog(ncfile, variable='dissimilarity', cmap='viridis', title='Spatial analog'):
    """Return a matplotlib Figure instance showing a map of the dissimilarity measure.
    """
    import netCDF4 as nc
    from flyingpigeon import utils
    from mpl_toolkits.axes_grid import make_axes_locatable
    import matplotlib.axes as maxes

    try:
        var = utils.get_values(ncfile, variable)
        LOGGER.info('Data loaded')

        lats, lons = utils.get_coordinates(ncfile, variable=variable, unrotate=False)

        if len(lats.shape) == 1:
            cyclic_var, cyclic_lons = add_cyclic_point(var, coord=lons)

            lons = cyclic_lons.data
            var = cyclic_var

        with nc.Dataset(ncfile) as D:
            V = D.variables[variable]
            lon, lat = map(float, V.target_location.split(','))

        LOGGER.info('Lat and lon loaded')

    except Exception as e:
        msg = 'Failed to get data for plotting: {0}\n{1}'.format(ncfile, e)
        LOGGER.exception(msg)
        raise Exception(msg)

    try:
        fig = plt.figure(facecolor='w', edgecolor='k')
        fig.subplots_adjust(top=.95, bottom=.05, left=.03, right=.95)

        ax = plt.axes(
            projection=ccrs.Robinson(central_longitude=int(np.mean(lons))))

        divider = make_axes_locatable(ax)
        cax = divider.new_horizontal("4%", pad=0.15, axes_class=maxes.Axes)
        fig.add_axes(cax)

        ax.plot(lon, lat, marker='o', mfc='#292421', ms=13, transform=ccrs.PlateCarree())
        ax.plot(lon, lat, marker='o', mfc='#ffffff', ms=7, transform=ccrs.PlateCarree())

        cs = ax.contourf(lons, lats, var, 60,
                         transform=ccrs.PlateCarree(),
                         cmap=cmap, interpolation='nearest')

        ax.coastlines(color='k', linewidth=.8)
        ax.set_title(title)

        cb = plt.colorbar(cs, cax=cax, orientation='vertical')
        cb.set_label(u"–            Dissimilarity             +")  # ha='left', va='center')
        cb.set_ticks([])

    except:
        msg = 'failed to plot graphic'
        LOGGER.exception(msg)

    LOGGER.info('Plot created and figure saved')
    return fig
Beispiel #12
0
def map_robustness(signal, high_agreement_mask, low_agreement_mask,
                   variable=None, cmap='seismic', title=None,
                   file_extension='png'):
    """
    generates a graphic for the output of the ensembleRobustness process for a lat/long file.

    :param signal: netCDF file containing the signal difference over time
    :param highagreement:
    :param lowagreement:
    :param variable:
    :param cmap: default='seismic',
    :param title: default='Model agreement of signal'
    :returns str: path/to/file.png
    """
    from flyingpigeon import utils
    from numpy import mean, ma

    if variable is None:
        variable = utils.get_variable(signal)

    try:
        var_signal = utils.get_values(signal)
        mask_l = utils.get_values(low_agreement_mask)
        mask_h = utils.get_values(high_agreement_mask)

        # mask_l = ma.masked_where(low < 0.5, low)
        # mask_h = ma.masked_where(high < 0.5, high)
        # mask_l[mask_l == 0] = np.nan
        # mask_h[mask_h == 0] = np.nan

        LOGGER.info('data loaded')

        lats, lons = utils.get_coordinates(signal, unrotate=True)

        if len(lats.shape) == 1:
            cyclic_var, cyclic_lons = add_cyclic_point(var_signal, coord=lons)
            mask_l, cyclic_lons = add_cyclic_point(mask_l, coord=lons)
            mask_h, cyclic_lons = add_cyclic_point(mask_h, coord=lons)

            lons = cyclic_lons.data
            var_signal = cyclic_var

        LOGGER.info('lat lon loaded')

        minval = round(np.nanmin(var_signal))
        maxval = round(np.nanmax(var_signal)+.5)

        LOGGER.info('prepared data for plotting')
    except:
        msg = 'failed to get data for plotting'
        LOGGER.exception(msg)
        raise Exception(msg)

    try:
        fig = plt.figure(facecolor='w', edgecolor='k')

        ax = plt.axes(projection=ccrs.Robinson(central_longitude=int(mean(lons))))
        norm = MidpointNormalize(midpoint=0)

        cs = plt.contourf(lons, lats, var_signal, 60, norm=norm, transform=ccrs.PlateCarree(),
                          cmap=cmap, interpolation='nearest')

        cl = plt.contourf(lons, lats, mask_l, 1, transform=ccrs.PlateCarree(), colors='none', hatches=[None, '/'])
        ch = plt.contourf(lons, lats, mask_h, 1, transform=ccrs.PlateCarree(), colors='none', hatches=[None, '.'])
        # artists, labels = ch.legend_elements()
        # plt.legend(artists, labels, handleheight=2)
        # plt.clim(minval,maxval)
        ax.coastlines()
        ax.gridlines()
        # ax.set_global()

        if title is None:
            plt.title('%s with Agreement' % variable)
        else:
            plt.title(title)
        plt.colorbar(cs)

        plt.annotate('// = low model ensemble agreement', (0, 0), (0, -10),
                     xycoords='axes fraction', textcoords='offset points', va='top')
        plt.annotate('..  = high model ensemble agreement', (0, 0), (0, -20),
                     xycoords='axes fraction', textcoords='offset points', va='top')

        graphic = fig2plot(fig=fig, file_extension=file_extension)
        plt.close()

        LOGGER.info('Plot created and figure saved')
    except:
        msg = 'failed to plot graphic'
        LOGGER.exception(msg)

    return graphic
Beispiel #13
0
def calc_indice_percentile(resources=[], variable=None, 
    prefix=None, indices='TG90p', refperiod=None,
    groupings='yr', polygons=None, percentile=90, mosaik = False, 
    dir_output=None, dimension_map = None):
    """
    Calculates given indices for suitable files in the appopriate time grouping and polygon.

    :param resource: list of filenames in drs convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='TG90p')
    :param prefix: filename prefix 
    :param refperiod: reference refperiod touple = (start,end)
    :param grouping: indices time aggregation (default='yr')
    :param dir_output: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir
    """
    from os.path import join, dirname, exists
    from os import remove
    import uuid
    from numpy import ma 
    from datetime import datetime as dt

    from flyingpigeon.ocgis_module import call
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import get_values, get_time
    
    if type(resources) != list: 
      resources = list([resources])
    if type(indices) != list: 
      indices = list([indices])
      
    if type(groupings) != list: 
      groupings = list([groupings])
      
    if type(refperiod) == list: 
      refperiod = refperiod[0]
      
    if refperiod != None:
      start = dt.strptime(refperiod.split('-')[0] , '%Y%m%d')
      end = dt.strptime(refperiod.split('-')[1] , '%Y%m%d')
      time_range = [start, end]
    else:  
      time_range = None
    
    if dir_output != None:
      if not exists(dir_output): 
        makedirs(dir_output)
    
    ########################################################################################################################
    # Compute a custom percentile basis using ICCLIM. ######################################################################
    ########################################################################################################################

    from ocgis.contrib import library_icclim  as lic 
    nc_indices = []
    nc_dic = sort_by_filename(resources)
    
    for grouping in groupings:
      calc_group = calc_grouping(grouping)
      for key in nc_dic.keys():
        resource = nc_dic[key]
        if variable == None: 
          variable = get_variable(resource)
        if polygons == None:
          nc_reference = call(resource=resource, 
            prefix=str(uuid.uuid4()), 
            time_range=time_range,
            output_format='nc', 
            dir_output=dir_output)
        else:
          nc_reference = clipping(resource=resource, 
            prefix=str(uuid.uuid4()),
            time_range=time_range, 
            output_format='nc', 
            polygons=polygons,
            dir_output=dir_output, 
            mosaik = mosaik)
          
        arr = get_values(nc_files=nc_reference)
        dt_arr = get_time(nc_files=nc_reference)
        arr = ma.masked_array(arr)
        dt_arr = ma.masked_array(dt_arr)
        percentile = percentile
        window_width = 5
        
        for indice in indices:
          name = indice.replace('_', str(percentile))
          var = indice.split('_')[0]

          operation = None
          if 'T' in var: 
            if percentile >= 50: 
              operation = 'Icclim%s90p' % var
              func = 'icclim_%s90p' % var # icclim_TG90p
            else: 
              operation = 'Icclim%s10p' % var
              func = 'icclim_%s10p' % var
              
          ################################
          # load the appropriate operation
          ################################

          ops = [op for op in dir(lic) if operation in op]
          if len(ops) == 0:
              raise Exception("operator does not exist %s", operation)
          
          exec "percentile_dict = lic.%s.get_percentile_dict(arr, dt_arr, percentile, window_width)" % ops[0]
          calc = [{'func': func, 'name': name, 'kwds': {'percentile_dict': percentile_dict}}]
          
          if polygons == None:
            nc_indices.append(call(resource=resource, 
                                prefix=key.replace(variable,name).replace('_day_', '_%s_' % grouping), 
                                calc=calc, 
                                calc_grouping=calc_group, 
                                output_format='nc',
                                dir_output=dir_output))
          else: 
            nc_indices.extend(clipping(resource=resource, 
                                prefix=key.replace(variable,name).replace('_day_', '_%s_' % grouping), 
                                calc=calc, 
                                calc_grouping=calc_group, 
                                output_format='nc',
                                dir_output=dir_output,
                                polygons=polygons, 
                                mosaik = mosaik,
                                ))
    return nc_indices
Beispiel #14
0
def map_robustness(signal,
                   high_agreement_mask,
                   low_agreement_mask,
                   cmap='seismic',
                   title=None):
    """
    generates a graphic for the output of the ensembleRobustness process for a lat/long file.

    :param signal: netCDF file containing the signal difference over time
    :param highagreement:
    :param lowagreement:
    :param variable:
    :param cmap: default='seismic',
    :param title: default='Model agreement of signal'
    :returns str: path/to/file.png
    """

    try:
        # from flyingpigeon.utils import get_values
        from cartopy.util import add_cyclic_point
        # from flyingpigeon.utils import get_coordinates

        var_signal = utils.get_values(signal)
        mask_l = utils.get_values(low_agreement_mask)
        mask_h = utils.get_values(high_agreement_mask)

        mask_l.mask = var_signal.mask
        mask_h.mask = var_signal.mask

        mask_l.mask[mask_l.data is 0] = False
        mask_h.mask[mask_h.data is 0] = False

        LOGGER.debug('values loaded')

        lats, lons = utils.get_coordinates(signal)

        #
        # cyclic_var, cyclic_lons = add_cyclic_point(var_signal, coord=lons)
        # mask_l, cyclic_lons = add_cyclic_point(mask_l, coord=lons)
        # mask_h, cyclic_lons = add_cyclic_point(mask_h, coord=lons)
        #
        # lons = cyclic_lons.data
        # var_signal = cyclic_var
        #
        LOGGER.debug('coordinates loaded')
        #
        minval = round(np.nanmin(var_signal))
        maxval = round(np.nanmax(var_signal) + .5)

        LOGGER.info('prepared data for plotting')
    except:
        msg = 'failed to get data for plotting'
        LOGGER.exception(msg)

    try:
        fig = plt.figure(facecolor='w',
                         edgecolor='k')  # figsize=(20,10), dpi=600,
        ax = plt.axes(projection=ccrs.Robinson(central_longitude=0))
        norm = MidpointNormalize(midpoint=0)

        cs = plt.contourf(lons,
                          lats,
                          var_signal,
                          60,
                          transform=ccrs.PlateCarree(),
                          norm=norm,
                          cmap=cmap,
                          interpolation='nearest')
        cl = plt.contourf(lons,
                          lats,
                          mask_l,
                          60,
                          transform=ccrs.PlateCarree(),
                          colors='none',
                          hatches=['//'])
        ch = plt.contourf(lons,
                          lats,
                          mask_h,
                          60,
                          transform=ccrs.PlateCarree(),
                          colors='none',
                          hatches=['.'])

        # plt.clim(minval, maxval)
        ax.coastlines()
        # ax.set_global()

        if title is None:
            plt.title('Robustness')
        else:
            plt.title(title)

        plt.colorbar(cs)

        plt.annotate('// = low model ensemble agreement', (0, 0), (0, -10),
                     xycoords='axes fraction',
                     textcoords='offset points',
                     va='top')
        plt.annotate('..  = high model ensemble agreement', (0, 0), (0, -20),
                     xycoords='axes fraction',
                     textcoords='offset points',
                     va='top')

        _, graphic = mkstemp(dir='.', suffix='.png')
        fig.savefig(graphic)
        plt.close()

        LOGGER.info('Plot created and figure saved')
    except:
        msg = 'failed to plot graphic'
        LOGGER.exception(msg)
        _, graphic = mkstemp(dir='.', suffix='.png')
    return graphic
Beispiel #15
0
def calc_indice_percentile(resources=[], variable=None, 
    prefix=None, indices='TG90p', refperiod=None,
    groupings='yr', polygons=None, percentile=90, mosaic = False, 
    dir_output=None, dimension_map = None):
    """
    Calculates given indices for suitable files in the appropriate time grouping and polygon.

    :param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='TG90p')
    :param prefix: filename prefix 
    :param refperiod: reference period tuple = (start,end)
    :param grouping: indices time aggregation (default='yr')
    :param dir_output: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir.
    """
    from os.path import join, dirname, exists
    from os import remove
    import uuid
    from numpy import ma 
    from datetime import datetime as dt

    from flyingpigeon.ocgis_module import call
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import get_values, get_time
    
    if type(resources) != list: 
      resources = list([resources])
    if type(indices) != list: 
      indices = list([indices])
      
    if type(groupings) != list: 
      groupings = list([groupings])
      
    if type(refperiod) == list: 
      refperiod = refperiod[0]
      
    if refperiod != None:
      start = dt.strptime(refperiod.split('-')[0] , '%Y%m%d')
      end = dt.strptime(refperiod.split('-')[1] , '%Y%m%d')
      time_range = [start, end]
    else:  
      time_range = None
    
    if dir_output != None:
      if not exists(dir_output): 
        makedirs(dir_output)
    
    ########################################################################################################################
    # Compute a custom percentile basis using ICCLIM. ######################################################################
    ########################################################################################################################

    from ocgis.contrib import library_icclim  as lic 
    nc_indices = []
    nc_dic = sort_by_filename(resources)
    
    for grouping in groupings:
      calc_group = calc_grouping(grouping)
      for key in nc_dic.keys():
        resource = nc_dic[key]
        if variable == None: 
          variable = get_variable(resource)
        if polygons == None:
          nc_reference = call(resource=resource, 
            prefix=str(uuid.uuid4()), 
            time_range=time_range,
            output_format='nc', 
            dir_output=dir_output)
        else:
          nc_reference = clipping(resource=resource, 
            prefix=str(uuid.uuid4()),
            time_range=time_range, 
            output_format='nc', 
            polygons=polygons,
            dir_output=dir_output, 
            mosaic = mosaic)
          
        arr = get_values(resource=nc_reference)
        dt_arr = get_time(resource=nc_reference)
        arr = ma.masked_array(arr)
        dt_arr = ma.masked_array(dt_arr)
        percentile = percentile
        window_width = 5
        
        for indice in indices:
          name = indice.replace('_', str(percentile))
          var = indice.split('_')[0]

          operation = None
          if 'T' in var: 
            if percentile >= 50: 
              operation = 'Icclim%s90p' % var
              func = 'icclim_%s90p' % var # icclim_TG90p
            else: 
              operation = 'Icclim%s10p' % var
              func = 'icclim_%s10p' % var
              
          ################################
          # load the appropriate operation
          ################################

          ops = [op for op in dir(lic) if operation in op]
          if len(ops) == 0:
              raise Exception("operator does not exist %s", operation)
          
          exec "percentile_dict = lic.%s.get_percentile_dict(arr, dt_arr, percentile, window_width)" % ops[0]
          calc = [{'func': func, 'name': name, 'kwds': {'percentile_dict': percentile_dict}}]
          
          if polygons == None:
            nc_indices.append(call(resource=resource, 
                                prefix=key.replace(variable,name).replace('_day_', '_%s_' % grouping), 
                                calc=calc, 
                                calc_grouping=calc_group, 
                                output_format='nc',
                                dir_output=dir_output))
          else: 
            nc_indices.extend(clipping(resource=resource, 
                                prefix=key.replace(variable,name).replace('_day_', '_%s_' % grouping), 
                                calc=calc, 
                                calc_grouping=calc_group, 
                                output_format='nc',
                                dir_output=dir_output,
                                polygons=polygons, 
                                mosaic = mosaic,
                                ))
    return nc_indices

#def calc_indice_unconventional(resource=[], variable=None, prefix=None,
  #indices=None, polygons=None,  groupings=None, 
  #dir_output=None, dimension_map = None):
    #"""
    #Calculates given indices for suitable files in the appropriate time grouping and polygon.

    #:param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    #:param variable: variable name to be selected in the in netcdf file (default=None)
    #:param indices: list of indices (default ='TGx')
    #:param polygons: list of polygons (default =None)
    #:param grouping: indices time aggregation (default='yr')
    #:param out_dir: output directory for result file (netcdf)
    #:param dimension_map: optional dimension map if different to standard (default=None)

    #:return: list of netcdf files with calculated indices. Files are saved into dir_output
    #"""
    
    #from os.path import join, dirname, exists
    #from os import remove
    #import uuid
    #from flyingpigeon import ocgis_module
    #from flyingpigeon.subset import get_ugid, get_geom

    #if type(resource) != list: 
      #resource = list([resource])
    #if type(indices) != list: 
      #indices = list([indices])
    #if type(polygons) != list and polygons != None:
      #polygons = list([polygons])
    #elif polygons == None:
      #polygons = [None]
    #else: 
      #logger.error('Polygons not found')
    #if type(groupings) != list:
      #groupings = list([groupings])
    
    #if dir_output != None:
      #if not exists(dir_output): 
        #makedirs(dir_output)
    
    #experiments = sort_by_filename(resource)
    #outputs = []

    #print('environment for calc_indice_unconventional set')
    #logger.info('environment for calc_indice_unconventional set')
    
    #for key in experiments:
      #if variable == None:
        #variable = get_variable(experiments[key][0])
      #try: 
        #ncs = experiments[key]
        #for indice in indices:
          #logger.info('indice: %s' % indice)
          #try: 
            #for grouping in groupings:
              #logger.info('grouping: %s' % grouping)
              #try:
                #calc_group = calc_grouping(grouping)
                #logger.info('calc_group: %s' % calc_group)
                #for polygon in polygons:  
                  #try:
                    #domain = key.split('_')[1].split('-')[0]
                    #if polygon == None:
                      #if prefix == None: 
                        #prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping )
                      #geom = None
                      #ugid = None
                    #else:
                      #if prefix == None: 
                        #prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping ).replace(domain,polygon)
                      #geom = get_geom(polygon=polygon)
                      #ugid = get_ugid(polygons=polygon, geom=geom)
                    #if indice == 'TGx':
                      #calc=[{'func': 'max', 'name': 'TGx'}]
                      #tmp = ocgis_module.call(resource=ncs,# conform_units_to='celcius',
                                              #variable=variable, dimension_map=dimension_map, 
                                              #calc=calc, calc_grouping=calc_group, prefix=prefix,
                                              #dir_output=dir_output, geom=geom, select_ugid=ugid)
                    #elif indice == 'TGn':
                      #calc=[{'func': 'min', 'name': 'TGn'}]
                      #tmp = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
                                              #variable=variable, dimension_map=dimension_map, 
                                              #calc=calc, calc_grouping= calc_group, prefix=prefix,
                                               #dir_output=dir_output, geom=geom, select_ugid = ugid)
                    #elif indice == 'TGx5day':
                      #calc = [{'func': 'moving_window', 'name': 'TGx5day', 'kwds': {'k': 5, 'operation': 'mean', 'mode': 'same' }}]
                      #tmp2 = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
                                              #variable=variable, dimension_map=dimension_map, 
                                              #calc=calc, prefix=str(uuid.uuid4()),
                                              #geom=geom, select_ugid = ugid)
                      #calc=[{'func': 'max', 'name': 'TGx5day'}]
                      #logger.info('moving window calculated : %s' % tmp2)
                      #tmp = ocgis_module.call(resource=tmp2,
                                              #variable=indice, dimension_map=dimension_map, 
                                              #calc=calc, calc_grouping=calc_group, prefix=prefix,
                                              #dir_output=dir_output)
                      #remove(tmp2)
                    #elif indice == 'TGn5day':
                      #calc = [{'func': 'moving_window', 'name': 'TGn5day', 'kwds': {'k': 5, 'operation': 'mean', 'mode': 'same' }}]
                      #tmp2 = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
                                              #variable=variable, dimension_map=dimension_map, 
                                              #calc=calc, prefix=str(uuid.uuid4()),
                                              #geom=geom, select_ugid = ugid)
                      #calc=[{'func': 'min', 'name': 'TGn5day'}]
                      
                      #logger.info('moving window calculated : %s' % tmp2)
                      
                      #tmp = ocgis_module.call(resource=tmp2,
                                              #variable=indice, dimension_map=dimension_map, 
                                              #calc=calc, calc_grouping=calc_group, prefix=prefix,
                                              #dir_output=dir_output)
                      #remove(tmp2)
                    #else: 
                      #logger.error('Indice %s is not a known inidce' % (indice))
                    #outputs.append(tmp)
                    #logger.info('indice file calcualted %s ' % (tmp))
                  #except Exception as e:
                    #logger.debug('could not calc indice %s for key %s, polygon %s and calc_grouping %s : %s' %  (indice, key, polygon, grouping, e ))
              #except Exception as e:
                #logger.debug('could not calc indice %s for key %s and calc_grouping %s : %s' % ( indice, key, polygon, e ))
          #except Exception as e:
            #logger.debug('could not calc indice %s for key %s: %s'%  (indice, key, e ))
      #except Exception as e:
        #logger.debug('could not calc key %s: %s' % (key, e))
    #return outputs
Beispiel #16
0
def calc_indice_percentile(resources=[],
                           variable=None,
                           prefix=None,
                           indices='TG90p',
                           refperiod=None,
                           groupings='yr',
                           polygons=None,
                           percentile=90,
                           mosaic=False,
                           dir_output=None,
                           dimension_map=None):
    """
    Calculates given indices for suitable files in the appropriate time grouping and polygon.

    :param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
    :param variable: variable name to be selected in the in netcdf file (default=None)
    :param indices: list of indices (default ='TG90p')
    :param prefix: filename prefix 
    :param refperiod: reference period tuple = (start,end)
    :param grouping: indices time aggregation (default='yr')
    :param dir_output: output directory for result file (netcdf)
    :param dimension_map: optional dimension map if different to standard (default=None)

    :return: list of netcdf files with calculated indices. Files are saved into out_dir.
    """
    from os.path import join, dirname, exists
    from os import remove
    import uuid
    from numpy import ma
    from datetime import datetime as dt

    from flyingpigeon.ocgis_module import call
    from flyingpigeon.subset import clipping
    from flyingpigeon.utils import get_values, get_time

    if type(resources) != list:
        resources = list([resources])
    if type(indices) != list:
        indices = list([indices])

    if type(groupings) != list:
        groupings = list([groupings])

    if type(refperiod) == list:
        refperiod = refperiod[0]

    if refperiod != None:
        start = dt.strptime(refperiod.split('-')[0], '%Y%m%d')
        end = dt.strptime(refperiod.split('-')[1], '%Y%m%d')
        time_range = [start, end]
    else:
        time_range = None

    if dir_output != None:
        if not exists(dir_output):
            makedirs(dir_output)

    ########################################################################################################################
    # Compute a custom percentile basis using ICCLIM. ######################################################################
    ########################################################################################################################

    from ocgis.contrib import library_icclim as lic
    nc_indices = []
    nc_dic = sort_by_filename(resources)

    for grouping in groupings:
        calc_group = calc_grouping(grouping)
        for key in nc_dic.keys():
            resource = nc_dic[key]
            if variable == None:
                variable = get_variable(resource)
            if polygons == None:
                nc_reference = call(resource=resource,
                                    prefix=str(uuid.uuid4()),
                                    time_range=time_range,
                                    output_format='nc',
                                    dir_output=dir_output)
            else:
                nc_reference = clipping(resource=resource,
                                        prefix=str(uuid.uuid4()),
                                        time_range=time_range,
                                        output_format='nc',
                                        polygons=polygons,
                                        dir_output=dir_output,
                                        mosaic=mosaic)

            arr = get_values(resource=nc_reference)
            dt_arr = get_time(resource=nc_reference)
            arr = ma.masked_array(arr)
            dt_arr = ma.masked_array(dt_arr)
            percentile = percentile
            window_width = 5

            for indice in indices:
                name = indice.replace('_', str(percentile))
                var = indice.split('_')[0]

                operation = None
                if 'T' in var:
                    if percentile >= 50:
                        operation = 'Icclim%s90p' % var
                        func = 'icclim_%s90p' % var  # icclim_TG90p
                    else:
                        operation = 'Icclim%s10p' % var
                        func = 'icclim_%s10p' % var

                ################################
                # load the appropriate operation
                ################################

                ops = [op for op in dir(lic) if operation in op]
                if len(ops) == 0:
                    raise Exception("operator does not exist %s", operation)

                exec "percentile_dict = lic.%s.get_percentile_dict(arr, dt_arr, percentile, window_width)" % ops[
                    0]
                calc = [{
                    'func': func,
                    'name': name,
                    'kwds': {
                        'percentile_dict': percentile_dict
                    }
                }]

                if polygons == None:
                    nc_indices.append(
                        call(resource=resource,
                             prefix=key.replace(variable, name).replace(
                                 '_day_', '_%s_' % grouping),
                             calc=calc,
                             calc_grouping=calc_group,
                             output_format='nc',
                             dir_output=dir_output))
                else:
                    nc_indices.extend(
                        clipping(
                            resource=resource,
                            prefix=key.replace(variable, name).replace(
                                '_day_', '_%s_' % grouping),
                            calc=calc,
                            calc_grouping=calc_group,
                            output_format='nc',
                            dir_output=dir_output,
                            polygons=polygons,
                            mosaic=mosaic,
                        ))
    return nc_indices


#def calc_indice_unconventional(resource=[], variable=None, prefix=None,
#indices=None, polygons=None,  groupings=None,
#dir_output=None, dimension_map = None):
#"""
#Calculates given indices for suitable files in the appropriate time grouping and polygon.

#:param resource: list of filenames in data reference syntax (DRS) convention (netcdf)
#:param variable: variable name to be selected in the in netcdf file (default=None)
#:param indices: list of indices (default ='TGx')
#:param polygons: list of polygons (default =None)
#:param grouping: indices time aggregation (default='yr')
#:param out_dir: output directory for result file (netcdf)
#:param dimension_map: optional dimension map if different to standard (default=None)

#:return: list of netcdf files with calculated indices. Files are saved into dir_output
#"""

#from os.path import join, dirname, exists
#from os import remove
#import uuid
#from flyingpigeon import ocgis_module
#from flyingpigeon.subset import get_ugid, get_geom

#if type(resource) != list:
#resource = list([resource])
#if type(indices) != list:
#indices = list([indices])
#if type(polygons) != list and polygons != None:
#polygons = list([polygons])
#elif polygons == None:
#polygons = [None]
#else:
#logger.error('Polygons not found')
#if type(groupings) != list:
#groupings = list([groupings])

#if dir_output != None:
#if not exists(dir_output):
#makedirs(dir_output)

#experiments = sort_by_filename(resource)
#outputs = []

#print('environment for calc_indice_unconventional set')
#logger.info('environment for calc_indice_unconventional set')

#for key in experiments:
#if variable == None:
#variable = get_variable(experiments[key][0])
#try:
#ncs = experiments[key]
#for indice in indices:
#logger.info('indice: %s' % indice)
#try:
#for grouping in groupings:
#logger.info('grouping: %s' % grouping)
#try:
#calc_group = calc_grouping(grouping)
#logger.info('calc_group: %s' % calc_group)
#for polygon in polygons:
#try:
#domain = key.split('_')[1].split('-')[0]
#if polygon == None:
#if prefix == None:
#prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping )
#geom = None
#ugid = None
#else:
#if prefix == None:
#prefix = key.replace(variable, indice).replace('_day_','_%s_' % grouping ).replace(domain,polygon)
#geom = get_geom(polygon=polygon)
#ugid = get_ugid(polygons=polygon, geom=geom)
#if indice == 'TGx':
#calc=[{'func': 'max', 'name': 'TGx'}]
#tmp = ocgis_module.call(resource=ncs,# conform_units_to='celcius',
#variable=variable, dimension_map=dimension_map,
#calc=calc, calc_grouping=calc_group, prefix=prefix,
#dir_output=dir_output, geom=geom, select_ugid=ugid)
#elif indice == 'TGn':
#calc=[{'func': 'min', 'name': 'TGn'}]
#tmp = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
#variable=variable, dimension_map=dimension_map,
#calc=calc, calc_grouping= calc_group, prefix=prefix,
#dir_output=dir_output, geom=geom, select_ugid = ugid)
#elif indice == 'TGx5day':
#calc = [{'func': 'moving_window', 'name': 'TGx5day', 'kwds': {'k': 5, 'operation': 'mean', 'mode': 'same' }}]
#tmp2 = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
#variable=variable, dimension_map=dimension_map,
#calc=calc, prefix=str(uuid.uuid4()),
#geom=geom, select_ugid = ugid)
#calc=[{'func': 'max', 'name': 'TGx5day'}]
#logger.info('moving window calculated : %s' % tmp2)
#tmp = ocgis_module.call(resource=tmp2,
#variable=indice, dimension_map=dimension_map,
#calc=calc, calc_grouping=calc_group, prefix=prefix,
#dir_output=dir_output)
#remove(tmp2)
#elif indice == 'TGn5day':
#calc = [{'func': 'moving_window', 'name': 'TGn5day', 'kwds': {'k': 5, 'operation': 'mean', 'mode': 'same' }}]
#tmp2 = ocgis_module.call(resource=ncs, #conform_units_to='celcius',
#variable=variable, dimension_map=dimension_map,
#calc=calc, prefix=str(uuid.uuid4()),
#geom=geom, select_ugid = ugid)
#calc=[{'func': 'min', 'name': 'TGn5day'}]

#logger.info('moving window calculated : %s' % tmp2)

#tmp = ocgis_module.call(resource=tmp2,
#variable=indice, dimension_map=dimension_map,
#calc=calc, calc_grouping=calc_group, prefix=prefix,
#dir_output=dir_output)
#remove(tmp2)
#else:
#logger.error('Indice %s is not a known inidce' % (indice))
#outputs.append(tmp)
#logger.info('indice file calcualted %s ' % (tmp))
#except Exception as e:
#logger.debug('could not calc indice %s for key %s, polygon %s and calc_grouping %s : %s' %  (indice, key, polygon, grouping, e ))
#except Exception as e:
#logger.debug('could not calc indice %s for key %s and calc_grouping %s : %s' % ( indice, key, polygon, e ))
#except Exception as e:
#logger.debug('could not calc indice %s for key %s: %s'%  (indice, key, e ))
#except Exception as e:
#logger.debug('could not calc key %s: %s' % (key, e))
#return outputs