Пример #1
0
def test_archive_tar():
    result = utils.archive(
        [local_path(TESTDATA['cmip5_tasmax_2007_nc'])],
        format='tar',
        dir_output=tempfile.mkdtemp())
    tar = tarfile.open(result)
    assert len(tar.getnames()) == 1
Пример #2
0
def test_archive_zip():
    result = utils.archive(
        [local_path(TESTDATA['cmip5_tasmax_2007_nc'])],
        format='zip',
        dir_output=tempfile.mkdtemp())
    zipf = zipfile.ZipFile(result)
    assert len(zipf.namelist()) == 1
Пример #3
0
    def execute(self):
        import os
        from flyingpigeon.utils import archive
        # import tarfile
        from tempfile import mkstemp
        from os import path
        from numpy import squeeze

        ncs = self.getInputValues(identifier='resource')
        indices = self.indices.getValue()
        polygons = self.polygons.getValue()
        mosaic = self.mosaic.getValue()
        groupings = self.groupings.getValue() 

        
        if polygons==None:
            self.status.set('No countries selected, entire domain will be calculated' , 10)

        logger.debug('indices=%s', indices)
        logger.debug('groupings=%s', groupings)
        logger.debug('num files=%s', len(ncs))
        self.status.set('processing indices : %s' % indices, 12)

        results = squeeze(calc_indice_simple(
            resource=ncs,
            mosaic=mosaic,
            indices=indices,
            polygons=polygons,
            groupings=groupings,
            dir_output=path.curdir,
            ))
         
        results_list =  results.tolist()

        self.status.set('indices calculated', 90)
        logger.debug('results type: %s', type(results_list))
        logger.debug('indices files: %s ' % results_list )

        try:
            archive_indices = archive(results_list)
            logger.info('archive prepared')
        except Exception as e:
            msg = "archive preparation failed"
            logger.exception(msg)
            raise Exception(msg)
        try: 
            self.output.setValue(archive_indices)
            if type(results_list) == list:
                i = next((i for i, x in enumerate(results.tolist()) if x), None)
                self.output_netcdf.setValue(str(results[i]))
            elif type(results_list) == str:
                self.output_netcdf.setValue(results_list)
            else:
                logger.debug('results_list type: %s  not extractable ' % type(results_list) )
                self.output_netcdf.setValue(None)
        except Exception as e:
            msg = "extraction of example file failed"
            logger.exception(msg)

        self.status.set('done', 100)
Пример #4
0
    def execute(self):
        from ast import literal_eval
        from flyingpigeon.utils import archive, archiveextract

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        ncs = archiveextract(self.getInputValues(identifier='resource'))
        mosaic = self.mosaic.getValue()
        regions = self.region.getValue()
        # variable = self.variable.getValue()

        # logger.info('regions: %s' % regions)
        # dimension_map = self.dimension_map.getValue()
        # if dimension_map != None:
        #     dimension_map = literal_eval(dimension_map)

        logger.info('ncs = %s', ncs)
        logger.info('regions = %s', regions)
        logger.info('mosaic = %s', mosaic)
        # logger.info('dimension_map = %s', dimension_map)

        self.status.set('Arguments set for subset process', 0)
        logger.debug('starting: regions=%s, num_files=%s' %
                     (len(regions), len(ncs)))
        try:
            results = clipping(
                resource=ncs,
                polygons=regions,  # self.region.getValue(),
                mosaic=mosaic,
                spatial_wrapping='wrap',
                # variable=variable,
                dir_output=os.path.abspath(os.curdir),
                # dimension_map=dimension_map,
            )
            logger.info('results %s' % results)
        except Exception as e:
            msg = 'clipping failed'
            logger.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('no results produced.')

        # prepare tar file
        try:
            tarf = archive(results)
            logger.info('Tar file prepared')
        except Exception as e:
            msg = 'Tar file preparation failed'
            logger.exception(msg)
            raise Exception(msg)

        self.output.setValue(tarf)

        i = next((i for i, x in enumerate(results) if x), None)
        self.output_netcdf.setValue(results[i])

        self.status.set('done', 100)
Пример #5
0
    def execute(self):
        from flyingpigeon.ocgis_module import call
        from flyingpigeon.utils import sort_by_filename, archive, get_values, get_time

        ncs = self.getInputValues(identifier='resource')
        logger.info("ncs: %s " % ncs)
        coords = self.getInputValues(identifier='coords')
        logger.info("coords %s", coords)
        filenames = []
        nc_exp = sort_by_filename(ncs, historical_concatination=True)

        from numpy import savetxt, column_stack
        from shapely.geometry import Point

        for key in nc_exp.keys():
            try:
                logger.info('start calculation for %s ' % key)
                ncs = nc_exp[key]
                times = get_time(ncs, format='%Y-%m-%d_%H:%M:%S')
                concat_vals = times  # ['%s-%02d-%02d_%02d:%02d:%02d' %
                # (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
                header = 'date_time'
                filename = '%s.csv' % key
                filenames.append(filename)

                for p in coords:
                    try:
                        self.status.set('processing point : {0}'.format(p), 20)
                        # define the point:
                        p = p.split(',')
                        point = Point(float(p[0]), float(p[1]))

                        # get the values
                        timeseries = call(resource=ncs,
                                          geom=point,
                                          select_nearest=True)
                        vals = get_values(timeseries)

                        # concatenation of values
                        header = header + ',%s-%s' % (p[0], p[1])
                        concat_vals = column_stack([concat_vals, vals])
                    except Exception as e:
                        logger.debug('failed for point %s %s' % (p, e))
                self.status.set(
                    '*** all points processed for {0} ****'.format(key), 50)
                savetxt(filename,
                        concat_vals,
                        fmt='%s',
                        delimiter=',',
                        header=header)
            except Exception as e:
                logger.debug('failed for %s %s' % (key, e))

    # set the outputs
        self.status.set('*** creating output tar archive ****', 90)
        tarout_file = archive(filenames)
        self.tarout.setValue(tarout_file)
Пример #6
0
    def execute(self):
        from ast import literal_eval

        urls = self.getInputValues(identifier='resource')
        mosaic = self.mosaic.getValue()
        regions = self.region.getValue()
        variable = self.variable.getValue()
        
        #logger.info('regions: %s' % regions)

        # dimension_map = self.dimension_map.getValue()
        # if dimension_map != None: 
        #     dimension_map = literal_eval(dimension_map)

        logger.info('urls = %s', urls)
        logger.info('regions = %s', regions)
        logger.info('mosaic = %s', mosaic)
        # logger.info('dimension_map = %s', dimension_map)
    
        self.status.set('Arguments set for subset process', 0)
        logger.debug('starting: regions=%s, num_files=%s' % (len(regions), len(urls)))
        try:
            results = clipping(
                resource = urls,
                polygons = regions, # self.region.getValue(),
                mosaic = mosaic,
                spatial_wrapping='wrap',
                variable = variable, 
                dir_output = os.path.abspath(os.curdir),
                # dimension_map=dimension_map,
                )
            logger.info('results %s' % results )
        except Exception as e:
            msg = 'clipping failed'
            logger.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('no results produced.')
        
        # prepare tar file 
        try:
            from flyingpigeon.utils import archive
            tarf = archive(results)
            logger.info('Tar file prepared')
        except Exception as e:
            msg = 'Tar file preparation failed'
            logger.exception(msg)
            raise Exception(msg)

        self.output.setValue(tarf)
        
        i = next((i for i, x in enumerate(results) if x), None)
        self.output_netcdf.setValue(results[i])
            
        self.status.set('done', 100)
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            indices = [inpt.data for inpt in request.inputs['indices']]
            LOGGER.debug("indices = %s", indices)
            archive_format = request.inputs['archive_format'][0].data
        except:
            msg = 'failed to read the arguments.'
            LOGGER.exception(msg)
            raise Exception(msg)
        LOGGER.info('indices %s ' % indices)

        #################################
        # calculate the climate indices
        #################################

        # indices calculation
        ncs_indices = None
        datasets = sort_by_filename(resources, historical_concatination=True)
        LOGGER.debug("datasets=%s", datasets.keys())

        for ds_name in datasets:
            try:
                response.update_status('calculation of {}'.format(ds_name), 30)
                # TODO: what is happening with the results for each ds?
                ncs_indices = sdm.get_indices(resource=datasets[ds_name],
                                              indices=indices)
            except:
                msg = 'indice calculation failed for {}'.format(ds_name)
                LOGGER.exception(msg)
                raise Exception(msg)

        # archive multiple output files to one archive file
        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices 3D added to tarfile')
        except:
            msg = 'failed adding indices to tar'
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output_indices'].file = archive_indices

        i = next((i for i, x in enumerate(ncs_indices) if x), None)
        response.outputs['ncout'].file = ncs_indices[i]

        response.update_status('done', 100)
        return response
Пример #8
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        LOGGER.info('ncs: {}'.format(ncs))

        coords = []
        for coord in request.inputs['coords']:
            coords.append(coord.data)

        LOGGER.info('coords {}'.format(coords))
        filenames = []
        nc_exp = sort_by_filename(ncs, historical_concatination=True)

        for key in nc_exp.keys():
            try:
                LOGGER.info('start calculation for {}'.format(key))
                ncs = nc_exp[key]
                times = get_time(ncs)  # , format='%Y-%m-%d_%H:%M:%S')
                concat_vals = times  # ['%s-%02d-%02d_%02d:%02d:%02d' %
                # (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
                header = 'date_time'
                filename = '{}.csv'.format(key)
                filenames.append(filename)

                for p in coords:
                    try:
                        response.update_status('processing point: {}'.format(p), 20)
                        # define the point:
                        p = p.split(',')
                        point = Point(float(p[0]), float(p[1]))

                        # get the values
                        timeseries = call(resource=ncs, geom=point, select_nearest=True)
                        vals = get_values(timeseries)

                        # concatenation of values
                        header = header + ',{}-{}'.format(p[0], p[1])
                        concat_vals = column_stack([concat_vals, vals])
                    except Exception as e:
                        LOGGER.debug('failed for point {} {}'.format(p, e))
                response.update_status('*** all points processed for {0} ****'.format(key), 50)

                # TODO: Ascertain whether this 'savetxt' is a valid command without string formatting argument: '%s'
                savetxt(filename, concat_vals, fmt='%s', delimiter=',', header=header)
            except Exception as ex:
                LOGGER.debug('failed for {}: {}'.format(key, str(ex)))

        # set the outputs
        response.update_status('*** creating output tar archive ****', 90)
        tarout_file = archive(filenames)
        response.outputs['tarout'].file = tarout_file
        return response
Пример #9
0
    def execute(self):
        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        from flyingpigeon.utils import searchfile
        from flyingpigeon.subset import masking
        from flyingpigeon.utils import archive, archiveextract

        from flyingpigeon import config
        from os import path

        resources = archiveextract(self.getInputValues(identifier='resource'))
        masks = archiveextract(self.getInputValues(identifier='mask'))
        land_area = self.land_area.getValue()

        fp_cache = config.cache_path().split('/')
        base_dir = '/'.join(fp_cache[0:-1])  # base dir for all birds

        logger.debug('base dir of directory tree: %s' % base_dir)

        ncs = []
        sftlf = []
        for nc in resources:
            try:
                basename = path.basename(nc)
                bs = basename.split('_')
                pattern = 'sftlf_' + '_'.join(bs[1:-2]) + '_fx.nc'
                pattern = pattern.replace('historical',
                                          '*').replace('rcp85',
                                                       '*').replace('rcp65',
                                                                    '*').replace('rcp45',
                                                                                 '*').replace('rcp26', '*')
                logger.debug('searching for %s ' % pattern)
                sftlf.extend(searchfile(pattern, path.curdir))
                sftlf.extend(searchfile(pattern, base_dir))
                logger.debug('lenght of sftlf: %s' % len(sftlf))
                if len(sftlf) >= 1:
                    if len(sftlf) > 1:
                        logger.warn(
                            'more than one sftlf file is found fitting to the pattern, first one will be taken %s'
                            % sftlf[0])
                    prefix = 'masked%s' % basename.replace('.nc', '')
                    nc_mask = masking(nc, sftlf[0], land_area=land_area, prefix=prefix)
                    ncs.extend([nc_mask])
                    logger.info('masking processed for %s' % basename)
                else:
                    logger.warn('no masked found. Please perform a "Download Resources"\
                     to make sure the land_area file is in cache')
            except:
                logger.exception('failed to mask file: %s' % basename)
        nc_archive = archive(ncs)

        self.output_archive.setValue(nc_archive)
        i = next((i for i, x in enumerate(ncs) if x), None)
        self.output_example.setValue(ncs[i])
Пример #10
0
  def execute(self):
    from flyingpigeon.ocgis_module import call
    from flyingpigeon.utils import sort_by_filename, archive, get_values, get_time
        
    ncs = self.getInputValues(identifier='netcdf_file')
    logger.info("ncs: %s " % ncs) 
    coords = self.getInputValues(identifier='coords')
    logger.info("coords %s", coords)
    filenames = []    
    nc_exp = sort_by_filename(ncs, historical_concatination=True)
    
    #(fp_tar, tarout_file) = tempfile.mkstemp(dir=".", suffix='.tar')
    #tar = tarfile.open(tarout_file, "w")

    from numpy import savetxt, column_stack
    from shapely.geometry import Point
    
    for key in nc_exp.keys():
      try:
        logger.info('start calculation for %s ' % key )
        ncs = nc_exp[key]
        times = get_time(ncs)
        concat_vals = ['%s-%02d-%02d_%02d:%02d:%02d' %
                       (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
        header = 'date_time'
        filename = '%s.csv' % key
        filenames.append(filename) 
        
        for p in coords:
          try: 
            self.status.set('processing point : {0}'.format(p), 20)
            # define the point:  
            p = p.split(',')
            point = Point(float(p[0]), float(p[1]))       
            
            # get the values
            timeseries = call(resource=ncs, geom=point, select_nearest=True)
            vals = get_values(timeseries)
            
            # concatination of values 
            header = header + ',%s-%s' % (p[0], p[1])
            concat_vals = column_stack([concat_vals, vals])
          except Exception as e: 
            logger.debug('failed for point %s %s' % (p , e))
        self.status.set('*** all points processed for {0} ****'.format(key), 50)
        savetxt(filename, concat_vals, fmt='%s', delimiter=',', header=header)
      except Exception as e: 
        logger.debug('failed for %s %s' % (key, e))

    ### set the outputs
    self.status.set('*** creating output tar archive ****',90) 
    tarout_file = archive(filenames)
    self.tarout.setValue( tarout_file )
Пример #11
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        datasets = []
        # append file urls
        if 'dataset' in request.inputs:
            datasets.extend(
                archiveextract(
                    resource=rename_complexinputs(request.inputs['dataset'])))
        # append opendap urls
        if 'dataset_opendap' in request.inputs:
            for dataset in request.inputs['dataset_opendap']:
                datasets.append(dataset.data)
        # land or sea flag
        land_area_flag = request.inputs['land_or_sea'][0].data == 'land'

        masked_datasets = []
        count = 0
        max_count = len(datasets)
        for ds in datasets:
            ds_name = os.path.basename(ds)
            LOGGER.info('masking dataset: {}'.format(ds_name))
            if 'mask' in request.inputs:
                landsea_mask = request.inputs['mask'][0].data
            else:
                landsea_mask = search_landsea_mask_by_esgf(ds)

            LOGGER.info("using landsea_mask: {}".format(landsea_mask))
            prefix = 'masked_{}'.format(ds_name.replace('.nc', ''))
            try:
                new_ds = masking(ds,
                                 landsea_mask,
                                 land_area=land_area_flag,
                                 prefix=prefix)
                masked_datasets.append(new_ds)

            except Exception as ex:
                msg = 'Could not subset dataset {}: {}'.format(
                    ds_name, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)
            count = count + 1
            response.update_status(
                "masked: {:d}/{:d}".format(count, max_count),
                int(100.0 * count / max_count))

        response.outputs['output_archive'].file = archive(masked_datasets)
        response.outputs['output_example'].file = masked_datasets[0]

        response.update_status("done", 100)
        return response
Пример #12
0
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            indices = [inpt.data for inpt in request.inputs['indices']]
            LOGGER.debug("indices = {}".format(indices))
            archive_format = request.inputs['archive_format'][0].data
        except Exception as ex:
            msg = 'failed to read the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
        LOGGER.info('indices {}'.format(indices))

        #################################
        # calculate the climate indices
        #################################

        # indices calculation
        try:
            response.update_status('calculation of indices', 30)
            ncs_indices = sdm.get_indices(resource=resources, indices=indices)
            LOGGER.info('indice calculation done')
        except Exception as ex:
            # TODO: 'ds_name' does not resolve. What is this referring to? This will throw a critical error!
            msg = 'indice calculation failed for {}: {}'.format(ds_name, str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        # archive multiple output files to one archive file
        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices 3D added to tarfile')
        except Exception as ex:
            msg = 'failed adding indices to tar: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output_indices'].file = archive_indices

        i = next((i for i, x in enumerate(ncs_indices) if x), None)
        response.outputs['ncout'].file = ncs_indices[i]

        response.update_status('done', 100)
        return response
Пример #13
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        LOGGER.info("ncs: %s " % ncs)
        coords = request.inputs['coords']  # self.getInputValues(identifier='coords')
        LOGGER.info("coords %s", coords)
        filenames = []
        nc_exp = sort_by_filename(ncs, historical_concatination=True)

        for key in nc_exp.keys():
            try:
                LOGGER.info('start calculation for %s ' % key)
                ncs = nc_exp[key]
                times = get_time(ncs, format='%Y-%m-%d_%H:%M:%S')
                concat_vals = times  # ['%s-%02d-%02d_%02d:%02d:%02d' %
                # (t.year, t.month, t.day, t.hour, t.minute, t.second) for t in times]
                header = 'date_time'
                filename = '%s.csv' % key
                filenames.append(filename)

                for p in coords:
                    try:
                        response.update_status('processing point : {0}'.format(p), 20)
                        # define the point:
                        p = p.split(',')
                        point = Point(float(p[0]), float(p[1]))

                        # get the values
                        timeseries = call(resource=ncs, geom=point, select_nearest=True)
                        vals = get_values(timeseries)

                        # concatenation of values
                        header = header + ',%s-%s' % (p[0], p[1])
                        concat_vals = column_stack([concat_vals, vals])
                    except Exception as e:
                        LOGGER.debug('failed for point %s %s' % (p, e))
                response.update_status('*** all points processed for {0} ****'.format(key), 50)
                savetxt(filename, concat_vals, fmt='%s', delimiter=',', header=header)
            except Exception as e:
                LOGGER.debug('failed for %s %s' % (key, e))

    # set the outputs
        response.update_status('*** creating output tar archive ****', 90)
        tarout_file = archive(filenames)
        response.outputs['tarout'].file = tarout_file
        return response
    def _handler(self, request, response):
        from tempfile import mkstemp

        tic = dt.now()
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        response.update_status('Execution started at : {}'.format(tic), 1)

        ######################################
        # Read inputs
        ######################################
        try:
            resource = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))[0]
            fmts = [e.data for e in request.inputs['fmt']]
            title = request.inputs['title'][0].data

        except Exception as e:
            msg = 'Failed to read input parameter {}'.format(e)
            LOGGER.error(msg)
            raise Exception(msg)

        response.update_status('Input parameters ingested', 2)

        try:
            fig = map_spatial_analog(resource, title=title)
            output = []

            for fmt in fmts:
                output.append(fig2plot(fig, fmt))

        except Exception as e:
            msg = "Failed to create figure: {}".format(e)
            LOGGER.error(msg)
            raise Exception(msg)

        finally:
            plt.close()

        if len(fmts) == 1:
            output = output[0]
        else:
            output = archive(output)

        response.outputs['output_figure'].file = output
        response.update_status("done", 100)
        return response
Пример #15
0
    def execute(self):
        from ast import literal_eval

        urls = self.getInputValues(identifier='resource')
        mosaik = self.mosaik.getValue()
        regions = self.region.getValue()
        variable = self.variable.getValue()
        
        #logger.info('regions: %s' % regions)

        dimension_map = self.dimension_map.getValue()
        if dimension_map != None: 
            dimension_map = literal_eval(dimension_map)

        logger.info('urls = %s', urls)
        logger.info('regions = %s', regions)
        logger.info('mosaik = %s', mosaik)
        logger.info('dimension_map = %s', dimension_map)
    
        self.status.set('Arguments set for subset process', 0)

        logger.debug('starting: regions=%s, num_files=%s' % (len(regions), len(urls)))

        try:
            results = clipping(
                resource = urls,
                polygons = regions, # self.region.getValue(),
                mosaik = mosaik,
                spatial_wrapping='wrap',
                variable = variable, 
                dir_output = os.path.abspath(os.curdir),
                dimension_map=dimension_map,
                )

        except Exception as e:
            logger.exception('clipping failed')
            self.status.set('clipping failed')
        # prepare tar file 
        try: 
            from flyingpigeon.utils import archive
            tarf = archive(results)
            logger.info('Tar file prepared')
        except Exception as e:
            logger.exception('Tar file preparation failed')
            raise

        self.output.setValue( tarf )
        self.status.set('done', 100)
Пример #16
0
    def execute(self):
        from ast import literal_eval

        urls = self.getInputValues(identifier='resource')
        mosaic = self.mosaic.getValue()
        regions = self.region.getValue()
        variable = self.variable.getValue()

        #logger.info('regions: %s' % regions)

        # dimension_map = self.dimension_map.getValue()
        # if dimension_map != None:
        #     dimension_map = literal_eval(dimension_map)

        logger.info('urls = %s', urls)
        logger.info('regions = %s', regions)
        logger.info('mosaic = %s', mosaic)
        # logger.info('dimension_map = %s', dimension_map)

        self.status.set('Arguments set for subset process', 0)

        logger.debug('starting: regions=%s, num_files=%s' %
                     (len(regions), len(urls)))

        try:
            results = clipping(
                resource=urls,
                polygons=regions,  # self.region.getValue(),
                mosaic=mosaic,
                variable=variable,
                dir_output=os.path.abspath(os.curdir),
                #  dimension_map=dimension_map,
            )

        except Exception as e:
            logger.exception('clipping failed')
            self.status.set('clipping failed')
        # prepare tar file
        try:
            from flyingpigeon.utils import archive
            tarf = archive(results)
            logger.info('Tar file prepared')
        except Exception as e:
            logger.exception('Tar file preparation failed')
            raise

        self.output.setValue(tarf)
        self.status.set('done', 100)
Пример #17
0
    def execute(self):
        from os.path import basename
        from flyingpigeon import sdm
        from flyingpigeon.utils import archive

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        self.status.set('Start process', 0)

        try:
            logger.info('reading the arguments')
            resources = self.getInputValues(identifier='resources')
            indices = self.getInputValues(identifier='input_indices')
            logger.debug("indices = %s", indices)
            archive_format = self.archive_format.getValue()
        except Exception as e:
            logger.error('failed to read in the arguments %s ' % e)
        logger.info('indices %s ' % indices)

        #################################
        # calculate the climate indices
        #################################

        # indices calculation
        ncs_indices = None
        try:
            self.status.set(
                'start calculation of climate indices for %s' % indices, 30)
            ncs_indices = sdm.get_indices(resources=resources, indices=indices)
            logger.info('indice calculation done')
        except:
            msg = 'failed to calculate indices'
            logger.exception(msg)
            raise Exception(msg)

        # archive multiple output files to one archive file
        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            logger.info('indices 3D added to tarfile')
        except:
            msg = 'failed adding indices to tar'
            logger.exception(msg)
            raise Exception(msg)

        self.output_indices.setValue(archive_indices)
        self.status.set('done', 100)
Пример #18
0
    def execute(self):
        from os.path import basename
        from flyingpigeon import sdm
        from flyingpigeon.utils import archive, archiveextract  # , get_domain
        from flyingpigeon.visualisation import map_PAmask

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        self.status.set('Start process', 0)

        try:
            self.status.set('reading the arguments', 5)
            resources = archiveextract(
                self.getInputValues(identifier='input_indices'))
            csv_file = self.getInputValues(identifier='gbif')[0]
            period = self.getInputValues(identifier='period')
            period = period[0]
            archive_format = self.archive_format.getValue()
        except:
            logger.error('failed to read in the arguments')

        try:
            self.status.set('read in latlon coordinates', 10)
            latlon = sdm.latlon_gbifcsv(csv_file)
        except:
            logger.exception('failed to extract the latlon points')

        try:
            self.status.set('plot map', 20)
            from flyingpigeon.visualisation import map_gbifoccurrences
            # latlon = sdm.latlon_gbifdic(gbifdic)
            occurence_map = map_gbifoccurrences(latlon)
        except:
            logger.exception('failed to plot occurence map')

        # try:
        #     self.status.set('get domain', 30)
        #     domains = set()
        #     for indice in resources:
        #         # get_domain works only if metadata are set in a correct way
        #         domains = domains.union([basename(indice).split('_')[1]])
        #     if len(domains) == 1:
        #         domain = list(domains)[0]
        #         logger.info('Domain %s found in indices files' % domain)
        #     else:
        #         logger.warn('NOT a single domain in indices files %s' % domains)
        # except:
        #     logger.exception('failed to get domains')

        try:
            # sort indices
            indices_dic = sdm.sort_indices(resources)
            logger.info('indice files sorted for %s Datasets' %
                        len(indices_dic.keys()))
        except:
            msg = 'failed to sort indices'
            logger.exception(msg)
            raise Exception(msg)

        ncs_references = []
        species_files = []
        stat_infos = []
        PAmask_pngs = []

        self.status.set('Start processing for %s Datasets' %
                        len(indices_dic.keys()))
        for count, key in enumerate(indices_dic.keys()):
            try:
                staus_nr = 40 + count * 10
                self.status.set('Start processing of %s' % key, staus_nr)
                ncs = indices_dic[key]
                logger.info('with %s files' % len(ncs))

                try:
                    self.status.set('generating the PA mask', 20)
                    PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0])
                    logger.info('PA mask sucessfully generated')
                except:
                    logger.exception('failed to generate the PA mask')

                try:
                    self.status.set('Ploting PA mask', 25)
                    PAmask_pngs.extend([map_PAmask(PAmask)])
                except:
                    logger.exception('failed to plot the PA mask')

                try:
                    ncs_reference = sdm.get_reference(ncs_indices=ncs,
                                                      period=period)
                    ncs_references.extend(ncs_reference)
                    logger.info('reference indice calculated %s ' %
                                ncs_references)
                    self.status.set('reference indice calculated',
                                    staus_nr + 2)
                except:
                    msg = 'failed to calculate the reference'
                    logger.exception(msg)
                    # raise Exception(msg)

                try:
                    gam_model, predict_gam, gam_info = sdm.get_gam(
                        ncs_reference, PAmask, modelname=key)
                    stat_infos.append(gam_info)
                    self.status.set('GAM sucessfully trained', staus_nr + 5)
                except:
                    msg = 'failed to train GAM for %s' % (key)
                    logger.exception(msg)

                try:
                    prediction = sdm.get_prediction(gam_model, ncs)
                    self.status.set('prediction done', staus_nr + 7)
                except:
                    msg = 'failed to predict tree occurence'
                    logger.exception(msg)
                    # raise Exception(msg)

                # try:
                #     self.status.set('land sea mask for predicted data', staus_nr + 8)
                #     from numpy import invert, isnan, nan, broadcast_arrays  # , array, zeros, linspace, meshgrid
                #     mask = invert(isnan(PAmask))
                #     mask = broadcast_arrays(prediction, mask)[1]
                #     prediction[mask is False] = nan
                # except:
                #     logger.exception('failed to mask predicted data')

                try:
                    species_files.append(sdm.write_to_file(ncs[0], prediction))
                    logger.info('Favourabillity written to file')
                except:
                    msg = 'failed to write species file'
                    logger.exception(msg)
                    # raise Exception(msg)
            except:
                msg = 'failed to process SDM chain for %s ' % key
                logger.exception(msg)
                # raise Exception(msg)

        try:
            archive_references = None
            archive_references = archive(ncs_references, format=archive_format)
            logger.info('indices 2D added to archive')
        except:
            msg = 'failed adding 2D indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        archive_predicion = None
        try:
            archive_predicion = archive(species_files, format=archive_format)
            logger.info('species_files added to archive')
        except:
            msg = 'failed adding species_files indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        try:
            from flyingpigeon.visualisation import pdfmerge, concat_images
            stat_infosconcat = pdfmerge(stat_infos)
            logger.debug('pngs %s' % PAmask_pngs)
            PAmask_png = concat_images(PAmask_pngs, orientation='h')
            logger.info('stat infos pdfs and mask pngs merged')
        except:
            logger.exception('failed to concat images')
            _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf',
                                                   prefix='foobar-',
                                                   dir='.')
            _, PAmask_png = tempfile.mkstemp(suffix='.png',
                                             prefix='foobar-',
                                             dir='.')

        self.output_gbif.setValue(occurence_map)
        self.output_PA.setValue(PAmask_png)
        self.output_reference.setValue(archive_references)
        self.output_prediction.setValue(archive_predicion)
        self.output_info.setValue(stat_infosconcat)
        self.status.set('done', 100)
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # input files
        LOGGER.debug("url={}, mime_type={}".format(
            request.inputs['resource'][0].url,
            request.inputs['resource'][0].data_format.mime_type))
        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))
        # mime_type=request.inputs['resource'][0].data_format.mime_type)
        # mosaic option
        # TODO: fix defaults in pywps 4.x
        if 'mosaic' in request.inputs:
            mosaic = request.inputs['mosaic'][0].data
        else:
            mosaic = False
        # regions used for subsetting
        regions = [inp.data for inp in request.inputs['region']]

        LOGGER.info('ncs = {}'.format(ncs))
        LOGGER.info('regions = {}'.format(regions))
        LOGGER.info('mosaic = {}'.format(mosaic))

        response.update_status("Arguments set for subset process", 0)
        LOGGER.debug('starting: regions=%s, num_files=%s', len(regions),
                     len(ncs))

        try:
            results = clipping(
                resource=ncs,
                polygons=regions,  # self.region.getValue(),
                mosaic=mosaic,
                spatial_wrapping='wrap',
                # variable=variable,
                # dir_output=os.path.abspath(os.curdir),
                # dimension_map=dimension_map,
            )
            LOGGER.info('results {}'.format(results))
        except Exception as ex:
            msg = 'clipping failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('No results produced.')

        # prepare tar file
        try:
            tarf = archive(results)
            LOGGER.info('Tar file prepared')
        except Exception as ex:
            msg = 'Tar file preparation failed: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        response.outputs['ncout'].file = results[i]

        response.update_status("done", 100)
        return response
Пример #20
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'
        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            period = request.inputs['period']
            period = period[0].data
            indices = [inpt.data for inpt in request.inputs['indices']]
            archive_format = request.inputs['archive_format'][0].data
            LOGGER.info(
                "all arguments read in nr of files in resources: {}".foirmat(
                    len(resources)))
        except Exception as ex:
            msg = 'failed to read in the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            gbif_url = request.inputs['gbif'][0].data
            csv_file = download(gbif_url)
            LOGGER.info('CSV file fetched sucessfully: %s' % csv_file)
        except Exception as ex:
            msg = 'failed to fetch GBIF file: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('read in latlon coordinates', 10)
            latlon = sdm.latlon_gbifcsv(csv_file)
            LOGGER.info('got occurence coordinates %s ' % csv_file)
        except Exception as ex:
            msg = 'failed to extract the latlon points from file {}: {}'.format(
                csv_file, str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('plot map', 20)
            occurence_map = map_gbifoccurrences(latlon)
            LOGGER.info('GBIF occourence ploted')
        except Exception as ex:
            msg = 'failed to plot occurence map: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        #################################
        # calculate the climate indices
        #################################

        # get the indices
        try:
            response.update_status('start calculation of indices', 30)
            ncs_indices = sdm.get_indices(resource=resources, indices=indices)
            LOGGER.info('indice calculation done')
        except Exception as ex:
            msg = 'failed to calculate indices: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            # sort indices
            indices_dic = sdm.sort_indices(ncs_indices)
            LOGGER.info('indice files sorted in dictionary')
        except Exception as ex:
            msg = 'failed to sort indices: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            indices_dic = {'dummy': []}

        ncs_references = []
        species_files = []
        stat_infos = []
        PAmask_pngs = []

        response.update_status('Start processing for {} datasets'.format(
            len(indices_dic.keys())))
        for count, key in enumerate(indices_dic.keys()):
            try:
                status_nr = 40 + count * 10
                response.update_status('Start processing of {}'.format(key),
                                       status_nr)

                ncs = indices_dic[key]
                LOGGER.info('with {} files'.format(len(ncs)))

                try:
                    response.update_status('generating the PA mask', 20)
                    PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0])
                    LOGGER.info('PA mask sucessfully generated')
                except Exception as ex:
                    msg = 'failed to generate the PA mask: {}'.format(str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

                try:
                    response.update_status('Ploting PA mask', 25)
                    PAmask_pngs.extend([map_PAmask(PAmask)])
                except Exception as ex:
                    msg = 'failed to plot the PA mask: {}'.format(str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

                try:
                    ncs_reference = sdm.get_reference(ncs_indices=ncs,
                                                      period=period)
                    ncs_references.extend(ncs_reference)
                    LOGGER.info('reference indice calculated {}'.format(
                        ncs_references))
                except Exception as ex:
                    msg = 'failed to calculate the reference: {}'.format(
                        str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

                try:
                    gam_model, predict_gam, gam_info = sdm.get_gam(
                        ncs_reference, PAmask)
                    stat_infos.append(gam_info)
                    response.update_status('GAM sucessfully trained',
                                           status_nr + 5)
                except Exception as ex:
                    msg = 'failed to train GAM for {}: {}'.format(key, str(ex))
                    LOGGER.debug(msg)
                    raise Exception(msg)

                try:
                    prediction = sdm.get_prediction(gam_model, ncs)
                    response.update_status('prediction done', status_nr + 7)
                except Exception as ex:
                    msg = 'failed to predict tree occurence: {}'.format(
                        str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)
                #
                # try:
                #     response.update_status('land sea mask for predicted data',  status_nr + 8)
                #     from numpy import invert, isnan, nan, broadcast_arrays  # , array, zeros, linspace, meshgrid
                #     mask = invert(isnan(PAmask))
                #     mask = broadcast_arrays(prediction, mask)[1]
                #     prediction[mask is False] = nan
                # except:
                #     LOGGER.debug('failed to mask predicted data')

                try:
                    species_files.append(sdm.write_to_file(ncs[0], prediction))
                    LOGGER.info('Favourability written to file')
                except Exception as ex:
                    msg = 'failed to write species file: {}'.format(str(ex))
                    LOGGER.debug(msg)
                    raise Exception(msg)

            except Exception as ex:
                msg = 'failed to process SDM chain for {} : {}'.format(
                    key, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices added to archive')
        except Exception as ex:
            msg = 'failed adding indices to archive: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            archive_indices = tempfile.mkstemp(suffix='.tar',
                                               prefix='foobar-',
                                               dir='.')

        try:
            archive_references = archive(ncs_references, format=archive_format)
            LOGGER.info('indices reference added to archive')
        except Exception as ex:
            msg = 'failed adding reference indices to archive: {}'.format(
                str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            archive_references = tempfile.mkstemp(suffix='.tar',
                                                  prefix='foobar-',
                                                  dir='.')

        try:
            archive_prediction = archive(species_files, format=archive_format)
            LOGGER.info('species_files added to archive')
        except Exception as ex:
            msg = 'failed adding species_files indices to archive: {}'.format(
                str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            stat_infosconcat = pdfmerge(stat_infos)
            LOGGER.debug('pngs {}'.format(PAmask_pngs))
            PAmask_png = concat_images(PAmask_pngs, orientation='h')
            LOGGER.info('stat infos pdfs and mask pngs merged')
        except Exception as ex:
            msg = 'failed to concat images: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf',
                                                   prefix='foobar-',
                                                   dir='.')
            _, PAmask_png = tempfile.mkstemp(suffix='.png',
                                             prefix='foobar-',
                                             dir='.')

        # self.output_csv.setValue(csv_file)
        response.outputs['output_gbif'].file = occurence_map
        response.outputs['output_PA'].file = PAmask_png
        response.outputs['output_indices'].file = archive_indices
        response.outputs['output_reference'].file = archive_references
        response.outputs['output_prediction'].file = archive_prediction
        response.outputs['output_info'].file = stat_infosconcat

        response.update_status('done', 100)
        return response
    def _handler(self, request, response):
        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        colorscheems = [inpt.data for inpt in request.inputs['colorscheems']]

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                'period ends before period starts; period now set to the last 30 days from now'
            )

        username = request.inputs['username'][0].data
        password = request.inputs['password'][0].data
        cloud_cover = request.inputs['cloud_cover'][0].data

        api = SentinelAPI(username, password)

        geom = {
            "type":
            "Polygon",
            "coordinates": [[[bbox[0], bbox[1]], [bbox[2], bbox[1]],
                             [bbox[2], bbox[3]], [bbox[0], bbox[3]],
                             [bbox[0], bbox[1]]]]
        }

        footprint = geojson_to_wkt(geom)

        response.update_status("start searching tiles acording query", 15)

        products = api.query(
            footprint,
            date=(start, end),
            platformname='Sentinel-2',
            cloudcoverpercentage=(0, cloud_cover),
            # producttype='SLC',
            # orbitdirection='ASCENDING',
        )

        LOGGER.debug('%s products found' % len(products.keys()))
        DIR_cache = cache_path()
        DIR_EO = join(DIR_cache, 'scihub.copernicus')
        if not exists(DIR_EO):
            makedirs(DIR_EO)

        resources = []

        for key in products.keys():
            try:
                filename = products[key]['filename']
                # form = products[key]['format']
                ID = str(products[key]['identifier'])
                file_zip = join(DIR_EO, '%s.zip' % (ID))
                DIR_tile = join(DIR_EO, '%s' % (filename))
                response.update_status("fetch file %s" % ID, 20)
                LOGGER.debug('path: %s' % DIR_tile)
                if exists(file_zip):
                    LOGGER.debug('file %s.zip already fetched' % ID)
                else:
                    try:
                        api.download(key, directory_path=DIR_EO)
                        # Does the '***' denote a string formatting function?
                        response.update_status(
                            "***%s sucessfully fetched" % ID, 20)
                        # TODO: Figure out why these are duplicate
                        LOGGER.debug('Tile {} fetched'.format(ID))
                        LOGGER.debug('Files {} fetched'.format(ID))
                    except Exception as ex:
                        msg = 'failed to extract file {}: {}'.format(
                            filename, str(ex))
                        LOGGER.exception(msg)
                        raise Exception(msg)

                if exists(DIR_tile):
                    LOGGER.debug('file %s already unzipped' % filename)
                else:
                    try:
                        # zipfile = join(DIR_EO, '%szip' % (filename)).strip(form)
                        zip_ref = zipfile.ZipFile(file_zip, 'r')
                        zip_ref.extractall(DIR_EO)
                        zip_ref.close()
                        LOGGER.debug('Tile %s unzipped' % ID)
                    except Exception as ex:
                        msg = 'failed to extract {}: {}'.format(
                            file_zip, str(ex))
                        LOGGER.exception(msg)
                        raise Exception(msg)

                resources.append(DIR_tile)

            except Exception as ex:
                msg = 'failed to fetch {}: {}'.format(key, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        response.update_status("Plotting RGB graphics", 40)
        size = float(products[key]['size'].split(' ')[0])
        producttype = products[key]['producttype']
        beginposition = str(products[key]['beginposition'])

        # fp.write('%s \t %s \t %s \t %s \t %s \n' % (ID, size, producttype, beginposition, key))
        # response.outputs['output_txt'].file = filepathes
        # except:
        #     LOGGER.exception('failed to fetch resource')
        # response.outputs['output'].file = filepathes

        # try:
        #     extend = [float(bboxStr[0])-5, float(bboxStr[1])+5, float(bboxStr[2])-5, float(bboxStr[3])+5]
        #     img = eodata.plot_products(products, extend=extend)
        #     response.outputs['output_plot'].file = img
        #     LOGGER.debug('location of tiles plotted to map')
        # except:
        #     LOGGER.exception("Failed to plot extents of EO data")

        imgs = []
        colorscheem = colorscheems[0]
        try:
            for recource in resources:
                # LOGGER.debug('Scale and merge RGB bands')
                # tile = eodata.get_RGB(recource)
                LOGGER.debug('plot RGB image')
                img = eodata.plot_RGB(recource, colorscheem=colorscheem)
                LOGGER.debug('IMG plotted: {}'.format(img))
                imgs.append(img)
            LOGGER.debug('resources plotted')
        except Exception as ex:
            msg = 'failed to plot RGB graph: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        from flyingpigeon.utils import archive
        tarf = archive(imgs)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(imgs) if x), None)
        if i is None:
            i = "dummy.png"

        response.outputs['output_plot'].file = imgs[i]

        # from flyingpigeon import visualisation as vs
        #
        # images = vs.concat_images(imgs, orientation='v')

        response.update_status("done", 100)
        return response
Пример #22
0
    def _handler(self, request, response):
        response.update_status("start fetch data", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        products = [inpt.data for inpt in request.inputs['products']]

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if (start > end):
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                "periode end befor periode start, period is set to the last 30 days from now"
            )

        token = request.inputs['token'][0].data
        archive_format = request.inputs['archive_format'][0].data

        resources = []
        # resources_sleeping = []
        for product in products:
            item_type, asset = product.split('__')
            LOGGER.debug('itym type: %s , asset: %s' % (item_type, asset))
            fetch_sleep, tiles = fetch_eodata(item_type,
                                              asset,
                                              token,
                                              bbox,
                                              period=[start, end],
                                              cloud_cover=0.5,
                                              cache=True)
            resources.extend(tiles)
            # resources_sleeping.extend(fetch_sleep)

            dates = set()

            for tile in resources:
                dates = dates.union([eodata.get_timestamp(tile).date()])
            dl = list(dates)

            merged_tiles = []

            for date in dl:
                try:
                    LOGGER.debug("calculating date %s " % date)
                    tiles_day = [
                        tile for tile in tiles
                        if eodata.get_timestamp(tile).date() == date
                    ]
                    LOGGER.debug('%s files ready for merging' % len(tiles_day))
                    prefix = date.strftime("%Y%m%d")
                    mosaic = eodata.merge(tiles_day, prefix=prefix)
                    merged_tiles.extend([mosaic])
                except:
                    LOGGER.exception("merge failed for date %s " % date)
        try:
            output_archive = archive(merged_tiles, format=archive_format)
            LOGGER.info('geotiff files added to archive')
        except:
            msg = 'failed adding species_files indices to archive'
            LOGGER.exception(msg)

        # response.outputs['output'].file = write_fileinfo(resource, filepath=True)
        response.outputs['output_archive'].file = output_archive

        i = next((i for i, x in enumerate(merged_tiles) if x), None)
        if i is None:
            i = "dummy.png"
            LOGGER.excetion('failed to select example file')

        plot_example = eodata.plot_truecolorcomposite(merged_tiles[i])
        response.outputs['output_png'].file = plot_example

        response.update_status("done", 100)

        return response
Пример #23
0
    def execute(self):
        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        from os.path import basename
        from flyingpigeon import sdm
        from flyingpigeon.utils import archive, archiveextract

        self.status.set('Start process', 0)

        try:
            logger.info('reading the arguments')
            resources = archiveextract(
                self.getInputValues(identifier='resources'))
            taxon_name = self.getInputValues(identifier='taxon_name')[0]
            bbox_obj = self.BBox.getValue()
            bbox = [
                bbox_obj.coords[0][0], bbox_obj.coords[0][1],
                bbox_obj.coords[1][0], bbox_obj.coords[1][1]
            ]
            period = self.getInputValues(identifier='period')
            period = period[0]
            indices = self.getInputValues(identifier='input_indices')
            archive_format = self.archive_format.getValue()
            logger.debug("indices = %s for %s ", indices, taxon_name)
            logger.info("bbox={0}".format(bbox))
        except Exception as e:
            logger.error('failed to read in the arguments %s ' % e)
        logger.info('indices %s ' % indices)

        try:
            self.status.set('Fetching GBIF Data', 10)
            gbifdic = sdm.get_gbif(taxon_name, bbox=bbox)
        except Exception as e:
            msg = 'failed to search gbif.'
            logger.exception(msg)
            raise Exception(msg)

        try:
            self.status.set('write csv file', 70)
            gbifcsv = sdm.gbifdic2csv(gbifdic)
        except Exception as e:
            msg = 'failed to write csv file.'
            logger.exception(msg)
            raise Exception(msg)

        try:
            self.status.set('plot map', 80)
            from flyingpigeon.visualisation import map_gbifoccurrences
            latlon = sdm.latlon_gbifdic(gbifdic)
            occurence_map = map_gbifoccurrences(latlon)
        except Exception as e:
            msg = 'failed to plot occurence map.'
            logger.exception(msg)
            raise Exception(msg)

        #################################
        # calculate the climate indices
        #################################

        # get the indices
        ncs_indices = None
        try:
            self.status.set(
                'start calculation of climate indices for %s' % indices, 30)
            ncs_indices = sdm.get_indices(resources=resources, indices=indices)
            logger.info('indice calculation done')
        except:
            msg = 'failed to calculate indices'
            logger.exception(msg)
            raise Exception(msg)

        try:
            self.status.set('get domain', 30)
            domains = set()
            for resource in ncs_indices:
                # get_domain works only if metadata are set in a correct way
                domains = domains.union([basename(resource).split('_')[1]])
            if len(domains) == 1:
                domain = list(domains)[0]
                logger.debug('Domain %s found in indices files' % domain)
            else:
                logger.error('Not a single domain in indices files %s' %
                             domains)
        except Exception as e:
            logger.exception('failed to get domains %s' % e)

        try:
            self.status.set('generating the PA mask', 20)
            PAmask = sdm.get_PAmask(coordinates=latlon, domain=domain)
            logger.info('PA mask sucessfully generated')
        except Exception as e:
            logger.exception('failed to generate the PA mask: %s' % e)

        try:
            self.status.set('Ploting PA mask', 25)
            from flyingpigeon.visualisation import map_PAmask
            PAmask_png = map_PAmask(PAmask)
        except Exception as e:
            logger.exception('failed to plot the PA mask: %s' % e)

        try:
            # sort indices
            indices_dic = None
            indices_dic = sdm.sort_indices(ncs_indices)
            logger.info('indice files sorted for %s Datasets' %
                        len(indices_dic.keys()))
        except:
            msg = 'failed to sort indices'
            logger.exception(msg)
            raise Exception(msg)

        ncs_references = []
        species_files = []
        stat_infos = []

        for count, key in enumerate(indices_dic.keys()):
            try:
                staus_nr = 40 + count * 10
                self.status.set('Start processing of %s' % key, staus_nr)
                ncs = indices_dic[key]
                logger.info('with %s files' % len(ncs))
                try:
                    ncs_reference = sdm.get_reference(ncs_indices=ncs,
                                                      period=period)
                    ncs_references.extend(ncs_reference)
                    logger.info('reference indice calculated %s ' %
                                ncs_references)
                except:
                    msg = 'failed to calculate the reference'
                    logger.exception(msg)
                    raise Exception(msg)

                try:
                    gam_model, predict_gam, gam_info = sdm.get_gam(
                        ncs_reference, PAmask)
                    stat_infos.append(gam_info)
                    self.status.set('GAM sucessfully trained', staus_nr + 5)
                except Exception as e:
                    msg = 'failed to train GAM for %s : %s' % (key, e)
                    logger.debug(msg)

                try:
                    prediction = sdm.get_prediction(gam_model, ncs)
                    self.status.set('prediction done', staus_nr + 7)
                except Exception as e:
                    msg = 'failed to predict tree occurence %s' % e
                    logger.exception(msg)
                    # raise Exception(msg)

                try:
                    self.status.set('land sea mask for predicted data',
                                    staus_nr + 8)
                    from numpy import invert, isnan, nan, broadcast_arrays  # , array, zeros, linspace, meshgrid
                    mask = invert(isnan(PAmask))
                    mask = broadcast_arrays(prediction, mask)[1]
                    prediction[mask is False] = nan
                except Exception as e:
                    logger.debug('failed to mask predicted data: %s' % e)

                try:
                    species_files.append(sdm.write_to_file(ncs[0], prediction))
                    logger.info('Favourabillity written to file')
                except Exception as e:
                    msg = 'failed to write species file %s' % e
                    logger.debug(msg)
                    # raise Exception(msg)

            except Exception as e:
                msg = 'failed to calculate reference indices. %s ' % e
                logger.exception(msg)
                raise Exception(msg)

        try:
            archive_indices = None
            archive_indices = archive(ncs_indices, format=archive_format)
            logger.info('indices added to archive')
        except:
            msg = 'failed adding indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        archive_references = None
        try:
            archive_references = archive(ncs_references, format=archive_format)
            logger.info('indices reference added to archive')
        except:
            msg = 'failed adding reference indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        archive_predicion = None
        try:
            archive_predicion = archive(species_files, format=archive_format)
            logger.info('species_files added to archive')
        except:
            msg = 'failed adding species_files indices to archive'
            logger.exception(msg)
            raise Exception(msg)
        try:
            from flyingpigeon.visualisation import pdfmerge
            stat_infosconcat = pdfmerge(stat_infos)
            logger.info('stat infos pdfs merged')
        except:
            logger.exception('failed to concat images')
            _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf',
                                                   prefix='foobar-',
                                                   dir='.')

        self.output_csv.setValue(gbifcsv)
        self.output_gbif.setValue(occurence_map)
        self.output_PA.setValue(PAmask_png)
        self.output_indices.setValue(archive_indices)
        self.output_reference.setValue(archive_references)
        self.output_prediction.setValue(archive_predicion)
        self.output_info.setValue(stat_infosconcat)

        self.status.set('done', 100)
Пример #24
0
    def execute(self):
        from os import mkdir, path, listdir
        from flyingpigeon import segetalflora as sf

        init_process_logger('log.txt')
        self.output_log.setValue('log.txt')

        logging.debug('starting segetalflora process execution')
        self.status.set('starting calcualtion segetalflora', 5)

        ############################
        # read argments to variables
        ############################
        try:
            resource = self.getInputValues(identifier='resource')
            climate_type = self.climate_type.getValue()
            culture_type = self.culture_type.getValue()

            logging.info('urls for %s ncs found' % (len(resource)))
            logging.info('culture type: %s ' % (culture_type))
        except Exception as e:
            logger.debug('failed to read in the arguments: %s ' % e)

        try:
            if type(climate_type) != list:
                climate_type = list([climate_type])
            if type(culture_type) != list:
                culture_type = list([culture_type])
            logger.info('arguments are lists')
        except Exception as e:
            logger.debug('failed to transform arguments to lists: %s ' % e)

        #############################
        # get yearly mean temperature
        #############################

        nc_tasmean = sf.get_yrmean(resource)

        #######################################
        # main call for segetalflora processing
        #######################################

        nc_sf = sf.get_segetalflora(resource=nc_tasmean,
                                    culture_type=culture_type,
                                    climate_type=climate_type)

        ####################
        # tar file archiving
        ####################

        try:
            from flyingpigeon.utils import archive
            self.status.set('files to tar archives', 99)
            tar_sf = archive(nc_sf, format='tar', dir_output='.', mode='w')
            tar_tasmean = archive(nc_tasmean,
                                  format='tar',
                                  dir_output='.',
                                  mode='w')
            logging.info('Archives prepared')
        except Exception as e:
            logger.debug('failed to archive files %s' % e)

    # === set output parameter
        self.out_segetalflora.setValue(tar_sf)
        self.out_tasmean.setValue(tar_tasmean)
        self.status.set("processing done", 100)
Пример #25
0
    def _handler(self, request, response):
        response.update_status("start fetch data", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        products = [inpt.data for inpt in request.inputs['products']]

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                'period ends before period starts; period now set to the last 30 days from now'
            )

        token = request.inputs['token'][0].data
        archive_format = request.inputs['archive_format'][0].data

        resources = []

        # resources_sleeping = []
        for product in products:
            if product == 'PlanetScope':
                item_type = 'PSScene4Band'
                assets = ['analytic', 'analytic_xml']
                for asset in assets:
                    LOGGER.debug('item type: {}, asset: {}'.format(
                        item_type, asset))
                    fetch_sleep, tiles = fetch_eodata(item_type,
                                                      asset,
                                                      token,
                                                      bbox,
                                                      period=[start, end],
                                                      cloud_cover=0.5,
                                                      cache=True)
                    resources.extend(tiles)

                    # TODO: include merge of NDVI
                    # for tile in tiles:
                    #     dates = dates.union([eodata.get_timestamp(tile).date()])
                    # dl = list(dates)
                    #
                    # for date in dl:
                    #     print "calculating date %s " % date
                    #     tiles_day = [tile for tile in tiles if eodata.get_timestamp(tile).date() == date]
                    #     print(tiles_day)
                    #     archive = eodata.merge(tiles_day)
                    #     print archive

                    # resources_sleeping.extend(fetch_sleep)
                LOGGER.debug('{} tiles fetched'.format(len(resources)))
                response.update_status("calculating NDVI ", 30)
                try:
                    LOGGER.debug('Start calculating NDVI')
                    ndvi_tiles = eodata.ndvi(resources, product)
                    # ndvi_merged = eodata.merge(ndvi_tiles)
                except Exception as ex:
                    msg = 'failed to calculate NDVI: {}'.format(str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)
        try:
            ndvi_archive = archive(ndvi_tiles, format=archive_format)
            LOGGER.info('geotiff files added to archive')
        except Exception as ex:
            msg = 'failed adding species_files indices to archive: {}'.format(
                str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['ndvi_archive'].file = ndvi_archive

        i = next((i for i, x in enumerate(ndvi_tiles) if x), None)
        if i is None:
            response.outputs['ndviexample'].file = "dummy.png"
        else:
            LOGGER.debug('start plotting test files for quick check')
            # TODO plot_ndvi does not resolve. Critical error if called.
            ndvi_plot = eodata.plot_ndvi(ndvi_tiles[i])
            LOGGER.debug('NDVI test plot {}'.format(ndvi_plot))

            response.outputs['ndviexample'].file = ndvi_plot

        response.update_status("done", 100)

        return response
Пример #26
0
    def execute(self):
      from os.path import basename
      from flyingpigeon import sdm
      from flyingpigeon import spatial_analog as sa
      from flyingpigeon.utils import archive

      self.status.set('Start process', 0)
      
      try: 
        logger.info('reading the arguments')
        resources = self.getInputValues(identifier='resources')
        #taxon_name = self.getInputValues(identifier='taxon_name')[0]
        #period = self.period.getValue()
        coords = self.getInputValues(identifier='coords')[0]
        period = self.getInputValues(identifier='period')[0]
        coordinate = [float(n) for n in coords.split(',')]
        
        #indices = self.input_indices.getValue()
        indices = self.getInputValues(identifier='input_indices')
        logger.info("indices = %s ", indices)
        
        archive_format = self.archive_format.getValue()
      except Exception as e: 
        logger.error('failed to read in the arguments %s ' % e)
     
      #################################
      ### calculate the climate indices
      #################################
      
      # get the indices
      ncs_indices = None
      try:
        self.status.set('start calculation of climate indices for %s' % indices, 30 )
        ncs_indices = sdm.get_indices(resources=resources, indices=indices)
        logger.info('indice calculation done')
      except:
        msg = 'failed to calculate indices'
        logger.debug(msg)
        # raise Exception(msg)

      try:
        archive_indices = archive(ncs_indices , format=archive_format)
        logger.info('indices 3D added to tarfile')
      except:
        msg = 'failed adding indices to tar'  
        logger.debug(msg)
        # raise Exception(msg)  

      indices_dic = None
      try: 
        # sort indices
        indices_dic = sdm.sort_indices(ncs_indices)
        logger.info('indice files sorted for %s datasets' % len(indices_dic.keys()))
      except:
        msg = 'failed to sort indices'
        logger.debug(msg)
        # raise Exception(msg)

      ncs_references = []
      analogs = []
      statistics_info = []

      for count, key in enumerate(indices_dic.keys()):
        try:
          self.status.set('Start processing of %s ' % key, 40 + count * 10)
          ncs = indices_dic[key]
          logger.info('with %s files' % len(ncs))

          gam_model, statistic_plot = sa.get_gam(ncs, coordinate)
          statistics_info.append(statistic_plot)
          self.status.set('GAM sucessfully trained', 70)
        except:
          msg = 'failed to train GAM'  
          logger.debug(msg)
          # raise Exception(msg)

        try:
          prediction = sdm.get_prediction(gam_model, ncs_indices)
          self.status.set('prediction done', 80)
        except:
          msg = 'failed to predict'   
          logger.debug(msg)
          # raise Exception(msg)
          
      #   try:
      #     from numpy import invert, isnan, nan, broadcast_arrays, array, zeros, linspace, meshgrid
      #     mask = invert(isnan(PAmask))
      #     mask = broadcast_arrays(prediction, mask)[1]
      #     prediction[mask==False] = nan
      #     self.status.set('land sea mask for predicted data', 90)
      #   except: 
      #     logger.debug('failed to mask predicted data')

        try: 
          analogs.append(sdm.write_to_file(ncs_indices[0], prediction))

          logger.info('Analog written to file')
          #tar_prediction.add(species_file, 
           #               arcname = basename(species_file))#.replace(os.path.abspath(os.path.curdir), ""))
        except:
          msg = 'failed to write species file'
          logger.debug(msg)
          # raise Exception(msg)

      from flyingpigeon.visualisation import concat_images
      statistics_infos = None
      try: 
        statistics_infos = concat_images(statistics_info, orientation='v')
        logger.info('statistc graphics concatinated')
      except:
        msg = 'failed to concat images'  
        logger.debug(msg)
        # raise Exception(msg)  

      # # archive_references = None
      # # try:
      # #   archive_references = archive(ncs_references , format=archive_format)
      # #   logger.info('indices 2D added to archive')
      # # except:
      # #   msg = 'failed adding 2D indices to archive'  
      # #   logger.debug(msg)
      # #   # raise Exception(msg) 
      # archive_analogs = None
      
      try:
        archive_analogs = archive(analogs , format=archive_format)
        logger.info('analog file added to archive')
      except:
        msg = 'failed adding analog file to archive'  
        logger.debug(msg)
        # raise Exception(msg)  

      self.output_indices.setValue( archive_indices )
      self.output_analogs.setValue( archive_analogs )
      i = next((i for i, x in enumerate(analogs) if x), None)
      self.output_example.setValue (analogs[i])
      self.output_info.setValue(statistics_infos)

      self.status.set('done', 100)
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        try:
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))

            if 'region' in request.inputs:
                region = request.inputs['region'][0].data
            else:
                region = None

            if 'mosaic' in request.inputs:
                mosaic = request.inputs['mosaic'][0].data
            else:
                mosaic = False

            percentile = request.inputs['percentile'][0].data

            LOGGER.debug("mosaic %s " % mosaic)
            LOGGER.debug('percentile: %s' % percentile)
            LOGGER.debug('region %s' % region)
            LOGGER.debug('Nr of input files %s ' % len(resources))

        except:
            LOGGER.exception('failed to read in the arguments')

        from flyingpigeon.utils import sort_by_filename
        from flyingpigeon.ocgis_module import call

        datasets = sort_by_filename(resources, historical_concatination=True)
        results = []

        kwds = {'percentile': 90, 'window_width': 5}
        calc = [{'func': 'daily_perc', 'name': 'dp', 'kwds': kwds}]

        try:
            for key in datasets.keys():
                try:
                    if region is None:
                        result = call(
                            resource=datasets[key],
                            output_format='nc',
                            calc=calc,
                            # prefix=key,
                            # time_region={'year': [1995, 2000]}
                            # calc_grouping='year'
                        )
                        results.extend([result])
                        LOGGER.debug('percentile based indice done for %s' %
                                     result)
                    else:
                        result = clipping(
                            resource=datasets[key],
                            #  variable=None,
                            calc=calc,
                            #  calc_grouping=None,
                            #  time_range=None,
                            #  time_region=None,
                            polygons=region,
                            mosaic=mosaic)
                        results.extend(result)
                except:
                    LOGGER.exception(
                        "failed to calculate percentil based indice for %s " %
                        key)
        except:
            LOGGER.exception("failed to calculate percentile indices")

        tarf = archive(results)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        if i is None:
            i = "dummy.nc"
        response.outputs['ncout'].file = results[i]

        #       response.update_status("done", 100)
        response.update_status("done", 100)
        return response
Пример #28
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        response.update_status('Start process', 0)

        try:
            LOGGER.info('reading the arguments')
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            taxon_name = request.inputs['taxon_name'][0].data
            bbox = [-180, -90, 180, 90]
            # bbox_obj = self.BBox.getValue()
            # bbox = [bbox_obj.coords[0][0],
            #         bbox_obj.coords[0][1],
            #         bbox_obj.coords[1][0],
            #         bbox_obj.coords[1][1]]
            period = request.inputs['period']
            period = period[0].data
            indices = [inpt.data for inpt in request.inputs['indices']]
            archive_format = request.inputs['archive_format'][0].data
            LOGGER.exception("indices = %s for %s ", indices, taxon_name)
            LOGGER.info("bbox={0}".format(bbox))
        except:
            LOGGER.exception('failed to read in the arguments')
        LOGGER.info('indices %s ' % indices)

        try:
            response.update_status('Fetching GBIF Data', 10)
            gbifdic = sdm.get_gbif(taxon_name, bbox=bbox)
            LOGGER.info('Fetched GBIF data')
        except:
            msg = 'failed to search gbif.'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('write csv file', 70)
            gbifcsv = sdm.gbifdic2csv(gbifdic)
            LOGGER.info('GBIF data written to file')
        except:
            msg = 'failed to write csv file.'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            response.update_status('plot map', 80)
            latlon = sdm.latlon_gbifdic(gbifdic)
            occurence_map = map_gbifoccurrences(latlon)
        except:
            msg = 'failed to plot occurence map.'
            LOGGER.exception(msg)
            raise Exception(msg)

        #################################
        # calculate the climate indices
        #################################

        # get the indices
        ncs_indices = None
        try:
            response.update_status(
                'start calculation of climate indices for %s' % indices, 30)
            ncs_indices = sdm.get_indices(resource=resources, indices=indices)
            LOGGER.info('indice calculation done')
        except:
            msg = 'failed to calculate indices'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            # sort indices
            indices_dic = sdm.sort_indices(ncs_indices)
            LOGGER.info('indice files sorted in dictionary')
        except:
            msg = 'failed to sort indices'
            LOGGER.exception(msg)
            indices_dic = {'dummy': []}

        ncs_references = []
        species_files = []
        stat_infos = []
        PAmask_pngs = []

        response.update_status('Start processing for %s Datasets' %
                               len(indices_dic.keys()))

        for count, key in enumerate(indices_dic.keys()):
            try:
                staus_nr = 40 + count * 10
                response.update_status('Start processing of %s' % key,
                                       staus_nr)
                ncs = indices_dic[key]
                LOGGER.info('with %s files' % len(ncs))

                try:
                    response.update_status('generating the PA mask', 20)
                    PAmask = sdm.get_PAmask(coordinates=latlon, nc=ncs[0])
                    LOGGER.info('PA mask sucessfully generated')
                except:
                    LOGGER.exception('failed to generate the PA mask')

                try:
                    response.update_status('Ploting PA mask', 25)
                    PAmask_pngs.extend([map_PAmask(PAmask)])
                except:
                    LOGGER.exception('failed to plot the PA mask')

                try:
                    ncs_reference = sdm.get_reference(ncs_indices=ncs,
                                                      period=period)
                    ncs_references.extend(ncs_reference)
                    LOGGER.info('reference indice calculated %s ' %
                                ncs_references)
                except:
                    msg = 'failed to calculate the reference'
                    LOGGER.exception(msg)

                try:
                    gam_model, predict_gam, gam_info = sdm.get_gam(
                        ncs_reference, PAmask)
                    stat_infos.append(gam_info)
                    response.update_status('GAM sucessfully trained',
                                           staus_nr + 5)
                except:
                    msg = 'failed to train GAM for %s' % (key)
                    LOGGER.exception(msg)

                try:
                    prediction = sdm.get_prediction(gam_model, ncs)
                    response.update_status('prediction done', staus_nr + 7)
                except:
                    msg = 'failed to predict tree occurence'
                    LOGGER.exception(msg)
                    # raise Exception(msg)

                # try:
                #     response.update_status('land sea mask for predicted data',  staus_nr + 8)
                #     from numpy import invert, isnan, nan, broadcast_arrays  # , array, zeros, linspace, meshgrid
                #     mask = invert(isnan(PAmask))
                #     mask = broadcast_arrays(prediction, mask)[1]
                #     prediction[mask is False] = nan
                # except:
                #     LOGGER.exception('failed to mask predicted data')

                try:
                    species_files.append(sdm.write_to_file(ncs[0], prediction))
                    LOGGER.info('Favourabillity written to file')
                except:
                    msg = 'failed to write species file'
                    LOGGER.exception(msg)
                    # raise Exception(msg)

            except:
                msg = 'failed to calculate reference indices'
                LOGGER.exception(msg)
                raise Exception(msg)

        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            LOGGER.info('indices added to archive')
        except:
            msg = 'failed adding indices to archive'
            LOGGER.exception(msg)
            archive_indices = tempfile.mkstemp(suffix='.tar',
                                               prefix='foobar-',
                                               dir='.')

        try:
            archive_references = archive(ncs_references, format=archive_format)
            LOGGER.info('indices reference added to archive')
        except:
            msg = 'failed adding reference indices to archive'
            LOGGER.exception(msg)
            archive_references = tempfile.mkstemp(suffix='.tar',
                                                  prefix='foobar-',
                                                  dir='.')

        try:
            archive_prediction = archive(species_files, format=archive_format)
            LOGGER.info('species_files added to archive')
        except:
            msg = 'failed adding species_files indices to archive'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            stat_infosconcat = pdfmerge(stat_infos)
            LOGGER.debug('pngs %s' % PAmask_pngs)
            PAmask_png = concat_images(PAmask_pngs, orientation='h')
            LOGGER.info('stat infos pdfs and mask pngs merged')
        except:
            LOGGER.exception('failed to concat images')
            _, stat_infosconcat = tempfile.mkstemp(suffix='.pdf',
                                                   prefix='foobar-',
                                                   dir='.')
            _, PAmask_png = tempfile.mkstemp(suffix='.png',
                                             prefix='foobar-',
                                             dir='.')

        response.outputs['output_gbif'].file = occurence_map
        response.outputs['output_PA'].file = PAmask_png
        response.outputs['output_indices'].file = archive_indices
        response.outputs['output_reference'].file = archive_references
        response.outputs['output_prediction'].file = archive_prediction
        response.outputs['output_info'].file = stat_infosconcat
        response.outputs['output_csv'].file = gbifcsv

        response.update_status('done', 100)
        return response
Пример #29
0
def test_archive_tar():
    result = utils.archive([local_path(TESTDATA["cmip5_tasmax_2007_nc"])], format="tar", dir_output=tempfile.mkdtemp())
    tar = tarfile.open(result)
    assert len(tar.getnames()) == 1
Пример #30
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        try:
            resources = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))

            indices = [inpt.data for inpt in request.inputs['indices']]
            grouping = [inpt.data for inpt in request.inputs['grouping']]

            if 'mosaic' in request.inputs:
                mosaic = request.inputs['mosaic'][0].data
            else:
                mosaic = False

            if 'region' in request.inputs:
                region = [inpt.data for inpt in request.inputs['region']]
            else:
                region = None

            LOGGER.debug('grouping: {}'.format(grouping))
            LOGGER.debug('mosaic: {}'.format(mosaic))
            LOGGER.debug('indices: {}'.format(indices))
            LOGGER.debug('region: {}'.format(region))
            LOGGER.debug('Nr of input files: {}'.format(len(resources)))
        except Exception as ex:
            LOGGER.exception('failed to read in the arguments: {}'.format(str(ex)))

        response.update_status(
            'starting: indices={}, grouping={}, num_files={}'.format(indices, grouping, len(resources)), 2)

        results = []

        from flyingpigeon.utils import sort_by_filename
        datasets = sort_by_filename(resources, historical_concatination=True)
        results = []
        try:
            group = grouping[0]  # for group in grouping:
            indice = indices[0]  # for indice in indices:
            for key in datasets.keys():
                try:
                    response.update_status('Dataset {}: {}'.format(len(results) + 1, key), 10)

                    LOGGER.debug('grouping: {}'.format(grouping))
                    LOGGER.debug('mosaic: {}'.format(mosaic))
                    LOGGER.debug('indice: {}'.format(indice))
                    LOGGER.debug('region: {}'.format(region))
                    LOGGER.debug('Nr of input files: {}'.format(len(datasets[key])))

                    result = calc_indice_simple(
                        resource=datasets[key],
                        mosaic=mosaic,
                        indice=indice,
                        polygons=region,
                        grouping=group,
                        # dir_output=path.curdir,
                    )
                    LOGGER.debug('result: {}'.format(result))
                    results.extend(result)

                except Exception as ex:
                    msg = 'failed for {}: {}'.format(key, str(ex))
                    LOGGER.exception(msg)
                    raise Exception(msg)

        except Exception as ex:
            msg = 'Failed to calculate indices: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        #         # if not results:
        #         #     raise Exception("failed to produce results")
        #         # response.update_status('num results %s' % len(results), 90)

        tarf = archive(results)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        if i is None:
            i = 'dummy.nc'
        response.outputs['ncout'].file = results[i]

        #       response.update_status("done", 100)
        return response
Пример #31
0
    def execute(self):
      from os.path import basename

      from flyingpigeon import sdm
      from flyingpigeon.utils import archive

      self.status.set('Start process', 0)
      
      try: 
        logger.info('reading the arguments')
        resources = self.getInputValues(identifier='resources')
        taxon_name = self.getInputValues(identifier='taxon_name')[0]
        #period = self.period.getValue()
        period = self.getInputValues(identifier='period')
        period = period[0]
        
        #indices = self.input_indices.getValue()
        indices = self.getInputValues(identifier='input_indices')
        logger.debug("indices = %s for %s ", indices, taxon_name)
        
        archive_format = self.archive_format.getValue()
      except Exception as e: 
        logger.error('failed to read in the arguments %s ' % e)
      logger.info('indices %s ' % indices)
      
      try:
        self.status.set('Fetching GBIF Data', 10)
        latlon = sdm.gbif_serach(taxon_name)
      except Exception as e: 
        logger.exception('failed to search gbif %s' % e)
        
      #try:
        #self.status.set('extract csv file with tree observations', 5)
        #csv_file = sdm.get_csv(taxon_name[0])
      #except Exception as e: 
        #logger.exception('failed to extract csv file from url.')

      #try:
        #self.status.set('read in latlon coordinates of tree observations', 10)
        #latlon = sdm.get_latlon(csv_file)
      #except Exception as e: 
        #logger.exception('failed to extract the latlon points')

      
      try:
        from flyingpigeon.visualisation import map_gbifoccurrences
        self.status.set('plotting Tree presents based on coordinates', 15)
        tree_presents = map_gbifoccurrences(latlon)
      except Exception as e:
        msg = 'plotting points failed'   
        logger.exception(msg)
        with open(tree_presents, 'w') as fp:
            # TODO: needs to be a png file
            fp.write(msg)
      
      try:
        self.status.set('generating the PA mask', 20)
        PAmask = sdm.get_PAmask(coordinates=latlon)
        logger.info('PA mask sucessfully generated')
      except Exception as e: 
        logger.exception('failed to generate the PA mask')
        
      png_PA_mask = 'PA_mask.png'
      try:
        import matplotlib.pyplot as plt
        self.status.set('Ploting PA mask', 25)
        fig = plt.figure(figsize=(20,10), dpi=300, facecolor='w', edgecolor='k')
        cs = plt.contourf(PAmask)
        fig.savefig(png_PA_mask)
        plt.close()
      except Exception as e:
        msg = 'failed to plot the PA mask'
        logger.exception(msg)
        with open(png_PA_mask, 'w') as fp:
            # TODO: needs to be a png file
            fp.write(msg)
      
      #################################
      ### calculate the climate indices
      #################################
      
      # get the indices
      ncs_indices = None
      try:
        self.status.set('start calculation of climate indices for %s' % indices, 30 )
        ncs_indices = sdm.get_indices(resources=resources, indices=indices)
        logger.info('indice calculation done')
      except:
        msg = 'failed to calculate indices'
        logger.exception(msg)
        raise Exception(msg)

      try:
        archive_indices = archive(ncs_indices , format=archive_format)
        logger.info('indices 3D added to tarfile')
      except:
        msg = 'failed adding indices to tar'  
        logger.exception(msg)
        raise Exception(msg)  

      indices_dic = None
      try: 
        # sort indices
        indices_dic = sdm.sort_indices(ncs_indices)
        logger.info('indice files sorted for %s Datasets' % len(indices_dic.keys()))
      except:
        msg = 'failed to sort indices'
        logger.exception(msg)
        raise Exception(msg)

      # try:
      #   # open tar files
      #   tar_reference = tarfile.open('reference.tar', "w")
      #   tar_indices = tarfile.open('indices.tar', "w")

      #   tar_info = tarfile.open('info.tar', "w")
      #   tar_prediction = tarfile.open('prediction.tar', "w")
        
      #   logger.info('tar files prepared')
      # except:
      #   msg = 'tar file preparation failed'
      #   logger.exception(msg)
      #   raise Exception(msg)


      ncs_references = []
      species_files = []
      statistics_info = []

      for count,key in enumerate(indices_dic.keys()):
        try:
          self.status.set('Start processing of %s ' % key, 40 + count * 10)
          
          ncs = indices_dic[key]
          
          logger.info('with %s files' % len(ncs))
            
          try: 
            ncs_references.extend(sdm.get_reference(ncs_indices=ncs, period=period))
            logger.info('reference indice calculated %s ' % ncs_references)
          except:
            msg = 'failed adding ref indices to tar'
            logger.exception(msg)
            raise Exception(msg)
          
          # for nc_reference in ncs_references:
          #   tar_reference.add(nc_reference, 
          #       arcname = basename(nc_reference))# nc_reference.replace(os.path.abspath(os.path.curdir), ""))
          
          # logger.info('reference indices added to tarfile')
          
        except:
          msg = 'failed to calculate reference indices.'
          logger.exception(msg)
          raise Exception(msg)

        try:
          gam_model, predict_gam, gam_info = sdm.get_gam(ncs_references,PAmask)
          statistics_info.append(gam_info)
          self.status.set('GAM sucessfully trained', 70)
        except:
          msg = 'failed to train GAM'  
          logger.exception(msg)
          raise Exception(msg)

        try:
          prediction = sdm.get_prediction(gam_model, ncs_indices)
          self.status.set('prediction done', 80)
        except:
          msg = 'failed to predict'   
          logger.exception(msg)
          raise Exception(msg)
          
        try:
          from numpy import invert, isnan, nan, broadcast_arrays, array, zeros, linspace, meshgrid
          mask = invert(isnan(PAmask))
          mask = broadcast_arrays(prediction, mask)[1]
          prediction[mask==False] = nan
          self.status.set('land sea mask for predicted data', 90)
        except: 
          logger.exception('failed to mask predicted data')

        try: 
          species_files.append(sdm.write_to_file(ncs_indices[0], prediction))

          logger.info('Favourabillity written to file')
          #tar_prediction.add(species_file, 
           #               arcname = basename(species_file))#.replace(os.path.abspath(os.path.curdir), ""))
        except:
          msg = 'failed to write species file'
          logger.exception(msg)
          raise Exception(msg)

      from flyingpigeon.visualisation import concat_images
      statistics_infos = None
      try: 
        statistics_infos = concat_images(statistics_info, orientation='v')
      except:
        msg = 'failed to concat images'  
        logger.exception(msg)
        raise Exception(msg)  

      archive_references = None
      try:
        archive_references = archive(ncs_references , format=archive_format)
        logger.info('indices 2D added to archive')
      except:
        msg = 'failed adding 2D indices to archive'  
        logger.exception(msg)
        raise Exception(msg)  

      archive_predicion = None
      try:
        archive_predicion = archive(species_files , format=archive_format)
        logger.info('species_files added to archive')
      except:
        msg = 'failed adding species_files indices to archive'  
        logger.exception(msg)
        raise Exception(msg)  

      # try:
      #   #tar_indices.close()
      #   #tar_reference.close()

      #   tar_prediction.close()
      #   #tar_info.close()
        
      #   logger.info('tar files closed')
      # except:
      #   logger.exception('tar file closing failed')
      #   raise Exception
           #self.output_csv.setValue( csv_file )
      self.output_gbif.setValue( tree_presents )
      self.output_PA.setValue( png_PA_mask )
      self.output_indices.setValue( archive_indices )
      self.output_reference.setValue (archive_references)
      self.output_prediction.setValue (archive_predicion)
      self.output_info.setValue(statistics_infos)

      self.status.set('done', 100)
Пример #32
0
    def execute(self):

        from os import mkdir, path, listdir

        # import tarfile
        # from tempfile import  mkstemp #, mkdtemp
        # from datetime import datetime

        from flyingpigeon import segetalflora as sf

        logging.debug('starting segetalflora process execution')
        self.status.set('starting calcualtion segetalflora', 5)

        ############################
        # read argments to variables
        ############################
        try:
            resource = self.getInputValues(identifier='resource')
            climate_type = self.climate_type.getValue()
            culture_type = self.culture_type.getValue()

            logging.info('urls for %s ncs found' % (len(resource)))
            logging.info('culture type: %s ' % (culture_type))
        except Exception as e:
            logger.debug('failed to read in the arguments: %s ' % e)

        try:
            if type(climate_type) != list:
                climate_type = list([climate_type])
            if type(culture_type) != list:
                culture_type = list([culture_type])
            logger.info('arguments are lists')
        except Exception as e:
            logger.debug('failed to transform arguments to lists: %s ' % e)

        #############################
        # get yearly mean temperature
        #############################

        nc_tasmean = sf.get_yrmean(resource)

        #######################################
        # main call for segetalflora processing
        #######################################

        nc_sf = sf.get_segetalflora(resource=nc_tasmean,
                                    culture_type=culture_type,
                                    climate_type=climate_type)

        #stepps = len(culture_type) * len(climate_type)
        #for a, cult in enumerate(culture_type):
        #for b, clim in enumerate(climate_type):
        #start = (a + 1) * (b + 1)
        #per = (start / stepps) * 95
        #self.status.set('%s/%s processing for %s climate type: %s' %(start, stepps, culture_type, climate_type), per)
        #try:
        #sf_files =  sf.get_segetalflora(resource=ncs, culture_type=cult, climate_type=clim) # dir_output=dir_tas,
        #self.status.set("processing of %s segetalflora files done " % (len(sf_files)) , 95)
        #except Exception as e:
        #logging.exception('segetalflora calculation failed %s %s' % ( climate_type, culture_type))
        #raise

        ## === fieldmeans
        #from flyingpigeon import timeseries as ts
        #self.status.set('processing fieldmeans' , 97)
        #try:
        #ncs = [path.join(dir_segetalflora,nc) for nc in listdir(dir_segetalflora)]
        #ncs_fld = ts.fldmean(ncs, dir_output=dir_fieldmean)
        #logging.debug('%s fieldmeans processed' % (len(ncs_fld)))
        #except Exception as e:
        #logging.exception('fieldmeans failed')
        #raise

        ## === visualisation
        #from flyingpigeon import visualisation as vs
        #from os import rename

        #self.status.set('processing visualisation' , 98)

        ## sort files for plotting
        #try:
        #ncs = listdir(dir_segetalflora)
        #set_var = set()
        #set_contry = set()
        #for nc in ncs:
        #set_var = set_var.union([nc.split('_')[0]])
        #set_contry = set_contry.union([nc.split('_')[1]])
        #logging.debug('%s files to plots sorted' % (len(ncs)))
        #except Exception as e:
        #logging.exception('files sorting failed')
        #raise

        ## plot sorted files
        #try:
        #plots = []
        #for v in set_var:
        #for c in set_contry:
        #ncs = [path.join(dir_segetalflora,nc) for nc in listdir(dir_segetalflora) if v in nc and c in nc ]
        #p = vs.spaghetti(ncs,
        #variable=v,
        #title='Segetalflora %s in %s' % (v, c),
        #dir_out=dir_plots)
        #newname = path.dirname(p)+'/%s_%s_birdhouse_output.html' %(v,c)
        #rename(p,newname)
        #plots.append(newname)
        #logging.debug('plot created and renamed for %s %s' % (v, c ))
        #except Exception as e:
        #logging.exception('ploting failed')
        #raise

        ####################
        # tar file archiving
        ####################
        from flyingpigeon.utils import archive

        try:
            self.status.set('files to tar archives', 99)
            tar_sf = archive(nc_sf, format='tar', dir_output='.', mode='w')
            tar_tasmean = archive(nc_tasmean,
                                  format='tar',
                                  dir_output='.',
                                  mode='w')
            logging.info('Archives prepared')
        except Exception as e:
            logger.debug('failed to archive files %s' % e)


# === set output parameter
#self.out_fieldmeans.setValue( tarf_fieldmeans )
        self.out_segetalflora.setValue(tar_sf)
        self.out_tasmean.setValue(tar_tasmean)
        #self.out_plots.setValue( tarf_plots )
        self.status.set("processing done", 100)
Пример #33
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'
        process_start_time = time.time()  # measure process execution time ...

        response.update_status(
            'execution started at: {}'.format(str(dt.now())), 5)

        LOGGER.debug('starting segetalflora process execution')
        response.update_status('starting calcualtion segetalflora', 5)

        ############################
        # read argments to variables
        ############################
        try:
            resource = archiveextract(
                resource=rename_complexinputs(request.inputs['resource']))
            climate_type = request.inputs['climate_type'][0].data
            culture_type = request.inputs['culture_type'][0].data

            LOGGER.info('urls for {} ncs found'.format(len(resource)))
            LOGGER.info('culture type: {}'.format(culture_type))
        except Exception as ex:
            msg = 'Failed to read in the arguments: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            if type(climate_type) != list:
                climate_type = list([climate_type])
            if type(culture_type) != list:
                culture_type = list([culture_type])
            LOGGER.info('arguments are lists')
        except Exception as ex:
            msg = 'Failed to transform arguments to lists: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        #############################
        # get yearly mean temperature
        #############################

        nc_tasmean = sf.get_yrmean(resource)

        #######################################
        # main call for segetalflora processing
        #######################################

        nc_sf = sf.get_segetalflora(resource=nc_tasmean,
                                    culture_type=culture_type,
                                    climate_type=climate_type)

        ####################
        # tar file archiving
        ####################

        try:
            response.update_status('preparing output', 99)
            LOGGER.debug('length of sf: {}'.format(len(nc_sf)))
            if len(nc_sf) == 1:
                # TODO: fix pywps output formats OR use separate output params.
                response.outputs['out_segetalflora'].file = nc_sf[0]
                response.outputs['out_segetalflora'].format = FORMATS.NETCDF
            else:
                response.outputs['out_segetalflora'].file = archive(
                    nc_sf, format='tar', dir_output='.', mode='w')
                response.outputs['out_segetalflora'].format = Format(
                    'application/x-tar')
            if len(nc_tasmean) == 1:
                response.outputs['out_tasmean'].file = nc_tasmean[0]
                response.outputs['out_segetalflora'].format = FORMATS.NETCDF
            else:
                response.outputs['out_tasmean'].file = archive(nc_tasmean,
                                                               format='tar',
                                                               dir_output='.',
                                                               mode='w')
                response.outputs['out_segetalflora'].format = Format(
                    'application/x-tar')
        except Exception as ex:
            msg = 'Failed to prepare output files: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.update_status('done', 100)
        LOGGER.debug(
            "total execution took {} seconds.".format(time.time() -
                                                      process_start_time))

        return response
Пример #34
0
def test_archive_zip():
    result = utils.archive([local_path(TESTDATA["cmip5_tasmax_2007_nc"])], format="zip", dir_output=tempfile.mkdtemp())
    zipf = zipfile.ZipFile(result)
    assert len(zipf.namelist()) == 1
    def _handler(self, request, response):

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        ncs = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        var = get_variable(ncs[0])
        LOGGER.info('variable to be plotted: {}'.format(var))

        # mosaic = self.mosaic.getValue()
        if 'region' in request.inputs:
            regions = [inp.data for inp in request.inputs['region']]
            try:
                png_region = vs.plot_polygons(regions)
            except Exception as ex:
                msg = 'failed to plot the polygon to world map: {}'.format(
                    str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)
                o1, png_region = mkstemp(dir='.', suffix='.png')

            # clip the demanded polygons
            subsets = clipping(
                resource=ncs,
                variable=var,
                polygons=regions,
                mosaic=True,
                spatial_wrapping='wrap',
            )
        else:
            subsets = ncs
            png_region = vs.plot_extend(ncs[0])

        response.update_status('Arguments set for subset process', 0)

        try:
            tar_subsets = archive(subsets)
        except Exception as ex:
            msg = 'failed to archive subsets: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, tar_subsets = mkstemp(dir='.', suffix='.tar')

        try:
            png_uncertainty = vs.uncertainty(subsets, variable=var)
        except Exception as ex:
            msg = 'failed to generate the uncertainty plot: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_uncertainty = mkstemp(dir='.', suffix='.png')

        try:
            png_spaghetti = vs.spaghetti(
                subsets,
                variable=var,
            )

        except Exception as ex:
            msg = 'failed to generate the spaghetti plot: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_spaghetti = mkstemp(dir='.', suffix='.png')

        try:
            from flyingpigeon import robustness as ro
            signal, low_agreement_mask, high_agreement_mask, text_src = ro.signal_noise_ratio(
                resource=subsets,
                # start=None, end=None,
                # timeslice=None,
                # variable=var
            )
            # if title is None:
            title = 'signal robustness of %s ' % (
                var)  # , end1, end2, start1, start2
            png_robustness = vs.map_robustness(
                signal,
                high_agreement_mask,
                low_agreement_mask,
                # cmap=cmap,
                #    title=title
            )
            LOGGER.info('robustness graphic generated')
        except Exception as ex:
            msg = 'failed to generate the robustness plot: {}'.format(ex)
            LOGGER.exception(msg)
            raise Exception(msg)
            _, png_robustness = mkstemp(dir='.', suffix='.png')

        factsheet = vs.factsheetbrewer(png_region=png_region,
                                       png_uncertainty=png_uncertainty,
                                       png_spaghetti=png_spaghetti,
                                       png_robustness=png_robustness)

        response.outputs['output_nc'].file = tar_subsets
        response.outputs['output_factsheet'].file = factsheet
        response.update_status("done", 100)
        return response
Пример #36
0
    def execute(self):
        from os.path import basename

        from flyingpigeon import sdm
        from flyingpigeon.utils import archive

        self.status.set('Start process', 0)

        try:
            logger.info('reading the arguments')
            resources = self.getInputValues(identifier='resources')
            gbif = self.getInputValues(identifier='gbif')
            #period = self.period.getValue()
            period = self.getInputValues(identifier='period')
            period = period[0]
            #indices = self.input_indices.getValue()
            indices = self.getInputValues(identifier='input_indices')
            logger.debug("indices = %s", indices)

            archive_format = self.archive_format.getValue()
        except Exception as e:
            logger.error('failed to read in the arguments %s ' % e)
        logger.info('indices %s ' % indices)
        try:
            self.status.set('extract csv file with tree observations', 5)
            csv_file = sdm.get_csv(gbif[0])
        except Exception as e:
            logger.exception('failed to extract csv file from url.')

        try:
            self.status.set('read in latlon coordinates of tree observations',
                            10)
            latlon = sdm.get_latlon(csv_file)
        except Exception as e:
            logger.exception('failed to extract the latlon points')

        tree_presents = 'tree_presents.png'
        try:
            self.status.set('plotting Tree presents based on coordinates', 15)
            import matplotlib.pyplot as plt
            from cartopy import config
            from cartopy.util import add_cyclic_point
            import cartopy.crs as ccrs

            fig = plt.figure(figsize=(20, 10),
                             dpi=600,
                             facecolor='w',
                             edgecolor='k')
            ax = plt.axes(projection=ccrs.Robinson(central_longitude=0))
            ax.coastlines()
            ax.set_global()
            cs = plt.scatter(latlon[:, 1],
                             latlon[:, 0],
                             transform=ccrs.PlateCarree())
            fig.savefig(tree_presents)
            plt.close()
        except Exception as e:
            msg = 'plotting points failed'
            logger.exception(msg)
            with open(tree_presents, 'w') as fp:
                # TODO: needs to be a png file
                fp.write(msg)

        try:
            self.status.set('generating the PA mask', 20)
            PAmask = sdm.get_PAmask(coordinates=latlon)
            logger.info('PA mask sucessfully generated')
        except Exception as e:
            logger.exception('failed to generate the PA mask')

        png_PA_mask = 'PA_mask.png'
        try:
            self.status.set('Ploting PA mask', 25)
            fig = plt.figure(figsize=(20, 10),
                             dpi=300,
                             facecolor='w',
                             edgecolor='k')
            cs = plt.contourf(PAmask)
            fig.savefig(png_PA_mask)
            plt.close()
        except Exception as e:
            msg = 'failed to plot the PA mask'
            logger.exception(msg)
            with open(png_PA_mask, 'w') as fp:
                # TODO: needs to be a png file
                fp.write(msg)

        #################################
        ### calculate the climate indices
        #################################

        # get the indices
        ncs_indices = None
        try:
            self.status.set(
                'start calculation of climate indices for %s' % indices, 30)
            ncs_indices = sdm.get_indices(resources=resources, indices=indices)
            logger.info('indice calculation done')
        except:
            msg = 'failed to calculate indices'
            logger.exception(msg)
            raise Exception(msg)

        try:
            archive_indices = archive(ncs_indices, format=archive_format)
            logger.info('indices 3D added to tarfile')
        except:
            msg = 'failed adding indices to tar'
            logger.exception(msg)
            raise Exception(msg)

        indices_dic = None
        try:
            # sort indices
            indices_dic = sdm.sort_indices(ncs_indices)
            logger.info('indice files sorted for %s Datasets' %
                        len(indices_dic.keys()))
        except:
            msg = 'failed to sort indices'
            logger.exception(msg)
            raise Exception(msg)

        # try:
        #   # open tar files
        #   tar_reference = tarfile.open('reference.tar', "w")
        #   tar_indices = tarfile.open('indices.tar', "w")

        #   tar_info = tarfile.open('info.tar', "w")
        #   tar_prediction = tarfile.open('prediction.tar', "w")

        #   logger.info('tar files prepared')
        # except:
        #   msg = 'tar file preparation failed'
        #   logger.exception(msg)
        #   raise Exception(msg)

        ncs_references = []
        species_files = []
        statistics_info = []

        for count, key in enumerate(indices_dic.keys()):
            try:
                self.status.set('Start processing of %s ' % key,
                                40 + count * 10)

                ncs = indices_dic[key]

                logger.info('with %s files' % len(ncs))

                try:
                    ncs_references.extend(
                        sdm.get_reference(ncs_indices=ncs, period=period))
                    logger.info('reference indice calculated %s ' %
                                ncs_references)
                except:
                    msg = 'failed adding ref indices to tar'
                    logger.exception(msg)
                    raise Exception(msg)

                # for nc_reference in ncs_references:
                #   tar_reference.add(nc_reference,
                #       arcname = basename(nc_reference))# nc_reference.replace(os.path.abspath(os.path.curdir), ""))

                # logger.info('reference indices added to tarfile')

            except:
                msg = 'failed to calculate reference indices.'
                logger.exception(msg)
                raise Exception(msg)

            try:
                gam_model, predict_gam, gam_info = sdm.get_gam(
                    ncs_references, PAmask)
                statistics_info.append(gam_info)
                self.status.set('GAM sucessfully trained', 70)
            except:
                msg = 'failed to train GAM'
                logger.exception(msg)
                raise Exception(msg)

            try:
                prediction = sdm.get_prediction(gam_model, ncs_indices)
                self.status.set('prediction done', 80)
            except:
                msg = 'failed to predict'
                logger.exception(msg)
                raise Exception(msg)

            try:
                from numpy import invert, isnan, nan, broadcast_arrays, array, zeros, linspace, meshgrid
                mask = invert(isnan(PAmask))
                mask = broadcast_arrays(prediction, mask)[1]
                prediction[mask == False] = nan
                self.status.set('land sea mask for predicted data', 90)
            except:
                logger.exception('failed to mask predicted data')

            try:
                species_files.append(
                    sdm.write_to_file(ncs_indices[0], prediction))

                logger.info('Favourabillity written to file')
                #tar_prediction.add(species_file,
                #               arcname = basename(species_file))#.replace(os.path.abspath(os.path.curdir), ""))
            except:
                msg = 'failed to write species file'
                logger.exception(msg)
                raise Exception(msg)

        from flyingpigeon.visualisation import concat_images
        statistics_infos = None
        try:
            statistics_infos = concat_images(statistics_info, orientation='v')
        except:
            msg = 'failed to concat images'
            logger.exception(msg)
            raise Exception(msg)

        archive_references = None
        try:
            archive_references = archive(ncs_references, format=archive_format)
            logger.info('indices 2D added to archive')
        except:
            msg = 'failed adding 2D indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        archive_predicion = None
        try:
            archive_predicion = archive(species_files, format=archive_format)
            logger.info('species_files added to archive')
        except:
            msg = 'failed adding species_files indices to archive'
            logger.exception(msg)
            raise Exception(msg)

        # try:
        #   #tar_indices.close()
        #   #tar_reference.close()

        #   tar_prediction.close()
        #   #tar_info.close()

        #   logger.info('tar files closed')
        # except:
        #   logger.exception('tar file closing failed')
        #   raise Exception

        self.output_csv.setValue(csv_file)
        self.output_gbif.setValue(tree_presents)
        self.output_PA.setValue(png_PA_mask)
        self.output_indices.setValue(archive_indices)
        self.output_reference.setValue(archive_references)
        self.output_prediction.setValue(archive_predicion)
        self.output_info.setValue(statistics_infos)

        self.status.set('done', 100)
Пример #37
0
    def _handler(self, request, response):
        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # products = [inpt.data for inpt in request.inputs['indices']]

        indice = request.inputs['indices'][0].data

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if start > end:
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception('period ends before period starts; period now set to the last 30 days from now')

        username = request.inputs['username'][0].data
        password = request.inputs['password'][0].data
        cloud_cover = request.inputs['cloud_cover'][0].data

        api = SentinelAPI(username, password)

        geom = {
            "type": "Polygon",
            "coordinates": [[[bbox[0], bbox[1]],
                             [bbox[2], bbox[1]],
                             [bbox[2], bbox[3]],
                             [bbox[0], bbox[3]],
                             [bbox[0], bbox[1]]]]}

        footprint = geojson_to_wkt(geom)

        response.update_status('start searching tiles according to query', 15)

        products = api.query(footprint,
                             date=(start, end),
                             platformname='Sentinel-2',
                             cloudcoverpercentage=(0, cloud_cover),
                             # producttype='SLC',
                             # orbitdirection='ASCENDING',
                             )

        LOGGER.debug('{} products found'.format(len(products.keys())))
        DIR_cache = cache_path()
        DIR_EO = join(DIR_cache, 'scihub.copernicus')
        if not exists(DIR_EO):
            makedirs(DIR_EO)

        # api.download_all(products)
        # try:
        # with open(filepaths, 'w') as fp:
        #     fp.write('############################################\n')
        #     fp.write('###     Following files are fetched      ###\n')
        #     fp.write('############################################\n')
        #     fp.write('\n')

        resources = []

        for key in products.keys():
            try:
                filename = products[key]['filename']
                # form = products[key]['format']
                ID = str(products[key]['identifier'])

                file_zip = join(DIR_EO, '{}.zip'.format(ID))
                DIR_tile = join(DIR_EO, str(filename))
                response.update_status('fetch file {}'.format(ID), 20)
                LOGGER.debug('path: {}'.format(DIR_tile))

                if exists(file_zip):
                    LOGGER.debug('file %s.zip already fetched' % ID)
                else:
                    try:
                        api.download(key, directory_path=DIR_EO)
                        # Does the '***' denote a string formatting function?
                        response.update_status("***%s sucessfully fetched" % ID, 20)
                        # TODO: Figure out why these are duplicate
                        LOGGER.debug('Tile {} fetched'.format(ID))
                        LOGGER.debug('Files {} fetched'.format(ID))
                    except Exception as ex:
                        msg = 'failed to extract file {}: {}'.format(filename, str(ex))
                        LOGGER.exception(msg)
                        raise Exception(msg)

                if exists(DIR_tile):
                    LOGGER.debug('file {} already unzipped'.format(filename))

                else:
                    try:
                        # zipfile = join(DIR_EO, '%szip' % (filename)).strip(form)
                        zip_ref = zipfile.ZipFile(file_zip, 'r')
                        zip_ref.extractall(DIR_EO)
                        zip_ref.close()
                        LOGGER.debug('Tile {} unzipped'.format(ID))
                    except Exception as ex:
                        msg = 'failed to extract {}'.format(file_zip)
                        LOGGER.exception(msg)
                        raise Exception(msg)

                resources.append(DIR_tile)
            except Exception as ex:
                msg = 'failed to fetch {}: {}'.format(key, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        # TODO: Find a place for these variables or remove them
        size = float(products[key]['size'].split(' ')[0])
        producttype = products[key]['producttype']
        beginposition = str(products[key]['beginposition'])

        imgs = []
        tiles = []
        for resource in resources:
            try:
                response.update_status('Calculating {} indices'.format(indice), 40)
                if indice == 'NDVI':
                    LOGGER.debug('Calculate NDVI for {}'.format(resource))
                    tile = eodata.get_ndvi(resource)
                    LOGGER.debug('resources BAI calculated')
                if indice == 'BAI':
                    LOGGER.debug('Calculate BAI for {}'.format(resource))

                    tile = eodata.get_bai(resource)
                    LOGGER.debug('resources BAI calculated')
                tiles.append(tile)
            except Exception as ex:
                msg = 'failed to calculate indice for {}: {}'.format(resource, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        for tile in tiles:
            try:
                LOGGER.debug('Plot tile {}'.format(tile))
                img = eodata.plot_band(tile, file_extension='PNG', colorscheem=indice)
                imgs.append(img)
            except Exception as ex:
                msg = 'Failed to plot tile {}: {}'.format(tile, str(ex))
                LOGGER.exception(msg)
                raise Exception(msg)

        from flyingpigeon.utils import archive
        tarf = archive(imgs)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(imgs) if x), None)
        if i is None:
            i = "dummy.png"
        response.outputs['output_plot'].file = imgs[i]

        # from flyingpigeon import visualisation as vs
        #
        # images = vs.concat_images(imgs, orientation='v')

        response.update_status("done", 100)
        return response