def _handler(self, request, response):
        # init_process_logger('log.txt')
        # response.outputs['output_log'].file = 'log.txt'
        ncs = extract_archive(
            resources=[inpt.file for inpt in request.inputs['resource']],
            dir_output=self.workdir)
        LOGGER.info('ncs: {}'.format(ncs))

        coords = []
        for coord in request.inputs['coords']:
            coords.append(coord.data)

        LOGGER.info('coords {}'.format(coords))
        filenames = []
        nc_exp = sort_by_filename(ncs, historical_concatination=True)

        for key in nc_exp.keys():
            try:
                LOGGER.info('start calculation for {}'.format(key))
                ncs = nc_exp[key]
                times = get_time(ncs)
                concat_vals = times
                header = 'date_time'
                filename = join(self.workdir, '{}.csv'.format(key))
                filenames.append(filename)

                for p in coords:
                    try:
                        response.update_status('processing point: {}'.format(p), 20)
                        # define the point:
                        p = p.split(',')
                        point = Point(float(p[0]), float(p[1]))

                        # get the values
                        timeseries = call(resource=ncs, geom=point, select_nearest=True, dir_output=self.workdir)
                        vals = get_values(timeseries)

                        # concatenation of values
                        header = header + ',{}-{}'.format(p[0], p[1])
                        concat_vals = column_stack([concat_vals, vals])
                    except Exception as e:
                        LOGGER.debug('failed for point {} {}'.format(p, e))
                response.update_status('*** all points processed for {0} ****'.format(key), 50)

                # TODO: Ascertain whether this 'savetxt' is a valid command without string formatting argument: '%s'
                savetxt(filename, concat_vals, fmt='%s', delimiter=',', header=header)
            except Exception as ex:
                LOGGER.debug('failed for {}: {}'.format(key, str(ex)))

        # set the outputs
        response.update_status('*** creating output tar archive ****', 90)
        tarout_file = archive(filenames, dir_output=self.workdir)
        response.outputs['tarout'].file = tarout_file
        return response
    def _handler(self, request, response):

        tic = dt.now()
        # init_process_logger('log.txt')
        # response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        response.update_status('Execution started at : {}'.format(tic), 1)

        ######################################
        # Read inputs
        ######################################
        try:
            resource = extract_archive(
                resources=[inpt.file for inpt in request.inputs['resource']],
                dir_output=self.workdir)[0]
            fmts = [e.data for e in request.inputs['fmt']]
            title = request.inputs['title'][0].data

        except Exception as ex:
            msg = 'Failed to read input parameter {}'.format(ex)
            LOGGER.error(msg)
            raise Exception(msg)

        response.update_status('Input parameters ingested', 2)

        try:
            fig = plot_spatial_analog(resource, title=title)
            output = []

            for fmt in fmts:
                output.append(fig2plot(fig, fmt, dir_output=self.workdir))

        except Exception as ex:
            msg = "Failed to create figure: {}".format(ex)
            LOGGER.error(msg)
            raise Exception(msg)

        finally:
            plt.close()

        if len(fmts) == 1:
            output = output[0]
        else:
            output = archive(output, dir_output=self.workdir)

        response.outputs['output_figure'].file = output
        response.update_status("done", 100)
        return response
    def _handler(self, request, response):

        # input files
        LOGGER.debug("url={}, mime_type={}".format(request.inputs['resource'][0].url,
                     request.inputs['resource'][0].data_format.mime_type))
        ncs = extract_archive(
            resources=[inpt.file for inpt in request.inputs['resource']],
            dir_output=self.workdir)
        # mime_type=request.inputs['resource'][0].data_format.mime_type)
        # mosaic option
        # TODO: fix defaults in pywps 4.x
        if 'mosaic' in request.inputs:
            mosaic = request.inputs['mosaic'][0].data
        else:
            mosaic = False
        # regions used for subsetting
        regions = [inp.data for inp in request.inputs['region']]

        LOGGER.info('ncs: {}'.format(ncs))
        LOGGER.info('regions: {}'.format(regions))
        LOGGER.info('mosaic: {}'.format(mosaic))
        LOGGER.info('flyingpigeon dir_output : {}'.format(abspath(self.workdir)))

        response.update_status("Arguments set for subset process", 0)
        LOGGER.debug('starting: regions={}, num_files={}'.format(len(regions), len(ncs)))

        try:
            results = clipping(
                resource=ncs,
                polygons=regions,
                mosaic=mosaic,
                spatial_wrapping='wrap',
                # variable=variable,
                dir_output=self.workdir,
                # dimension_map=dimension_map,
            )
            LOGGER.info('results %s' % results)

        except Exception as ex:
            msg = 'Clipping failed: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        if not results:
            raise Exception('No results produced.')

        # prepare tar file
        try:
            tarf = archive(results, dir_output=self.workdir)
            LOGGER.info('Tar file prepared')

        except Exception as ex:
            msg = 'Tar file preparation failed: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['output'].file = tarf

        i = next((i for i, x in enumerate(results) if x), None)
        response.outputs['ncout'].file = results[i]

        response.update_status("done", 100)
        return response
Exemplo n.º 4
0
def test_archive():
    assert '.tar' in utils.archive([])
    assert '.zip' in utils.archive([], format='zip')
    with pytest.raises(Exception):
        utils.archive([], format='zip2')
Exemplo n.º 5
0
def test_extract_archive():
    files = utils.extract_archive(
        [utils.archive([]), utils.archive([], format='zip')])
    assert len(files) == 0
Exemplo n.º 6
0
def test_archive_zip():
    result = utils.archive(local_path(TESTDATA['cmip5_tasmax_2006_nc']),
                           format='zip',
                           dir_output=tempfile.mkdtemp())
    zipf = zipfile.ZipFile(result)
    assert len(zipf.namelist()) == 1
Exemplo n.º 7
0
def test_archive_tar():
    result = utils.archive(local_path(TESTDATA['cmip5_tasmax_2006_nc']),
                           format='tar',
                           dir_output=tempfile.mkdtemp())
    tar = tarfile.open(result)
    assert len(tar.getnames()) == 1