예제 #1
0
    def _handler(self, request, response):

        geom = self.parse_bbox(request)
        dr = self.parse_daterange(request)

        ml = MetaLink4('subset', workdir=self.workdir)

        for res in self.parse_resources(request):
            variables = self.parse_variable(request, res)
            prefix = Path(res).stem + "_bbox_subset"
            rd = ocgis.RequestDataset(res, variables)

            try:
                ops = ocgis.OcgOperations(
                    dataset=rd, geom=geom, time_range=dr,
                    output_format='nc',
                    interpolate_spatial_bounds=True,
                    prefix=prefix, dir_output=tempfile.mkdtemp(dir=self.workdir))
                out = ops.execute()

                mf = MetaFile(prefix, fmt=FORMATS.NETCDF)
                mf.file = out
                ml.append(mf)

            except ocgis.exc.ExtentError:
                continue

        response.outputs['output'].file = ml.files[0].file
        response.outputs['metalink'].data = ml.xml
        response.update_status("Completed", 100)

        return response
예제 #2
0
    def _handler(request, response):
        LOGGER.info("Extract boundary conditions")

        bc_table = request.inputs['bc_table'][0].file

        command = [
            "../bin/preprocessor.ESGF", "2033-12-24_00:00:00",
            "2033-12-30_00:00:00", "/oceano/gmeteo/WORK/ASNA/DATA/CanESM2",
            bc_table
        ]
        bc = subprocess.run(command,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE)
        outlog = bc.stdout.decode("utf-8")
        errlog = bc.stderr.decode("utf-8")

        try:
            ml = MetaLink4('bc', workdir="grbData")
            for f in os.listdir(
                    "/oceano/gmeteo/WORK/ASNA/projects/cordex4cds/v2/grbData"):
                mf = MetaFile(os.path.basename(f), fmt=FORMATS.META4)
                mf.file = os.path.join(
                    "/oceano/gmeteo/WORK/ASNA/projects/cordex4cds/v2/grbData",
                    f)
                ml.append(mf)
        except Exception as ex:
            msg = 'BC failed: {}'.format(str(ex))
            LOGGER.exception(msg)
            raise Exception(msg)

        response.outputs['metalink'].data = ml.xml
        response.outputs['stdout'].data = outlog
        response.outputs['stderr'].data = errlog
        response.update_status("Completed", 100)
        return response
예제 #3
0
    def _handler(self, request, response):
        max_outputs = request.inputs['count'][0].data

        ml = MetaLink4('test-ml-1', 'MetaLink with links to text files.', workdir=self.workdir)
        for i in range(max_outputs):
            # Create a MetaFile instance, which instantiates a ComplexOutput object.
            mf = MetaFile('output_{}'.format(i), 'Test output', format=FORMATS.TEXT)
            mf.data = 'output: {}'.format(i)  # or mf.file = <path to file> or mf.url = <url>
            ml.append(mf)

        # The `xml` property of the Metalink4 class returns the metalink content.
        response.outputs['output'].data = ml.xml
        return response
예제 #4
0
 def _handler(self, request, response):
     # TODO: handle lazy load of daops
     from daops.utils import is_characterised
     collection = [dset.data for dset in request.inputs['collection']]
     if request.inputs['pre_checked'][0].data and not is_characterised(collection, require_all=True):
         raise ProcessError('Data has not been pre-checked')
     # metalink document with collection of netcdf files
     ml4 = MetaLink4('average-result', 'Averaging result as NetCDF files.', workdir=self.workdir)
     mf = MetaFile('Text file', 'Dummy text file', fmt=FORMATS.TEXT)
     mf.data = 'not working yet'
     ml4.append(mf)
     response.outputs['output'].data = ml4.xml
     return response
예제 #5
0
def build_metalink(identity, description, workdir, file_uris, file_type="NetCDF"):
    ml4 = MetaLink4(identity, description, workdir=workdir)
    file_desc = f"{file_type} file"

    # Add file paths or URLs
    for file_uri in file_uris:
        mf = MetaFile(file_desc, file_desc, fmt=file_type_map.get(file_type, file_type))

        if urlparse(file_uri).scheme in ["http", "https"]:
            mf.url = file_uri
        else:
            mf.file = file_uri

        ml4.append(mf)

    return ml4
예제 #6
0
 def _handler(self, request, response):
     try:
         wf = workflow.WorkflowRunner(output_dir=self.workdir)
         output = wf.run(request.inputs['workflow'][0].file)
     except Exception as e:
         raise ProcessError(f"{e}")
     # metalink document with collection of netcdf files
     ml4 = MetaLink4('workflow-result',
                     'Workflow result as NetCDF files.',
                     workdir=self.workdir)
     for ncfile in output:
         mf = MetaFile('NetCDF file', 'NetCDF file', fmt=FORMATS.NETCDF)
         mf.file = ncfile
         ml4.append(mf)
     response.outputs['output'].data = ml4.xml
     return response
예제 #7
0
파일: utils.py 프로젝트: bird-house/finch
def make_metalink_output(process: Process,
                         files: List[Path],
                         description: str = None) -> MetaLink4:
    """Make a metalink output from a list of files"""

    metalink = MetaLink4(
        identity=process.identifier,
        description=description,
        publisher="Finch",
        workdir=process.workdir,
    )

    for f in files:
        mf = MetaFile(identity=f.stem, fmt=FORMATS.NETCDF)
        mf.file = str(f)
        metalink.append(mf)

    return metalink
예제 #8
0
    def _handler(self, request, response):
        max_outputs = request.inputs['count'][0].data

        ml = MetaLink4('test-ml-1',
                       'MetaLink with links to text files.',
                       workdir=self.workdir)
        for i in range(max_outputs):
            # Create a MetaFile instance, which instantiates a ComplexOutput object.
            mf = MetaFile('output_{}'.format(i),
                          'Test output',
                          format=FORMATS.TEXT)
            mf.data = 'output: {}'.format(
                i)  # or mf.file = <path to file> or mf.url = <url>
            ml.append(mf)

        # The `xml` property of the Metalink4 class returns the metalink content.
        response.outputs['output'].data = ml.xml
        return response
예제 #9
0
파일: subset.py 프로젝트: Zeitsperre/finch
        def process_resource(resource):
            ds = self.try_opendap(resource)
            out = subset_function(ds)

            if not all(out.dims.values()):
                LOGGER.warning(f"Subset is empty for dataset: {resource.url}")
                return

            p = Path(resource._file or resource._build_file_name(resource.url))
            out_fn = Path(self.workdir) / (p.stem + "_sub" + p.suffix)

            out.to_netcdf(out_fn)

            mf = MetaFile(
                identity=p.stem,
                fmt=FORMATS.NETCDF,
            )
            mf.file = out_fn
            metalink.append(mf)
예제 #10
0
def build_meta_link(
    varname,
    desc,
    outfiles,
    format_name="netCDF",
    fmt=FORMATS.NETCDF,
    outdir=os.getcwd(),
):
    """Create meta link between output files

    A MetaLink4 object is created to contain a description of the
    process output, and a MetaFile is created for each output file to be
    appended to this link.

    Parameters:
        varname (str): Name of variable (used for MetaLink4)
        desc (str): Description of meta file
        outfiles (list): List of output files
        format_name (str): Format name of output files
        fmt (pywps.FORMATS): Format of output files
        outdir (str): Directory containing output files

    Returns:
        MetaLink4.xml: xml of metalink connecting output files
    """
    if len(outfiles) == 1:
        meta_link = MetaLink4(
            "output", f"Output of {format_name} {varname} file", workdir=outdir
        )
    else:
        meta_link = MetaLink4(
            "output", f"Output of {format_name} {varname} files", workdir=outdir
        )

    for file in outfiles:
        # Create a MetaFile instance, which instantiates a ComplexOutput object.
        meta_file = MetaFile(f"{file}", desc, fmt=fmt)
        meta_file.file = os.path.join(outdir, file)
        meta_link.append(meta_file)

    return meta_link.xml
예제 #11
0
    def _handler(self, request, response):
        response.update_status('PyWPS Process started.', 0)

        LOGGER.info("starting ...")
        max_outputs = request.inputs['count'][0].data

        # generate MetaLink v3 output
        ml3 = MetaLink('test-ml-1',
                       'Testing MetaLink with text files.',
                       workdir=self.workdir)
        for i in range(max_outputs):
            mf = MetaFile('output_{}'.format(i),
                          'Test output',
                          fmt=FORMATS.TEXT)
            mf.data = 'output: {}'.format(i)
            ml3.append(mf)
        response.outputs['output'].data = ml3.xml

        # ... OR generate MetaLink v4 output (recommended)
        ml4 = MetaLink4('test-ml-1',
                        'Testing MetaLink with text files.',
                        workdir=self.workdir)
        for i in range(max_outputs):
            mf = MetaFile('output_{}'.format(i),
                          'Test output',
                          fmt=FORMATS.TEXT)
            mf.data = 'output: {}'.format(i)
            ml4.append(mf)
        response.outputs['output_meta4'].data = ml4.xml

        response.update_status('PyWPS Process completed.', 100)
        return response
예제 #12
0
    def _handler(self, request, response):
        response.update_status('PyWPS Process started.', 0)

        LOGGER.info("starting ...")
        max_outputs = request.inputs['count'][0].data

        # generate MetaLink v3 output
        ml3 = MetaLink('test-ml-1', 'Testing MetaLink with text files.', workdir=self.workdir)
        for i in range(max_outputs):
            mf = MetaFile('output_{}'.format(i), 'Test output', fmt=FORMATS.TEXT)
            mf.data = 'output: {}'.format(i)
            ml3.append(mf)
        response.outputs['output'].data = ml3.xml

        # ... OR generate MetaLink v4 output (recommended)
        ml4 = MetaLink4('test-ml-1', 'Testing MetaLink with text files.', workdir=self.workdir)
        for i in range(max_outputs):
            mf = MetaFile('output_{}'.format(i), 'Test output', fmt=FORMATS.TEXT)
            mf.data = 'output: {}'.format(i)
            ml4.append(mf)
        response.outputs['output_meta4'].data = ml4.xml

        response.update_status('PyWPS Process completed.', 100)
        return response
예제 #13
0
def build_metalink(identity,
                   description,
                   workdir,
                   file_uris,
                   file_type="NetCDF"):
    ml4 = MetaLink4(identity, description, workdir=workdir)
    file_desc = f"{file_type} file"

    # Add file paths or URLs
    for file_uri in file_uris:
        mf = MetaFile(file_desc,
                      file_desc,
                      fmt=file_type_map.get(file_type, file_type))

        if urlparse(file_uri).scheme in ["http", "https"]:
            mf.url = file_uri
            # TODO: size calculation takes too long. Set size from inventory/catalog.
            mf.size = 0
        else:
            mf.file = file_uri

        ml4.append(mf)

    return ml4
예제 #14
0
 def metafile(self):
     mf = MetaFile('identifier', 'title', fmt=FORMATS.JSON)
     mf.data = json.dumps({'a': 1})
     mf._set_workdir(self.tmp_dir)
     return mf
예제 #15
0
    def _handler(self, request, response):

        shape_url = request.inputs["shape"][0].file
        band = request.inputs["band"][0].data
        touches = request.inputs["select_all_touching"][0].data

        vectors = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))

        if "raster" in request.inputs:
            raster_url = request.inputs["raster"][0].file
            rasters = [".tiff", ".tif"]
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))
        else:
            raster_url = None
            # Assuming that the shape coordinate are in WGS84
            raster_file = gather_dem_tile(vector_file,
                                          self.workdir,
                                          geographic=True)

        vec_crs, ras_crs = crs_sniffer(vector_file), crs_sniffer(raster_file)

        if ras_crs != vec_crs:
            msg = f"CRS for files {vector_file} and {raster_file} are not the same. Reprojecting raster..."
            LOGGER.warning(msg)

            projected = tempfile.NamedTemporaryFile(prefix="reprojected_",
                                                    suffix=".json",
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_raster_warp(raster_file, projected, target_crs=vec_crs)
            raster_file = projected

        data_type = raster_datatype_sniffer(raster_file)

        try:
            stats = zonal_stats(
                vector_file,
                raster_file,
                band=band,
                all_touched=touches,
                raster_out=True,
            )

            raster_files = zonalstats_raster_file(
                stats,
                working_dir=self.workdir,
                data_type=data_type,
                crs=vec_crs or ras_crs,
            )

            if len(raster_files) > 1:
                ml = MetaLink4(
                    "test-ml-1",
                    "MetaLink with links to raster files.",
                    workdir=self.workdir,
                )
                for i, file in enumerate(raster_files):
                    # Create a MetaFile instance, which instantiates a ComplexOutput object.
                    mf = MetaFile(file.name,
                                  description="Raster file",
                                  fmt=FORMATS.GEOTIFF)
                    mf.file = (
                        file.as_posix()
                    )  # or mf.file = <path to file> or mf.url = <url>
                    ml.append(mf)

                response.outputs["raster"].data = ml.xml
            else:
                response.outputs["raster"].file = raster_files[0]

        except Exception as e:
            msg = f"Failed to perform raster subset using {shape_url}{f' and {raster_url} ' if raster_url else ''}: {e}"
            LOGGER.error(msg)
            raise Exception(msg)

        return response
예제 #16
0
    def _handler(self, request, response):
        response.update_status('PyWPS Process started.', 0)
        LOGGER.info("starting ...")
        max_outputs = 1

        # Variabili in input
        vectorbacino=request.inputs['vectorbacino'][0].file
        nomebacino=request.inputs['namebacino'][0].data

        # Variabili per output
        workdir=self.workdir
        file_path = config.get('server', 'outputpath')
        file_url = config.get('server', 'outputurl')

        nome_report_PDF="Report_CDP_"+str(self.uuid)+".pdf"
        nome_grafico="CDP_"+str(self.uuid)+".png"

        file_report_PDF= os.path.join(file_path, nome_report_PDF)
        url_report_PDF = os.path.join(file_url, nome_report_PDF)
        file_grafico=os.path.join(file_path, nome_grafico)
        url_grafico = os.path.join(file_url, nome_grafico)

        #Definizione ambiente di GRASS
        import grass.script as grass
        import grass.script.setup as gsetup

        GISBASE=config.get('grass', 'gisbase')
        GISDBASE=config.get("grass", "gisdbase")
        location="EPSG32632"
        mapset="PROVA"
        gsetup.init(GISBASE,GISDBASE, location, mapset)

        gisenv=grass.parse_command('g.gisenv', flags='n')
        print("Test gisenv: %s" % gisenv)

        list=grass.parse_command('g.list', type="rast")
        print("g.list rast: %s " %list)
      
        ######### ESTRAZIONE DESCRITTORI DEL BACINO DA GRASS #########
        print('######### ESTRAZIONE DESCRITTORI DEL BACINO DA GRASS #########')
        #caricamento vettoriale in GRASS
        res=grass.start_command('v.in.ogr', input=vectorbacino, output='basin', overwrite = True, min_area='0',stderr=subprocess.PIPE)
        stdoutdata, stderrdata = res.communicate()
        print("Error occured: %s" % stderrdata)

        # Configurazione della regione di GRASS
        grass.run_command('g.region', vector='basin')

        #trasforma il vettore del bacino in un raster
        grass.run_command('v.to.rast', input='basin', output='BASIN', use='cat', type='area', overwrite = True)

        #quota media e area ('piemonte_dem_r100')
        stats_dem = grass.parse_command('r.univar', flags='eg', map='piemonte_dem_r100@PROVA', zones='BASIN')
        quota_media=float(stats_dem['mean'])
        quota_max=float(stats_dem['max'])
        area_km=float(stats_dem['n']) * 0.01
        ipso75=float(stats_dem['first_quartile'])
        print(quota_media, quota_max, area_km, ipso75)

        #media afflusso annuo ('piemonte_MAP_r250')
        #grass.run_command('g.region', vect='basin', res='250')
        stats_MAP = grass.parse_command('r.univar', flags='g', map='piemonte_MAP_r250@PROVA', zones='BASIN')
        MAP_media = float(stats_MAP['mean'])
        MAP_std = float(stats_MAP['stddev'])

        #media e STD coefficiente pluviale orario CPP ('piemonte_IDFa_r250')
        #grass.run_command('g.region', vect='basin', res='250')
        stats_IDFa = grass.parse_command('r.univar', flags='g', map='piemonte_IDFa_r250@PROVA', zones='BASIN')
        IDFa_media = float(stats_IDFa['mean'])
        IDFa_std = float(stats_IDFa['stddev'])
    
        #media coefficiente regime pluviometrico B1 ('piemonte_fourierB1_r50')
        #grass.run_command('g.region', vect='basin', res='50')
        stats_fourierB1 = grass.parse_command('r.univar', flags='g', map='piemonte_fourierB1_r50@PROVA', zones='BASIN')
        fourierB1_media = float(stats_fourierB1['mean'])
    
        #media coefficiente variazione regime pluviometrico ('piemonte_rp_cv_r50')
        #grass.run_command('g.region', vect='basin', res='50')
        stats_rpcv = grass.parse_command('r.univar', flags='g', map='piemonte_pioggemensili_cv_r50@PROVA',zones='BASIN')
        rpcv_media = float(stats_rpcv['mean'])
    
        #percentuale classi CORINE riclassifcato
        cells_CLC = grass.read_command('r.stats', flags='1n', input='italy_CLC2000_r100@PROVA')
        all_cells_CLC = cells_CLC.count('1') + cells_CLC.count('2') + cells_CLC.count('3') + cells_CLC.count('4') + cells_CLC.count('5')
        clc2_percentuale = float(cells_CLC.count('2')) / float(all_cells_CLC) * 100
        clc3_percentuale = float(cells_CLC.count('3')) / float(all_cells_CLC) * 100
    
        # pulizia del workspace di GRASS
        grass.run_command('g.remove', flags='f', type='raster', name='MASK')
        grass.run_command('g.remove', flags='f', type='raster', name='BASIN')
        grass.run_command('g.remove', flags='f', type='vector', name='basin')

        testo =""
        testo1 = "I descrittori del bacino '%s' sono: \n" %(nomebacino)
        testo1 += "Area (km2): "+ str(round(area_km,3)) + "\n"+ "quota_media (m slm):  "+ str(round(quota_media,3)) + "\n" + "quota_massima (m slm):  " + str(round(quota_max,3)) + "\n" + "curva_ipso_75percento (m slm):  " + str(round(ipso75,3)) + "\n" + "MAP (mm):  " + str(round(MAP_media,3)) + "\n" + "IDFa (mm):  " + str(round(IDFa_media,3)) + "\n" + "IDFa_std (mm/h):  " + str(round(IDFa_std,3)) + "\n" + "fourier_B1:  " + str(round(fourierB1_media,3)) + "\n" +"CV rp:  " + str(round(rpcv_media,3)) + "\n" + "clc2_perc:  " + str(round(clc2_percentuale,3)) + "\n" + "clc3_perc:  " + str(round(clc3_percentuale,3))+"\n"
        
        print(testo1)
        ########## STIMA L-MOMENTI REGIONALI E PARAMETRI DISTRIBUZIONE ##########
        ## Calcolo portata L-momenti regionali
        c_int=IDFa_media/MAP_media
        Y=-7.3605*10**2+1.2527*MAP_media+3.2569*10**(-1)*quota_media+5.2674*fourierB1_media-6.7185*clc2_percentuale
        LCV=-2.896*10**(-1)-2.688*10**(-3)*clc3_percentuale+9.643*10**(-5)*ipso75+1.688*10**(-4)*MAP_media+2.941*10*c_int
        LCA=4.755*quota_max**(-0.2702)*IDFa_std**0.06869*rpcv_media**0.2106
        L1=Y*area_km/31536.0

        testo2 = "\n Gli L-momenti della CDP stimati, per l' area di studio sulla base delle caratteristiche geomorfologice del bacino, secondo la procedura regionale Renerfor sono: \n"
        testo2 += "L1:" + str(round(L1,3)) + "\n" + "LCV: "+str(round(LCV,3))+ "\n"+"LCA:" + str(round(LCA,3))+"\n \n"

        print(testo2)   
        ## Calcolo dei parametri della distribuzione funzioni riscritte a partire dal pacchetto per R Hydroapps(Ganora) per RENERFOR
        d=np.array(range(1,366))
        p=1-d/366.0
        LCAinf=fun.tau3BurrXII_WeibullBound(LCV)
        LCAsup=fun.tau3BurrXII_ParetoBound(LCV)
        risultati=fun.parBurrXIIapprox(L1, LCV, LCA)
        #risultati=('BurrXII','a: 8.5; b: 1; c: 2.8', p)
        distribuzione=risultati[0]
        parametri=risultati[1]
        x=risultati[2]
         
        testo3 ="Gli L-momenti L-CV e L-CA della Curva di Durata delle Portate (CDP), stimati a partire dai descrittori di bacino, ricadono, come riportato nella seguente figura, nel dominio di esistenza della distribuzione: "+ str(distribuzione)+".\n"
        testo3 += "I parametri stimati della distribuzione indicata hanno valore: \n"+ str(parametri)+". \n \n"
        testo4 =" La Curva di durata delle portate in regime naturale (non influenzata da derivazioni), ottenuta dal modello regionale Renerfor, viene riportata nel presente Report."
         
        #Creazione grafico CDP 
        fun.grafico_FDC_semplice_browser(x,file_grafico)
        #fun.figura_FDC_due_assi(x) #prova
        ##########################################################################################
        ##########################################################################################
        # OUTPUT
        testo=testo1+testo2+testo3+testo4
        
        #Creazione Report PDF
        with PdfPages(file_report_PDF) as pdf:
            #plt.rc('text', usetex=False)
            figura_testo=plt.figure(figsize=(8,6))
            # Pagina 1: Risultati testuali
            plt.text(-0.12, 1.01,testo, ha='left',va='top', wrap=True,fontsize=10)
            plt.ylim(0, 1)
            plt.xlim(0, 1)
            plt.setp(plt.gca(), frame_on=False, xticks=(), yticks=())
            pdf.savefig(figura_testo)
            plt.close()
            # Pagina 2: Dominio di Burr
            figura_dominio=fun.figura_dominio_burr(LCV,LCA)
            pdf.savefig(figura_dominio)
            plt.close()
            # Pagina 2: Curva di Durata
            figura_FDC=fun.figura_FDC_due_assi(x)
            pdf.savefig(figura_FDC)
            plt.close()

        #output = "Puoi visualizzare e scaricare il grafico della curva di durata delle portate all'url: \n %s" %('http://130.192.28.30/wpsoutputs/'+str(nome_grafico))
        #output += "\n"+"Puoi visualizzare e scaricare il Report PDF all'url: \n %s" %(url_report_PDF))
	

        # generate MetaLink v3 output
        ml3 = MetaLink('Report PDF', 'MetaLink', workdir=self.workdir)
        mf = MetaFile('REPORT_PDF.pdf', 'Report PDF CDP', fmt=FORMATS.TEXT)
        mf.url=url_report_PDF
        ml3.append(mf)
        response.outputs['output'].data = ml3.xml

        # ... OR generate MetaLink v4 output (recommended)
        ml4 = MetaLink4('Report PDF', 'MetaLink4', workdir=self.workdir)
        mf = MetaFile('REPORT_PDF.pdf', 'Report PDF CDP', fmt=FORMATS.TEXT)
        mf.file=file_report_PDF
        ml4.append(mf)
        response.outputs['output_meta4'].data = ml4.xml

        response.update_status('PyWPS Process completed.', 100)
        return response
예제 #17
0
    def _handler(self, request, response):

        shape_url = request.inputs["shape"][0].file
        simple_categories = request.inputs["simple_categories"][0].data
        band = request.inputs["band"][0].data
        touches = request.inputs["select_all_touching"][0].data

        vectors = [".gml", ".shp", ".gpkg", ".geojson", ".json"]
        vector_file = single_file_check(
            archive_sniffer(shape_url,
                            working_dir=self.workdir,
                            extensions=vectors))
        vec_crs = crs_sniffer(vector_file)

        response.update_status("Accessed vector", status_percentage=5)

        # For raster files using the UNFAO Land Cover Classification System (19 types)
        if "raster" in request.inputs:
            rasters = [".tiff", ".tif"]
            raster_url = request.inputs["raster"][0].file
            raster_file = single_file_check(
                archive_sniffer(raster_url,
                                working_dir=self.workdir,
                                extensions=rasters))
            ras_crs = crs_sniffer(raster_file)

            if vec_crs != ras_crs:
                msg = f"CRS for files {vector_file} and {raster_file} are not the same. Reprojecting..."
                LOGGER.warning(msg)

                # Reproject full vector to preserve feature attributes
                projected = tempfile.NamedTemporaryFile(
                    prefix="reprojected_",
                    suffix=".json",
                    delete=False,
                    dir=self.workdir,
                ).name
                generic_vector_reproject(vector_file,
                                         projected,
                                         source_crs=vec_crs,
                                         target_crs=ras_crs)
            else:
                projected = vector_file

        else:
            raster_url = None
            # using the NALCMS data from GeoServer
            projected = tempfile.NamedTemporaryFile(prefix="reprojected_",
                                                    suffix=".json",
                                                    delete=False,
                                                    dir=self.workdir).name
            generic_vector_reproject(vector_file,
                                     projected,
                                     source_crs=vec_crs,
                                     target_crs=NALCMS_PROJ4)
            raster_file = gather_dem_tile(
                vector_file,
                self.workdir,
                geographic=False,
                raster="public:CEC_NALCMS_LandUse_2010",
            )

        data_type = raster_datatype_sniffer(raster_file)
        response.update_status("Accessed raster", status_percentage=10)

        categories = SIMPLE_CATEGORIES if simple_categories else TRUE_CATEGORIES
        summary_stats = SUMMARY_ZONAL_STATS

        try:

            # Use zonalstats to produce a GeoJSON
            stats = zonal_stats(
                projected,
                raster_file,
                stats=summary_stats,
                band=band,
                categorical=True,
                all_touched=touches,
                geojson_out=True,
                raster_out=False,
            )

            land_use = list()
            for stat in stats:
                lu = defaultdict(lambda: 0)
                prop = stat["properties"]

                # Rename/aggregate land-use categories
                for k, v in categories.items():
                    lu[v] += prop.get(k, 0)

                prop.update(lu)
                land_use.append(lu)
                # prop['mini_raster_array'] = pickle.dumps(prop['mini_raster_array'], protocol=0).decode()

            # Use zonalstats to produce sets of raster grids
            raster_subset = zonal_stats(
                projected,
                raster_file,
                stats=summary_stats,
                band=band,
                categorical=True,
                all_touched=touches,
                geojson_out=False,
                raster_out=True,
            )

            raster_out = zonalstats_raster_file(
                raster_subset,
                working_dir=self.workdir,
                data_type=data_type,
                crs=NALCMS_PROJ4,
                zip_archive=False,
            )

            ml = MetaLink4(
                "rasters_out",
                "Metalink to series of GeoTIFF raster files",
                workdir=self.workdir,
            )
            for r in raster_out:
                mf = MetaFile(Path(r).name,
                              "Raster subset",
                              fmt=FORMATS.GEOTIFF)
                mf.file = r
                ml.append(mf)

            feature_collect = {"type": "FeatureCollection", "features": stats}
            response.outputs["features"].data = json.dumps(feature_collect)
            response.outputs["statistics"].data = json.dumps(land_use)
            response.outputs["raster"].data = ml.xml

        except Exception as e:
            msg = f"Failed to perform raster subset using {shape_url}{f' and {raster_url} ' if raster_url else ''}: {e}"
            LOGGER.error(msg)
            raise Exception(msg) from e

        return response
예제 #18
0
 def metafile(self):
     mf = MetaFile('identifier', 'title', fmt=FORMATS.JSON)
     mf.data = json.dumps({'a': 1})
     mf._set_workdir(self.tmp_dir)
     return mf