def display_inspect(request): ## parse the query string query = parse_qs(request.META['QUERY_STRING']) uri = helpers._get_uri_(query,scalar=True) variable = QueryParm(query,'variable',scalar=True) interface_overload = _get_interface_overload_(query) io = Inspect(uri.value,variable=variable.value, interface_overload=interface_overload) report = io.__repr__() response = HttpResponse(report,content_type="text/plain") return(response)
def display_inspect(request): ## parse the query string query = parse_qs(request.META['QUERY_STRING']) uri = helpers._get_uri_(query, scalar=True) variable = QueryParm(query, 'variable', scalar=True) interface_overload = _get_interface_overload_(query) io = Inspect(uri.value, variable=variable.value, interface_overload=interface_overload) report = io.__repr__() response = HttpResponse(report, content_type="text/plain") return (response)
def inspect(self): '''Print inspection output using :class:`~ocgis.Inspect`. This is a convenience method.''' ip = Inspect(self.uri,variable=self.variable, interface_overload=self.interface) return(ip)
def inspect_as_dct(self): ''' Return a dictionary representation of the target's metadata. If the variable is `None`. An attempt will be made to find the target dataset's time bounds raising a warning if none is found or the time variable is lacking units and/or calendar attributes. >>> rd = ocgis.RequestDataset('rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc','rhs') >>> ret = rd.inspect_as_dct() >>> ret.keys() ['dataset', 'variables', 'dimensions', 'derived'] >>> ret['derived'] OrderedDict([('Start Date', '2011-01-01 12:00:00'), ('End Date', '2020-12-31 12:00:00'), ('Calendar', '365_day'), ('Units', 'days since 1850-1-1'), ('Resolution (Days)', '1'), ('Count', '8192'), ('Has Bounds', 'True'), ('Spatial Reference', 'WGS84'), ('Proj4 String', '+proj=longlat +datum=WGS84 +no_defs '), ('Extent', '(-1.40625, -90.0, 358.59375, 90.0)'), ('Interface Type', 'NcPolygonDimension'), ('Resolution', '2.80091351339')]) :rtype: :class:`collections.OrderedDict` ''' ip = Inspect(request_dataset=self) ret = ip._as_dct_() return(ret)
def inspect_as_dct(self): ''' Return a dictionary representation of the target's metadata. If the variable is `None`. An attempt will be made to find the target dataset's time bounds raising a warning if none is found or the time variable is lacking units and/or calendar attributes. >>> rd = ocgis.RequestDataset('rhs_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc','rhs') >>> ret = rd.inspect_as_dct() >>> ret.keys() ['dataset', 'variables', 'dimensions', 'derived'] >>> ret['derived'] OrderedDict([('Start Date', '2011-01-01 12:00:00'), ('End Date', '2020-12-31 12:00:00'), ('Calendar', '365_day'), ('Units', 'days since 1850-1-1'), ('Resolution (Days)', '1'), ('Count', '8192'), ('Has Bounds', 'True'), ('Spatial Reference', 'WGS84'), ('Proj4 String', '+proj=longlat +datum=WGS84 +no_defs '), ('Extent', '(-1.40625, -90.0, 358.59375, 90.0)'), ('Interface Type', 'NcPolygonDimension'), ('Resolution', '2.80091351339')]) :rtype: :class:`collections.OrderedDict` ''' ip = Inspect(request_dataset=self) ret = ip._as_dct_() return (ret)
def test_low_res(self): ocgis.env.OVERWRITE = True nc_spatial = NcSpatial(5.0,(-90.0,90.0),(0.0,360.0)) path = self.make_data(nc_spatial) dataset = {'uri':path,'variable':'foo'} output_format = 'shp' geom = self.nebraska ip = Inspect(dataset['uri'],dataset['variable']) for s_abstraction in ['point','polygon']: interface = {'s_abstraction':s_abstraction} ops = OcgOperations(dataset=dataset, output_format=output_format, geom=geom, abstraction=s_abstraction) ret = OcgInterpreter(ops).execute()
def test_inspect(self): uri = self.get_dataset()['uri'] for variable in [self.get_dataset()['variable'],None]: ip = Inspect(uri,variable=variable) ret = ip.__repr__() self.assertTrue(len(ret) > 100)
def test_nc_conversion(self): ops = OcgOperations(dataset=self.get_dataset(), output_format='nc') ret = self.get_ret(ops) ip = Inspect(ret, 'foo')
def test_inspect(self): uri = self.get_dataset()['uri'] for variable in [self.get_dataset()['variable'], None]: ip = Inspect(uri, variable=variable) ret = ip.__repr__() self.assertTrue(len(ret) > 100)
def write(self): ## call subclass write method ocgis_lh('starting subclass write method',self._log,logging.DEBUG) ret = self._write_() ## added OCGIS metadata output if requested. if self.add_meta: ocgis_lh('adding OCGIS metadata file','conv',logging.DEBUG) lines = MetaConverter(self.ops).write() out_path = os.path.join(self.outdir,self.prefix+'_'+MetaConverter._meta_filename) with open(out_path,'w') as f: f.write(lines) ## add the dataset descriptor file if specified if self._add_did_file: ocgis_lh('writing dataset description (DID) file','conv',logging.DEBUG) from ocgis.conv.csv_ import OcgDialect headers = ['DID','VARIABLE','ALIAS','URI','STANDARD_NAME','UNITS','LONG_NAME'] out_path = os.path.join(self.outdir,self.prefix+'_did.csv') with open(out_path,'w') as f: writer = csv.writer(f,dialect=OcgDialect) writer.writerow(headers) for rd in self.ops.dataset: row = [rd.did,rd.variable,rd.alias,rd.uri] ref_variable = rd.ds.metadata['variables'][rd.variable]['attrs'] row.append(ref_variable.get('standard_name',None)) row.append(ref_variable.get('units',None)) row.append(ref_variable.get('long_name',None)) writer.writerow(row) ## add user-geometry if self._add_ugeom and self.ops.geom is not None: ocgis_lh('writer user-geometry shapefile','conv',logging.DEBUG) if self._add_ugeom_nest: shp_dir = os.path.join(self.outdir,'shp') try: os.mkdir(shp_dir) ## catch if the directory exists except OSError: if os.path.exists(shp_dir): pass else: raise else: shp_dir = self.outdir shp_path = os.path.join(shp_dir,self.prefix+'_ugid.shp') self.ops.geom.write(shp_path) ## add source metadata if requested if self._add_source_meta: ocgis_lh('writing source metadata file','conv',logging.DEBUG) out_path = os.path.join(self.outdir,self.prefix+'_source_metadata.txt') to_write = [] for rd in self.ops.dataset: ip = Inspect(request_dataset=rd) to_write += ip.get_report() with open(out_path,'w') as f: f.writelines('\n'.join(to_write)) ## return anything from the overloaded _write_ method. otherwise return ## the internal path. if ret is None: ret = self.path return(ret)
def write(self): ocgis_lh('starting write method',self._log,logging.DEBUG) unique_geometry_store = [] # indicates if user geometries should be written to file write_ugeom = False try: build = True for coll in iter(self.colls): if build: # write the user geometries if configured and there is one present on the incoming collection. if self._add_ugeom and coll.geoms.values()[0] is not None: write_ugeom = True f = self._build_(coll) if write_ugeom: ugid_shp_name = self.prefix + '_ugid.shp' ugid_csv_name = self.prefix + '_ugid.csv' if self._add_ugeom_nest: fiona_path = os.path.join(self._get_or_create_shp_folder_(),ugid_shp_name) csv_path = os.path.join(self._get_or_create_shp_folder_(),ugid_csv_name) else: fiona_path = os.path.join(self.outdir,ugid_shp_name) csv_path = os.path.join(self.outdir,ugid_csv_name) if coll.meta is None: # convert the collection properties to fiona properties from fiona_ import FionaConverter fiona_properties = {} for k, v in coll.properties.values()[0].iteritems(): fiona_properties[k] = FionaConverter.get_field_type(type(v)) fiona_schema = {'geometry':'MultiPolygon', 'properties':fiona_properties} fiona_meta = {'schema':fiona_schema,'driver':'ESRI Shapefile'} else: fiona_meta = coll.meta ## always use the CRS from the collection. shapefile metadata ## will always be WGS84, but it may be overloaded in the ## operations. fiona_meta['crs'] = coll.crs.value ## always upper for the properties definition as this happens ## for each record. fiona_meta['schema']['properties'] = {k.upper():v for k,v in fiona_meta['schema']['properties'].iteritems()} ## selection geometries will always come out as MultiPolygon ## regardless if they began as points. points are buffered ## during the subsetting process. fiona_meta['schema']['geometry'] = 'MultiPolygon' fiona_object = fiona.open(fiona_path,'w',**fiona_meta) csv_file = open(csv_path,'w') from ocgis.conv.csv_ import OcgDialect csv_object = DictWriter(csv_file,fiona_meta['schema']['properties'].keys(),dialect=OcgDialect) csv_object.writeheader() build = False self._write_coll_(f,coll) if write_ugeom: ## write the overview geometries to disk r_geom = coll.geoms.values()[0] if isinstance(r_geom,Polygon): r_geom = MultiPolygon([r_geom]) ## see if this geometry is in the unique geometry store should_append = self._get_should_append_to_unique_geometry_store_( unique_geometry_store, r_geom, coll.properties.values()[0]['UGID']) if should_append: unique_geometry_store.append({'geom':r_geom, 'ugid':coll.properties.values()[0]['UGID']}) ## if it is unique write the geometry to the output files to_write = {'geometry':mapping(r_geom), 'properties':{k.upper():v for k,v in coll.properties.values()[0].iteritems()}} fiona_object.write(to_write) ## write the geometry attributes to the corresponding shapefile for row in coll.properties.itervalues(): csv_object.writerow({k.upper():v for k,v in row.iteritems()}) finally: ## errors are masked if the processing failed and file objects, etc. ## were not properly created. if there are UnboundLocalErrors pass ## them through to capture the error that lead to the objects not ## being created. try: try: self._finalize_(f) except UnboundLocalError: pass except Exception as e: ## this the exception we want to log ocgis_lh(exc=e,logger=self._log) finally: if write_ugeom: try: fiona_object.close() except UnboundLocalError: pass try: csv_file.close() except UnboundLocalError: pass ## the metadata and dataset descriptor files may only be written if ## OCGIS operations are present. if self.ops is not None and self.add_auxiliary_files == True: ## added OCGIS metadata output if requested. if self.add_meta: ocgis_lh('adding OCGIS metadata file','conv',logging.DEBUG) lines = MetaConverter(self.ops).write() out_path = os.path.join(self.outdir,self.prefix+'_'+MetaConverter._meta_filename) with open(out_path,'w') as f: f.write(lines) ## add the dataset descriptor file if specified and OCGIS operations ## are present. if self._add_did_file: ocgis_lh('writing dataset description (DID) file','conv',logging.DEBUG) from ocgis.conv.csv_ import OcgDialect headers = ['DID','VARIABLE','ALIAS','URI','STANDARD_NAME','UNITS','LONG_NAME'] out_path = os.path.join(self.outdir,self.prefix+'_did.csv') with open(out_path,'w') as f: writer = csv.writer(f,dialect=OcgDialect) writer.writerow(headers) for rd in self.ops.dataset.itervalues(): for d in rd: row = [rd.did,d['variable'],d['alias'],rd.uri] ref_variable = rd.source_metadata['variables'][d['variable']]['attrs'] row.append(ref_variable.get('standard_name',None)) row.append(ref_variable.get('units',None)) row.append(ref_variable.get('long_name',None)) writer.writerow(row) ## add source metadata if requested if self._add_source_meta: ocgis_lh('writing source metadata file','conv',logging.DEBUG) out_path = os.path.join(self.outdir,self.prefix+'_source_metadata.txt') to_write = [] for rd in self.ops.dataset.itervalues(): ip = Inspect(meta=rd.source_metadata, uri=rd.uri) to_write += ip.get_report_no_variable() with open(out_path,'w') as f: f.writelines('\n'.join(to_write)) ## return the internal path unless overloaded by subclasses. ret = self._get_return_() return(ret)
def write(self): ## call subclass write method ocgis_lh('starting subclass write method', self._log, logging.DEBUG) ret = self._write_() ## added OCGIS metadata output if requested. if self.add_meta: ocgis_lh('adding OCGIS metadata file', 'conv', logging.DEBUG) lines = MetaConverter(self.ops).write() out_path = os.path.join( self.outdir, self.prefix + '_' + MetaConverter._meta_filename) with open(out_path, 'w') as f: f.write(lines) ## add the dataset descriptor file if specified if self._add_did_file: ocgis_lh('writing dataset description (DID) file', 'conv', logging.DEBUG) from ocgis.conv.csv_ import OcgDialect headers = [ 'DID', 'VARIABLE', 'ALIAS', 'URI', 'STANDARD_NAME', 'UNITS', 'LONG_NAME' ] out_path = os.path.join(self.outdir, self.prefix + '_did.csv') with open(out_path, 'w') as f: writer = csv.writer(f, dialect=OcgDialect) writer.writerow(headers) for rd in self.ops.dataset: row = [rd.did, rd.variable, rd.alias, rd.uri] ref_variable = rd.ds.metadata['variables'][ rd.variable]['attrs'] row.append(ref_variable.get('standard_name', None)) row.append(ref_variable.get('units', None)) row.append(ref_variable.get('long_name', None)) writer.writerow(row) ## add user-geometry if self._add_ugeom and self.ops.geom is not None: ocgis_lh('writer user-geometry shapefile', 'conv', logging.DEBUG) if self._add_ugeom_nest: shp_dir = os.path.join(self.outdir, 'shp') try: os.mkdir(shp_dir) ## catch if the directory exists except OSError: if os.path.exists(shp_dir): pass else: raise else: shp_dir = self.outdir shp_path = os.path.join(shp_dir, self.prefix + '_ugid.shp') self.ops.geom.write(shp_path) ## add source metadata if requested if self._add_source_meta: ocgis_lh('writing source metadata file', 'conv', logging.DEBUG) out_path = os.path.join(self.outdir, self.prefix + '_source_metadata.txt') to_write = [] for rd in self.ops.dataset: ip = Inspect(request_dataset=rd) to_write += ip.get_report() with open(out_path, 'w') as f: f.writelines('\n'.join(to_write)) ## return anything from the overloaded _write_ method. otherwise return ## the internal path. if ret is None: ret = self.path return (ret)
def write(self): ocgis_lh('starting write method', self._log, logging.DEBUG) unique_geometry_store = [] # indicates if user geometries should be written to file write_ugeom = False try: build = True for coll in iter(self.colls): if build: # write the user geometries if configured and there is one present on the incoming collection. if self._add_ugeom and coll.geoms.values()[0] is not None: write_ugeom = True f = self._build_(coll) if write_ugeom: ugid_shp_name = self.prefix + '_ugid.shp' ugid_csv_name = self.prefix + '_ugid.csv' if self._add_ugeom_nest: fiona_path = os.path.join(self._get_or_create_shp_folder_(), ugid_shp_name) else: fiona_path = os.path.join(self.outdir, ugid_shp_name) if coll.meta is None: # convert the collection properties to fiona properties from fiona_ import AbstractFionaConverter fiona_properties = get_schema_from_numpy_dtype(coll.properties.values()[0].dtype) fiona_schema = {'geometry': 'MultiPolygon', 'properties': fiona_properties} fiona_meta = {'schema': fiona_schema, 'driver': 'ESRI Shapefile'} else: fiona_meta = coll.meta # always use the CRS from the collection. shapefile metadata will always be WGS84, but it may be # overloaded in the operations. fiona_meta['crs'] = coll.crs.value # selection geometries will always come out as MultiPolygon regardless if they began as points. # points are buffered during the subsetting process. fiona_meta['schema']['geometry'] = 'MultiPolygon' fiona_object = fiona.open(fiona_path, 'w', **fiona_meta) build = False self._write_coll_(f, coll) if write_ugeom: # write the overview geometries to disk r_geom = coll.geoms.iteritems().next() uid_value = r_geom[0] r_geom = r_geom[1] if isinstance(r_geom, Polygon): r_geom = MultiPolygon([r_geom]) # see if this geometry is in the unique geometry store should_append = self._get_should_append_to_unique_geometry_store_(unique_geometry_store, r_geom, uid_value) if should_append: unique_geometry_store.append({'geom': r_geom, 'ugid': uid_value}) # if it is unique write the geometry to the output files coll.write_ugeom(fobject=fiona_object) finally: # errors are masked if the processing failed and file objects, etc. were not properly created. if there are # UnboundLocalErrors pass them through to capture the error that lead to the objects not being created. try: try: self._finalize_(f) except UnboundLocalError: pass except Exception as e: # this the exception we want to log ocgis_lh(exc=e, logger=self._log) finally: if write_ugeom: try: fiona_object.close() except UnboundLocalError: pass # the metadata and dataset descriptor files may only be written if OCGIS operations are present. if self.ops is not None and self.add_auxiliary_files == True: # added OCGIS metadata output if requested. if self.add_meta: ocgis_lh('adding OCGIS metadata file', 'conv', logging.DEBUG) from ocgis.conv.meta import MetaOCGISConverter lines = MetaOCGISConverter(self.ops).write() out_path = os.path.join(self.outdir, self.prefix + '_' + MetaOCGISConverter._meta_filename) with open(out_path, 'w') as f: f.write(lines) # add the dataset descriptor file if requested if self._add_did_file: ocgis_lh('writing dataset description (DID) file', 'conv', logging.DEBUG) from ocgis.conv.csv_ import OcgDialect headers = ['DID', 'VARIABLE', 'ALIAS', 'URI', 'STANDARD_NAME', 'UNITS', 'LONG_NAME'] out_path = os.path.join(self.outdir, self.prefix + '_did.csv') with open(out_path, 'w') as f: writer = csv.writer(f, dialect=OcgDialect) writer.writerow(headers) for rd in self.ops.dataset.itervalues(): try: for d in rd: row = [rd.did, d['variable'], d['alias'], rd.uri] try: ref_variable = rd.source_metadata['variables'][d['variable']]['attrs'] except KeyError: if isinstance(rd.driver, DriverVector): # not be present in metadata ref_variable = {} else: raise row.append(ref_variable.get('standard_name', None)) row.append(ref_variable.get('units', None)) row.append(ref_variable.get('long_name', None)) writer.writerow(row) except NotImplementedError: if isinstance(rd, Field): for variable in rd.variables.itervalues(): row = [rd.uid, variable.name, variable.alias, None, variable.attrs.get('standard_name'), variable.units, variable.attrs.get('long_name')] writer.writerow(row) else: raise # add source metadata if requested if self._add_source_meta: ocgis_lh('writing source metadata file', 'conv', logging.DEBUG) out_path = os.path.join(self.outdir, self.prefix + '_source_metadata.txt') to_write = [] for rd in self.ops.dataset.iter_request_datasets(): ip = Inspect(request_dataset=rd) to_write += ip.get_report_possible() with open(out_path, 'w') as f: f.writelines(Inspect.newline.join(to_write)) # return the internal path unless overloaded by subclasses. ret = self._get_return_() return ret
def inspect(self): '''Print inspection output using :class:`~ocgis.Inspect`. This is a convenience method.''' ip = Inspect(request_dataset=self) return (ip)