def write_regridded_data_to_shapefile(dstfield): """ :param dstfield: The ESMF field object containing a mesh to write to shapefile. :type dstfield: :class:`ESMF.api.field.Field` :returns: Path to the output shapefile. :rtype: str """ # turn the shapefile into an OCGIS field and get the spatial information ofield = ocgis.RequestDataset(PATH_SHP).get() # get the time dimension from the original netCDF file otime = ocgis.RequestDataset(PATH_PR).get().temporal # create an OCGIS variable from the regridded data values pr = ocgis.Variable(name='pr', value=np.array(dstfield.reshape(1, otime.shape[0], 1, 1, ofield.shape[-1]))) # this holds our variables vc = ocgis.VariableCollection([pr]) # we want to maintain the original shapefile data, but it needs to reshaped to account for the new time dimension. for var in ofield.variables.itervalues(): newvalue = np.zeros(pr.shape, dtype=var.dtype) newvalue[:] = var.value newvar = ocgis.Variable(name=var.name, value=newvalue) vc[newvar.name] = newvar # combine the spatial data with time and the regridded values ofield2 = ocgis.Field(temporal=otime, spatial=ofield.spatial, variables=vc) # write this to shapefile path_out_shp = ocgis.OcgOperations(dataset=ofield2, output_format='shp', prefix='pr_catchments', add_auxiliary_files=False).execute() return path_out_shp
def apply_by_spatial_chunk(src_filename, dst_filename, nchunks, chunk_idx, **kwargs): """ Create a spatial chunk from source and destination CF-Grid NetCDF files. Each source and destination chunk is converted to a :class:`xarray.Dataset`. See :class:`~ocgis.spatial.grid_chunker.GridChunker` for more documentation on the spatial chunking. Returns `0` if the chunking is successful. :param str src_filename: Path to source NetCDF file. :param str dst_filename: Path to destination NetCDF file. :param nchunks: The chunking decomposition for the destination grid. See :class:`~ocgis.spatial.grid_chunker.GridChunker`. :type nchunks: tuple(int, ...) :param int chunk_idx: The target chunk index. :param kwargs: Extra keyword arguments to :class:`~ocgis.spatial.grid_chunker.GridChunker` initialization. :rtype: int """ rc = 1 rd_src = ocgis.RequestDataset(src_filename) rd_dst = ocgis.RequestDataset(dst_filename) gc = GridChunker(rd_src, rd_dst, nchunks_dst=nchunks, **kwargs) for ctr, (src_grid, src_slice, dst_grid, dst_slice) in enumerate(gc.iter_src_grid_subsets(yield_dst=True, yield_idx=chunk_idx)): xsrc = src_grid.parent.to_xarray(decode_cf=False) xdst = dst_grid.parent.to_xarray(decode_cf=False) rc = 0 assert ctr == 0 # Ensure we only have a single loop return rc
def main(): ## create request datasets tas = ocgis.RequestDataset(uri='tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', variable='tasmax') rhs = ocgis.RequestDataset(uri='rhsmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', variable='rhsmax') ## other operations arguments calc = [{'func':'heat_index','name':'heat_index', 'kwds':{'tas':'tasmax','rhs':'rhsmax','units':'k'}}] calc_grouping = ['month','year'] snippet = False select_ugid = [668,705,743,597,634,783,599,785,600,748,786,675,676,603,827,679,680,792,682,719,794,610,760,797,835,688,692,693,694,658,695,698,735,775,666] geom = 'urban_areas_2000' aggregate = True spatial_operation = 'clip' output_format = 'shp' dir_output = '/home/local/WX/ben.koziol/Dropbox/nesii/conference/FOSS4G_2013/figures/heat_index' prefix = 'minneapolis_heat_index' agg_selection = True ## construct operations ops = ocgis.OcgOperations(dataset=[tas,rhs],calc_grouping=calc_grouping, snippet=snippet,geom=geom,select_ugid=select_ugid,aggregate=aggregate, spatial_operation=spatial_operation,output_format=output_format, calc=calc,dir_output=dir_output,prefix=prefix,agg_selection=agg_selection) ## return the data ret = ops.execute()
def test1d(self): p1 = self.write_field_data('v1', ncol=1, nrow=1) p3 = self.write_field_data('v1', dir='b') ref_range = [dt.datetime(2000, 3, 1), dt.datetime(2000, 3, 31)] reference = ocgis.RequestDataset(p1, time_range=ref_range).get() cand_range = [dt.datetime(2000, 8, 1), dt.datetime(2000, 8, 31)] candidate = ocgis.RequestDataset(p3, time_range=cand_range) calc = [{ 'func': 'dissimilarity', 'name': 'output_1d', 'kwds': { 'target': reference, 'candidate': ('v1', ) } }] ops = OcgOperations(dataset=candidate, calc=calc) ret = ops.execute() actual_field = ret.get_element() actual_variables = get_variable_names(actual_field.data_variables) self.assertEqual(actual_variables[0], ('dissimilarity')) dist = actual_field['dissimilarity'] self.assertEqual(dist.shape, (1, 1, 2, 2))
def _handler(self, request, response): import uuid import time import json outputpath = configuration.get_config_value('server', 'outputpath') init_process_logger('log.txt') response.outputs['output_log'].file = 'log.txt' # -------------- # # Input handling # # -------------- # resource = archiveextract( resource=rename_complexinputs(request.inputs['resource'])) LOGGER.info("resource: %s " % resource) dest = archiveextract( resource=rename_complexinputs(request.inputs['dest'])) LOGGER.info("dest: %s " % dest) method = request.inputs['method'][0].data LOGGER.info("method: %s " % method) snippet = request.inputs['snippet'][0].data LOGGER.info("snippet: %s " % snippet) # -------------------- # # Regridding operation # # -------------------- # d = ocgis.RequestDataset(dest) m = getattr(ESMF.RegridMethod, method.upper()) LOGGER.info('Start ocgis module call function') # Prepare the environment ocgis.env.OVERWRITE = True prefix = str(uuid.uuid1()) ocgis.env.PREFIX = prefix outputs = [] for source in resource: s = ocgis.RequestDataset(source) ops = ocgis.OcgOperations(dataset=s, regrid_destination=d, regrid_options={'regrid_method': m}, snippet=snippet, dir_output=outputpath, output_format='nc', prefix=prefix) outputs.append(ops.execute()) response.outputs['output_netcdf'].file = outputs[0] time_str = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) output_json = "esmf_regrid_results_{}.json".format(time_str) with open(output_json, 'w') as f: f.write(json.dumps([actual_output_path(o) for o in outputs])) response.outputs['output'].file = output_json response.outputs['output'].output_format = json_format return response
def test_dissimilarity_op(): """Test with a real file.""" import datetime as dt lon, lat = -72, 46 g = Point(lon, lat) cfn = local_path(TESTDATA['indicators_small.nc']) tfn = local_path(TESTDATA['indicators_medium.nc']) indices = ['meantemp', 'totalpr'] # Candidate fields candidate = ocgis.RequestDataset(cfn, variable=indices, time_range=[dt.datetime(1970, 1, 1), dt.datetime(2000, 1, 1)], ) # The indicators_small dataset is just a subset of the indicators_medium # dataset. Below is the code to create the small dataset. # Running the test with the full file takes about 2 minutes, so we'll # crop the data to 4 grid cells. """ op = ocgis.OcgOperations(dataset=crd, geom=g, select_nearest=False, search_radius_mult=1.75, output_format='nc', output_format_options={'data_model': 'NETCDF4'}, dir_output='/tmp', prefix='indicators_small' ) res = op.execute() """ # Target fields # Extract values from one grid cell trd = ocgis.RequestDataset(tfn, variable=indices, time_range=[dt.datetime(1970, 1, 1), dt.datetime(2000, 1, 1)], ) op = ocgis.OcgOperations(dataset=trd, geom=g, search_radius_mult=1.75, select_nearest=True) target = op.execute().get_element() ops = ocgis.OcgOperations( calc=[{'func': 'dissimilarity', 'name': 'spatial_analog', 'kwds': {'dist': 'seuclidean', 'target': target, 'candidate': indices}}], dataset=candidate ) res = ops.execute() out = res.get_element() val = out['dissimilarity'].get_value() i = np.argmin(np.abs(out['lon'].get_value() - lon)) j = np.argmin(np.abs(out['lat'].get_value() - lat)) np.testing.assert_almost_equal(val[j, i], 0, 6) np.testing.assert_array_equal(val > 0, True)
def setUp(self): ocgis.env.DIR_DATA = '/usr/local/climate_data/narccap' self.polar_stereographic = ocgis.RequestDataset( uri='pr_CRCM_ccsm_1981010103.nc', variable='pr') self.oblique_mercator = ocgis.RequestDataset( uri='pr_RCM3_gfdl_1981010103.nc', variable='pr') self.rotated_pole = ocgis.RequestDataset( uri='pr_HRM3_gfdl_1981010103.nc', variable='pr') self.ecp2 = ocgis.RequestDataset(uri='pr_ECP2_ncep_1981010103.nc', variable='pr')
def test_maurer_concatenated_tasmax_region(self): ocgis.env.DIR_DATA = '/usr/local/climate_data/maurer/2010-concatenated' filename = 'Maurer02new_OBS_tasmax_daily.1971-2000.nc' variable = 'tasmax' # ocgis.env.VERBOSE = True rd = ocgis.RequestDataset(filename, variable) ops = ocgis.OcgOperations(dataset=rd, geom='us_counties', select_ugid=[2778], output_format='numpy') ret = ops.execute() ref = ret[2778].variables['tasmax'] years = np.array([ dt.year for dt in ret[2778].variables['tasmax'].temporal.value_datetime ]) months = np.array([ dt.month for dt in ret[2778].variables['tasmax'].temporal.value_datetime ]) select = np.array([ dt.month in (6, 7, 8) and dt.year in (1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999) for dt in ret[2778].variables['tasmax'].temporal.value_datetime ]) time_subset = ret[2778].variables['tasmax'].value[select, :, :, :] time_values = ref.temporal.value[select] rd = ocgis.RequestDataset( filename, variable, time_region={ 'month': [6, 7, 8], 'year': [1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999] }) ops = ocgis.OcgOperations(dataset=rd, geom='us_counties', select_ugid=[2778], output_format='numpy') ret2 = ops.execute() ref2 = ret2[2778].variables['tasmax'] self.assertEqual(time_values.shape, ref2.temporal.shape) self.assertEqual(time_subset.shape, ref2.value.shape) self.assertNumpyAll(time_subset, ref2.value) self.assertFalse(np.any(ref2.value < 0))
def create_grid_splitter(src_path, dst_path): """Create grid splitter object from a source and destination path.""" src_filename = os.path.split(src_path)[1] dst_filename = os.path.split(dst_path)[1] grid_splitter_paths = {'wd': WD} grid_abstraction = ocgis.constants.GridAbstraction.POINT src_grid = ocgis.RequestDataset(uri=src_path, driver='netcdf-ugrid', grid_abstraction=grid_abstraction).get().grid dst_grid = create_scrip_grid(dst_path) nsplits_dst = GS_META[dst_filename]['nsplits_dst'] src_grid_resolution = GS_META[src_filename]['spatial_resolution'] dst_grid_resolution = GS_META[dst_filename]['spatial_resolution'] buffer_value = GS_META[dst_filename]['buffer_value'] if dst_filename == 'SCRIPgrid_ne16np4_nomask_c110512.nc': idest = iter_dst2 else: idest = iter_dst gs = ocgis.GridSplitter(src_grid, dst_grid, (nsplits_dst,), paths=grid_splitter_paths, src_grid_resolution=src_grid_resolution, check_contains=False, dst_grid_resolution=dst_grid_resolution, iter_dst=idest, buffer_value=buffer_value, redistribute=True) return gs
def main(): ocgis.env.DIR_DATA = '/usr/local/climate_data/CanCM4' ocgis.env.DIR_OUTPUT = '.../presentation/2013_nws_gis_workshop' rd = ocgis.RequestDataset( uri='tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', variable='tasmax') calc = [{'func': 'mean', 'name': 'mean', 'func': 'std', 'name': 'std'}] ops = ocgis.OcgOperations(dataset=rd, geom='climate_divisions', spatial_operation='clip', aggregate=True, calc=calc, calc_grouping=['month'], output_format='csv') ret = ops.execute() ## operations # select_ugid = [2001,2002,2003,2004,2005,2006,2007,2008,2009,2010] select_ugid = None calc_grouping = ['month'] snippet = False geom = 'climate_divisions' output_format = 'csv' ops = ocgis.OcgOperations(dataset=rds, select_ugid=select_ugid, snippet=snippet, output_format=output_format, geom=geom, calc=calc, calc_grouping=calc_grouping, spatial_operation='clip', aggregate=True) ret = ops.execute() print(ret)
def test_qed_maurer_concatenated(self): raise (SkipTest('dev')) calc = [{ 'func': 'freq_duration', 'name': 'freq_duration', 'kwds': { 'operation': 'gt', 'threshold': 15 } }] ocgis.env.DIR_DATA = '/usr/local/climate_data/maurer/2010-concatenated' filename = 'Maurer02new_OBS_tasmax_daily.1971-2000.nc' variable = 'tasmax' rd = ocgis.RequestDataset(filename, variable) ocgis.env.VERBOSE = True ops = ocgis.OcgOperations(dataset=rd, geom='gg_city_centroids', select_ugid=None, calc=calc, calc_grouping=['month', 'year'], output_format='csv+') ret = ops.execute() webbrowser.open(ret) import ipdb ipdb.set_trace()
def test_QED_2013(self): variable = 'rx1dayamina' uri = '/home/local/WX/ben.koziol/climate_data/QED-2013/maurer02v2_min_rx1dayamina_annual_1971-2000.nc' rd = ocgis.RequestDataset(uri, variable, time_region={ 'year': [1991], 'month': [5] }, time_range=[ datetime.datetime(1971, 1, 1, 0, 0), datetime.datetime(2001, 1, 1, 0, 0) ]) ops = ocgis.OcgOperations(dataset=rd) ret = ops.execute() ref = ret[1].variables['rx1dayamina'] ds = nc.Dataset( '/home/local/WX/ben.koziol/climate_data/QED-2013/maurer02v2_min_rx1dayamina_annual_1971-2000.nc' ) try: ref2 = ds.variables['rx1dayamina'][:] self.assertNumpyAll(ref.value, ref2) finally: ds.close()
def run(): tic = time.time() rd = ocgis.RequestDataset(TESTDATA['cmip5_tasmax_2006_nc'][6:]) ops = ocgis.OcgOperations(dataset=rd, calc=[{'func': 'mean', 'name': 'mean'}], calc_grouping=['month'],) ops.execute() tac = time.time() print ("Completion time [#{}]: {} seconds".format(ocgis.vm.rank, tac-tic))
def _handler(self, request, response): # Get the NetCDF file # Note that all input parameters require index access dataset_input = request.inputs['dataset'][0] # Subset geometry geometry_zip_input = request.inputs['shapefile'][0] geometry_zip = ZipFile(geometry_zip_input.file) # Extract to subdirectory, pick out the SHP file geometry_dir = os.path.join(self.workdir, '_geometry') geometry_zip.extractall(path=geometry_dir) geometry_shp_file = glob(os.path.join(geometry_dir, '*.shp'))[0] # Name of the variable to subset data_variable = request.inputs['variable'][0].data # Use the given NetCDF file rd = ocgis.RequestDataset(dataset_input.file, data_variable) # Execute subset result = ocgis.OcgOperations( dataset=rd, geom=geometry_shp_file, output_format='nc', # Outputs only NetCDF dir_output=self.workdir, ).execute() # Finish up by providing the path to the subsetted file response.outputs['output'].file = result return response
def parse_narccap_filenames(folder): ## parse data directory into requeset datasets rds = [] filenames = os.listdir(folder) pieces = np.empty((len(filenames),4),dtype=object) ## split filenames into parts and collect those into groups. for ii,filename in enumerate(filenames): pieces[ii,0:3] = filename.split('_')[:-1] pieces[ii,-1] = filename for variable in np.unique(pieces[:,0]).flat: for gcm in np.unique(pieces[:,1]).flat: for rcm in np.unique(pieces[:,2]).flat: idx = np.all(pieces[:,0:3] == [variable,gcm,rcm],axis=1) if not idx.any(): continue ## there are multiple filepaths for each request dataset taking ## advantage of time dimension concatenation. uris = pieces[idx,-1].tolist() alias = variable+'_'+gcm+'_'+rcm ## this gcm-rcm combination does not have false_easting and ## false_northing attributes correctly filled. if gcm == 'ECP2' and rcm == 'ncep': s_proj = PolarStereographic(60.0,90.0,263.0,4700000.0,8400000.0) else: s_proj = None rd = ocgis.RequestDataset(uri=uris,alias=alias, variable=variable,s_proj=s_proj, meta={'gcm':gcm,'rcm':rcm}) rds.append(rd) return(rds)
def single_year(): rd = ocgis.RequestDataset( uri='gridded_obs.tasmax.OBS_125deg.daily.1999.nc', variable='tasmax') calc = [ { 'func': 'freq_perc', 'name': 'perc_95', 'kwds': { 'perc': 0.95, 'round_method': 'ceil' } }, # {'func':'mean','name':'mean'} ] calc_grouping = ['month', 'year'] snippet = False select_ugid = None # select_ugid = [32] geom = None # geom = 'state_boundaries' ops = ocgis.OcgOperations(dataset=rd, snippet=snippet, geom=geom, select_ugid=select_ugid, aggregate=False, spatial_operation='intersects', output_format='nc', calc=calc, calc_grouping=calc_grouping) ret = ops.execute() return (ret)
def test_domain_overlap(self): """Test all points from the source UGRID coordinates are found in the source data coverage.""" path_source = PATH_SYNTHETIC_SOURCE_DATA path_source = '/home/benkoziol/Dropbox/Share/Transfer/pmesh/single-element-failure-20151204/precipitation_synthetic.nc' rd = ocgis.RequestDataset(uri=path_source) ops = ocgis.OcgOperations(dataset=rd, output_format='shp', dir_output='/tmp', prefix='precipitation_synthetic', snippet=True) ops.execute() field = rd.get() # polygons = field.spatial.geom.polygon.value extent_polygon = field.spatial.grid.extent_polygon ugrid_path = PATH_UGRID_NC ugrid_path = '/home/benkoziol/Dropbox/Share/Transfer/pmesh/single-element-failure-20151204/catchment_ugrid.nc' with self.nc_scope(ugrid_path) as ds: lat = ds.variables['mesh_node_lat'][:] lon = ds.variables['mesh_node_lon'][:] out_pt = '/tmp/out_points.shp' schema = {'geometry': 'Point', 'properties': {}} with fiona.open(out_pt, 'w', schema=schema, driver='ESRI Shapefile') as sink: for idx in range(lat.shape[0]): print '{} of {}'.format(idx + 1, lat.shape[0]) pt = Point(lon[idx], lat[idx]) # print pt record = {'geometry': mapping(pt), 'properties': {}} sink.write(record) self.assertTrue(extent_polygon.contains(pt))
def values(self): path = '/usr/local/climate_data/maurer/2010-concatenated' filenames = [ 'Maurer02new_OBS_pr_daily.1971-2000.nc', 'Maurer02new_OBS_tasmax_daily.1971-2000.nc', 'Maurer02new_OBS_tasmin_daily.1971-2000.nc', 'Maurer02new_OBS_tas_daily.1971-2000.nc' ] time_range = [ None, [ datetime.datetime(2001, 3, 1), datetime.datetime(2001, 3, 31, 23) ] ] time_region = [None, {'month': [6, 7], 'year': [2006, 2007]}] for filename in filenames: variable = filename.split('_')[2] for time in [None, time_range, time_region]: if time is None: trange = None tregion = None elif isinstance(time, list): trange = time tregion = None else: trange = None tregion = time rd = ocgis.RequestDataset(os.path.join('path'), variable, time_range=trange, time_region=tregion) yield (rd)
def test_compute_small(self): rd = self.test_data.get_rd('cancm4_tas') # use a smaller netCDF as target ops = ocgis.OcgOperations(dataset=rd, geom='state_boundaries', select_ugid=[2, 9, 12, 23, 25], output_format='nc', prefix='sub', add_auxiliary_files=False, agg_selection=True) sub = ops.execute() # use the compute function rd_sub = ocgis.RequestDataset(sub, 'tas') ops = ocgis.OcgOperations(dataset=rd_sub, calc=[{'func': 'mean', 'name': 'mean'}], calc_grouping=['month'], output_format='nc', add_auxiliary_files=False) ret_compute = compute(ops, 5, verbose=False) # now just run normally and ensure the answers are the same! ops.prefix = 'ocgis_compare' ops.add_auxiliary_files = False ret_ocgis = ops.execute() self.assertNcEqual(ret_compute, ret_ocgis, check_fill_value=False, check_types=False, ignore_attributes={'global': ['history'], 'mean': ['_FillValue']})
def create_scrip_grid(path): """Create an OCGIS grid from a SCRIP file. :param str path: Path to source NetCDF file. """ rfield = ocgis.RequestDataset(path).create_raw_field() pgc = ocgis.PointGC(x=rfield['grid_center_lon'], y=rfield['grid_center_lat'], crs=ocgis.crs.Spherical()) return ocgis.GridUnstruct(geoms=pgc)
def test_full(self): """Compute the dissimilarity will all metrics.""" from flyingpigeon import dissimilarity from matplotlib import pyplot as plt p1 = self.write_field_data('v1', ncol=1, nrow=1) p2 = self.write_field_data('v2', ncol=1, nrow=1) p3 = self.write_field_data('v1', ncol=11, nrow=10, dir='c') p4 = self.write_field_data('v2', ncol=11, nrow=10, dir='c') ref_range = [dt.datetime(2000, 3, 1), dt.datetime(2000, 3, 31)] ref = [ocgis.RequestDataset(p, time_range=ref_range) for p in [p1, p2]] reference = ocgis.MultiRequestDataset(ref) reference = reference.get() cand_range = [dt.datetime(2000, 8, 1), dt.datetime(2000, 8, 31)] can = [ ocgis.RequestDataset(p, time_range=cand_range) for p in [p3, p4] ] candidate = ocgis.MultiRequestDataset(can) fig, axes = plt.subplots(2, 3) for i, dist in enumerate(dissimilarity.__all__): calc = [{ 'func': 'dissimilarity', 'name': 'output_mfpf', 'kwds': { 'target': reference, 'candidate': ('v1', 'v2'), 'dist': dist } }] ops = OcgOperations(dataset=candidate, calc=calc) ret = ops.execute() out_field = ret.get_element() var_name = get_variable_names(out_field.data_variables)[0] out = out_field[var_name].get_value()[0, 0] axes.flat[i].imshow(out) axes.flat[i].set_title(dist) path = os.path.join(test_output_path, 'test_spatial_analog_metrics.png') plt.savefig(path) plt.close()
def test_climatology(self): # http://cf-pcmdi.llnl.gov/documents/cf-conventions/1.6/cf-conventions.html#idp5996336 path = os.path.join(self.current_dir_output, 'climatology.nc') ds = nc.Dataset(path, 'w') try: dim_time = ds.createDimension('time', size=None) dim_bounds = ds.createDimension('bounds', size=2) dim_lat = ds.createDimension('lat', size=2) dim_lon = ds.createDimension('lon', size=2) var_lat = ds.createVariable('lat', float, dimensions=(dim_lat._name,)) var_lat[:] = [43, 42] var_lon = ds.createVariable('lon', float, dimensions=(dim_lon._name,)) var_lon[:] = [-109, -108] dts = [datetime(2000, 6, 16), datetime(2000, 7, 16), datetime(2000, 8, 16)] dts_bounds = [[datetime(2000, 6, 1, 6), datetime(2000, 7, 1, 6)], [datetime(2000, 7, 1, 6), datetime(2000, 8, 1, 6)], [datetime(2000, 8, 1, 6), datetime(2000, 9, 1, 6)]] units = 'hours since 0001-01-01 00:00:00' calendar = 'standard' var_time = ds.createVariable('time', float, dimensions=(dim_time._name,)) var_time.units = units var_time.calendar = calendar var_time.climatology = 'climatology_bounds' var_time[:] = nc.date2num(dts, units, calendar=calendar) var_cbounds = ds.createVariable('climatology_bounds', float, dimensions=(dim_time._name, dim_bounds._name)) var_cbounds[:] = nc.date2num(dts_bounds, units, calendar=calendar) var_tas = ds.createVariable('tas', float, dimensions=(dim_time._name, dim_lat._name, dim_lon._name)) var_tas[:] = np.random.rand(3, 2, 2) finally: ds.close() rd = ocgis.RequestDataset(path, 'tas') ods = rd.get() self.assertNotEqual(ods.temporal.bounds, None) rd = ocgis.RequestDataset(path, 'tas', time_region={'month': [8]}) ret = ocgis.OcgOperations(dataset=rd).execute() field = ret.get_element() # field = ret[1]['tas'] self.assertEqual(field.temporal.bounds.shape, (1, 2)) self.assertEqual(field.temporal.get_value().shape, (1,))
def test_get_field_write_target(self): p1 = 'Polygon ((-116.94238466549290933 52.12861711455555991, -82.00526805089285176 61.59075286434307372, ' \ '-59.92695130138864101 31.0207758265680269, -107.72286778108455962 22.0438778075388484, ' \ '-122.76523743459291893 37.08624746104720771, -116.94238466549290933 52.12861711455555991))' p2 = 'Polygon ((-63.08099655131782413 21.31602121140134898, -42.70101185946779765 9.42769680782217279, ' \ '-65.99242293586783603 9.912934538580501, -63.08099655131782413 21.31602121140134898))' p1 = wkt.loads(p1) p2 = wkt.loads(p2) mp1 = MultiPolygon([p1, p2]) mp2 = mp1.buffer(0.1) geoms = [mp1, mp2] gvar = GeometryVariable(name='gc', value=geoms, dimensions='elementCount') gc = gvar.convert_to(node_dim_name='n_node') field = gc.parent self.assertEqual(field.grid.node_dim.name, 'n_node') actual = DriverESMFUnstruct._get_field_write_target_(field) self.assertEqual(field.grid.node_dim.name, 'n_node') self.assertNotEqual(id(field), id(actual)) self.assertEqual(actual['numElementConn'].dtype, np.int32) self.assertEqual(actual['elementConn'].dtype, np.int32) self.assertNotIn(field.grid.cindex.name, actual) self.assertEqual(actual['nodeCoords'].dimensions[0].name, 'nodeCount') path = self.get_temporary_file_path('foo.nc') actual.write(path) # Optional test for loading the mesh file if ESMF is available. try: import ESMF except ImportError: pass else: _ = ESMF.Mesh(filename=path, filetype=ESMF.FileFormat.ESMFMESH) path2 = self.get_temporary_file_path('foo2.nc') driver = DriverKey.NETCDF_ESMF_UNSTRUCT field.write(path2, driver=driver) # Test the polygons are equivalent when read from the ESMF unstructured file. rd = ocgis.RequestDataset(path2, driver=driver) self.assertEqual(rd.driver.key, driver) efield = rd.get() self.assertEqual(efield.driver.key, driver) grid_actual = efield.grid self.assertEqual(efield.driver.key, driver) self.assertEqual(grid_actual.parent.driver.key, driver) self.assertEqual(grid_actual.x.ndim, 1) for g in grid_actual.archetype.iter_geometries(): self.assertPolygonSimilar(g[1], geoms[g[0]]) ngv = grid_actual.archetype.convert_to() self.assertIsInstance(ngv, GeometryVariable)
def overlay(): dir_output = '/home/local/WX/ben.koziol/Dropbox/nesii/conference/FOSS4G_2013/figures/cmip_overlay' prefix = 'cmip_grid' ## write cmip grid tas = ocgis.RequestDataset(uri='tasmax_day_CanCM4_decadal2010_r2i1p1_20110101-20201231.nc', variable='tasmax') ops = ocgis.OcgOperations(dataset=tas,snippet=True,output_format='shp', dir_output=dir_output,prefix=prefix) ret = ops.execute()
def _handler(self, request, response): from rpy2 import robjects from rpy2.robjects.packages import importr import os import datetime as dt tic = dt.datetime.now() init_process_logger('log.txt') response.outputs['output_log'].file = 'log.txt' LOGGER.info('Start process') response.update_status('Execution started at : {}'.format(tic), 1) ###################################### # Read inputs ###################################### try: obs = archiveextract( resource=rename_complexinputs(request.inputs['obs'])) ref = archiveextract( resource=rename_complexinputs(request.inputs['ref'])) fut = archiveextract( resource=rename_complexinputs(request.inputs['fut'])) # dedrizzle = request.inputs['dedrizzle'][0].data # norm = request.inputs['norm'][0].data except Exception as e: msg = 'Failed to read input parameter {}'.format(e) msg += "obs: " + request.inputs['obs'] msg += "ref: " + request.inputs['ref'] msg += "fut: " + request.inputs['fut'] LOGGER.error(msg) raise Exception(msg) response.update_status('Input parameters ingested', 2) rp, ext = os.path.splitext(ref[0]) ref_out = rp + '_kddm-bc' + ext fp, ext = os.path.splitext(fut[0]) fut_out = fp + '_kddm-bc' + ext # Assuming all files share the same variable. rd = ocgis.RequestDataset(ref) varname = rd.variable # Calling the R code Rsrc = os.path.join(flyingpigeon.config.Rsrc_dir(), 'bc.kddm.R') devtools = importr("devtools") rfunc = robjects.r(open(Rsrc).read()) rfunc(varname, obs[0], ref[0], fut[0], ref_out, fut_out, False) response.outputs['output_netcdf_ref'].file = ref_out response.outputs['output_netcdf_fut'].file = fut_out return response
def test_months_in_units_convert_to_netcdf(self): uri = self.test_data.get_uri('clt_month_units') variable = 'clt' rd = ocgis.RequestDataset(uri=uri, variable=variable) # subset the clt dataset by the state of nevada and write to netcdf ops = ocgis.OcgOperations(dataset=rd, output_format='nc', geom='state_boundaries', select_ugid=[23]) ret = ops.execute() rd2 = ocgis.RequestDataset(uri=ret, variable=variable) field = rd.get() field2 = rd2.get() # confirm raw values and datetime values are equivalent self.assertNumpyAll(field.temporal.value_datetime, field2.temporal.value_datetime) self.assertNumpyAll(field.temporal.get_value(), field2.temporal.get_value())
def monthly_clim(ncfile): import ocgis rd = ocgis.RequestDataset(ncfile) ops = ocgis.OcgOperations(rd, calc=[{ 'func': 'mean', 'name': 'monthly_mean' }], calc_grouping=['month']) sc = ops.execute() return sc.get_element()['monthly_mean'].get_value()
def extract_glacier_by_ugid(glacier, ugid, uri, shape_file, variable, metadata, epsg=None): """ Extract glacier using OCGIS """ ocgis.env.OVERWRITE = True output_dir = metadata["output_dir"] output_format = metadata["output_format"] output_format_options = metadata["output_format_options"] prefix = metadata["prefix_string"] time_range = metadata["time_range"] logger.info("Extracting glacier {} with UGID {}".format(glacier, ugid)) if epsg: crs = ocgis.variable.crs.CoordinateReferenceSystem(epsg=epsg) rd = ocgis.RequestDataset( uri=uri, variable=variable, crs=crs, ) else: rd = ocgis.RequestDataset(uri=uri, variable=variable) ops = ocgis.OcgOperations( dataset=rd, time_range=time_range, geom=shape_file, snippet=False, select_ugid=[ugid], output_format=output_format, output_format_options=output_format_options, prefix=prefix, dir_output=output_dir, ) ret = ops.execute()
def calc_mean(dataset): tic = time.time() rd = ocgis.RequestDataset(dataset) ops = ocgis.OcgOperations( dataset=rd, calc=[{'func': 'mean', 'name': 'mean'}], calc_grouping=['month'], prefix='mean', dir_output='.', output_format='nc') output = ops.execute() # notify about completion tac = time.time() msg = "Completion time [#{}/{}]: {} seconds\n".format(ocgis.vm.rank, ocgis.vm.size, tac - tic) print(msg) with open(os.path.join(MODULE_PATH, 'mean.log'), 'a') as fp: fp.write(msg) if ocgis.vm.rank == 0: tasmax = ocgis.RequestDataset(output).get_field()['mean'] fp.write('Number of values: {}\n'.format(len(tasmax.get_value()))) return output
def get_ref(month, year): rd = ocgis.RequestDataset(uri=os.path.join(env.DIR_OUTPUT, self.fn), variable=self.var, time_region={ 'month': month, 'year': year }) ops = ocgis.OcgOperations(dataset=rd) ret = ops.execute() ref = ret[1].variables['foo'].temporal.value return (ref)