def make_rtree(self): p = rtree.index.Property() p.overwrite = True p.storage = rtree.index.RT_Disk p.Dimension = 2 with self.dataset() as nc: sg = from_nc_dataset(nc) class FastRtree(rtree.Rtree): def dumps(self, obj): try: import cPickle return cPickle.dumps(obj, -1) except ImportError: super(FastRtree, self).dumps(obj) def rtree_generator_function(): for i, axis in enumerate(sg.centers): for j, (x, y) in enumerate(axis): yield (i+j, (x, y, x, y), (i, j)) logger.info("Building Faces (centers) Rtree Topology Cache for {0}".format(self.name)) _, temp_file = tempfile.mkstemp(suffix='.face') start = time.time() FastRtree(temp_file, rtree_generator_function(), properties=p, overwrite=True, interleaved=True) logger.info("Built Faces (centers) Rtree Topology Cache in {0} seconds.".format(time.time() - start)) shutil.move('{}.dat'.format(temp_file), self.face_tree_data_file) shutil.move('{}.idx'.format(temp_file), self.face_tree_index_file)
def make_rtree(self): with self.dataset() as nc: with self.topology() as topo: lon = topo.get_variables_by_attributes(standard_name="longitude")[0] lat = topo.get_variables_by_attributes(standard_name="latitude")[0] def rtree_generator_function(): c = -1 for row in range(lon.shape[0]): for col in range(lon.shape[1]): coord = (lon[row, col], lat[row, col], lon[row, col], lat[row, col]) c += 1 yield (c, coord, (col, row)) logger.info("Building Faces (centers) Rtree Topology Cache for {0}".format(self.name)) _, temp_file = tempfile.mkstemp(suffix=".face") start = time.time() p = index.Property() p.filename = str(temp_file) p.overwrite = True p.storage = index.RT_Disk p.dimension = 2 index.Index( p.filename.decode("utf-8"), rtree_generator_function(), properties=p, overwrite=True, interleaved=True, ) logger.info("Built Faces Rtree Topology Cache in {0} seconds.".format(time.time() - start)) shutil.move("{}.dat".format(temp_file), self.face_tree_data_file) shutil.move("{}.idx".format(temp_file), self.face_tree_index_file)
def make_rtree(self): with self.dataset() as nc: sg = from_nc_dataset(nc) def rtree_generator_function(): c = 0 for i, axis in enumerate(sg.centers): for j, (x, y) in enumerate(axis): c += 1 yield (c, (x, y, x, y), (i, j)) logger.info( "Building Faces (centers) Rtree Topology Cache for {0}".format( self.name)) _, temp_file = tempfile.mkstemp(suffix='.face') start = time.time() p = index.Property() p.filename = str(temp_file) p.overwrite = True p.storage = index.RT_Disk p.dimension = 2 index.Index(p.filename.decode('utf-8'), rtree_generator_function(), properties=p, overwrite=True, interleaved=True) logger.info( "Built Faces (centers) Rtree Topology Cache in {0} seconds.". format(time.time() - start)) shutil.move('{}.dat'.format(temp_file), self.face_tree_data_file) shutil.move('{}.idx'.format(temp_file), self.face_tree_index_file)
def ready(self): # Initialize signals import wms.signals Dataset = self.get_model('Dataset') if settings.TESTING or settings.DEBUG: logger.info( "Not updating datasets due to TESTING or DEBUG setting being True" ) else: try: for d in Dataset.objects.all(): try: if not d.has_cache(): logger.info('Creating {} successful'.format( d.name)) else: logger.info('Updating {} successful'.format( d.name)) d.update_cache() except NotImplementedError: logger.info( 'Updating {} failed. Dataset type not implemented.' .format(d.name)) except BaseException as e: logger.info('Updating {} failed. {}.'.format( d.name, str(e))) except (ProgrammingError, OperationalError): pass
def make_rtree(self): with self.dataset() as nc: sg = from_nc_dataset(nc) def rtree_generator_function(): c = 0 for i, axis in enumerate(sg.centers): for j, (x, y) in enumerate(axis): c += 1 yield (c, (x, y, x, y), (i, j)) logger.info("Building Faces (centers) Rtree Topology Cache for {0}".format(self.name)) _, temp_file = tempfile.mkstemp(suffix='.face') start = time.time() p = index.Property() p.filename = str(temp_file) p.overwrite = True p.storage = index.RT_Disk p.dimension = 2 index.Index(p.filename.decode('utf-8'), rtree_generator_function(), properties=p, overwrite=True, interleaved=True) logger.info("Built Faces (centers) Rtree Topology Cache in {0} seconds.".format(time.time() - start)) shutil.move('{}.dat'.format(temp_file), self.face_tree_data_file) shutil.move('{}.idx'.format(temp_file), self.face_tree_index_file)
def update_time_cache(self): with self.dataset() as nc: if nc is None: logger.error("Failed update_time_cache, could not load dataset " "as a netCDF4 object") return time_cache = {} layer_cache = {} time_vars = nc.get_variables_by_attributes(standard_name='time') for time_var in time_vars: time_cache[time_var.name] = nc4.num2date( time_var[:], time_var.units, getattr(time_var, 'calendar', 'standard') ) for ly in self.all_layers(): try: layer_cache[ly.access_name] = find_appropriate_time(nc.variables[ly.access_name], time_vars) except ValueError: layer_cache[ly.access_name] = None full_cache = {'times': time_cache, 'layers': layer_cache} logger.info("Built time cache for {0}".format(self.name)) caches['time'].set(self.time_cache_file, full_cache, None) return full_cache
def timed(*args, **kw): ts = time.time() result = f(*args, **kw) te = time.time() logger.info('func:{} took: {} sec'.format(f.__name__, te - ts)) return result
def timed(*args, **kw): ts = time.time() result = f(*args, **kw) te = time.time() logger.info('func:{} took: {} sec'.format(f.__name__, te-ts)) return result
def test_sgrid_gfi_single_variable_csv(self): params = copy(self.gfi_params) r = self.do_test(params, fmt='csv') df = pd.read_csv(r, index_col='time') logger.info(df) #assert df['time'][0] == datetime(2015, 04, 30, 0, 0, 0) assert df['x'][0] == -71.6979 assert df['y'][0] == 40.9888 assert df['u'][0] == -0.0315
def test_sgrid_gfi_single_variable_csv_4326(self): params = copy(self.gfi_params) params['srs'] = 'EPSG:4326' params['bbox'] = '-73.125,39.90973623,-71.71875,40.97989807' r = self.do_test(params, fmt='csv') df = pd.read_csv(r, index_col='time') logger.info(df) #assert df['time'][0] == datetime(2015, 4, 30) assert df['x'][0] == -71.6979 assert df['y'][0] == 40.9888 assert df['u'][0] == -0.0315
def make_rtree(self): with self.dataset() as nc: ug = UGrid.from_nc_dataset(nc=nc) def rtree_faces_generator_function(): for face_idx, node_list in enumerate(ug.faces): nodes = ug.nodes[node_list] xmin, ymin = np.min(nodes, 0) xmax, ymax = np.max(nodes, 0) yield (face_idx, (xmin, ymin, xmax, ymax), face_idx) logger.info("Building Faces Rtree Topology Cache for {0}".format(self.name)) start = time.time() _, face_temp_file = tempfile.mkstemp(suffix='.face') pf = index.Property() pf.filename = str(face_temp_file) pf.overwrite = True pf.storage = index.RT_Disk pf.dimension = 2 idx = index.Index(pf.filename, rtree_faces_generator_function(), properties=pf, interleaved=True, overwrite=True) idx.close() logger.info("Built Faces Rtree Topology Cache in {0} seconds.".format(time.time() - start)) shutil.move('{}.dat'.format(face_temp_file), self.face_tree_data_file) shutil.move('{}.idx'.format(face_temp_file), self.face_tree_index_file) def rtree_nodes_generator_function(): for node_index, (x, y) in enumerate(ug.nodes): yield (node_index, (x, y, x, y), node_index) logger.info("Building Nodes Rtree Topology Cache for {0}".format(self.name)) start = time.time() _, node_temp_file = tempfile.mkstemp(suffix='.node') pn = index.Property() pn.filename = str(node_temp_file) pn.overwrite = True pn.storage = index.RT_Disk pn.dimension = 2 idx = index.Index(pn.filename, rtree_nodes_generator_function(), properties=pn, interleaved=True, overwrite=True) idx.close() logger.info("Built Nodes Rtree Topology Cache in {0} seconds.".format(time.time() - start)) shutil.move('{}.dat'.format(node_temp_file), self.node_tree_data_file) shutil.move('{}.idx'.format(node_temp_file), self.node_tree_index_file)
def make_rtree(self): p = rtree.index.Property() p.overwrite = True p.storage = rtree.index.RT_Disk p.Dimension = 2 with self.dataset() as nc: ug = UGrid.from_nc_dataset(nc=nc) class FastRtree(rtree.Rtree): def dumps(self, obj): try: import cPickle return cPickle.dumps(obj, -1) except ImportError: super(FastRtree, self).dumps(obj) def rtree_faces_generator_function(): for face_idx, node_list in enumerate(ug.faces): nodes = ug.nodes[node_list] xmin, ymin = np.min(nodes, 0) xmax, ymax = np.max(nodes, 0) yield (face_idx, (xmin, ymin, xmax, ymax), face_idx) logger.info("Building Faces Rtree Topology Cache for {0}".format(self.name)) _, face_temp_file = tempfile.mkstemp(suffix='.face') start = time.time() FastRtree(face_temp_file, rtree_faces_generator_function(), properties=p, overwrite=True, interleaved=True) logger.info("Built Faces Rtree Topology Cache in {0} seconds.".format(time.time() - start)) shutil.move('{}.dat'.format(face_temp_file), self.face_tree_data_file) shutil.move('{}.idx'.format(face_temp_file), self.face_tree_index_file) def rtree_nodes_generator_function(): for node_index, (x, y) in enumerate(ug.nodes): yield (node_index, (x, y, x, y), node_index) logger.info("Building Nodes Rtree Topology Cache for {0}".format(self.name)) _, node_temp_file = tempfile.mkstemp(suffix='.node') start = time.time() FastRtree(node_temp_file, rtree_nodes_generator_function(), properties=p, overwrite=True, interleaved=True) logger.info("Built Nodes Rtree Topology Cache in {0} seconds.".format(time.time() - start)) shutil.move('{}.dat'.format(node_temp_file), self.node_tree_data_file) shutil.move('{}.idx'.format(node_temp_file), self.node_tree_index_file)
def ready(self): # Initialize signals import wms.signals Dataset = self.get_model('Dataset') if settings.TESTING or settings.DEBUG: logger.info("Not updating datasets due to TESTING or DEBUG setting being True") else: try: for d in Dataset.objects.all(): try: if not d.has_cache(): logger.info('Creating {} successful'.format(d.name)) else: logger.info('Updating {} successful'.format(d.name)) d.update_cache() except NotImplementedError: logger.info('Updating {} failed. Dataset type not implemented.'.format(d.name)) except BaseException as e: logger.info('Updating {} failed. {}.'.format(d.name, str(e))) except (ProgrammingError, OperationalError): pass
def make_rtree(self): with self.dataset() as nc: ug = UGrid.from_nc_dataset(nc=nc) def rtree_faces_generator_function(): for face_idx, node_list in enumerate(ug.faces): nodes = ug.nodes[node_list] xmin, ymin = np.min(nodes, 0) xmax, ymax = np.max(nodes, 0) yield (face_idx, (xmin, ymin, xmax, ymax), face_idx) logger.info("Building Faces Rtree Topology Cache for {0}".format(self.name)) start = time.time() _, face_temp_file = tempfile.mkstemp(suffix='.face') pf = index.Property() pf.filename = str(face_temp_file) pf.overwrite = True pf.storage = index.RT_Disk pf.dimension = 2 idx = index.Index(pf.filename.decode('utf-8'), rtree_faces_generator_function(), properties=pf, interleaved=True, overwrite=True) idx.close() logger.info("Built Faces Rtree Topology Cache in {0} seconds.".format(time.time() - start)) shutil.move('{}.dat'.format(face_temp_file), self.face_tree_data_file) shutil.move('{}.idx'.format(face_temp_file), self.face_tree_index_file) def rtree_nodes_generator_function(): for node_index, (x, y) in enumerate(ug.nodes): yield (node_index, (x, y, x, y), node_index) logger.info("Building Nodes Rtree Topology Cache for {0}".format(self.name)) start = time.time() _, node_temp_file = tempfile.mkstemp(suffix='.node') pn = index.Property() pn.filename = str(node_temp_file) pn.overwrite = True pn.storage = index.RT_Disk pn.dimension = 2 idx = index.Index(pn.filename.decode('utf-8'), rtree_nodes_generator_function(), properties=pn, interleaved=True, overwrite=True) idx.close() logger.info("Built Nodes Rtree Topology Cache in {0} seconds.".format(time.time() - start)) shutil.move('{}.dat'.format(node_temp_file), self.node_tree_data_file) shutil.move('{}.idx'.format(node_temp_file), self.node_tree_index_file)
def make_rtree(self): p = rtree.index.Property() p.overwrite = True p.storage = rtree.index.RT_Disk p.Dimension = 2 with self.dataset() as nc: sg = from_nc_dataset(nc) class FastRtree(rtree.Rtree): def dumps(self, obj): try: import cPickle return cPickle.dumps(obj, -1) except ImportError: super(FastRtree, self).dumps(obj) def rtree_generator_function(): for i, axis in enumerate(sg.centers): for j, (x, y) in enumerate(axis): yield (i + j, (x, y, x, y), (i, j)) logger.info( "Building Faces (centers) Rtree Topology Cache for {0}".format( self.name)) _, temp_file = tempfile.mkstemp(suffix='.face') start = time.time() FastRtree(temp_file, rtree_generator_function(), properties=p, overwrite=True, interleaved=True) logger.info( "Built Faces (centers) Rtree Topology Cache in {0} seconds.". format(time.time() - start)) shutil.move('{}.dat'.format(temp_file), self.face_tree_data_file) shutil.move('{}.idx'.format(temp_file), self.face_tree_index_file)
def make_rtree(self): p = rtree.index.Property() p.overwrite = True p.storage = rtree.index.RT_Disk p.Dimension = 2 with self.dataset() as nc: ug = UGrid.from_nc_dataset(nc=nc) class FastRtree(rtree.Rtree): def dumps(self, obj): try: import cPickle return cPickle.dumps(obj, -1) except ImportError: super(FastRtree, self).dumps(obj) def rtree_faces_generator_function(): for face_idx, node_list in enumerate(ug.faces): nodes = ug.nodes[node_list] xmin, ymin = np.min(nodes, 0) xmax, ymax = np.max(nodes, 0) yield (face_idx, (xmin, ymin, xmax, ymax), face_idx) logger.info("Building Faces Rtree Topology Cache for {0}".format( self.name)) _, face_temp_file = tempfile.mkstemp(suffix='.face') start = time.time() FastRtree(face_temp_file, rtree_faces_generator_function(), properties=p, overwrite=True, interleaved=True) logger.info( "Built Faces Rtree Topology Cache in {0} seconds.".format( time.time() - start)) shutil.move('{}.dat'.format(face_temp_file), self.face_tree_data_file) shutil.move('{}.idx'.format(face_temp_file), self.face_tree_index_file) def rtree_nodes_generator_function(): for node_index, (x, y) in enumerate(ug.nodes): yield (node_index, (x, y, x, y), node_index) logger.info("Building Nodes Rtree Topology Cache for {0}".format( self.name)) _, node_temp_file = tempfile.mkstemp(suffix='.node') start = time.time() FastRtree(node_temp_file, rtree_nodes_generator_function(), properties=p, overwrite=True, interleaved=True) logger.info( "Built Nodes Rtree Topology Cache in {0} seconds.".format( time.time() - start)) shutil.move('{}.dat'.format(node_temp_file), self.node_tree_data_file) shutil.move('{}.idx'.format(node_temp_file), self.node_tree_index_file)
def getfeatureinfo(self, layer, request): with self.dataset() as nc: with self.topology() as topo: data_obj = nc.variables[layer.access_name] data_location = data_obj.location # mesh_name = data_obj.mesh # Use local topology for pulling bounds data # ug = UGrid.from_ncfile(self.topology_file, mesh_name=mesh_name) geo_index, closest_x, closest_y, start_time_index, end_time_index, return_dates = self.setup_getfeatureinfo( topo, data_obj, request, location=data_location) logger.info("Start index: {}".format(start_time_index)) logger.info("End index: {}".format(end_time_index)) logger.info("Geo index: {}".format(geo_index)) return_arrays = [] z_value = None if isinstance(layer, Layer): if len(data_obj.shape) == 3: z_index, z_value = self.nearest_z( layer, request.GET['elevation']) data = data_obj[start_time_index:end_time_index, z_index, geo_index] elif len(data_obj.shape) == 2: data = data_obj[start_time_index:end_time_index, geo_index] elif len(data_obj.shape) == 1: data = data_obj[geo_index] else: raise ValueError( "Dimension Mismatch: data_obj.shape == {0} and time indexes = {1} to {2}" .format(data_obj.shape, start_time_index, end_time_index)) return_arrays.append((layer.var_name, data)) elif isinstance(layer, VirtualLayer): # Data needs to be [var1,var2] where var are 1D (nodes only, elevation and time already handled) for l in layer.layers: data_obj = nc.variables[l.var_name] if len(data_obj.shape) == 3: z_index, z_value = self.nearest_z( layer, request.GET['elevation']) data = data_obj[start_time_index:end_time_index, z_index, geo_index] elif len(data_obj.shape) == 2: data = data_obj[start_time_index:end_time_index, geo_index] elif len(data_obj.shape) == 1: data = data_obj[geo_index] else: raise ValueError( "Dimension Mismatch: data_obj.shape == {0} and time indexes = {1} to {2}" .format(data_obj.shape, start_time_index, end_time_index)) return_arrays.append((l.var_name, data)) # Data is now in the return_arrays list, as a list of numpy arrays. We need # to add time and depth to them to create a single Pandas DataFrame if (len(data_obj.shape) == 3): df = pd.DataFrame({ 'time': return_dates, 'x': closest_x, 'y': closest_y, 'z': z_value }) elif (len(data_obj.shape) == 2): df = pd.DataFrame({ 'time': return_dates, 'x': closest_x, 'y': closest_y }) elif (len(data_obj.shape) == 1): df = pd.DataFrame({'x': closest_x, 'y': closest_y}) else: df = pd.DataFrame() # Now add a column for each member of the return_arrays list for (var_name, np_array) in return_arrays: df.loc[:, var_name] = pd.Series(np_array, index=df.index) return gfi_handler.from_dataframe(request, df)
def ready(self): # Initialize signals import wms.signals Dataset = self.get_model('Dataset') if settings.TESTING or settings.DEBUG: logger.info( "Not updating datasets due to TESTING or DEBUG setting being True" ) else: try: for d in Dataset.objects.all(): try: update_delta = timedelta(minute=1) now = datetime.utcnow().replace(tzinfo=pytz.utc) if not d.has_cache(): d.update_cache() logger.info('Creating {} successful'.format( d.name)) elif d.cache_last_updated and ( now - d.cache_last_updated) < update_delta: logger.info( 'Updating {} skipped. It was just done!'. format(d.name)) else: d.update_cache() logger.info('Updating {} successful'.format( d.name)) except NotImplementedError: logger.info( 'Updating {} failed. Dataset type not implemented.' .format(d.name)) except BaseException as e: logger.info('Updating {} failed. {}.'.format( d.name, str(e))) except (ProgrammingError, OperationalError): pass
def ready(self): # Initialize signals import wms.signals Dataset = self.get_model('Dataset') if settings.TESTING or settings.DEBUG: logger.info("Not updating datasets due to TESTING or DEBUG setting being True") else: try: for d in Dataset.objects.all(): try: update_delta = timedelta(minute=1) now = datetime.utcnow().replace(tzinfo=pytz.utc) if not d.has_cache(): d.update_cache() logger.info('Creating {} successful'.format(d.name)) elif d.cache_last_updated and (now - d.cache_last_updated) < update_delta: logger.info('Updating {} skipped. It was just done!'.format(d.name)) else: d.update_cache() logger.info('Updating {} successful'.format(d.name)) except NotImplementedError: logger.info('Updating {} failed. Dataset type not implemented.'.format(d.name)) except BaseException as e: logger.info('Updating {} failed. {}.'.format(d.name, str(e))) except (ProgrammingError, OperationalError): pass
def update_cache(self, force=False): with self.dataset() as nc: ug = UGrid.from_nc_dataset(nc) ug.save_as_netcdf(self.topology_file) if not os.path.exists(self.topology_file): logger.error( "Failed to create topology_file cache for Dataset '{}'". format(self.dataset)) return uamp = nc.get_variables_by_attributes( standard_name='eastward_sea_water_velocity_amplitude')[0] vamp = nc.get_variables_by_attributes( standard_name='northward_sea_water_velocity_amplitude')[0] uphase = nc.get_variables_by_attributes( standard_name='eastward_sea_water_velocity_phase')[0] vphase = nc.get_variables_by_attributes( standard_name='northward_sea_water_velocity_phase')[0] tnames = nc.get_variables_by_attributes( standard_name='tide_constituent')[0] tfreqs = nc.get_variables_by_attributes( standard_name='tide_frequency')[0] with netCDF4.Dataset(self.topology_file, mode='a') as cnc: ntides = uamp.shape[uamp.dimensions.index('ntides')] nlocs = uamp.shape[uamp.dimensions.index(uamp.location)] cnc.createDimension('ntides', ntides) cnc.createDimension('maxStrlen64', 64) vdims = ('ntides', '{}_num_{}'.format(uamp.mesh, uamp.location)) # Swap ntides to always be the first dimension.. it can be the second in the source files! transpose = False if uamp.shape[0] > uamp.shape[1]: logger.info( "Found flipped dimensions in source file... fixing in local cache." ) transpose = True # We are changing the variable names to 'u' and 'v' from 'u_amp' and 'v_amp' so # the layer.access_method can find the variable from the virtual layer 'u,v' ua = cnc.createVariable('u', uamp.dtype, vdims, zlib=True, fill_value=uamp._FillValue, chunksizes=[1, nlocs / 4]) for x in uamp.ncattrs(): if x != '_FillValue': ua.setncattr(x, uamp.getncattr(x)) va = cnc.createVariable('v', vamp.dtype, vdims, zlib=True, fill_value=vamp._FillValue, chunksizes=[1, nlocs / 4]) for x in vamp.ncattrs(): if x != '_FillValue': va.setncattr(x, vamp.getncattr(x)) up = cnc.createVariable('u_phase', uphase.dtype, vdims, zlib=True, fill_value=uphase._FillValue, chunksizes=[1, nlocs / 4]) for x in uphase.ncattrs(): if x != '_FillValue': up.setncattr(x, uphase.getncattr(x)) vp = cnc.createVariable('v_phase', vphase.dtype, vdims, zlib=True, fill_value=vphase._FillValue, chunksizes=[1, nlocs / 4]) for x in vphase.ncattrs(): if x != '_FillValue': vp.setncattr(x, vphase.getncattr(x)) tc = cnc.createVariable('tidenames', tnames.dtype, tnames.dimensions) tc[:] = tnames[:] for x in tnames.ncattrs(): if x != '_FillValue': tc.setncattr(x, tnames.getncattr(x)) tf = cnc.createVariable('tidefreqs', tfreqs.dtype, ('ntides', )) tf[:] = tfreqs[:] for x in tfreqs.ncattrs(): if x != '_FillValue': tf.setncattr(x, tfreqs.getncattr(x)) for r in range(ntides): logger.info("Saving ntide {} into cache".format(r)) if transpose is True: ua[r, :] = uamp[:, r].T va[r, :] = vamp[:, r].T up[r, :] = uphase[:, r].T vp[r, :] = vphase[:, r].T else: ua[r, :] = uamp[r, :] va[r, :] = vamp[r, :] up[r, :] = uphase[r, :] vp[r, :] = vphase[r, :] # Now do the RTree index self.make_rtree() self.cache_last_updated = datetime.utcnow().replace(tzinfo=pytz.utc) self.save()
def getfeatureinfo(self, layer, request): with self.dataset() as nc: with self.topology() as topo: data_obj = nc.variables[layer.access_name] data_location = data_obj.location # mesh_name = data_obj.mesh # Use local topology for pulling bounds data # ug = UGrid.from_ncfile(self.topology_file, mesh_name=mesh_name) geo_index, closest_x, closest_y, start_time_index, end_time_index, return_dates = self.setup_getfeatureinfo(topo, data_obj, request, location=data_location) logger.info("Start index: {}".format(start_time_index)) logger.info("End index: {}".format(end_time_index)) logger.info("Geo index: {}".format(geo_index)) return_arrays = [] z_value = None if isinstance(layer, Layer): if len(data_obj.shape) == 3: z_index, z_value = self.nearest_z(layer, request.GET['elevation']) data = data_obj[start_time_index:end_time_index, z_index, geo_index] elif len(data_obj.shape) == 2: data = data_obj[start_time_index:end_time_index, geo_index] elif len(data_obj.shape) == 1: data = data_obj[geo_index] else: raise ValueError("Dimension Mismatch: data_obj.shape == {0} and time indexes = {1} to {2}".format(data_obj.shape, start_time_index, end_time_index)) return_arrays.append((layer.var_name, data)) elif isinstance(layer, VirtualLayer): # Data needs to be [var1,var2] where var are 1D (nodes only, elevation and time already handled) for l in layer.layers: data_obj = nc.variables[l.var_name] if len(data_obj.shape) == 3: z_index, z_value = self.nearest_z(layer, request.GET['elevation']) data = data_obj[start_time_index:end_time_index, z_index, geo_index] elif len(data_obj.shape) == 2: data = data_obj[start_time_index:end_time_index, geo_index] elif len(data_obj.shape) == 1: data = data_obj[geo_index] else: raise ValueError("Dimension Mismatch: data_obj.shape == {0} and time indexes = {1} to {2}".format(data_obj.shape, start_time_index, end_time_index)) return_arrays.append((l.var_name, data)) # Data is now in the return_arrays list, as a list of numpy arrays. We need # to add time and depth to them to create a single Pandas DataFrame if (len(data_obj.shape) == 3): df = pd.DataFrame({'time': return_dates, 'x': closest_x, 'y': closest_y, 'z': z_value}) elif (len(data_obj.shape) == 2): df = pd.DataFrame({'time': return_dates, 'x': closest_x, 'y': closest_y}) elif (len(data_obj.shape) == 1): df = pd.DataFrame({'x': closest_x, 'y': closest_y}) else: df = pd.DataFrame() # Now add a column for each member of the return_arrays list for (var_name, np_array) in return_arrays: df.loc[:, var_name] = pd.Series(np_array, index=df.index) return gfi_handler.from_dataframe(request, df)
def update_cache(self, force=False): with self.dataset() as nc: ug = UGrid.from_nc_dataset(nc) ug.save_as_netcdf(self.topology_file) if not os.path.exists(self.topology_file): logger.error("Failed to create topology_file cache for Dataset '{}'".format(self.dataset)) return uamp = nc.get_variables_by_attributes(standard_name='eastward_sea_water_velocity_amplitude')[0] vamp = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity_amplitude')[0] uphase = nc.get_variables_by_attributes(standard_name='eastward_sea_water_velocity_phase')[0] vphase = nc.get_variables_by_attributes(standard_name='northward_sea_water_velocity_phase')[0] tnames = nc.get_variables_by_attributes(standard_name='tide_constituent')[0] tfreqs = nc.get_variables_by_attributes(standard_name='tide_frequency')[0] with netCDF4.Dataset(self.topology_file, mode='a') as cnc: ntides = uamp.shape[uamp.dimensions.index('ntides')] nlocs = uamp.shape[uamp.dimensions.index(uamp.location)] cnc.createDimension('ntides', ntides) cnc.createDimension('maxStrlen64', 64) vdims = ('ntides', '{}_num_{}'.format(uamp.mesh, uamp.location)) # Swap ntides to always be the first dimension.. it can be the second in the source files! transpose = False if uamp.shape[0] > uamp.shape[1]: logger.info("Found flipped dimensions in source file... fixing in local cache.") transpose = True # We are changing the variable names to 'u' and 'v' from 'u_amp' and 'v_amp' so # the layer.access_method can find the variable from the virtual layer 'u,v' ua = cnc.createVariable('u', uamp.dtype, vdims, zlib=True, fill_value=uamp._FillValue, chunksizes=[1, nlocs/4]) for x in uamp.ncattrs(): if x != '_FillValue': ua.setncattr(x, uamp.getncattr(x)) va = cnc.createVariable('v', vamp.dtype, vdims, zlib=True, fill_value=vamp._FillValue, chunksizes=[1, nlocs/4]) for x in vamp.ncattrs(): if x != '_FillValue': va.setncattr(x, vamp.getncattr(x)) up = cnc.createVariable('u_phase', uphase.dtype, vdims, zlib=True, fill_value=uphase._FillValue, chunksizes=[1, nlocs/4]) for x in uphase.ncattrs(): if x != '_FillValue': up.setncattr(x, uphase.getncattr(x)) vp = cnc.createVariable('v_phase', vphase.dtype, vdims, zlib=True, fill_value=vphase._FillValue, chunksizes=[1, nlocs/4]) for x in vphase.ncattrs(): if x != '_FillValue': vp.setncattr(x, vphase.getncattr(x)) tc = cnc.createVariable('tidenames', tnames.dtype, tnames.dimensions) tc[:] = tnames[:] for x in tnames.ncattrs(): if x != '_FillValue': tc.setncattr(x, tnames.getncattr(x)) tf = cnc.createVariable('tidefreqs', tfreqs.dtype, ('ntides',)) tf[:] = tfreqs[:] for x in tfreqs.ncattrs(): if x != '_FillValue': tf.setncattr(x, tfreqs.getncattr(x)) for r in range(ntides): logger.info("Saving ntide {} into cache".format(r)) if transpose is True: ua[r, :] = uamp[:, r].T va[r, :] = vamp[:, r].T up[r, :] = uphase[:, r].T vp[r, :] = vphase[:, r].T else: ua[r, :] = uamp[r, :] va[r, :] = vamp[r, :] up[r, :] = uphase[r, :] vp[r, :] = vphase[r, :] # Now do the RTree index self.make_rtree() self.cache_last_updated = datetime.utcnow().replace(tzinfo=pytz.utc) self.save()
def getmap(self, layer, request): time_index, time_value = self.nearest_time(layer, request.GET['time']) wgs84_bbox = request.GET['wgs84_bbox'] with self.dataset() as nc: data_obj = nc.variables[layer.access_name] data_location = data_obj.location mesh_name = data_obj.mesh ug = UGrid.from_ncfile(self.topology_file, mesh_name=mesh_name) coords = np.empty(0) if data_location == 'node': coords = ug.nodes elif data_location == 'face': coords = ug.face_coordinates elif data_location == 'edge': coords = ug.edge_coordinates lon = coords[:, 0] lat = coords[:, 1] # Calculate any vector padding if we need to padding = None vector_step = request.GET['vectorstep'] if request.GET['image_type'] == 'vectors': padding_factor = calc_safety_factor(request.GET['vectorscale']) padding = calc_lon_lat_padding(lon, lat, padding_factor) * vector_step # Calculate the boolean spatial mask to slice with bool_spatial_idx = data_handler.ugrid_lat_lon_subset_idx(lon, lat, bbox=wgs84_bbox.bbox, padding=padding) # Randomize vectors to subset if we need to if request.GET['image_type'] == 'vectors' and vector_step > 1: num_vec = int(bool_spatial_idx.size / vector_step) step = int(bool_spatial_idx.size / num_vec) bool_spatial_idx[np.where(bool_spatial_idx==True)][0::step] = False # noqa: E225 # If no triangles intersect the field of view, return a transparent tile if not np.any(bool_spatial_idx): logger.info("No triangles in field of view, returning empty tile.") return self.empty_response(layer, request) if isinstance(layer, Layer): if (len(data_obj.shape) == 3): z_index, z_value = self.nearest_z(layer, request.GET['elevation']) data = data_obj[time_index, z_index, :] elif (len(data_obj.shape) == 2): data = data_obj[time_index, :] elif len(data_obj.shape) == 1: data = data_obj[:] else: logger.debug("Dimension Mismatch: data_obj.shape == {0} and time = {1}".format(data_obj.shape, time_value)) return self.empty_response(layer, request) if request.GET['image_type'] in ['pcolor', 'contours', 'filledcontours']: # Avoid triangles with nan values bool_spatial_idx[np.isnan(data)] = False # Get the faces to plot faces = ug.faces[:] face_idx = data_handler.face_idx_from_node_idx(faces, bool_spatial_idx) faces_subset = faces[face_idx] tri_subset = Tri.Triangulation(lon, lat, triangles=faces_subset) if request.GET['image_type'] == 'pcolor': return mpl_handler.tripcolor_response(tri_subset, data, request, data_location=data_location) else: return mpl_handler.tricontouring_response(tri_subset, data, request) elif request.GET['image_type'] in ['filledhatches', 'hatches']: raise NotImplementedError('matplotlib does not support hatching on triangular grids... sorry!') else: raise NotImplementedError('Image type "{}" is not supported.'.format(request.GET['image_type'])) elif isinstance(layer, VirtualLayer): # Data needs to be [var1,var2] where var are 1D (nodes only, elevation and time already handled) data = [] for l in layer.layers: data_obj = nc.variables[l.var_name] if (len(data_obj.shape) == 3): z_index, z_value = self.nearest_z(layer, request.GET['elevation']) data.append(data_obj[time_index, z_index, bool_spatial_idx]) elif (len(data_obj.shape) == 2): data.append(data_obj[time_index, bool_spatial_idx]) elif len(data_obj.shape) == 1: data.append(data_obj[bool_spatial_idx]) else: logger.debug("Dimension Mismatch: data_obj.shape == {0} and time = {1}".format(data_obj.shape, time_value)) return self.empty_response(layer, request) if request.GET['image_type'] == 'vectors': return mpl_handler.quiver_response(lon[bool_spatial_idx], lat[bool_spatial_idx], data[0], data[1], request) else: raise NotImplementedError('Image type "{}" is not supported.'.format(request.GET['image_type']))