def concatenate_and_write(datasets, out_dir, group_name, request_id=None): # keep track of data not dimensioned along obs (13025 AC2) non_obs_data = [] for ds in datasets: non_obs_data = [var for var in ds.data_vars if 'obs' not in ds[var].dims] # compiled data sets will compile all data along the obs dimension ds = compile_datasets(datasets) # remove obs dimension from non_obs data (13025 AC2) for non_obs in non_obs_data: ds[non_obs] = (ds[non_obs].dims[1:], ds[non_obs].values[0], ds[non_obs].attrs) add_dynamic_attributes(ds) write_netcdf(ds, os.path.join(out_dir, get_name(ds, group_name)))
def _create_files(self, base_path): file_paths = [] for stream_key, stream_dataset in self.stream_request.datasets.iteritems(): for deployment, ds in stream_dataset.datasets.iteritems(): add_dynamic_attributes(ds) start = ds.attrs['time_coverage_start'].translate(None, '-:') end = ds.attrs['time_coverage_end'].translate(None, '-:') # provenance types will be written to JSON files prov_fname = 'deployment%04d_%s_provenance_%s-%s.json' % (deployment, stream_key.as_dashed_refdes(), start, end) prov_json = os.path.join(base_path, prov_fname) file_paths.append(prov_json) stream_dataset.provenance_metadata.dump_json(prov_json) file_name = 'deployment%04d_%s_%s-%s.nc' % (deployment, stream_key.as_dashed_refdes(), start, end) file_path = os.path.join(base_path, file_name) ds = rename_glider_lat_lon(stream_key, ds) write_netcdf(ds, file_path, classic=self.classic) file_paths.append(file_path) return file_paths
def concatenate_and_write(datasets, out_dir, group_name, request_id=None): ds = compile_datasets(datasets) add_dynamic_attributes(ds) write_netcdf(ds, os.path.join(out_dir, get_name(ds, group_name)))
def _create_files(self, base_path): file_paths = [] # annotation data will be written to a JSON file if self.stream_request.include_annotations: time_range_string = str(self.stream_request.time_range).replace( " ", "") anno_fname = 'annotations_%s.json' % time_range_string anno_json = os.path.join(base_path, anno_fname) file_paths.append(anno_json) self.stream_request.annotation_store.dump_json(anno_json) for stream_key, stream_dataset in self.stream_request.datasets.iteritems( ): for deployment, ds in stream_dataset.datasets.iteritems(): add_dynamic_attributes(ds) start = ds.attrs['time_coverage_start'].translate(None, '-:') end = ds.attrs['time_coverage_end'].translate(None, '-:') # provenance types will be written to JSON files if self.stream_request.include_provenance: prov_fname = 'deployment%04d_%s_provenance_%s-%s.json' % ( deployment, stream_key.as_dashed_refdes(), start, end) prov_json = os.path.join(base_path, prov_fname) file_paths.append(prov_json) stream_dataset.provenance_metadata.dump_json(prov_json) file_name = 'deployment%04d_%s_%s-%s.nc' % ( deployment, stream_key.as_dashed_refdes(), start, end) file_path = os.path.join(base_path, file_name) ds = rename_glider_lat_lon(stream_key, ds) # include all directly requested_parameters params_to_include = [ p.name for p in self.stream_request.requested_parameters ] # also include any indirectly derived pressure parameter (9328) pressure_params = [ (sk, param) for sk in self.stream_request.external_includes for param in self.stream_request.external_includes[sk] if param.data_product_identifier == PRESSURE_DPI ] if pressure_params: params_to_include.append(INT_PRESSURE_NAME) # include all external parameters associated with the directly requested parameters (12886) for external_stream_key in self.stream_request.external_includes: for parameter in self.stream_request.external_includes[ external_stream_key]: params_to_include.append(parameter.name) long_parameter_name = external_stream_key.stream_name + "-" + parameter.name if long_parameter_name in ds: # rename the parameter without the stream_name prefix (12544 AC1) ds = ds.rename( {long_parameter_name: parameter.name}) # record the instrument and stream (12544 AC2) ds[parameter.name].attrs[ 'instrument'] = external_stream_key.as_three_part_refdes( ) ds[parameter.name].attrs[ 'stream'] = external_stream_key.stream_name # associated variables with their contributors (12544 AC3) for requested_parameter in self.stream_request.requested_parameters: if requested_parameter.needs and requested_parameter.name in ds: for k, need_list in requested_parameter.needs: for need in need_list: if need.name in params_to_include: if 'ancillary_variables' in ds[ requested_parameter.name].attrs: ds[requested_parameter.name].attrs[ 'ancillary_variables'] += "," + need.name else: ds[requested_parameter.name].attrs[ 'ancillary_variables'] = need.name break # setup coordinate variables (10745) ds = self._setup_coordinate_variables(ds) if params_to_include: ds = self._filter_params(ds, params_to_include) write_netcdf(ds, file_path, classic=self.classic) file_paths.append(file_path) return file_paths