def add_granule(self, stream_id, rdt): ''' Appends the granule's data to the coverage and persists it. ''' if stream_id in self._bad_coverages: log.info( 'Message attempting to be inserted into bad coverage: %s', DatasetManagementService._get_coverage_path( self.get_dataset(stream_id))) #-------------------------------------------------------------------------------- # Coverage determiniation and appending #-------------------------------------------------------------------------------- dataset_id = self.get_dataset(stream_id) if not dataset_id: log.error('No dataset could be determined on this stream: %s', stream_id) return try: coverage = self.get_coverage(stream_id) except IOError as e: log.error( "Couldn't open coverage: %s", DatasetManagementService._get_coverage_path( self.get_dataset(stream_id))) raise CorruptionError(e.message) if not coverage: log.error( 'Could not persist coverage from granule, coverage is None') return #-------------------------------------------------------------------------------- # Actual persistence #-------------------------------------------------------------------------------- if rdt[rdt.temporal_parameter] is None: log.warning("Empty granule received") return # Parse the RDT and set hte values in the coverage self.insert_values(coverage, rdt, stream_id) # Force the data to be flushed DatasetManagementService._save_coverage(coverage) self.update_metadata(dataset_id, rdt) try: window = rdt[rdt.temporal_parameter][[0, -1]] window = window.tolist() except (ValueError, IndexError): window = None self.dataset_changed(dataset_id, window)
def add_granule(self,stream_id, rdt): ''' Appends the granule's data to the coverage and persists it. ''' if stream_id in self._bad_coverages: log.info('Message attempting to be inserted into bad coverage: %s', DatasetManagementService._get_coverage_path(self.get_dataset(stream_id))) #-------------------------------------------------------------------------------- # Coverage determiniation and appending #-------------------------------------------------------------------------------- dataset_id = self.get_dataset(stream_id) if not dataset_id: log.error('No dataset could be determined on this stream: %s', stream_id) return try: coverage = self.get_coverage(stream_id) except IOError as e: log.error("Couldn't open coverage: %s", DatasetManagementService._get_coverage_path(self.get_dataset(stream_id))) raise CorruptionError(e.message) if not coverage: log.error('Could not persist coverage from granule, coverage is None') return #-------------------------------------------------------------------------------- # Actual persistence #-------------------------------------------------------------------------------- if rdt[rdt.temporal_parameter] is None: log.warning("Empty granule received") return # Parse the RDT and set hte values in the coverage self.insert_values(coverage, rdt, stream_id) # Force the data to be flushed DatasetManagementService._save_coverage(coverage) self.update_metadata(dataset_id, rdt) try: window = rdt[rdt.temporal_parameter][[0,-1]] window = window.tolist() except (ValueError, IndexError): window = None self.dataset_changed(dataset_id, window)
def add_granule(self,stream_id, rdt): ''' Appends the granule's data to the coverage and persists it. ''' debugging = log.isEnabledFor(DEBUG) timer = Timer() if debugging else None if stream_id in self._bad_coverages: log.info('Message attempting to be inserted into bad coverage: %s', DatasetManagementService._get_coverage_path(self.get_dataset(stream_id))) #-------------------------------------------------------------------------------- # Gap Analysis #-------------------------------------------------------------------------------- gap_found = self.has_gap(rdt.connection_id, rdt.connection_index) if gap_found: log.error('Gap Found! New connection: (%s,%s)\tOld Connection: (%s,%s)', rdt.connection_id, rdt.connection_index, self.connection_id, self.connection_index) self.gap_coverage(stream_id) #-------------------------------------------------------------------------------- # Coverage determiniation and appending #-------------------------------------------------------------------------------- dataset_id = self.get_dataset(stream_id) if not dataset_id: log.error('No dataset could be determined on this stream: %s', stream_id) return try: coverage = self.get_coverage(stream_id) except IOError as e: log.error("Couldn't open coverage: %s", DatasetManagementService._get_coverage_path(self.get_dataset(stream_id))) raise CorruptionError(e.message) if debugging: path = DatasetManagementService._get_coverage_path(dataset_id) log.debug('%s: add_granule stream %s dataset %s coverage %r file %s', self._id, stream_id, dataset_id, coverage, path) if not coverage: log.error('Could not persist coverage from granule, coverage is None') return #-------------------------------------------------------------------------------- # Actual persistence #-------------------------------------------------------------------------------- elements = len(rdt) self.insert_sparse_values(coverage,rdt,stream_id) if debugging: timer.complete_step('checks') # lightweight ops, should be zero self.expand_coverage(coverage, elements, stream_id) if debugging: timer.complete_step('insert') self.insert_values(coverage, rdt, stream_id) if debugging: timer.complete_step('keys') DatasetManagementService._save_coverage(coverage) if debugging: timer.complete_step('save') start_index = coverage.num_timesteps - elements self.dataset_changed(dataset_id,coverage.num_timesteps,(start_index,start_index+elements)) if gap_found: self.splice_coverage(dataset_id, coverage) self.evaluate_qc(rdt, dataset_id) if debugging: timer.complete_step('notify') self._add_timing_stats(timer) self.update_connection_index(rdt.connection_id, rdt.connection_index)
def add_granule(self, stream_id, granule): """ Appends the granule's data to the coverage and persists it. """ if stream_id in self._bad_coverages: log.info( "Message attempting to be inserted into bad coverage: %s" % DatasetManagementService._get_coverage_path(self.get_dataset(stream_id)) ) # -------------------------------------------------------------------------------- # Coverage determiniation and appending # -------------------------------------------------------------------------------- dataset_id = self.get_dataset(stream_id) if not dataset_id: log.error("No dataset could be determined on this stream: %s", stream_id) return try: coverage = self.get_coverage(stream_id) except IOError as e: log.error( "Couldn't open coverage: %s" % DatasetManagementService._get_coverage_path(self.get_dataset(stream_id)) ) raise CorruptionError(e.message) if not coverage: log.error("Could not persist coverage from granule, coverage is None") return # -------------------------------------------------------------------------------- # Actual persistence # -------------------------------------------------------------------------------- rdt = granule elements = len(rdt) if not elements: return try: coverage.insert_timesteps(elements, oob=False) except IOError as e: log.error( "Couldn't insert time steps for coverage: %s" % DatasetManagementService._get_coverage_path(self.get_dataset(stream_id)) ) log.exception("IOError") try: coverage.close() finally: self._bad_coverages[stream_id] = 1 raise CorruptionError(e.message) start_index = coverage.num_timesteps - elements for k, v in rdt.iteritems(): slice_ = slice(start_index, None) try: coverage.set_parameter_values(param_name=k, tdoa=slice_, value=v) except IOError as e: log.error( "Couldn't insert values for coverage: %s" % DatasetManagementService._get_coverage_path(self.get_dataset(stream_id)) ) log.exception("IOError") try: coverage.close() finally: self._bad_coverages[stream_id] = 1 raise CorruptionError(e.message) DatasetManagementService._save_coverage(coverage) # coverage.flush() self.dataset_changed(dataset_id, coverage.num_timesteps, (start_index, start_index + elements))
def add_granule(self, stream_id, rdt): ''' Appends the granule's data to the coverage and persists it. ''' debugging = log.isEnabledFor(DEBUG) timer = Timer() if debugging else None if stream_id in self._bad_coverages: log.info( 'Message attempting to be inserted into bad coverage: %s', DatasetManagementService._get_coverage_path( self.get_dataset(stream_id))) #-------------------------------------------------------------------------------- # Gap Analysis #-------------------------------------------------------------------------------- if not self.ignore_gaps: gap_found = self.has_gap(rdt.connection_id, rdt.connection_index) if gap_found: log.error( 'Gap Found! New connection: (%s,%s)\tOld Connection: (%s,%s)', rdt.connection_id, rdt.connection_index, self.connection_id, self.connection_index) self.gap_coverage(stream_id) #-------------------------------------------------------------------------------- # Coverage determiniation and appending #-------------------------------------------------------------------------------- dataset_id = self.get_dataset(stream_id) if not dataset_id: log.error('No dataset could be determined on this stream: %s', stream_id) return try: coverage = self.get_coverage(stream_id) except IOError as e: log.error( "Couldn't open coverage: %s", DatasetManagementService._get_coverage_path( self.get_dataset(stream_id))) raise CorruptionError(e.message) if debugging: path = DatasetManagementService._get_coverage_path(dataset_id) log.debug( '%s: add_granule stream %s dataset %s coverage %r file %s', self._id, stream_id, dataset_id, coverage, path) if not coverage: log.error( 'Could not persist coverage from granule, coverage is None') return #-------------------------------------------------------------------------------- # Actual persistence #-------------------------------------------------------------------------------- elements = len(rdt) if rdt[rdt.temporal_parameter] is None: elements = 0 self.insert_sparse_values(coverage, rdt, stream_id) if debugging: timer.complete_step('checks') # lightweight ops, should be zero self.expand_coverage(coverage, elements, stream_id) if debugging: timer.complete_step('insert') self.insert_values(coverage, rdt, stream_id) if debugging: timer.complete_step('keys') DatasetManagementService._save_coverage(coverage) if debugging: timer.complete_step('save') start_index = coverage.num_timesteps - elements self.dataset_changed(dataset_id, coverage.num_timesteps, (start_index, start_index + elements)) if not self.ignore_gaps and gap_found: self.splice_coverage(dataset_id, coverage) self.evaluate_qc(rdt, dataset_id) if debugging: timer.complete_step('notify') self._add_timing_stats(timer) self.update_connection_index(rdt.connection_id, rdt.connection_index)
def add_granule(self,stream_id, rdt): ''' Appends the granule's data to the coverage and persists it. ''' debugging = log.isEnabledFor(DEBUG) timer = Timer() if debugging else None if stream_id in self._bad_coverages: log.info('Message attempting to be inserted into bad coverage: %s', DatasetManagementService._get_coverage_path(self.get_dataset(stream_id))) #-------------------------------------------------------------------------------- # Coverage determiniation and appending #-------------------------------------------------------------------------------- dataset_id = self.get_dataset(stream_id) if not dataset_id: log.error('No dataset could be determined on this stream: %s', stream_id) return try: coverage = self.get_coverage(stream_id) except IOError as e: log.error("Couldn't open coverage: %s", DatasetManagementService._get_coverage_path(self.get_dataset(stream_id))) raise CorruptionError(e.message) if debugging: path = DatasetManagementService._get_coverage_path(dataset_id) log.debug('%s: add_granule stream %s dataset %s coverage %r file %s', self._id, stream_id, dataset_id, coverage, path) if not coverage: log.error('Could not persist coverage from granule, coverage is None') return #-------------------------------------------------------------------------------- # Actual persistence #-------------------------------------------------------------------------------- elements = len(rdt) if debugging: timer.complete_step('checks') # lightweight ops, should be zero try: coverage.insert_timesteps(elements, oob=False) except IOError as e: log.error("Couldn't insert time steps for coverage: %s", DatasetManagementService._get_coverage_path(self.get_dataset(stream_id)), exc_info=True) try: coverage.close() finally: self._bad_coverages[stream_id] = 1 raise CorruptionError(e.message) if debugging: timer.complete_step('insert') start_index = coverage.num_timesteps - elements for k,v in rdt.iteritems(): slice_ = slice(start_index, None) try: coverage.set_parameter_values(param_name=k, tdoa=slice_, value=v) except IOError as e: log.error("Couldn't insert values for coverage: %s", DatasetManagementService._get_coverage_path(self.get_dataset(stream_id)), exc_info=True) try: coverage.close() finally: self._bad_coverages[stream_id] = 1 raise CorruptionError(e.message) if 'ingestion_timestamp' in coverage.list_parameters(): t_now = time.time() ntp_time = TimeUtils.ts_to_units(coverage.get_parameter_context('ingestion_timestamp').uom, t_now) coverage.set_parameter_values(param_name='ingestion_timestamp', tdoa=slice_, value=ntp_time) if debugging: timer.complete_step('keys') DatasetManagementService._save_coverage(coverage) if debugging: timer.complete_step('save') self.dataset_changed(dataset_id,coverage.num_timesteps,(start_index,start_index+elements)) if debugging: timer.complete_step('notify') self._add_timing_stats(timer)
def add_granule(self, stream_id, rdt): ''' Appends the granule's data to the coverage and persists it. ''' debugging = log.isEnabledFor(DEBUG) timer = Timer() if debugging else None if stream_id in self._bad_coverages: log.info( 'Message attempting to be inserted into bad coverage: %s', DatasetManagementService._get_coverage_path( self.get_dataset(stream_id))) #-------------------------------------------------------------------------------- # Coverage determiniation and appending #-------------------------------------------------------------------------------- dataset_id = self.get_dataset(stream_id) if not dataset_id: log.error('No dataset could be determined on this stream: %s', stream_id) return try: coverage = self.get_coverage(stream_id) except IOError as e: log.error( "Couldn't open coverage: %s", DatasetManagementService._get_coverage_path( self.get_dataset(stream_id))) raise CorruptionError(e.message) if debugging: path = DatasetManagementService._get_coverage_path(dataset_id) log.debug( '%s: add_granule stream %s dataset %s coverage %r file %s', self._id, stream_id, dataset_id, coverage, path) if not coverage: log.error( 'Could not persist coverage from granule, coverage is None') return #-------------------------------------------------------------------------------- # Actual persistence #-------------------------------------------------------------------------------- elements = len(rdt) if debugging: timer.complete_step('checks') # lightweight ops, should be zero try: coverage.insert_timesteps(elements, oob=False) except IOError as e: log.error("Couldn't insert time steps for coverage: %s", DatasetManagementService._get_coverage_path( self.get_dataset(stream_id)), exc_info=True) try: coverage.close() finally: self._bad_coverages[stream_id] = 1 raise CorruptionError(e.message) if debugging: timer.complete_step('insert') start_index = coverage.num_timesteps - elements for k, v in rdt.iteritems(): slice_ = slice(start_index, None) try: coverage.set_parameter_values(param_name=k, tdoa=slice_, value=v) except IOError as e: log.error("Couldn't insert values for coverage: %s", DatasetManagementService._get_coverage_path( self.get_dataset(stream_id)), exc_info=True) try: coverage.close() finally: self._bad_coverages[stream_id] = 1 raise CorruptionError(e.message) if 'ingestion_timestamp' in coverage.list_parameters(): t_now = time.time() ntp_time = TimeUtils.ts_to_units( coverage.get_parameter_context('ingestion_timestamp').uom, t_now) coverage.set_parameter_values(param_name='ingestion_timestamp', tdoa=slice_, value=ntp_time) if debugging: timer.complete_step('keys') DatasetManagementService._save_coverage(coverage) if debugging: timer.complete_step('save') self.dataset_changed(dataset_id, coverage.num_timesteps, (start_index, start_index + elements)) if debugging: timer.complete_step('notify') self._add_timing_stats(timer)