def __init__(self, config, stream_handle, exception_callback): # set the class types from the config particle_class_dict = config.get( DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT) if particle_class_dict is not None: try: # get the particle module module = __import__( 'mi.dataset.parser.glider', fromlist=[ particle_class_dict[EngineeringClassKey.METADATA], particle_class_dict[EngineeringClassKey.DATA], particle_class_dict[EngineeringClassKey.SCIENCE] ]) # get the class from the string name of the class self._metadata_class = getattr( module, particle_class_dict[EngineeringClassKey.METADATA]) self._particle_class = getattr( module, particle_class_dict[EngineeringClassKey.DATA]) self._science_class = getattr( module, particle_class_dict[EngineeringClassKey.SCIENCE]) self._gps_class = getattr( module, particle_class_dict[EngineeringClassKey.GPS]) except AttributeError: raise ConfigurationException( 'Config provided a class which does not exist %s' % config) else: raise ConfigurationException( 'Missing particle_classes_dict in config') self._metadata_sent = False super(GliderEngineeringParser, self).__init__(config, stream_handle, exception_callback)
def __init__(self, config, file_handle, exception_callback): self._file_handle = file_handle # Obtain the particle classes dictionary from the config data if DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT in config: particle_classes_dict = config.get(DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT) # Set the metadata and data particle classes to be used later if FuelCellEngDclParticleClassKey.ENGINEERING_DATA_PARTICLE_CLASS in particle_classes_dict: self._fuelcell_data_class = config[DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ FuelCellEngDclParticleClassKey.ENGINEERING_DATA_PARTICLE_CLASS] else: log.error( 'Configuration missing engineering data particle class key in particle classes dict') raise ConfigurationException( 'Configuration missing engineering data particle class key in particle classes dict') else: log.error('Configuration missing particle classes dict') raise ConfigurationException('Configuration missing particle classes dict') super(FuelCellEngDclParser, self).__init__(config, file_handle, exception_callback)
def _build_parser(self, parser_state, file_handle, data_key=None): """ Build and return the specified parser as indicated by the data_key. """ # # If the key is VEL3D_L_WFP, build the WFP parser. # if data_key == DataTypeKey.VEL3D_L_WFP: config = self._parser_config[data_key] config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.vel3d_l_wfp', DataSetDriverConfigKeys.PARTICLE_CLASS: [ 'Vel3dKWfpInstrumentRecoveredParticle', 'Vel3dKWfpMetadataRecoveredParticle' ] }) parser = Vel3dLWfpParser( config, parser_state, file_handle, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) if parser is None: raise ConfigurationException( 'Unable to build Vel3d_L_Wfp Parser') # # If the key is VEL3D_L_WFP_SIO_MULE, build the WFP SIO Mule parser. # elif data_key == DataTypeKey.VEL3D_L_WFP_SIO_MULE: config = self._parser_config[data_key] config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.vel3d_l_wfp', DataSetDriverConfigKeys.PARTICLE_CLASS: [ 'Vel3dKWfpInstrumentParticle', 'Vel3dLWfpSioMuleMetadataParticle' ] }) parser = Vel3dLWfpSioMuleParser( config, parser_state, file_handle, lambda state: self._save_parser_state(state, data_key), self._data_callback, self._sample_exception_callback) if parser is None: raise ConfigurationException( 'Unable to build Vel3d_L_Wfp_Sio_Mule Parser') # # If the key is one that we're not expecting, don't build any parser. # else: raise ConfigurationException( 'Vel3d_L Parser configuration key incorrect %s', data_key) return parser
def __init__(self, config, stream_handle, exception_callback): """ This method is a constructor that will instantiate an DbgPdbgCsppParser object. @param config The configuration for this DbgPdbgCsppParser parser @param stream_handle The handle to the data stream containing the cspp data @param exception_callback The function to call to report exceptions """ # Build up the header state dictionary using the default header key list self._header_state = {} header_key_list = DEFAULT_HEADER_KEY_LIST for header_key in header_key_list: self._header_state[header_key] = None # Obtain the particle classes dictionary from the config data if DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT in config: particle_classes_dict = config.get( DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT) # Set the metadata and data particle classes to be used later if METADATA_PARTICLE_CLASS_KEY in particle_classes_dict and \ BATTERY_STATUS_CLASS_KEY in particle_classes_dict and \ GPS_ADJUSTMENT_CLASS_KEY in particle_classes_dict: self._metadata_particle_class = particle_classes_dict.get( METADATA_PARTICLE_CLASS_KEY) self._battery_status_class = particle_classes_dict.get( BATTERY_STATUS_CLASS_KEY) self._gps_adjustment_class = particle_classes_dict.get( GPS_ADJUSTMENT_CLASS_KEY) else: log.warning( 'Configuration missing metadata or data particle class key in particle classes dict' ) raise ConfigurationException( 'Configuration missing metadata or data particle class key in particle classes dict' ) else: log.warning('Configuration missing particle classes dict') raise ConfigurationException( 'Configuration missing particle classes dict') # Initialize the record buffer to an empty list self._record_buffer = [] # Call the superclass constructor super(DbgPdbgCsppParser, self).__init__( config, stream_handle, None, partial(StringChunker.regex_sieve_function, regex_list=[SIEVE_MATCHER]), lambda state, ingested: None, lambda data: None, exception_callback) self._metadata_extracted = False
def __init__(self, config, file_handle, exception_callback): self._record_buffer = [] self._calculated_checksum = 0 self._current_record = '' self._velocity_data = False self._diagnostic_header = False self._diagnostic_header_published = False self._diagnostic_data = False self._end_of_file = False self._sending_diagnostics = False self._bad_diagnostic_header = False self._first_diagnostics_record = False self._diagnostics_count = 0 self._total_diagnostic_records = 0 self._velocity_data_dict = {} self._diagnostics_header_dict = {} self._diagnostics_data_dict = {} self._diagnostics_header_record = '' self._file_handle = file_handle # Obtain the particle classes dictionary from the config data if DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT in config: particle_classes_dict = config.get( DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT) # Set the metadata and data particle classes to be used later if VelptAbDclParticleClassKey.METADATA_PARTICLE_CLASS in particle_classes_dict and \ VelptAbDclParticleClassKey.DIAGNOSTICS_PARTICLE_CLASS in particle_classes_dict and \ VelptAbDclParticleClassKey.INSTRUMENT_PARTICLE_CLASS in particle_classes_dict: self._metadata_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ VelptAbDclParticleClassKey.METADATA_PARTICLE_CLASS] self._diagnostics_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ VelptAbDclParticleClassKey.DIAGNOSTICS_PARTICLE_CLASS] self._velocity_data_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ VelptAbDclParticleClassKey.INSTRUMENT_PARTICLE_CLASS] else: log.error( 'Configuration missing metadata or data particle class key in particle classes dict' ) raise ConfigurationException( 'Configuration missing metadata or data particle class key in particle classes dict' ) else: log.error('Configuration missing particle classes dict') raise ConfigurationException( 'Configuration missing particle classes dict') super(VelptAbDclParser, self).__init__(config, file_handle, exception_callback)
def _build_parser(self, parser_state, infile, data_key=None): """ Build and return the parser """ parser = None if data_key == DataParticleType.FLNTU_X_MMP_CDS_INSTRUMENT: config = self._parser_config.get( DataParticleType.FLNTU_X_MMP_CDS_INSTRUMENT) config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flntu_x_mmp_cds', DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlntuXMmpCdsParserDataParticle' }) log.debug("My Config: %s", config) parser = FlntuXMmpCdsParser( config, parser_state, infile, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) elif data_key == DataParticleType.FLCDR_X_MMP_CDS_INSTRUMENT: config = self._parser_config.get( DataParticleType.FLCDR_X_MMP_CDS_INSTRUMENT) config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flcdr_x_mmp_cds', DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlcdrXMmpCdsParserDataParticle' }) log.debug("My Config: %s", config) parser = FlcdrXMmpCdsParser( config, parser_state, infile, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) else: raise ConfigurationException( 'flntu/flcdr parser not built due to missing key') if parser is None: raise ConfigurationException( 'flntu/flcdr parser not built due to failed instantiation') return parser
def __init__(self, config, state, stream_handle, state_callback, publish_callback, exception_callback): self._saved_header = None self._read_state = {StateKey.POSITION: 0} super(AdcpsJlnStcParser, self).__init__(config, stream_handle, state, self.sieve_function, state_callback, publish_callback, exception_callback) try: self._metadata_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ AdcpsJlnStcParticleClassKey.METADATA_PARTICLE_CLASS] self._instrument_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ AdcpsJlnStcParticleClassKey.INSTRUMENT_PARTICLE_CLASS] except KeyError: message = "Unable to access adcps jln stc data particle class types in config dictionary" log.warn(message) raise ConfigurationException(message) if state: self.set_state(state) if state[StateKey.POSITION] == 0: self._parse_header() else: self._parse_header()
def __init__(self, config, state, stream_handle, state_callback, publish_callback, exception_callback, *args, **kwargs): super(PresfAbcDclParser, self).__init__(config, stream_handle, state, self.sieve_function, state_callback, publish_callback, exception_callback, *args, **kwargs) self.input_file = stream_handle # Obtain the particle classes dictionary from the config data if DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT in config: particle_classes_dict = config.get( DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT) # Set the wave and tide particle classes to be used later if TIDE_PARTICLE_CLASS_KEY in particle_classes_dict and \ WAVE_PARTICLE_CLASS_KEY in particle_classes_dict: self._wave_particle_class = particle_classes_dict.get( WAVE_PARTICLE_CLASS_KEY) self._tide_particle_class = particle_classes_dict.get( TIDE_PARTICLE_CLASS_KEY) else: log.warning( 'Configuration missing metadata or data particle class key in particle classes dict' ) raise ConfigurationException( 'Configuration missing metadata or data particle class key in particle classes dict' )
def _verify_config(self): """ Verify we have good configurations for the parser and harvester. @raise: ConfigurationException if configuration is invalid """ errors = [] log.debug("Driver Config: %s", self._config) harvester_config = self._config.get(DataSourceConfigKey.HARVESTER) if harvester_config: if not harvester_config.get('directory'): errors.append("harvester config missing 'directory") if not harvester_config.get('pattern'): errors.append("harvester config missing 'pattern") else: errors.append("missing 'harvester' config") if errors: log.error("Driver configuration error: %r", errors) raise ConfigurationException("driver configuration errors: %r", errors) def _nextfile_callback(self): pass self._harvester_config = harvester_config self._parser_config = self._config.get(DataSourceConfigKey.PARSER)
def __init__(self, config, stream_handle, exception_callback): super(PhsenAbcdefImodemParser, self).__init__(config, stream_handle, exception_callback) try: self._instrument_particle_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ PhsenAbcdefImodemParticleClassKey. INSTRUMENT_PARTICLE_CLASS] self._control_particle_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ PhsenAbcdefImodemParticleClassKey.CONTROL_PARTICLE_CLASS] self._metadata_particle_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ PhsenAbcdefImodemParticleClassKey.METADATA_PARTICLE_CLASS] except: raise ConfigurationException( "Error configuring PhsenAbcdefImodemParser") # Construct the dictionary to save off the metadata record matches self._metadata_matches_dict = { MetadataMatchKey.FILE_TIME_MATCH: None, MetadataMatchKey.INSTRUMENT_ID_MATCH: None, MetadataMatchKey.SERIAL_NUMBER_MATCH: None, MetadataMatchKey.VOLTAGE_FLT32_MATCH: None, MetadataMatchKey.RECORD_LENGTH_MATCH: None, MetadataMatchKey.NUM_EVENTS_MATCH: None, MetadataMatchKey.NUM_SAMPLES_MATCH: None, } self._metadata_sample_generated = False
def _build_parser(self, parser_state, stream_in, data_key=None): """ Build and return the parser """ config = self._parser_config.get(data_key) # # If the key is DOSTA_ABCDJM_SIO_RECOVERED, build the WFP parser. # if data_key == DataTypeKey.DOSTA_ABCDJM_SIO_RECOVERED: config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.dostad', DataSetDriverConfigKeys.PARTICLE_CLASS: None, DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: DostadParserRecoveredMetadataDataParticle, DATA_PARTICLE_CLASS_KEY: DostadParserRecoveredDataParticle } }) parser = DostadRecoveredParser( config, parser_state, stream_in, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) return parser # # If the key is DOSTA_ABCDJM_SIO_TELEMETERED, build the WFP SIO Mule parser. # elif data_key == DataTypeKey.DOSTA_ABCDJM_SIO_TELEMETERED: config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.dostad', DataSetDriverConfigKeys.PARTICLE_CLASS: None, DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: DostadParserTelemeteredMetadataDataParticle, DATA_PARTICLE_CLASS_KEY: DostadParserTelemeteredDataParticle } }) parser = DostadParser( config, parser_state, stream_in, lambda state: self._save_parser_state(state, data_key), self._data_callback, self._sample_exception_callback) return parser # # If the key is one that we're not expecting, don't build any parser. # else: raise ConfigurationException( "Invalid data_key supplied to build parser")
def __init__(self, config, stream_handle, exception_callback): """ This the constructor which instantiates the NutnrJCsppParser """ # Build up the header state dictionary using the default her key list ot one that was provided self._header_state = {} header_key_list = DEFAULT_HEADER_KEY_LIST for header_key in header_key_list: self._header_state[header_key] = None # Initialize the metadata flag self._metadata_extracted = False # Obtain the particle classes dictionary from the config data if DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT in config: particle_classes_dict = config.get(DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT) # Set the metadata and data particle classes to be used later if METADATA_PARTICLE_CLASS_KEY in particle_classes_dict and \ LIGHT_PARTICLE_CLASS_KEY in particle_classes_dict and \ DARK_PARTICLE_CLASS_KEY in particle_classes_dict: self._light_particle_class = particle_classes_dict.get(LIGHT_PARTICLE_CLASS_KEY) self._dark_particle_class = particle_classes_dict.get(DARK_PARTICLE_CLASS_KEY) self._metadata_particle_class = particle_classes_dict.get(METADATA_PARTICLE_CLASS_KEY) else: log.warning( 'Configuration missing metadata or data particle class key in particle classes dict') raise ConfigurationException( 'Configuration missing metadata or data particle class key in particle classes dict') else: log.warning('Configuration missing particle classes dict') raise ConfigurationException('Configuration missing particle classes dict') # call the superclass constructor super(NutnrJCsppParser, self).__init__(config, stream_handle, exception_callback)
def _build_parser(self, parser_state, infile, data_key=None): """ Build and return the parser """ config = self._parser_config.get(data_key) # # If the key is RECOVERED, build the recovered parser. # if data_key == DataTypeKey.PARAD_J_CSPP_RECOVERED: config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.parad_j_cspp', DataSetDriverConfigKeys.PARTICLE_CLASS: None, DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: ParadJCsppMetadataRecoveredDataParticle, DATA_PARTICLE_CLASS_KEY: ParadJCsppInstrumentRecoveredDataParticle } }) # # If the key is TELEMETERED, build the telemetered parser. # elif data_key == DataTypeKey.PARAD_J_CSPP_TELEMETERED: config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.parad_j_cspp', DataSetDriverConfigKeys.PARTICLE_CLASS: None, DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: ParadJCsppMetadataTelemeteredDataParticle, DATA_PARTICLE_CLASS_KEY: ParadJCsppInstrumentTelemeteredDataParticle } }) # # If the key is one that we're not expecting, don't build any parser. # else: raise ConfigurationException( "Invalid data_key (%s) supplied to build parser" % data_key) parser = ParadJCsppParser( config, parser_state, infile, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) return parser
def _build_parser(self, parser_state, stream_in, data_key): """ Build the requested parser based on the data key @param parser_state starting parser state to pass to parser @param stream_in Handle of open file to pass to parser @param data_key Key to determine which parser type is built """ if data_key == DataSourceKey.PHSEN_ABCDEF_SIO_MULE: parser = self._build_telemetered_parser(parser_state, stream_in) elif data_key == DataSourceKey.PHSEN_ABCDEF: parser = self._build_recovered_parser(parser_state, stream_in) else: raise ConfigurationException('Tried to build parser for unknown data source key %s' % data_key) return parser
def _build_parser(self, parser_state, stream_in, data_key): """ Build the requested parser based on the data key @param parser_state starting parser state to pass to parser @param stream_in Handle of open file to pass to parser @param stream_in Filename string to pass to parser @param data_key Key to determine which parser type is built """ # Build the recovered parser if requested. if data_key == DataTypeKey.DOSTA_ABCDJM_RECOVERED: config = self._parser_config[data_key] config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.dosta_abcdjm_dcl', DataSetDriverConfigKeys.PARTICLE_CLASS: None }) parser = DostaAbcdjmDclRecoveredParser( config, stream_in, parser_state, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) # Build the telemetered parser if requested. elif data_key == DataTypeKey.DOSTA_ABCDJM_TELEMETERED: config = self._parser_config[data_key] config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.dosta_abcdjm_dcl', DataSetDriverConfigKeys.PARTICLE_CLASS: None }) parser = DostaAbcdjmDclTelemeteredParser( config, stream_in, parser_state, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) # Not one of the keys we recognize? # No parser for you! else: raise ConfigurationException( 'Dosta_abcdjm Parser configuration incorrect %s', data_key) return parser
def _build_parser(self, parser_state, stream_in, data_key): """ Build the requested parser based on the data key @param parser_state starting parser state to pass to parser @param stream_in Handle of open file to pass to parser @param data_key Key to determine which parser type is built """ if data_key == DataSourceKey.DOSTA_LN_WFP_SIO_MULE: parser = self._build_telemetered_parser(parser_state, stream_in) elif data_key == DataSourceKey.DOSTA_LN_WFP: parser = self._build_recovered_parser(parser_state, stream_in) else: raise ConfigurationException("Invalid data source key %s" % data_key) return parser
def _build_parser(self, parser_state, infile, data_key=None): """ Build and return the specified parser as indicated by the data_key. """ # # If the key is PARAD_K_STC, build the telemetered parser. # if data_key == DataTypeKey.PARAD_K_STC: config = self._parser_config[data_key] config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.parad_k_stc_imodem', DataSetDriverConfigKeys.PARTICLE_CLASS: 'Parad_k_stc_imodemDataParticle' }) log.debug("My Config: %s", config) parser = Parad_k_stc_imodemParser( config, parser_state, infile, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) # # If the key is PARAD_K_STC_RECOVERED, build the recovered parser. # elif data_key == DataTypeKey.PARAD_K_STC_RECOVERED: config = self._parser_config[data_key] config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.parad_k_stc_imodem', DataSetDriverConfigKeys.PARTICLE_CLASS: 'Parad_k_stc_imodemRecoveredDataParticle' }) log.debug("My Config: %s", config) parser = Parad_k_stc_imodemRecoveredParser( config, parser_state, infile, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) # # If the key is one that we're not expecting, don't build any parser. # else: raise ConfigurationException('Parser configuration incorrect %s', data_key) return parser
def build_single_harvester(self, driver_state, key): """ Build and return the harvester """ if key in self._harvester_config: harvester = SingleDirectoryHarvester( self._harvester_config.get(key), driver_state[key], lambda filename: self._new_file_callback(filename, key), lambda modified: self._modified_file_callback(modified, key), self._exception_callback) else: harvester = None raise ConfigurationException( 'FLORT KN recovered harvester not built because missing config' ) return harvester
def _build_parser(self, parser_state, stream_in, data_key): """ Build the requested parser based on the data key @param parser_state starting parser state to pass to parser @param stream_in Handle of open file to pass to parser @param data_key Key to determine which parser type is built """ if data_key == DataSourceKey.FLORT_DJ_SIO_TELEMETERED: config = self._parser_config.get( DataSourceKey.FLORT_DJ_SIO_TELEMETERED) config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flortd', DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlortdParserDataParticle' }) # build the telemetered parser parser = FlortdParser( config, parser_state, stream_in, lambda state: self._save_parser_state( state, DataSourceKey.FLORT_DJ_SIO_TELEMETERED), self._data_callback, self._sample_exception_callback) elif data_key == DataSourceKey.FLORT_DJ_SIO_RECOVERED: config = self._parser_config.get( DataSourceKey.FLORT_DJ_SIO_RECOVERED) config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flortd', DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlortdRecoveredParserDataParticle' }) # build the recovered parser parser = FlortdRecoveredParser( config, parser_state, stream_in, lambda state, ingested: self._save_parser_state( state, DataSourceKey.FLORT_DJ_SIO_RECOVERED, ingested), self._data_callback, self._sample_exception_callback) else: raise ConfigurationException( 'Tried to build parser for unknown data source key %s' % data_key) return parser
def __init__(self, config, stream_handle, exception_callback): super(AdcpsJlnStcParser, self).__init__(config, stream_handle, exception_callback) try: self._metadata_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ AdcpsJlnStcParticleClassKey.METADATA_PARTICLE_CLASS] self._instrument_class = config[ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT][ AdcpsJlnStcParticleClassKey.INSTRUMENT_PARTICLE_CLASS] except KeyError: message = "Unable to access adcps jln stc data particle class types in config dictionary" log.warn(message) raise ConfigurationException(message)
def _build_parser(self, parser_state, stream_in, data_key): """ Build the requested parser based on the data key @param parser_state starting parser state to pass to parser @param stream_in Handle of open file to pass to parser @param stream_in Filename string to pass to parser @param data_key Key to determine which parser type is built """ # Build the recovered parser if requested. if data_key == DataTypeKey.OPTAA_DJ_RECOVERED: config = self._parser_config[data_key] config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: MODULE_NAME, DataSetDriverConfigKeys.PARTICLE_CLASS: None }) parser_class = OptaaDjDclRecoveredParser # Build the telemetered parser if requested. elif data_key == DataTypeKey.OPTAA_DJ_TELEMETERED: config = self._parser_config[data_key] config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: MODULE_NAME, DataSetDriverConfigKeys.PARTICLE_CLASS: None }) parser_class = OptaaDjDclTelemeteredParser # Not one of the keys we recognize? # No parser for you! else: raise ConfigurationException( 'Optaa_dj Parser configuration incorrect %s', data_key) # Note that the Optaa_Dj parsers need the name of the file being parsed. parser = parser_class( config, stream_in, parser_state, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback, self._file_in_process[data_key]) return parser
def _build_parser(self, parser_state, infile, data_key=None): """ Build and return the parser """ if data_key == DataTypeKey.VELPT_J_CSPP_RECOVERED: config = self._parser_config.get( DataTypeKey.VELPT_J_CSPP_RECOVERED) config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.velpt_j_cspp', DataSetDriverConfigKeys.PARTICLE_CLASS: None, DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: VelptJCsppMetadataRecoveredDataParticle, DATA_PARTICLE_CLASS_KEY: VelptJCsppInstrumentRecoveredDataParticle } }) elif data_key == DataTypeKey.VELPT_J_CSPP_TELEMETERED: config = self._parser_config.get( DataTypeKey.VELPT_J_CSPP_TELEMETERED) config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.velpt_j_cspp', DataSetDriverConfigKeys.PARTICLE_CLASS: None, DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: VelptJCsppMetadataTelemeteredDataParticle, DATA_PARTICLE_CLASS_KEY: VelptJCsppInstrumentTelemeteredDataParticle } }) else: raise ConfigurationException( 'Parser not built due to missing particle type') parser = VelptJCsppParser( config, parser_state, infile, lambda state, ingested: self._save_parser_state( state, data_key, ingested), self._data_callback, self._sample_exception_callback) return parser
def _build_parser(self, parser_state, infile, data_key): """ Build and return the specified parser as indicated by the data_key. @param parser_state previous parser state to initialize parser with @param data_key harvester / parser key @param infile file name """ parser = None if data_key == DataTypeKey.CTDGV_TELEMETERED: parser = self._build_ctdgv_telemetered_parser(parser_state, infile, data_key) elif data_key == DataTypeKey.CTDGV_RECOVERED: parser = self._build_ctdgv_recovered_parser(parser_state, infile, data_key) else: raise ConfigurationException("Parser Configuration incorrect, invalid key: %s" % data_key) return parser
def _build_parser(self, parser_state, stream_in, data_key): """ Build the telemetered or the recovered parser according to which data source is appropriate """ parser = None if data_key == DataSourceKey.SIO_ENG_SIO_MULE_TELEMETERED: parser = self._build_telemetered_parser(parser_state, stream_in) log.debug("_build_parser:::: BUILT TELEMETERED PARSER, %s", type(parser)) elif data_key == DataSourceKey.SIO_ENG_SIO_MULE_RECOVERED: parser = self._build_recovered_parser(parser_state, stream_in) log.debug("_build_parser:::: BIULDING RECOVERED PARSER, %s", type(parser)) else: raise ConfigurationException("Bad data key: %s" % data_key) return parser
def _build_parser(self, parser_state, stream_in, data_key): """ Build and return a parser for the data_key type parser @param parser_state starting parser state to pass to parser @param stream_in Handle of open file to pass to parser @param data_key Key to determine which parser type is built """ # build the parser based on which key is passed in if data_key == DataSourceKey.NUTNR_J_CSPP_TELEMETERED: config = self._parser_config.get(DataSourceKey.NUTNR_J_CSPP_TELEMETERED) config.update({ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: NutnrJCsppMetadataTelemeteredDataParticle, DATA_PARTICLE_CLASS_KEY: NutnrJCsppTelemeteredDataParticle } }) elif data_key == DataSourceKey.NUTNR_J_CSPP_RECOVERED: config = self._parser_config.get(DataSourceKey.NUTNR_J_CSPP_RECOVERED) config.update({ DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: NutnrJCsppMetadataRecoveredDataParticle, DATA_PARTICLE_CLASS_KEY: NutnrJCsppRecoveredDataParticle } }) else: log.warn("Cannot build parser for unknown data source key %s", data_key) raise ConfigurationException("Cannot build parser for unknown data source key %s" % \ data_key) parser = NutnrJCsppParser( config, parser_state, stream_in, lambda state, ingested: self._save_parser_state(state, data_key, ingested), self._data_callback, self._sample_exception_callback ) return parser
def _build_parser(self, parser_state, infile): """ Build and return the parser """ config = self._parser_config config.update({ DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.vel3d_a_mmp_cds', DataSetDriverConfigKeys.PARTICLE_CLASS: 'Vel3dAMmpCdsParserDataParticle' }) log.debug("My Config: %s", config) _parser = Vel3dAMmpCdsParser(config, parser_state, infile, self._save_parser_state, self._data_callback, self._sample_exception_callback) if _parser is None: raise ConfigurationException( 'vel3d_a_mmp_cds parser failed instantiation') return _parser
def _verify_config(self): """ Verify we have good configurations for the parser. @raise: ConfigurationException if configuration is invalid """ errors = [] log.debug("Driver Config: %s", self._config) self._parser_config = self._config.get(DataSourceConfigKey.PARSER) if not self._parser_config: errors.append("missing 'parser' config") if not ParserConfigKey.ORBNAME in self._parser_config: errors.append("parser config missing 'orbname'") if not ParserConfigKey.SELECT in self._parser_config: errors.append("parser config missing 'select'") if not ParserConfigKey.REJECT in self._parser_config: errors.append("parser config missing 'reject'") if errors: log.error("Driver configuration error: %r", errors) raise ConfigurationException("driver configuration errors: %r", errors)
def _build_parser(self, parser_state, infile, data_key): """ Build and return the specified parser as indicated by the data_key. @param parser_state previous parser state to initialize parser with @param data_key harvester / parser key @param infile file name """ parser = None log.trace("DRIVER._build_parser(): data_key= %s", data_key) if data_key == DataTypeKey.ENG_TELEMETERED: log.trace("EngineeringDataSetDriver._build_parser(): using a TELEMETERED Parser") parser = self._build_eng_telemetered_parser(parser_state, infile, data_key) elif data_key == DataTypeKey.ENG_RECOVERED: log.trace("EngineeringDataSetDriver._build_parser(): using a RECOVERED Parser") parser = self._build_eng_recovered_parser(parser_state, infile, data_key) else: raise ConfigurationException("Parser Configuration incorrect, key invalid: %s" % data_key) return parser
def __init__(self, config, stream_handle, exception_callback): self._record_buffer = [] # holds tuples of (record, state) self._header_dict = {} # only initialize particle class to None if it does not already exist if not hasattr(self, '_particle_class'): self._particle_class = None self.num_columns = None super(GliderParser, self).__init__(config, stream_handle, exception_callback) if self._particle_class is None: msg = 'particle_class was not defined in configuration %s' % config log.warn(msg) raise ConfigurationException(msg) # Read and store the configuration found in the 14 line header self._read_file_definition() # Read and store the information found in the 3 lines of column labels self._read_column_labels()
def _verify_config(self): """ Verify we have good configurations for the parser and harvester. @raise: ConfigurationException if configuration is invalid """ errors = [] log.debug("Driver Config: %s", self._config) self._harvester_config = self._config.get(DataSourceConfigKey.HARVESTER) if self._harvester_config: if not self._harvester_config.get(DataSetDriverConfigKeys.DIRECTORY): errors.append("harvester config missing 'directory") #if not harvester_config.get(DataSetDriverConfigKeys.STORAGE_DIRECTORY): # errors.append("harvester config missing 'storage_directory") if not self._harvester_config.get(DataSetDriverConfigKeys.PATTERN): errors.append("harvester config missing 'pattern") else: errors.append("missing 'harvester' config") if errors: log.error("Driver configuration error: %r", errors) raise ConfigurationException("driver configuration errors: %r", errors) self._parser_config = self._config.get(DataSourceConfigKey.PARSER)