def test_wait_until_has_timeout(): state = ProcessingState() worker = Worker(state) worker.start() state.wait_until(State.STEADY, 4) assert state.has_value(State.STEADY) worker.join()
def __init__(self, name, generation_id, channel_dict, global_config): """ :type name: :obj:`str` :arg name: Name of channel corresponding to this task manager :type generation_id: :obj:`int` :arg generation_id: Task Manager generation id provided by caller :type channel_dict: :obj:`dict` :arg channel_dict: channel configuration :type global_config: :obj:`dict` :arg global_config: global configuration """ self.id = str(uuid.uuid4()).upper() self.dataspace = dataspace.DataSpace(global_config) self.data_block_t0 = datablock.DataBlock( self.dataspace, name, self.id, generation_id) # my current data block self.name = name self.channel = Channel(channel_dict) self.state = ProcessingState() self.loglevel = multiprocessing.Value('i', logging.WARNING) self.lock = threading.Lock() # The rest of this function will go away once the source-proxy # has been reimplemented. for src_worker in self.channel.sources.values(): src_worker.worker.post_create(global_config)
def test_wait_until_list(): state = ProcessingState() worker = Worker(state) worker.start() state.wait_until((State.STEADY, State.IDLE)) assert state.has_value(State.STEADY) worker.join()
def test_wait_while(): state = ProcessingState() worker = Worker(state) worker.start() state.wait_while(State.BOOT) assert state.has_value(State.STEADY) worker.join()
def __init__(self, name): """ :type name: :obj:`str` :arg name: Name of source corresponding to this source manager """ self.name = name self.state = ProcessingState() self.loglevel = multiprocessing.Value("i", logging.WARNING)
def __init__(self, config): """ :type config: :obj:`dict` :arg config: Configuration dictionary """ # Validate configuration self.logger = structlog.getLogger(LOGGERNAME) self.logger = self.logger.bind(module=__name__.split(".")[-1], channel=DELOGGER_CHANNEL_NAME) self.logger.debug("Initializing a reaper") # since we must validate this, have a private store space self.__retention_interval = self.MIN_RETENTION_INTERVAL_DAYS self.__seconds_between_runs = self.MIN_SECONDS_BETWEEN_RUNS if not config.get("dataspace"): self.logger.exception("Error in initializing Reaper!") raise dataspace.DataSpaceConfigurationError( "Invalid dataspace configuration: " "dataspace key not found in dictionary") elif not isinstance(config.get("dataspace"), dict): self.logger.exception("Error in initializing Reaper!") raise dataspace.DataSpaceConfigurationError( "Invalid dataspace configuration: " "dataspace key must correspond to a dictionary") try: db_driver_name = config["dataspace"]["datasource"]["name"] db_driver_module = config["dataspace"]["datasource"]["module"] db_driver_config = config["dataspace"]["datasource"]["config"] self.retention_interval = config["dataspace"][ "retention_interval_in_days"] self.seconds_between_runs = config["dataspace"].get( "reaper_run_interval", 24 * 60 * 60) except KeyError: self.logger.exception("Error in initializing Reaper!") raise dataspace.DataSpaceConfigurationError( "Invalid dataspace configuration") self.datasource = dataspace.DataSourceLoader().create_datasource( db_driver_module, db_driver_name, db_driver_config) self.thread = None self.state = ProcessingState()
def __init__(self, config): """ :type config: :obj:`dict` :arg config: Configuration dictionary """ # Validate configuration self.logger = logging.getLogger() self.logger.debug('Initializing a reaper') # since we must validate this, have a private store space self.__retention_interval = self.MIN_RETENTION_INTERVAL_DAYS self.__seconds_between_runs = self.MIN_SECONDS_BETWEEN_RUNS if not config.get('dataspace'): self.logger.exception("Error in initializing Reaper!") raise dataspace.DataSpaceConfigurationError('Invalid dataspace configuration: ' 'dataspace key not found in dictionary') elif not isinstance(config.get('dataspace'), dict): self.logger.exception("Error in initializing Reaper!") raise dataspace.DataSpaceConfigurationError('Invalid dataspace configuration: ' 'dataspace key must correspond to a dictionary') try: db_driver_name = config['dataspace']['datasource']['name'] db_driver_module = config['dataspace']['datasource']['module'] db_driver_config = config['dataspace']['datasource']['config'] self.retention_interval = config['dataspace']['retention_interval_in_days'] self.seconds_between_runs = config['dataspace'].get('reaper_run_interval', 24 * 60 * 60) except KeyError: self.logger.exception("Error in initializing Reaper!") raise dataspace.DataSpaceConfigurationError('Invalid dataspace configuration') self.datasource = dataspace.DataSourceLoader().create_datasource(db_driver_module, db_driver_name, db_driver_config) self.thread = None self.state = ProcessingState() self.logger = logging.getLogger()
def __init__(self, name, workers, dataspace, expected_products, exchange, broker_url, queue_info): """ :type name: :obj:`str` :arg name: Name of channel corresponding to this task manager :type generation_id: :obj:`int` :arg generation_id: Task manager generation id provided by caller :type channel_dict: :obj:`dict` :arg channel_dict: channel configuration :type global_config: :obj:`dict` :arg global_config: global configuration """ self.name = name self.state = ProcessingState() self.loglevel = multiprocessing.Value("i", logging.WARNING) self.id = str(uuid.uuid4()).upper() self.data_block_t0 = datablock.DataBlock(dataspace, name, self.id, 1) # my current data block self.logger = structlog.getLogger(CHANNELLOGGERNAME) self.logger = self.logger.bind(module=__name__.split(".")[-1], channel=self.name) # The DE owns the sources self.source_workers = workers["sources"] self.transform_workers = workers["transforms"] self.logic_engine = workers["logic_engine"] self.publisher_workers = workers["publishers"] self.exchange = exchange self.broker_url = broker_url self.connection = Connection(self.broker_url) self.source_product_cache = SourceProductCache(expected_products, self.logger) self.queue_info = queue_info self.routing_keys = [info[1] for info in self.queue_info]
def test_wrong_value_has_value_list(): state = ProcessingState() with pytest.raises(RuntimeError): state.has_value((State.STEADY, 'jkl'))
def test_wrong_value_on_assignment(): state = ProcessingState() with pytest.raises(Exception, match='Supplied value is not a State variable'): state.set(1.3)
def test_wrong_value_on_creation(): with pytest.raises(Exception, match='1\\.3 is not a valid State'): ProcessingState(1.3)
def test_shared_state_construction(): state = ProcessingState() assert state.has_value(State.BOOT) state = ProcessingState(State.STEADY) assert state.has_value(State.STEADY)
def test_probably_running(): state = ProcessingState() for set_to in RUNNING_CONDITIONS: state.set(set_to) assert state.probably_running()
def test_should_stop(): state = ProcessingState() for set_to in STOPPING_CONDITIONS: state.set(set_to) assert state.should_stop()
def test_inactive(): state = ProcessingState() for set_to in INACTIVE_CONDITIONS: state.set(set_to) assert state.inactive()
def test_has_lock_and_can_context(): state = ProcessingState() with state.lock: return True
def test_wrong_value_has_value(): state = ProcessingState() with pytest.raises(RuntimeError): state.has_value('asdf')
def test_cannot_redefine_lock(): state = ProcessingState() with pytest.raises(ValueError): state.lock = 'bad_string'