def test_is_data_model_consistent_2(self): test_operator_1 = DummyUtil.dummy_operator(operator_name='test_operator_1') test_operator_2 = DummyUtil.dummy_operator(operator_name='test_operator_2') test_operator_1.set_data_model(model=DataModelType.BATCH) test_operator_2.set_data_model(model=DataModelType.STREAMING) test_operator_1.add_child(child_node=test_operator_2) self.assertFalse(test_operator_2.is_data_model_consistent())
def test_execution_1(self): test_operator_1 = DummyUtil.dummy_operator(operator_name='test_operator_1') test_operator_2 = DummyUtil.dummy_operator(operator_name='test_operator_2') test_operator_1.add_child(child_node=test_operator_2) test_operator_1.set_status(status=Status.SUCCEEDED) test_operator_1.execute() self.assertEqual(test_operator_1.get_operator_snapshot().status, Status.SUCCEEDED)
def test_is_status_consistent_4(self): test_operator_1 = DummyUtil.dummy_operator(operator_name='test_operator_1') test_operator_2 = DummyUtil.dummy_operator(operator_name='test_operator_2') test_operator_1.set_status(status=Status.SUCCEEDED) test_operator_2.set_status(status=Status.FAILED) test_operator_1.add_child(child_node=test_operator_2) self.assertTrue(test_operator_2.is_status_consistent())
def test_is_status_consistent_1(self): test_operator_1 = DummyUtil.dummy_operator(operator_name='test_operator_1') test_operator_2 = DummyUtil.dummy_operator(operator_name='test_operator_2') test_operator_1.set_status(status=Status.IDLE) test_operator_2.set_status(status=Status.RUNNING) test_operator_1.add_child(child_node=test_operator_2) self.assertFalse(test_operator_2.is_status_consistent())
def test_execution_3(self): test_operator_1 = DummyUtil.dummy_batch_operator(operator_name='test_operator_1') test_operator_2 = DummyUtil.dummy_batch_operator(operator_name='test_operator_2') test_operator_3 = DummyUtil.dummy_batch_operator(operator_name='test_operator_3') test_operator_1.add_child(child_node=test_operator_2) test_operator_2.add_child(child_node=test_operator_3) test_operator_1.set_status(status=Status.FAILED) test_operator_2.execute() test_operator_3.execute() self.assertEqual(test_operator_2.get_operator_snapshot().status, Status.FAILED) self.assertEqual(test_operator_3.get_operator_snapshot().status, Status.FAILED)
def __init__(self, client_name, server_url): super().__init__() self._client_name = client_name self._logger = DummyUtil.dummy_logging() self._server_url = server_url.replace('http://', '').replace('https://', '') self._channel = None
def test_take_snapshot_1(self): test_operator_1 = DummyUtil.dummy_operator(operator_name='test_operator_1') test_operator_2 = DummyUtil.dummy_operator(operator_name='test_operator_2') test_operator_1.set_status(status=Status.SUCCEEDED) test_operator_2.set_status(status=Status.FAILED) test_operator_1.add_child(child_node=test_operator_2) expected_node_snapshot = NodeSnapshot() expected_node_snapshot.node_name = 'test_operator_1' expected_node_snapshot.children_names.extend(['test_operator_2']) expected_operator_snapshot = OperatorSnapshot() expected_operator_snapshot.operator_name = 'test_operator_1' expected_operator_snapshot.data_model = DataModelType.DEFAULT expected_operator_snapshot.status = Status.SUCCEEDED expected_operator_snapshot.class_name = 'pslx.util.dummy_util.DummyOperator' expected_operator_snapshot.node_snapshot.CopyFrom(expected_node_snapshot) self.assertEqual(test_operator_1.get_operator_snapshot(), expected_operator_snapshot)
def start(self): ttl_cleaner_op = TTLCleanerOp() dummy_op = DummyUtil.dummy_batch_operator( operator_name=self.get_class_name() + '_dummy') self.add_operator_edge(from_operator=ttl_cleaner_op, to_operator=dummy_op) self.initialize() self.execute()
def listening_to_log(): dedicated_logging_op = FrontendDedicatedLoggingOp() dedicated_logging_container = FrontendDedicatedLoggingContainer() dedicated_logging_container.add_operator_edge( from_operator=dedicated_logging_op, to_operator=DummyUtil.dummy_streaming_operator() ) dedicated_logging_container.initialize() dedicated_logging_container.execute()
def __init__(self, logger=None): super().__init__() if not logger: self._logger = DummyUtil.dummy_logging() else: self._logger = logger self._reader_status = Status.IDLE self._writer_status = Status.IDLE
def __init__(self, partitioner, logger=DummyUtil.dummy_logger(), delay=TimeSleepObj.ONE_TENTH_SECOND, timeout=-1): super().__init__() self._partitioner = partitioner self._timeout = timeout self._delay = delay self._logger = logger
def __init__(self, connection_str, logger=DummyUtil.dummy_logger()): super().__init__() self._logger = logger self._connection_str = connection_str self._connection = pika.BlockingConnection( pika.URLParameters(connection_str)) self._channel = self._connection.channel() tmp_result = self._channel.queue_declare(queue='', exclusive=True) self._tmp_queue_name = tmp_result.method.queue self._topic_names_to_types = {} self._op = None
def __init__(self, ws_url, params, logger=DummyUtil.dummy_logger()): super().__init__() self._ws_url = ws_url self._params = params self._logger = logger self._ws_connection = None self._op = None try: self._loop = asyncio.get_event_loop() except Exception as err: self._logger.warning("Getting the loop with error: " + str(err) + '.') self._loop = asyncio.new_event_loop() asyncio.set_event_loop(self._loop)
def __init__(self, container_name, logger=DummyUtil.dummy_logger()): super().__init__() self._container_name = container_name self._is_initialized = False self._snapshot_file_folder = FileUtil.join_paths_to_dir( EnvUtil.get_pslx_env_variable(var='PSLX_SNAPSHOT_DIR'), self._container_name) self._start_time = None self._end_time = None self._logger = logger self._upstream_ops = [] self._backend = None self._status = Status.IDLE self._counter = defaultdict(int)
def __init__(self, service_name, rpc_storage=None): Base.__init__(self) self._logger = DummyUtil.dummy_logger() self._service_name = service_name if rpc_storage: assert rpc_storage.get_storage_type() == StorageType.PARTITIONER_STORAGE if 'ttl' not in rpc_storage.get_dir_name(): self._SYS_LOGGER.warning("Warning. Please ttl the request log table.") underlying_storage = ProtoTableStorage() rpc_storage.set_underlying_storage(storage=underlying_storage) rpc_storage.set_max_capacity(max_capacity=EnvUtil.get_pslx_env_variable('PSLX_INTERNAL_CACHE')) self._rpc_storage = rpc_storage self._request_timestamp = collections.deque() self._request_response_pair = {}
def __init__(self, exchange_name, topic_name, connection_str, logger=DummyUtil.dummy_logger()): super().__init__() self._logger = logger self._connection_str = connection_str self._topic_name = topic_name self._exchange_name = exchange_name self._connection = pika.BlockingConnection( pika.URLParameters(connection_str) ) self._channel = self._connection.channel() self._channel.exchange_declare( exchange=self._exchange_name, exchange_type='direct' ) self._logger.info("Start publisher with topic name [" + self._topic_name + '] in exchange [' + self._exchange_name + '].') self._emit_lock = threading.Lock()
def __init__(self, exchange, queue_name, connection_str, logger=DummyUtil.dummy_logging()): super().__init__() self._logger = logger self._connection = pika.BlockingConnection( pika.URLParameters(connection_str)) self._channel = self._connection.channel() self._queue_name = queue_name self._exchange = exchange self._channel.confirm_delivery() self._channel.basic_consume(queue=queue_name, on_message_callback=self.on_response, auto_ack=True) self._corr_id = None self._response = None
def __init__(self, queue_name, connection_str, logger=DummyUtil.dummy_logger()): super().__init__() self._logger = logger self._connection = pika.BlockingConnection( pika.URLParameters(connection_str)) self._channel = self._connection.channel() self._queue_name = queue_name result = self._channel.queue_declare(queue='', exclusive=True) self._callback_queue = result.method.queue self._logger.info("Callback queue is [" + self._callback_queue + '].') self._channel.basic_consume(queue=self._callback_queue, on_message_callback=self.on_response, auto_ack=True) self._corr_id = None self._response = None
def __init__(self, container_name, logger=DummyUtil.dummy_logging(), ttl=-1): super().__init__() self._container_name = container_name self._is_initialized = False self._snapshot_file_folder = FileUtil.join_paths_to_dir_with_mode( root_dir=FileUtil.join_paths_to_dir( root_dir=EnvUtil.get_pslx_env_variable(var='PSLX_DATABASE'), base_name='snapshots' ), base_name=self.get_class_name() + '__' + container_name, ttl=ttl ) self._start_time = None self._end_time = None self._logger = logger self._upstream_ops = [] self._backend = None self._status = Status.IDLE self._counter = defaultdict(int)
subscriber = Subscriber( connection_str='amqp://*****:*****@localhost:5672' ) subscriber.bind_to_op(self) subscriber.subscribe( exchange_name='test_exchange_1', topic_name='test1', message_type=HealthCheckerRequest ) subscriber.subscribe( exchange_name='test_exchange_2', topic_name='test2', message_type=HealthCheckerRequest ) subscriber.start() class SubscriberExampleContainer(DefaultStreamingContainer): def __init__(self): super().__init__(container_name='subscriber_example_container', ttl=7) if __name__ == "__main__": op = SubscriberExampleOp() container = SubscriberExampleContainer() container.add_operator_edge(from_operator=op, to_operator=DummyUtil.dummy_streaming_operator()) container.initialize() container.execute()
def test_set_status(self): test_operator = DummyUtil.dummy_operator(operator_name='test_operator') test_operator.set_status(status=Status.FAILED) self.assertEqual(test_operator.get_status(), Status.FAILED)
def test_is_done(self): test_operator = DummyUtil.dummy_operator(operator_name='test_operator') test_operator.set_status(status=Status.FAILED) self.assertFalse(test_operator.is_done())
def __init__(self, logger=None): super().__init__() if not logger: self._logger = DummyUtil.dummy_logger() else: self._logger = logger
def test_mark_as_done(self): test_operator = DummyUtil.dummy_operator(operator_name='test_operator') test_operator.mark_as_done() self.assertTrue(test_operator.is_done())
def test_unset_model(self): test_operator = DummyUtil.dummy_operator(operator_name='test_operator') test_operator.unset_data_model() self.assertEqual(test_operator.get_data_model(), DataModelType.DEFAULT)
def test_set_data_model(self): test_operator = DummyUtil.dummy_operator(operator_name='test_operator') test_operator.set_data_model(model=DataModelType.BATCH) self.assertEqual(test_operator.get_data_model(), DataModelType.BATCH)
container1 = HelloWorldContainer() container1.bind_backend( server_url="localhost:11443" ) container1.add_operator_edge(from_operator=op1, to_operator=op3) container1.add_operator_edge(from_operator=op1, to_operator=op4) container1.add_operator_edge(from_operator=op2, to_operator=op3) container1.add_operator_edge(from_operator=op2, to_operator=op4) container1.initialize() container1.execute() container2 = HelloWorldContainer(container_name='hello_world_container_2') container2.bind_backend( server_url="localhost:11443" ) dummy_op = DummyUtil.dummy_batch_operator(operator_name='dummy') op1.set_config( config={ 'save_snapshot': True, } ) op1.unset_dependency() op1.unset_status() op2.unset_dependency() op2.unset_status() op3.unset_dependency() op3.unset_status() op4.unset_dependency() op4.unset_status()
def test_unset_status(self): test_operator = DummyUtil.dummy_operator(operator_name='test_operator') test_operator.unset_status() self.assertEqual(test_operator.get_status(), Status.IDLE)
op2 = HelloWorldOp(operator_name='hello_world_op2') op3 = HelloWorldOp(operator_name='hello_world_op3') op4 = HelloWorldOp(operator_name='hello_world_op4') container1 = HelloWorldContainer() container1.bind_backend(server_url="localhost:11443") container1.add_operator_edge(from_operator=op1, to_operator=op3) container1.add_operator_edge(from_operator=op1, to_operator=op4) container1.add_operator_edge(from_operator=op2, to_operator=op3) container1.add_operator_edge(from_operator=op2, to_operator=op4) container1.initialize() container1.execute() op1.unset_dependency() op1.unset_status() op2.unset_dependency() op2.unset_status() op3.unset_dependency() op3.unset_status() op4.unset_dependency() op4.unset_status() container2 = HelloWorldContainer(container_name='hello_world_container_2') container2.bind_backend(server_url="localhost:11443") dummy_op = DummyUtil.dummy_streaming_operator(operator_name='dummy') container2.add_operator_edge(from_operator=op1, to_operator=dummy_op) container2.add_operator_edge(from_operator=op2, to_operator=dummy_op) container2.add_operator_edge(from_operator=op3, to_operator=dummy_op) container2.add_operator_edge(from_operator=op4, to_operator=dummy_op) container2.initialize() container2.execute()
def test_wait_for_upstream_status(self): test_operator_1 = DummyUtil.dummy_operator(operator_name='test_operator_1') test_operator_2 = DummyUtil.dummy_operator(operator_name='test_operator_2') test_operator_1.add_child(child_node=test_operator_2) test_operator_1.set_status(status=Status.SUCCEEDED) self.assertListEqual(test_operator_2.wait_for_upstream_status(), [])