def main_run(self): # # # # Initial Executor object __executor = SimpleExecutor(mode=RunningMode.Asynchronous, executors=self.__Executor_Number) # # # # Running the Executor # # # # Asynchronous version of generally running __executor.run( function=self.__example.async_target_function, # function=self.__example.target_function, args=("index_1", "index_2.2")) # # # # Asynchronous version of generally running which will raise exception # __executor.run( # function=self.__example.async_target_fail_function, # args=("index_1", "index_2.2")) # # # # Map running which will raise exception # __executor.map( # function=self.__example.async_target_function, # args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)]) # # # # Function version of map running which will raise exception # __executor.map_with_function( # functions=[self.__example.async_target_function, self.__example.async_target_function], # args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)]) # # # # Get result __result = __executor.result() print("Result: ", __result)
def main_run(self): # Initialize Lock object __rlock = RLockFactory() # # # # Initial Executor object __executor = SimpleExecutor(mode=RunningMode.Parallel, executors=self.__Executor_Number) # __executor = SimpleExecutor(mode=RunningMode.Concurrent, executors=self.__Executor_Number) # __executor = SimpleExecutor(mode=RunningMode.GreenThread, executors=self.__Executor_Number) # # # # Running the Executor # # # # Generally running __executor.run(function=self.__example.target_function, args=("index_1", "index_2.2"), features=__rlock) # # # # Map running which will raise exception # __executor.map( # function=self.__example.target_function, # args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)], # features=__rlock) # # # # Function version of map running which will raise exception # __executor.map_with_function( # functions=[self.__example.target_function, self.__example.target_function], # args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)], # features=__rlock) # # # # Get result __result = __executor.result() print("Result: ", __result)
def test_kill(self, executor_as_thread: SimpleExecutor): try: executor_as_thread.kill() except Exception as e: assert False, "It should work finely without any issue. Please check it." else: assert True, "It work finely without any issue."
def main_run(self): test_dao = TestDao(db_driver="mysql", use_pool=False) # test_dao = AsyncTestDao(db_driver="mysql", use_pool=False) # # # # Initial and instantiate feature object: Queue Task, Lock and Bounded Semaphore _queue_task = self.__init_queue() _features = self.__init_features() _executor = SimpleExecutor( mode=RunningMode.Parallel, # mode=RunningMode.Concurrent, # mode=RunningMode.GreenThread, # mode=RunningMode.Asynchronous, executors=self.__Worker_Number) _executor.run(function=test_dao.get_test_data, queue_tasks=_queue_task, features=_features) result = _executor.result() for r in result: print(f"+============ {r.worker_ident} =============+") print("Result.pid: ", r.pid) print("Result.worker_id: ", r.worker_ident) print("Result.worker_name: ", r.worker_name) print("Result.state: ", r.state) print("Result.data: ", r.data) print("Result.exception: ", r.exception) print("+====================================+\n") self.__save_to_files(result=result) end_time = time.time() self.__logger.info( f"Total taking time: {end_time - start_time} seconds")
def test_map(self, instantiate_adapter_executor: SimpleExecutor): TestSimpleExecutor._initial() instantiate_adapter_executor.map(function=target_function_for_map, args_iter=Test_Function_Args) _results = instantiate_adapter_executor.result() assert len(_results) == len(Test_Function_Args), "" TestSimpleExecutor._chk_run_record(expected_size=len(Test_Function_Args))
def test_run(self, instantiate_adapter_executor: SimpleExecutor): TestSimpleExecutor._initial() instantiate_adapter_executor.run(function=target_function) _results = instantiate_adapter_executor.result() assert len(_results) == _Worker_Size, "" TestSimpleExecutor._chk_run_record(expected_size=Worker_Size)
def main_run(cls): # Initialize Condition object __condition = ConditionFactory() # Initialize Queue object __task = QueueTask() __task.name = "test_queue" # __task.queue_type =Process_Queue() __task.queue_type = Thread_Queue() # __task.queue_type = Async_Queue() __task.value = [] # Initialize and run ocean-simple-executor # __exe = SimpleExecutor(mode=RunningMode.Parallel, executors=cls.__Executor_Number) __exe = SimpleExecutor(mode=RunningMode.Concurrent, executors=cls.__Executor_Number) # __exe = SimpleExecutor(mode=RunningMode.GreenThread, executors=cls.__Executor_Number) # __exe = SimpleExecutor(mode=RunningMode.Asynchronous, executors=cls.__Executor_Number) # # # # Run without arguments __exe.map_with_function(functions=[ cls.__producer_p.send_process, cls.__consumer_p.receive_process ], queue_tasks=__task, features=__condition)
def test_run(self, executor_as_thread: SimpleExecutor): TestSimpleExecutor._initial() def _target(*args, **kwargs): global Running_Count, Running_Thread_IDs, Running_PPIDs, Running_Current_Threads, Running_Finish_Timestamp with _Thread_Lock: Running_Count += 1 _pid = os.getpid() _ppid = os.getppid() _ident = threading.get_ident() # _time = str(datetime.datetime.now()) _time = int(time.time()) Running_Thread_IDs.append(_ident) Running_PPIDs.append(_ppid) Running_Current_Threads.append(str(threading.current_thread())) Running_Finish_Timestamp.append(_time) time.sleep(Test_Function_Sleep_Time) return f"result_{threading.current_thread()}" executor_as_thread.run(function=_target) # Do some checking # 1. The amount of workers should be the same with the value of option *executors*. # 3. The amount of thread IDs should be the same with the value of option *executors*. # 2. The done-timestamp should be very close. TestSimpleExecutor._chk_run_record()
def test_start_new_worker(self, instantiate_adapter_executor: SimpleExecutor): TestSimpleExecutor._initial() _workers = [instantiate_adapter_executor.start_new_worker(target=target_function) for _ in range(_Worker_Size)] instantiate_adapter_executor.close(workers=_workers) _results = instantiate_adapter_executor.result() assert len(_results) == _Worker_Size, "" TestSimpleExecutor._chk_run_record(expected_size=Worker_Size)
def test_initial_running_strategy_with_parallel( self, executor_as_process: SimpleExecutor): executor_as_process._initial_running_strategy() from multirunnable.executor import General_Runnable_Strategy assert General_Runnable_Strategy is not None, "It should be assign running-strategy instance." assert isinstance( General_Runnable_Strategy, ProcessStrategy ), "It should be an sub-instance of 'ProcessStrategy'."
def test_initial_running_strategy_with_coroutine( self, executor_as_green_thread: SimpleExecutor): executor_as_green_thread._initial_running_strategy() from multirunnable.executor import General_Runnable_Strategy assert General_Runnable_Strategy is not None, "It should be assign running-strategy instance." assert isinstance( General_Runnable_Strategy, GreenThreadStrategy ), "It should be an sub-instance of 'GreenThreadStrategy'."
def test_initial_running_strategy_with_asynchronous( self, executor_as_asynchronous: SimpleExecutor): executor_as_asynchronous._initial_running_strategy() from multirunnable.executor import General_Runnable_Strategy assert General_Runnable_Strategy is not None, "It should be assign running-strategy instance." assert isinstance( General_Runnable_Strategy, AsynchronousStrategy ), "It should be an sub-instance of 'AsynchronousStrategy'."
def main_run(self): # # # # Initial Executor object __executor = SimpleExecutor(mode=RunningMode.Asynchronous, executors=self.__Executor_Number) # # # # Running the Executor # # # # Generally running __executor.run(function=self.__example.crawl_process) # # # # Get result __result = __executor.result() print("Result: ", __result)
def main_run(self): # # # # Initial Executor object __executor = SimpleExecutor(mode=RunningMode.Parallel, executors=self.__Executor_Number) # __executor = SimpleExecutor(mode=RunningMode.Concurrent, executors=self.__Executor_Number) # __executor = SimpleExecutor(mode=RunningMode.GreenThread, executors=self.__Executor_Number) # # # # Running the Executor # # # # Generally running __executor.run(function=self.__example.crawl_process) # # # # Get result __result = __executor.result() print("Result: ", __result)
def main_run(cls): # Initialize Event object __event = EventFactory() # # # # Initialize and run ocean-simple-executor # __exe = SimpleExecutor(mode=RunningMode.Parallel, executors=cls.__Executor_Number) __exe = SimpleExecutor(mode=RunningMode.Concurrent, executors=cls.__Executor_Number) # __exe = SimpleExecutor(mode=RunningMode.GreenThread, executors=cls.__Executor_Number) # __exe = SimpleExecutor(mode=RunningMode.Asynchronous, executors=cls.__Executor_Number) # # # # Run without arguments __exe.map_with_function( functions=[cls.__wakeup_p.wake_other_process, cls.__sleep_p.go_sleep], features=__event)
def main_run(cls): # Initialize Event object __event = EventFactory() # # # # Initialize and run ocean-simple-executor __exe = SimpleExecutor(mode=RunningMode.Asynchronous, executors=cls.__Executor_Number) # # # # Run without arguments # # # # Asynchronous version of running without arguments __exe.map_with_function(functions=[ cls.__wakeup_p.async_wake_other_process, cls.__sleep_p.async_go_sleep ], features=__event)
def test_result(self, instantiate_adapter_executor: SimpleExecutor): def _target(*args, **kwargs): _current_worker = threading.current_thread() return f"result_{_current_worker}" instantiate_adapter_executor.run(function=_target) _results = instantiate_adapter_executor.result() assert len(_results) == _Worker_Size, "" for _r in _results: assert "result_" in _r.data, "" assert _r.worker_name, "" assert _r.worker_ident, "" assert _r.state, "" assert _r.pid, "" assert _r.exception is None, ""
def test_initial_running_strategy(self, instantiate_executor: SimpleExecutor): instantiate_executor._initial_running_strategy() from multirunnable.executor import General_Runnable_Strategy assert General_Runnable_Strategy is not None, "It should be assign running-strategy instance." _rmode = get_current_mode(force=True) if _rmode is RunningMode.Parallel: assert isinstance(General_Runnable_Strategy, ProcessStrategy), "It should be an sub-instance of 'ProcessStrategy'." elif _rmode is RunningMode.Concurrent: assert isinstance(General_Runnable_Strategy, ThreadStrategy), "It should be an sub-instance of 'ThreadStrategy'." elif _rmode is RunningMode.Parallel.GreenThread: assert isinstance(General_Runnable_Strategy, GreenThreadStrategy), "It should be an sub-instance of 'GreenThreadStrategy'." elif _rmode is RunningMode.Asynchronous: assert isinstance(General_Runnable_Strategy, AsynchronousStrategy), "It should be an sub-instance of 'AsynchronousStrategy'." else: raise ValueError("The RunningMode has the unexpected mode.")
def test_map(self, executor_as_thread: SimpleExecutor): TestSimpleExecutor._initial() # _args = ("index_1", "index_2", "index_3", "index_4", "index_5") # Bug 1. _args = [("index_1", ), ("index_2", ), ("index_3", ), ("index_4", ), ("index_5", )] def _target(*args, **kwargs): global Running_Count, Running_Thread_IDs, Running_PPIDs, Running_Current_Threads, Running_Finish_Timestamp with _Thread_Lock: Running_Count += 1 if args: if len(args) == 1: assert {args} <= set( _args ), "The argument *args* should be one of element of the input outside." else: assert set(args) <= set( _args ), "The argument *args* should be one of element of the input outside." if len(args) > 1: assert args == _args, "The argument *args* should be same as the global variable 'Test_Function_Args'." if kwargs: assert kwargs is None or kwargs == {}, "The argument *kwargs* should be empty or None value." _pid = os.getpid() _ppid = os.getppid() _ident = threading.get_ident() # _time = str(datetime.datetime.now()) _time = int(time.time()) Running_Thread_IDs.append(_ident) Running_PPIDs.append(_ppid) Running_Current_Threads.append(str(threading.current_thread())) Running_Finish_Timestamp.append(_time) time.sleep(Test_Function_Sleep_Time) return f"result_{threading.current_thread()}" executor_as_thread.map(function=_target, args_iter=_args) # Do some checking # 1. The amount of workers should be the same with the amount of parameters. # 3. The amount of thread IDs should be the same with the amount of parameters. # 2. The done-timestamp should be very close. TestSimpleExecutor._chk_map_record(len(_args))
def main_run(cls): # Initialize Condition object __condition = ConditionFactory() # Initialize Queue object __task = QueueTask() __task.name = "test_queue" __task.queue_type = Async_Queue() __task.value = [] # Initialize and run ocean-simple-executor __exe = SimpleExecutor(mode=RunningMode.Asynchronous, executors=cls.__Executor_Number) # # # # Run without arguments # # # # Asynchronous version of running without arguments __exe.map_with_function( functions=[cls.__producer_p.async_send_process, cls.__consumer_p.async_receive_process], queue_tasks=__task, features=__condition)
def test_map_with_function(self, instantiate_executor: SimpleExecutor): TestSimpleExecutor._initial() initial_rlock() _function_a_flag = Global_Manager.Value(int, 0) _function_b_flag = Global_Manager.Value(int, 0) _workers_ids = Global_Manager.list() _current_workers = Global_Manager.list() _done_timestamp = Global_Manager.list() def _target_a(*args): nonlocal _function_a_flag, _workers_ids, _current_workers, _done_timestamp _function_a_flag.value += 1 _ident = _get_worker_id() _current_worker = _get_current_worker() _workers_ids.append(_ident) _current_workers.append(str(_current_worker)) _done_timestamp.append(int(time.time())) _sleep_time() def _target_b(*args): nonlocal _function_b_flag, _workers_ids, _current_workers, _done_timestamp _function_b_flag.value += 1 _ident = _get_worker_id() _current_worker = _get_current_worker() _workers_ids.append(_ident) _current_workers.append(str(_current_worker)) _done_timestamp.append(int(time.time())) _sleep_time() _functions = [_target_a, _target_b] instantiate_executor.map_with_function(functions=_functions) _results = instantiate_executor.result() assert len(_results) == len(_functions), "" TestSimpleExecutor._chk_map_with_function(_functions, _function_a_flag, _function_b_flag, _workers_ids, _current_workers, _done_timestamp)
def test_map_with_function(self, executor_as_thread: SimpleExecutor): TestSimpleExecutor._initial() _function_a_flag = 0 _function_b_flag = 0 _thread_ids = [] _threads = [] _done_timestamp = [] def _target_a(): # with _Thread_RLock: nonlocal _function_a_flag _function_a_flag += 1 _thread_ids.append(threading.get_ident()) _threads.append(threading.current_thread()) _done_timestamp.append(int(time.time())) time.sleep(Test_Function_Sleep_Time) def _target_b(): # with _Thread_RLock: nonlocal _function_b_flag _function_b_flag += 1 _thread_ids.append(threading.get_ident()) _threads.append(threading.current_thread()) _done_timestamp.append(int(time.time())) time.sleep(Test_Function_Sleep_Time) _functions = [_target_a, _target_b] executor_as_thread.map_with_function(functions=_functions) # Do some checking # 1. The amount of workers should be the same with the amount of functions. # 3. The amount of thread IDs should be the same with the amount of functions. # 2. The done-timestamp should be very close. TestSimpleExecutor._chk_map_with_function(_functions, _function_a_flag, _function_b_flag, _thread_ids, _threads, _done_timestamp)
def main_run(self): # # # # Initial Executor object # __executor = SimpleExecutor(mode=RunningMode.Parallel, executors=self.__Executor_Number) # __executor = SimpleExecutor(mode=RunningMode.Concurrent, executors=self.__Executor_Number) __executor = SimpleExecutor(mode=RunningMode.GreenThread, executors=self.__Executor_Number) # # # # Generally running with 'start_new_worker' _args = ("index_1", "index_2.2") _kwargs = {"param_1": "index_1", "param_2": "index_2.2"} _workers_list = [] for _ in range(5): _worker = __executor.start_new_worker( target=self.__example.target_function) # _worker = __executor.start_new_worker(target=self.__example.target_function, args=_args) # _worker = __executor.start_new_worker(self.__example.target_function, _args) # _worker = __executor.start_new_worker(self.__example.target_function, kwargs=_kwargs) _workers_list.append(_worker) __executor.close(_workers_list) # # # # Running the Executor # # # # Generally running # __executor.run( # function=self.__example.target_function, # args=("index_1", "index_2.2")) # # # # Generally running which will raise exception # __executor.run( # function=self.__example.target_fail_function, # args=("index_1", "index_2.2")) # # # # Map running which will raise exception # __executor.map( # function=self.__example.target_function, # args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)]) # # # # Function version of map running which will raise exception # __executor.map_with_function( # functions=[self.__example.target_function, self.__example.target_function], # args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)]) # # # # Get result __result = __executor.result() print("Result: ", __result)
# Import package multirunnable import pathlib import random import time import sys package_path = str(pathlib.Path(__file__).parent.parent.parent.absolute()) sys.path.append(package_path) from multirunnable import SimpleExecutor, RunningMode def function(index): print(f"This isfunction with index {index}") time.sleep(3) return "Return Value" executor = SimpleExecutor(mode=RunningMode.Parallel, executors=3) executor.run(function=function, args={"index": f"test_{random.randrange(1, 10)}"}) result = executor.result() print(f"This is final result: {result}")
def executor_as_asynchronous(): return SimpleExecutor(mode=RunningMode.Asynchronous, executors=_Worker_Size)
def executor_as_green_thread(): return SimpleExecutor(mode=RunningMode.GreenThread, executors=_Worker_Size)
def executor_as_thread(): return SimpleExecutor(mode=RunningMode.Concurrent, executors=_Worker_Size)
def executor_as_process(): return SimpleExecutor(mode=RunningMode.Parallel, executors=_Worker_Size)
def test_result(self, executor_as_thread: SimpleExecutor): executor_as_thread.result()
_Worker_Size = 7 _Semaphore_Value = 2 def target_function_with_bsmp() -> str: return retry_success_with_bsmp() @retry.function @RunWith.Bounded_Semaphore def retry_success_with_bsmp() -> str: print("Running function ...") _worker_name = context.get_current_worker_name() return f"{_worker_name} Running Result" if __name__ == '__main__': set_mode(RunningMode.Parallel) _bsmp_factory = BoundedSemaphoreFactory(value=_Semaphore_Value) _sexor = SimpleExecutor(executors=_Worker_Size) _sexor.run(function=target_function_with_bsmp, features=_bsmp_factory) _result = _sexor.result() for _r in _result: print("++++++++++++++++++++++++++++") print(f"worker_name: {_r.worker_name}") print(f"data: {_r.data}") print(f"exception: {_r.exception}")