def main_run(self):
        # Initialize Lock object
        __rlock = RLockFactory()

        # # # # Initial Executor object
        __executor = SimpleExecutor(mode=RunningMode.Parallel,
                                    executors=self.__Executor_Number)
        # __executor = SimpleExecutor(mode=RunningMode.Concurrent, executors=self.__Executor_Number)
        # __executor = SimpleExecutor(mode=RunningMode.GreenThread, executors=self.__Executor_Number)

        # # # # Running the Executor
        # # # # Generally running
        __executor.run(function=self.__example.target_function,
                       args=("index_1", "index_2.2"),
                       features=__rlock)

        # # # # Map running which will raise exception
        # __executor.map(
        #     function=self.__example.target_function,
        #     args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)],
        #     features=__rlock)

        # # # # Function version of map running which will raise exception
        # __executor.map_with_function(
        #     functions=[self.__example.target_function, self.__example.target_function],
        #     args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)],
        #     features=__rlock)

        # # # # Get result
        __result = __executor.result()
        print("Result: ", __result)
    def main_run(self):
        test_dao = TestDao(db_driver="mysql", use_pool=False)
        # test_dao = AsyncTestDao(db_driver="mysql", use_pool=False)

        # # # # Initial and instantiate feature object: Queue Task, Lock and Bounded Semaphore
        _queue_task = self.__init_queue()
        _features = self.__init_features()

        _executor = SimpleExecutor(
            mode=RunningMode.Parallel,
            # mode=RunningMode.Concurrent,
            # mode=RunningMode.GreenThread,
            # mode=RunningMode.Asynchronous,
            executors=self.__Worker_Number)

        _executor.run(function=test_dao.get_test_data,
                      queue_tasks=_queue_task,
                      features=_features)
        result = _executor.result()

        for r in result:
            print(f"+============ {r.worker_ident} =============+")
            print("Result.pid: ", r.pid)
            print("Result.worker_id: ", r.worker_ident)
            print("Result.worker_name: ", r.worker_name)
            print("Result.state: ", r.state)
            print("Result.data: ", r.data)
            print("Result.exception: ", r.exception)
            print("+====================================+\n")

        self.__save_to_files(result=result)

        end_time = time.time()
        self.__logger.info(
            f"Total taking time: {end_time - start_time} seconds")
Exemple #3
0
    def main_run(self):
        # # # # Initial Executor object
        __executor = SimpleExecutor(mode=RunningMode.Asynchronous, executors=self.__Executor_Number)

        # # # # Running the Executor
        # # # # Asynchronous version of generally running
        __executor.run(
            function=self.__example.async_target_function,
            # function=self.__example.target_function,
            args=("index_1", "index_2.2"))

        # # # # Asynchronous version of generally running which will raise exception
        # __executor.run(
        #     function=self.__example.async_target_fail_function,
        #     args=("index_1", "index_2.2"))

        # # # # Map running which will raise exception
        # __executor.map(
        #     function=self.__example.async_target_function,
        #     args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)])

        # # # # Function version of map running which will raise exception
        # __executor.map_with_function(
        #     functions=[self.__example.async_target_function, self.__example.async_target_function],
        #     args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)])

        # # # # Get result
        __result = __executor.result()
        print("Result: ", __result)
Exemple #4
0
    def test_map(self, instantiate_adapter_executor: SimpleExecutor):
        TestSimpleExecutor._initial()

        instantiate_adapter_executor.map(function=target_function_for_map, args_iter=Test_Function_Args)
        _results = instantiate_adapter_executor.result()
        assert len(_results) == len(Test_Function_Args), ""
        TestSimpleExecutor._chk_run_record(expected_size=len(Test_Function_Args))
Exemple #5
0
    def test_run(self, instantiate_adapter_executor: SimpleExecutor):
        TestSimpleExecutor._initial()

        instantiate_adapter_executor.run(function=target_function)
        _results = instantiate_adapter_executor.result()
        assert len(_results) == _Worker_Size, ""
        TestSimpleExecutor._chk_run_record(expected_size=Worker_Size)
Exemple #6
0
    def test_start_new_worker(self, instantiate_adapter_executor: SimpleExecutor):
        TestSimpleExecutor._initial()

        _workers = [instantiate_adapter_executor.start_new_worker(target=target_function) for _ in range(_Worker_Size)]
        instantiate_adapter_executor.close(workers=_workers)
        _results = instantiate_adapter_executor.result()
        assert len(_results) == _Worker_Size, ""
        TestSimpleExecutor._chk_run_record(expected_size=Worker_Size)
    def main_run(self):
        # # # # Initial Executor object
        __executor = SimpleExecutor(mode=RunningMode.Asynchronous, executors=self.__Executor_Number)

        # # # # Running the Executor
        # # # # Generally running
        __executor.run(function=self.__example.crawl_process)

        # # # # Get result
        __result = __executor.result()
        print("Result: ", __result)
    def main_run(self):
        # # # # Initial Executor object
        __executor = SimpleExecutor(mode=RunningMode.Parallel, executors=self.__Executor_Number)
        # __executor = SimpleExecutor(mode=RunningMode.Concurrent, executors=self.__Executor_Number)
        # __executor = SimpleExecutor(mode=RunningMode.GreenThread, executors=self.__Executor_Number)

        # # # # Running the Executor
        # # # # Generally running
        __executor.run(function=self.__example.crawl_process)

        # # # # Get result
        __result = __executor.result()
        print("Result: ", __result)
Exemple #9
0
    def test_result(self, instantiate_adapter_executor: SimpleExecutor):

        def _target(*args, **kwargs):
            _current_worker = threading.current_thread()
            return f"result_{_current_worker}"

        instantiate_adapter_executor.run(function=_target)
        _results = instantiate_adapter_executor.result()
        assert len(_results) == _Worker_Size, ""
        for _r in _results:
            assert "result_" in _r.data, ""
            assert _r.worker_name, ""
            assert _r.worker_ident, ""
            assert _r.state, ""
            assert _r.pid, ""
            assert _r.exception is None, ""
    def main_run(self):
        # # # # Initial Executor object
        # __executor = SimpleExecutor(mode=RunningMode.Parallel, executors=self.__Executor_Number)
        # __executor = SimpleExecutor(mode=RunningMode.Concurrent, executors=self.__Executor_Number)
        __executor = SimpleExecutor(mode=RunningMode.GreenThread,
                                    executors=self.__Executor_Number)

        # # # # Generally running with 'start_new_worker'
        _args = ("index_1", "index_2.2")
        _kwargs = {"param_1": "index_1", "param_2": "index_2.2"}
        _workers_list = []
        for _ in range(5):
            _worker = __executor.start_new_worker(
                target=self.__example.target_function)
            # _worker = __executor.start_new_worker(target=self.__example.target_function, args=_args)
            # _worker = __executor.start_new_worker(self.__example.target_function, _args)
            # _worker = __executor.start_new_worker(self.__example.target_function, kwargs=_kwargs)
            _workers_list.append(_worker)
        __executor.close(_workers_list)

        # # # # Running the Executor
        # # # # Generally running
        # __executor.run(
        #     function=self.__example.target_function,
        #     args=("index_1", "index_2.2"))

        # # # # Generally running which will raise exception
        # __executor.run(
        #     function=self.__example.target_fail_function,
        #     args=("index_1", "index_2.2"))

        # # # # Map running which will raise exception
        # __executor.map(
        #     function=self.__example.target_function,
        #     args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)])

        # # # # Function version of map running which will raise exception
        # __executor.map_with_function(
        #     functions=[self.__example.target_function, self.__example.target_function],
        #     args_iter=[("index_1", "index_2.2"), ("index_3",), (1, 2, 3)])

        # # # # Get result
        __result = __executor.result()
        print("Result: ", __result)
Exemple #11
0
    def test_map_with_function(self, instantiate_executor: SimpleExecutor):

        TestSimpleExecutor._initial()
        initial_rlock()

        _function_a_flag = Global_Manager.Value(int, 0)
        _function_b_flag = Global_Manager.Value(int, 0)

        _workers_ids = Global_Manager.list()
        _current_workers = Global_Manager.list()
        _done_timestamp = Global_Manager.list()

        def _target_a(*args):
            nonlocal _function_a_flag, _workers_ids, _current_workers, _done_timestamp

            _function_a_flag.value += 1
            _ident = _get_worker_id()
            _current_worker = _get_current_worker()

            _workers_ids.append(_ident)
            _current_workers.append(str(_current_worker))
            _done_timestamp.append(int(time.time()))

            _sleep_time()

        def _target_b(*args):
            nonlocal _function_b_flag, _workers_ids, _current_workers, _done_timestamp

            _function_b_flag.value += 1
            _ident = _get_worker_id()
            _current_worker = _get_current_worker()

            _workers_ids.append(_ident)
            _current_workers.append(str(_current_worker))
            _done_timestamp.append(int(time.time()))

            _sleep_time()

        _functions = [_target_a, _target_b]
        instantiate_executor.map_with_function(functions=_functions)
        _results = instantiate_executor.result()
        assert len(_results) == len(_functions), ""
        TestSimpleExecutor._chk_map_with_function(_functions, _function_a_flag, _function_b_flag, _workers_ids, _current_workers, _done_timestamp)
Exemple #12
0
 def test_result(self, executor_as_thread: SimpleExecutor):
     executor_as_thread.result()
# Import package multirunnable
import pathlib
import random
import time
import sys

package_path = str(pathlib.Path(__file__).parent.parent.parent.absolute())
sys.path.append(package_path)

from multirunnable import SimpleExecutor, RunningMode


def function(index):
    print(f"This isfunction with index {index}")
    time.sleep(3)
    return "Return Value"


executor = SimpleExecutor(mode=RunningMode.Parallel, executors=3)
executor.run(function=function,
             args={"index": f"test_{random.randrange(1, 10)}"})
result = executor.result()
print(f"This is final result: {result}")
Exemple #14
0
_Worker_Size = 7
_Semaphore_Value = 2


def target_function_with_bsmp() -> str:
    return retry_success_with_bsmp()


@retry.function
@RunWith.Bounded_Semaphore
def retry_success_with_bsmp() -> str:
    print("Running function ...")
    _worker_name = context.get_current_worker_name()
    return f"{_worker_name} Running Result"


if __name__ == '__main__':

    set_mode(RunningMode.Parallel)

    _bsmp_factory = BoundedSemaphoreFactory(value=_Semaphore_Value)
    _sexor = SimpleExecutor(executors=_Worker_Size)
    _sexor.run(function=target_function_with_bsmp, features=_bsmp_factory)
    _result = _sexor.result()

    for _r in _result:
        print("++++++++++++++++++++++++++++")
        print(f"worker_name: {_r.worker_name}")
        print(f"data: {_r.data}")
        print(f"exception: {_r.exception}")