コード例 #1
0
    def submit(self, func, *args, **kwargs):
        ''' Submits work to the the outgoing_q, an external process listens on this
        queue for new work. This method is simply pass through and behaves like a
        submit call as described here `Python docs: <https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor>`_

        Args:
            - func (callable) : Callable function
            - *args (list) : List of arbitrary positional arguments.

        Kwargs:
            - **kwargs (dict) : A dictionary of arbitrary keyword args for func.

        Returns:
              Future
        '''
        task_id = uuid.uuid4()

        logger.debug("Before pushing to queue : func:%s func_args:%s", func,
                     args)

        self.tasks[task_id] = Future()

        fn_buf = pack_apply_message(func,
                                    args,
                                    kwargs,
                                    buffer_threshold=1024 * 1024,
                                    item_threshold=1024)

        msg = {"task_id": task_id, "buffer": fn_buf}

        # Post task to the the outgoing queue
        self.outgoing_q.put(msg)

        # Return the future
        return self.tasks[task_id]
コード例 #2
0
ファイル: executor.py プロジェクト: ravihansa3000/parsl
    def _serialize_function(self, fn_path, parsl_fn, parsl_fn_args,
                            parsl_fn_kwargs):
        """Takes the function application parsl_fn(*parsl_fn_args, **parsl_fn_kwargs)
        and serializes it to the file fn_path."""

        # Either build a dictionary with the source of the function, or pickle
        # the function directly:
        if self.source:
            function_info = {
                "source code": inspect.getsource(parsl_fn),
                "name": parsl_fn.__name__,
                "args": parsl_fn_args,
                "kwargs": parsl_fn_kwargs
            }
        else:
            function_info = {
                "byte code":
                pack_apply_message(parsl_fn,
                                   parsl_fn_args,
                                   parsl_fn_kwargs,
                                   buffer_threshold=1024 * 1024,
                                   item_threshold=1024)
            }
        with open(fn_path, "wb") as f_out:
            pickle.dump(function_info, f_out)
コード例 #3
0
ファイル: executor.py プロジェクト: xinyixiang/parsl
def simple_executor(f_all, args_all, kwargs_all, num_tasks):
    serialization_times = []
    exec_times = []
    results = []

    for i in range(num_tasks):
        task_id = i
        start_time = time.time()
        buf = pack_apply_message(f=next(f_all),
                                 args=next(args_all),
                                 kwargs=next(kwargs_all),
                                 buffer_threshold=1024 * 1024,
                                 item_threshold=1024)
        serialization_times.append(time.time() - start_time)

        start_time = time.time()
        user_ns = locals()
        user_ns.update({'__builtins__': __builtins__})
        f, args, kwargs = unpack_apply_message(buf, user_ns, copy=False)
        result = execute_task(f, args, kwargs, user_ns)
        exec_times.append(time.time() - start_time)

        results.append(result)

    avg_serialization_time = sum(serialization_times) / len(
        serialization_times) * 10**6
    avg_execution_time = sum(exec_times) / len(exec_times) * 10**6

    return {
        "avg_serialization_time": avg_serialization_time,
        "avg_execution_time": avg_execution_time,
        "results": results
    }
コード例 #4
0
ファイル: executor.py プロジェクト: ravihansa3000/parsl
    def submit(self, func, resource_specification, *args, **kwargs):
        """ TODO: docstring """
        if resource_specification:
            logger.error(
                "Ignoring the resource specification. "
                "Parsl resource specification is not supported in LowLatency Executor. "
                "Please check WorkQueueExecutor if resource specification is needed."
            )
            raise UnsupportedFeatureError('resource specification',
                                          'LowLatency Executor',
                                          'WorkQueue Executor')

        if self.bad_state_is_set:
            raise self.executor_exception

        self._task_counter += 1
        task_id = self._task_counter

        logger.debug("Pushing function {} to queue with args {}".format(
            func, args))

        self.tasks[task_id] = Future()

        fn_buf = pack_apply_message(func,
                                    args,
                                    kwargs,
                                    buffer_threshold=1024 * 1024,
                                    item_threshold=1024)

        # Post task to the the outgoing queue
        self.outgoing_q.put(task_id, fn_buf)

        # Return the future
        return self.tasks[task_id]
コード例 #5
0
    def submit(self, func, resource_specification, *args, **kwargs):
        """Submits work to the the outgoing_q.

        The outgoing_q is an external process listens on this
        queue for new work. This method behaves like a
        submit call as described here `Python docs: <https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor>`_

        Args:
            - func (callable) : Callable function
            - *args (list) : List of arbitrary positional arguments.

        Kwargs:
            - **kwargs (dict) : A dictionary of arbitrary keyword args for func.

        Returns:
              Future
        """
        if resource_specification:
            logger.error("Ignoring the resource specification. "
                         "Parsl resource specification is not supported in HighThroughput Executor. "
                         "Please check WorkQueueExecutor if resource specification is needed.")
            raise UnsupportedFeatureError('resource specification', 'HighThroughput Executor', 'WorkQueue Executor')

        if self.bad_state_is_set:
            raise self.executor_exception

        self._task_counter += 1
        task_id = self._task_counter

        # handle people sending blobs gracefully
        args_to_print = args
        if logger.getEffectiveLevel() >= logging.DEBUG:
            args_to_print = tuple([arg if len(repr(arg)) < 100 else (repr(arg)[:100] + '...') for arg in args])
        logger.debug("Pushing function {} to queue with args {}".format(func, args_to_print))

        self.tasks[task_id] = Future()

        try:
            fn_buf = pack_apply_message(func, args, kwargs,
                                        buffer_threshold=1024 * 1024,
                                        item_threshold=1024)
        except TypeError:
            raise SerializationError(func.__name__)

        msg = {"task_id": task_id,
               "buffer": fn_buf}

        # Post task to the the outgoing queue
        self.outgoing_q.put(msg)

        # Return the future
        return self.tasks[task_id]
コード例 #6
0
ファイル: executor.py プロジェクト: Sprinterzzj/parsl
    def submit(self, func, *args, **kwargs):
        """Submits work to the the outgoing_q.

        The outgoing_q is an external process listens on this
        queue for new work. This method behaves like a
        submit call as described here `Python docs: <https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor>`_

        Args:
            - func (callable) : Callable function
            - *args (list) : List of arbitrary positional arguments.

        Kwargs:
            - **kwargs (dict) : A dictionary of arbitrary keyword args for func.

        Returns:
              Future
        """
        if self._executor_bad_state.is_set():
            raise self._executor_exception

        self._task_counter += 1
        task_id = self._task_counter

        # handle people sending blobs gracefully
        args_to_print = args
        if logger.getEffectiveLevel() >= logging.DEBUG:
            args_to_print = tuple([
                arg if len(repr(arg)) < 100 else (repr(arg)[:100] + '...')
                for arg in args
            ])
        logger.debug("Pushing function {} to queue with args {}".format(
            func, args_to_print))

        self.tasks[task_id] = Future()

        fn_buf = pack_apply_message(func,
                                    args,
                                    kwargs,
                                    buffer_threshold=1024 * 1024,
                                    item_threshold=1024)

        msg = {"task_id": task_id, "buffer": fn_buf}

        # Post task to the the outgoing queue
        self.outgoing_q.put(msg)

        # Return the future
        return self.tasks[task_id]
コード例 #7
0
ファイル: executor.py プロジェクト: timarmstrong/parsl
    def submit(self, func, *args, **kwargs):
        """ TODO: docstring """
        self._task_counter += 1
        task_id = self._task_counter

        logger.debug("Pushing function {} to queue with args {}".format(
            func, args))

        self.tasks[task_id] = Future()

        fn_buf = pack_apply_message(func,
                                    args,
                                    kwargs,
                                    buffer_threshold=1024 * 1024,
                                    item_threshold=1024)

        # Post task to the the outgoing queue
        self.outgoing_q.put(task_id, fn_buf)

        # Return the future
        return self.tasks[task_id]
コード例 #8
0
    def submit(self, func, *args, **kwargs):
        """Processes the Parsl app by its arguments and submits the function
        information to the task queue, to be executed using the Work Queue
        system. The args and kwargs are processed for input and output files to
        the Parsl app, so that the files are appropriately specified for the Work
        Queue task.

        Parameters
        ----------

        func : function
            Parsl app to be submitted to the Work Queue system
        args : list
            Arguments to the Parsl app
        kwargs : dict
            Keyword arguments to the Parsl app
        """
        self.task_counter += 1
        task_id = self.task_counter

        input_files = []
        output_files = []
        std_files = []

        # Add input files from the "inputs" keyword argument
        func_inputs = kwargs.get("inputs", [])
        for inp in func_inputs:
            if isinstance(inp, File):
                input_files.append(self.create_name_tuple(inp, "in"))

        for kwarg, inp in kwargs.items():
            # Add appropriate input and output files from "stdout" and "stderr" keyword arguments
            if kwarg == "stdout" or kwarg == "stderr":
                if (isinstance(inp, tuple) and len(inp) > 1
                        and isinstance(inp[0], str)
                        and isinstance(inp[1], str)) or isinstance(inp, str):
                    if isinstance(inp, tuple):
                        inp = inp[0]
                    if not os.path.exists(
                            os.path.join(".",
                                         os.path.split(inp)[0])):
                        continue
                    # Create "std" files instead of input or output files
                    if inp in self.registered_files:
                        input_files.append(
                            (inp, os.path.basename(inp) + "-1", False, "std"))
                        output_files.append(
                            (inp, os.path.basename(inp), False, "std"))
                    else:
                        output_files.append(
                            (inp, os.path.basename(inp), False, "std"))
                        self.registered_files.add(inp)
            # Add to input file if passed-in argument is a File object
            elif isinstance(inp, File):
                input_files.append(self.create_name_tuple(inp, "in"))

        # Add to input file if passed-in argument is a File object
        for inp in args:
            if isinstance(inp, File):
                input_files.append(self.create_name_tuple(inp, "in"))

        # Add output files from the "outputs" keyword argument
        func_outputs = kwargs.get("outputs", [])
        for output in func_outputs:
            if isinstance(output, File):
                output_files.append(self.create_name_tuple(output, "out"))

        if not self.submit_process.is_alive():
            raise ExecutorError(self, "Workqueue Submit Process is not alive")

        # Create a Future object and have it be mapped from the task ID in the tasks dictionary
        fu = Future()
        self.tasks_lock.acquire()
        self.tasks[str(task_id)] = fu
        self.tasks_lock.release()

        logger.debug("Creating task {} for function {} with args {}".format(
            task_id, func, args))

        # Pickle the result into object to pass into message buffer
        function_data_file = os.path.join(
            self.function_data_dir, "task_" + str(task_id) + "_function_data")
        function_result_file = os.path.join(
            self.function_data_dir,
            "task_" + str(task_id) + "_function_result")

        logger.debug("Creating Task {} with executable at: {}".format(
            task_id, function_data_file))
        logger.debug("Creating Task {} with result to be found at: {}".format(
            task_id, function_result_file))

        # Obtain function information and put into dictionary
        if self.source:
            # Obtain function information and put into dictionary
            source_code = inspect.getsource(func)
            name = func.__name__
            function_info = {
                "source code": source_code,
                "name": name,
                "args": args,
                "kwargs": kwargs
            }

            # Pack the function data into file
            f = open(function_data_file, "wb")
            pickle.dump(function_info, f)
            f.close()
        else:
            # Serialize function information
            function_info = pack_apply_message(func,
                                               args,
                                               kwargs,
                                               buffer_threshold=1024 * 1024,
                                               item_threshold=1024)

            # Pack the function data into file
            f = open(function_data_file, "wb")
            pickle.dump(function_info, f)
            f.close()

        # Create message to put into the message queue
        logger.debug("Placing task {} on message queue".format(task_id))
        msg = {
            "task_id": task_id,
            "data_loc": function_data_file,
            "result_loc": function_result_file,
            "input_files": input_files,
            "output_files": output_files,
            "std_files": std_files
        }
        self.task_queue.put_nowait(msg)

        return fu
コード例 #9
0
ファイル: executor.py プロジェクト: AndrewLitteken/parsl
    def submit(self, func, *args, **kwargs):
        """Submit.

        We haven't yet decided on what the args to this can be,
        whether it should just be func, args, kwargs or be the partially evaluated
        fn
        """
        self.task_counter += 1
        task_id = self.task_counter

        input_files = []
        output_files = []
        std_files = []

        func_inputs = kwargs.get("inputs", [])
        for inp in func_inputs:
            if isinstance(inp, File):
                input_files.append(self.create_name_tuple(inp, "in"))

        for kwarg, inp in kwargs.items():
            if kwarg == "stdout" or kwarg == "stderr":
                if (isinstance(inp, tuple) and len(inp) > 1
                        and isinstance(inp[0], str)
                        and isinstance(inp[1], str)) or isinstance(inp, str):
                    if isinstance(inp, tuple):
                        inp = inp[0]
                    print(os.path.split(inp))
                    if not os.path.exists(
                            os.path.join(".",
                                         os.path.split(inp)[0])):
                        continue
                    if inp in self.registered_files:
                        input_files.append(
                            (inp, os.path.basename(inp) + "-1", False, "std"))
                        output_files.append(
                            (inp, os.path.basename(inp), False, "std"))
                    else:
                        output_files.append(
                            (inp, os.path.basename(inp), False, "std"))
                        self.registered_files.add(inp)
            elif isinstance(inp, File):
                input_files.append(self.create_name_tuple(inp, "in"))

        for inp in args:
            if isinstance(inp, File):
                input_files.append(self.create_name_tuple(inp, "in"))

        func_outputs = kwargs.get("outputs", [])
        for output in func_outputs:
            if isinstance(output, File):
                output_files.append(self.create_name_tuple(output, "out"))

        if not self.submit_process.is_alive():
            raise ExecutorError(self, "Workqueue Submit Process is not alive")

        fu = Future()
        self.tasks_lock.acquire()
        self.tasks[str(task_id)] = fu
        self.tasks_lock.release()

        logger.debug("Creating task {} for function {} with args {}".format(
            task_id, func, args))

        # Pickle the result into object to pass into message buffer
        # TODO Try/Except Block
        function_data_file = os.path.join(
            self.function_data_dir, "task_" + str(task_id) + "_function_data")
        function_result_file = os.path.join(
            self.function_data_dir,
            "task_" + str(task_id) + "_function_result")

        logger.debug("Creating Task {} with executable at: {}".format(
            task_id, function_data_file))
        logger.debug("Creating Task {} with result to be found at: {}".format(
            task_id, function_result_file))

        f = open(function_data_file, "wb")
        fn_buf = pack_apply_message(func,
                                    args,
                                    kwargs,
                                    buffer_threshold=1024 * 1024,
                                    item_threshold=1024)
        pickle.dump(fn_buf, f)
        f.close()

        logger.debug("Placing task {} on message queue".format(task_id))
        msg = {
            "task_id": task_id,
            "data_loc": function_data_file,
            "result_loc": function_result_file,
            "input_files": input_files,
            "output_files": output_files,
            "std_files": std_files
        }

        self.task_queue.put_nowait(msg)

        return fu
コード例 #10
0
ファイル: executor.py プロジェクト: xinyixiang/parsl
def dealer_executor(f_all,
                    args_all,
                    kwargs_all,
                    num_tasks,
                    return_dict,
                    port=5559,
                    interchange=True,
                    warmup=10):
    label = "DEALER-INTERCHANGE-REP" if interchange else "DEALER-REP"
    logger.info("Starting executor:{}".format(label))

    serialization_times = []
    deserialization_times = []
    send_times = {}
    exec_times = {}
    results = []

    context = zmq.Context()
    dealer = context.socket(zmq.DEALER)
    dealer.bind("tcp://*:{}".format(port))

    poller = zmq.Poller()
    poller.register(dealer, zmq.POLLIN)

    num_send = 0
    num_recv = 0

    while True:
        socks = dict(poller.poll(1))
        if num_send < num_tasks:
            task_id = num_send
            task_id_bytes = task_id.to_bytes(4, "little")
            start_time = time.time()
            buf = pack_apply_message(f=next(f_all),
                                     args=next(args_all),
                                     kwargs=next(kwargs_all),
                                     buffer_threshold=1024 * 1024,
                                     item_threshold=1024)
            serialization_times.append(time.time() - start_time)

            logger.debug("Manager sending task {}".format(task_id))
            send_times[task_id] = time.time()
            dealer.send_multipart([b"", task_id_bytes] + buf)
            num_send += 1

        if dealer in socks and socks[dealer] == zmq.POLLIN:
            buf = dealer.recv_multipart()
            recv_time = time.time()

            start_time = time.time()
            msg = deserialize_object(buf[2:])[0]
            deserialization_times.append(time.time() - start_time)

            logger.debug("Got message {}".format(msg))
            task_id = int.from_bytes(buf[1], "little")
            results.append(msg["result"])

            if num_recv >= warmup:
                # Ignore the first `warmup` tasks
                exec_times[task_id] = recv_time - send_times[task_id]

            num_recv += 1
            logger.debug("Dealer received result {}".format(task_id))
            if num_recv == num_tasks:
                break

    avg_serialization_time = sum(serialization_times) / len(
        serialization_times) * 10**6
    avg_execution_time = sum(exec_times.values()) / len(exec_times) * 10**6

    return_dict["avg_serialization_time"] = avg_serialization_time
    return_dict["avg_execution_time"] = avg_execution_time
    return_dict["results"] = results
コード例 #11
0

def add2nums(num1, num2):
    return num1 + num2


for req in range(10):

    # time.sleep(4)
    print("Sending request {}".format(req))
    task_id = str(uuid.uuid4())

    args = {"num1": 2, "num2": 2}
    kwargs = {}

    obj = pack_apply_message(add2nums, args, kwargs)
    worker.send_multipart([
        pickle.dumps(task_id),
        pickle.dumps(obj),
        pickle.dumps("INVOKE"), b'B'
    ])
    worker.send_multipart([
        pickle.dumps(task_id),
        pickle.dumps(obj),
        pickle.dumps("INVOKE"), b'A'
    ])

total = 0
while True:

    print("Listening for new result")