Ejemplo n.º 1
0
class AsyncFunction(ForkProcess):
    """
	Execute a function call in a fork, and retrieve the function
	return value via pickling/unpickling, accessible as the
	"result" attribute after the forked process has exited.
	"""

    # NOTE: This class overrides the meaning of the SpawnProcess 'args'
    # attribute, and uses it to hold the positional arguments for the
    # 'target' function.
    __slots__ = ('kwargs', 'result', 'target', '_async_func_reader',
                 '_async_func_reader_pw')

    def _start(self):
        pr, pw = os.pipe()
        self.fd_pipes = {}
        self.fd_pipes[pw] = pw
        self._async_func_reader_pw = pw
        self._async_func_reader = PipeReader(input_files={"input": pr},
                                             scheduler=self.scheduler)
        self._async_func_reader.addExitListener(self._async_func_reader_exit)
        self._async_func_reader.start()
        ForkProcess._start(self)
        os.close(pw)

    def _run(self):
        try:
            result = self.target(*(self.args or []), **(self.kwargs or {}))
            os.write(self._async_func_reader_pw, pickle.dumps(result))
        except Exception:
            traceback.print_exc()
            return 1

        return os.EX_OK

    def _pipe_logger_exit(self, pipe_logger):
        # Ignore this event, since we want to ensure that we exit
        # only after _async_func_reader_exit has reached EOF.
        self._pipe_logger = None

    def _async_func_reader_exit(self, pipe_reader):
        try:
            self.result = pickle.loads(pipe_reader.getvalue())
        except Exception:
            # The child process will have printed a traceback in this case,
            # and returned an unsuccessful returncode.
            pass
        self._async_func_reader = None
        self._unregister()
        self.wait()

    def _unregister(self):
        ForkProcess._unregister(self)

        pipe_reader = self._async_func_reader
        if pipe_reader is not None:
            self._async_func_reader = None
            pipe_reader.removeExitListener(self._async_func_reader_exit)
            pipe_reader.cancel()
Ejemplo n.º 2
0
class _Reader(object):
    def __init__(self, future, input_file, loop):
        self._future = future
        self._pipe_reader = PipeReader(input_files={'input_file': input_file},
                                       scheduler=loop)

        self._future.add_done_callback(self._cancel_callback)
        self._pipe_reader.addExitListener(self._eof)
        self._pipe_reader.start()

    def _cancel_callback(self, future):
        if future.cancelled():
            self._cancel()

    def _eof(self, pipe_reader):
        self._pipe_reader = None
        self._future.set_result(pipe_reader.getvalue())

    def _cancel(self):
        if self._pipe_reader is not None and self._pipe_reader.poll() is None:
            self._pipe_reader.removeExitListener(self._eof)
            self._pipe_reader.cancel()
            self._pipe_reader = None
Ejemplo n.º 3
0
class FileDigester(ForkProcess):
    """
	Asynchronously generate file digests. Pass in file_path and
	hash_names, and after successful execution, the digests
	attribute will be a dict containing all of the requested
	digests.
	"""

    __slots__ = ('file_path', 'digests', 'hash_names', '_digest_pipe_reader',
                 '_digest_pw')

    def _start(self):
        pr, pw = os.pipe()
        self.fd_pipes = {}
        self.fd_pipes[pw] = pw
        self._digest_pw = pw
        self._digest_pipe_reader = PipeReader(input_files={"input": pr},
                                              scheduler=self.scheduler)
        self._digest_pipe_reader.addExitListener(self._digest_pipe_reader_exit)
        self._digest_pipe_reader.start()
        ForkProcess._start(self)
        os.close(pw)

    def _run(self):
        digests = perform_multiple_checksums(self.file_path,
                                             hashes=self.hash_names)

        buf = "".join("%s=%s\n" % item
                      for item in digests.items()).encode('utf_8')

        while buf:
            buf = buf[os.write(self._digest_pw, buf):]

        return os.EX_OK

    def _parse_digests(self, data):

        digests = {}
        for line in data.decode('utf_8').splitlines():
            parts = line.split('=', 1)
            if len(parts) == 2:
                digests[parts[0]] = parts[1]

        self.digests = digests

    def _pipe_logger_exit(self, pipe_logger):
        # Ignore this event, since we want to ensure that we
        # exit only after _digest_pipe_reader has reached EOF.
        self._pipe_logger = None

    def _digest_pipe_reader_exit(self, pipe_reader):
        self._parse_digests(pipe_reader.getvalue())
        self._digest_pipe_reader = None
        self._unregister()
        self.wait()

    def _unregister(self):
        ForkProcess._unregister(self)

        pipe_reader = self._digest_pipe_reader
        if pipe_reader is not None:
            self._digest_pipe_reader = None
            pipe_reader.removeExitListener(self._digest_pipe_reader_exit)
            pipe_reader.cancel()
Ejemplo n.º 4
0
class FileDigester(ForkProcess):
	"""
	Asynchronously generate file digests. Pass in file_path and
	hash_names, and after successful execution, the digests
	attribute will be a dict containing all of the requested
	digests.
	"""

	__slots__ = ('file_path', 'digests', 'hash_names',
		'_digest_pipe_reader', '_digest_pw')

	def _start(self):
		pr, pw = os.pipe()
		self.fd_pipes = {}
		self.fd_pipes[pw] = pw
		self._digest_pw = pw
		self._digest_pipe_reader = PipeReader(
			input_files={"input":pr},
			scheduler=self.scheduler)
		self._digest_pipe_reader.addExitListener(self._digest_pipe_reader_exit)
		self._digest_pipe_reader.start()
		ForkProcess._start(self)
		os.close(pw)

	def _run(self):
		digests = perform_multiple_checksums(self.file_path,
			hashes=self.hash_names)

		buf = "".join("%s=%s\n" % item
			for item in digests.items()).encode('utf_8')

		while buf:
			buf = buf[os.write(self._digest_pw, buf):]

		return os.EX_OK

	def _parse_digests(self, data):

		digests = {}
		for line in data.decode('utf_8').splitlines():
			parts = line.split('=', 1)
			if len(parts) == 2:
				digests[parts[0]] = parts[1]

		self.digests = digests

	def _pipe_logger_exit(self, pipe_logger):
		# Ignore this event, since we want to ensure that we
		# exit only after _digest_pipe_reader has reached EOF.
		self._pipe_logger = None

	def _digest_pipe_reader_exit(self, pipe_reader):
		self._parse_digests(pipe_reader.getvalue())
		self._digest_pipe_reader = None
		self._unregister()
		self.wait()

	def _unregister(self):
		ForkProcess._unregister(self)

		pipe_reader = self._digest_pipe_reader
		if pipe_reader is not None:
			self._digest_pipe_reader = None
			pipe_reader.removeExitListener(self._digest_pipe_reader_exit)
			pipe_reader.cancel()
Ejemplo n.º 5
0
class AsyncFunction(ForkProcess):
	"""
	Execute a function call in a fork, and retrieve the function
	return value via pickling/unpickling, accessible as the
	"result" attribute after the forked process has exited.
	"""

	# NOTE: This class overrides the meaning of the SpawnProcess 'args'
	# attribute, and uses it to hold the positional arguments for the
	# 'target' function.
	__slots__ = ('kwargs', 'result', 'target',
		'_async_func_reader', '_async_func_reader_pw')

	def _start(self):
		pr, pw = os.pipe()
		self.fd_pipes = {}
		self.fd_pipes[pw] = pw
		self._async_func_reader_pw = pw
		self._async_func_reader = PipeReader(
			input_files={"input":pr},
			scheduler=self.scheduler)
		self._async_func_reader.addExitListener(self._async_func_reader_exit)
		self._async_func_reader.start()
		ForkProcess._start(self)
		os.close(pw)

	def _run(self):
		try:
			result = self.target(*(self.args or []), **(self.kwargs or {}))
			os.write(self._async_func_reader_pw, pickle.dumps(result))
		except Exception:
			traceback.print_exc()
			return 1

		return os.EX_OK

	def _pipe_logger_exit(self, pipe_logger):
		# Ignore this event, since we want to ensure that we exit
		# only after _async_func_reader_exit has reached EOF.
		self._pipe_logger = None

	def _async_func_reader_exit(self, pipe_reader):
		try:
			self.result = pickle.loads(pipe_reader.getvalue())
		except Exception:
			# The child process will have printed a traceback in this case,
			# and returned an unsuccessful returncode.
			pass
		self._async_func_reader = None
		if self.returncode is None:
			self._async_waitpid()
		else:
			self._unregister()
			self._async_wait()

	def _unregister(self):
		ForkProcess._unregister(self)

		pipe_reader = self._async_func_reader
		if pipe_reader is not None:
			self._async_func_reader = None
			pipe_reader.removeExitListener(self._async_func_reader_exit)
			pipe_reader.cancel()