示例#1
0
    def run(self):
        from tblib import pickling_support

        pickling_support.install()
        while True:
            try:
                index = self.queue.get_nowait()
            except Queue.Empty:
                break
            except ConnectionRefusedError:
                time.sleep(.1)
                continue
            item = self.session.items[index]
            try:
                run_test(self.session, item, None)
            except BaseException:
                import pickle
                import sys

                self.errors.put((self.name, pickle.dumps(sys.exc_info())))
            finally:
                try:
                    self.queue.task_done()
                except ConnectionRefusedError:
                    pass
示例#2
0
def main(**kwargs):
    set_start_method('fork')  # PyInstaller supports only fork start method

    pickling_support.install()

    init_satellite_dir()

    try:
        config = configure(**{
            name: value
            for name, value in kwargs.items() if value is not None
        })
    except InvalidConfigError as exc:
        raise click.ClickException(f'Invalid config: {exc}') from exc

    satellite_logging.configure(log_path=config.log_path, silent=config.silent)

    db.configure(config.db_path)
    try:
        db.init()
    except db.DBVersionMismatch as exc:
        raise click.ClickException(exc) from exc

    deleted_aliases = AliasStore.cleanup()
    logger = logging.getLogger()
    logger.info(f'Deleted {deleted_aliases} expired aliases.')

    app = WebApplication(config)
    app.start()
示例#3
0
    def run(self):
        from tblib import pickling_support

        pickling_support.install()
        original_pytest_configure(allure_conf)
        while True:
            try:
                index = self.queue.get_nowait()
            except Queue.Empty:
                break
            except ConnectionRefusedError:
                time.sleep(.1)
                continue
            item = self.session.items[index]
            try:
                unsafe = item.get_closest_marker(name='unsafe') is not None
                if not (self.run_unsafe ^ unsafe):
                    run_test(self.session, item, None)
            except BaseException:
                import pickle
                import sys

                self.errors.put((self.name, pickle.dumps(sys.exc_info())))
            finally:
                try:
                    self.queue.task_done()
                except ConnectionRefusedError:
                    pass
示例#4
0
    def main(cls, name, loglevel, setup, setupargs, setupkwargs, queue,
             teardown, teardownargs, teardownkwargs):
        from tblib import pickling_support

        cls.name = name
        cls.working = True

        setupargs = setupargs or []
        setupkwargs = setupkwargs or {}

        teardownargs = teardownargs or []
        teardownkwargs = teardownkwargs or {}

        if getattr(signal, 'SIGTERM'):
            signal.signal(signal.SIGTERM, cls.handler)
        if getattr(signal, 'SIGINT'):
            signal.signal(signal.SIGINT, cls.handler)

        logger = logging.getLogger()
        for handler in logger.handlers:
            logger.removeHandler(handler)
        logger.addHandler(cls.LogHandler(queue))
        logger.setLevel(loglevel)

        cls.queue = queue
        queue.send(_State(_State.STARTING))

        with OutputCapture.ReplaceSysStream(
                'stderr', cls.Stream(_Print.stderr,
                                     queue)), OutputCapture.ReplaceSysStream(
                                         'stdout',
                                         cls.Stream(_Print.stdout, queue)):
            try:
                pickling_support.install()
                if setup:
                    setup(*setupargs, **setupkwargs)

                while cls.working:
                    task = queue.receive()
                    if not task:
                        break
                    queue.send(_Result(value=task(None), id=task.id))

            except BaseException:
                typ, exception, traceback = sys.exc_info()
                queue.send(
                    _ChildException(exc_info=(
                        typ,
                        typ('{} (from {})'.format(str(exception), name)),
                        traceback,
                    )))

            finally:
                if teardown:
                    teardown(*teardownargs, **teardownkwargs)
                sys.stdout.flush()
                sys.stderr.flush()
                queue.send(_State(_State.STOPPING))
                cls.queue.close()
                cls.queue = None
示例#5
0
    def start_process(self):
        self.bundle_engine.logline("Starting {}".format(self.service.name))
        self.bundle_engine.logline("Directory: {}".format(self.service.directory))
        self.bundle_engine.logline("Command: {}".format(' '.join(self.service.command)))
        os.chdir(self.service.directory)

        try:
            self.process = subprocess.Popen(
                self.service.command,
                bufsize=0,                  # Ensures that all stdout/err is pushed to us immediately.
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                stdin=subprocess.PIPE,
                env=self.service.env_vars,
                preexec_fn=os.setpgrp       # Ctrl-C signal is not passed on to the process.
            )
            self.service.pid = self.process.pid
            self.process_started = True
        except Exception as e:
            pickling_support.install()
            self.bundle_engine.messages_to_driver.put(sys.exc_info())
            return

        self.bundle_engine.service_bundle.hitch_dir.save_pgid(self.service.name.lower(), os.getpgid(self.process.pid))

        self.stdout_pipe = pyuv.Pipe(self.bundle_engine.loop)
        self.stdout_pipe.open(self.process.stdout.fileno())

        self.stderr_pipe = pyuv.Pipe(self.bundle_engine.loop)
        self.stderr_pipe.open(self.process.stderr.fileno())
示例#6
0
 def run_setup():
     signal.signal(signal.SIGINT, signal.SIG_IGN)
     signal.signal(signal.SIGTERM, signal.SIG_IGN)
     try:
         sys.stdout = open(self.bundle_engine.service_bundle.hitch_dir.setup_out(self.service.name), "ab", 0)
         sys.stderr = open(self.bundle_engine.service_bundle.hitch_dir.setup_err(self.service.name), "ab", 0)
         self.service.setup()
     except Exception as e:
         pickling_support.install()
         self.bundle_engine.messages_to_driver.put(sys.exc_info())
示例#7
0
 def error_handler(conn, func, *args, **kwargs):
     try:
         func(*args, **kwargs)
     except Exception as e:
         pickling_support.install()
         etype, exp, tb = exc_info()
         conn.send((etype, str(exp), tb))
         exit(1)
     finally:
         conn.close()
示例#8
0
async def _process_tile_in_worker(serialized_input_tile: str):
    try:
        input_tile = nexusproto.NexusTile.FromString(serialized_input_tile)
        processed_tile = _recurse(_worker_processor_list, _worker_dataset, input_tile)

        if processed_tile:
            await _worker_data_store.save_data(processed_tile)
            await _worker_metadata_store.save_metadata(processed_tile)
    except Exception as e:
        pickling_support.install(e)
        _shared_memory.error = pickle.dumps(e)
        raise
示例#9
0
    def __init__(self, player: str, server: str, conn, accuracy: int):
        """Initialize process and retrieve required data"""
        if not is_tesseract_installed():
            raise RuntimeError("Tesseract is not installed")

        pickling_support.install()
        Process.__init__(self)

        self.conn, self._f = conn, max(accuracy, 1)
        config = gui.get_player_config(player, server)["Settings"]
        self.colors = self.convert_colors(config["ChatColors"])
        ui = gui.GUIParser(config["GUI_Current_Profile"], {"ChatPanel_1": (None, None)})
        self.box = ui.get_box_coordinates("ChatPanel_1")
示例#10
0
def test_30():
    pickling_support.install()

    try:
        raise ValueError
    except ValueError:
        s = pickle.dumps(sys.exc_info())

    f = None
    try:
        six.reraise(*pickle.loads(s))
    except ValueError:
        f = Failure()

    assert f is not None
示例#11
0
def test_30():
    pickling_support.install()

    try:
        raise ValueError
    except ValueError:
        s = pickle.dumps(sys.exc_info())

    f = None
    try:
        six.reraise(*pickle.loads(s))
    except ValueError:
        f = Failure()

    assert f is not None
示例#12
0
    def main(cls, name, setup, queue, teardown):
        from tblib import pickling_support

        cls.name = name
        cls.working = True

        if getattr(signal, 'SIGTERM'):
            signal.signal(signal.SIGTERM, cls.handler)

        logger = logging.getLogger()
        for handler in logger.handlers:
            logger.removeHandler(handler)
        logger.addHandler(cls.LogHandler(queue))

        queue.send(State(State.STARTING))

        with OutputCapture.ReplaceSysStream(
                'stderr', cls.Stream(Print.stderr,
                                     queue)), OutputCapture.ReplaceSysStream(
                                         'stdout',
                                         cls.Stream(Print.stdout, queue)):
            try:
                pickling_support.install()
                if setup:
                    setup()

                while cls.working:
                    task = queue.receive()
                    if not task:
                        break
                    queue.send(Result(value=task(None), id=task.id))

            except BaseException:
                typ, exception, traceback = sys.exc_info()
                queue.send(
                    ChildException(exc_info=(
                        typ,
                        typ('{} (from {})'.format(str(exception), name)),
                        traceback,
                    )))

            finally:
                if teardown:
                    teardown()
                sys.stdout.flush()
                sys.stderr.flush()
                queue.send(State(State.STOPPING))
示例#13
0
    def launcher():
        # multiprocessing doesn't offer a good way to detach from the parent
        # process, allowing the child to exist without being cleaned up at
        # parent close. So given
        #
        # 1. parent process (which invoked run_in_process)
        # 2. runner process (executing target function)
        #
        # we fork (2), creating (3) then continue executing in (3) and forcibly
        # exit (2).
        #
        # The downside of this approach is that any exceptions from the
        # process after detaching will not be propagated to the caller
        # (and Windows incompatibility).
        def detach(result=None):
            # Indicate no exception.
            child_pipe.send(False)
            child_pipe.send(result)
            pid = os.fork()
            if pid:
                # Ensure we don't return to caller within the subprocess.
                os._exit(0)

        new_args = list(args)
        if allow_detach:
            new_args.insert(0, detach)
        try:
            result = target(*new_args, **kwargs)
        except:
            child_pipe.send(True)
            from tblib import pickling_support

            pickling_support.install()
            child_pipe.send(sys.exc_info())
            # Wait for signal from parent process to avoid exit/read race
            # condition.
            child_pipe.recv()
            # We don't really want the exception traced by multiprocessing
            # so exit like Python would.
            sys.exit(1)
        else:
            child_pipe.send(False)
            child_pipe.send(result)
            child_pipe.recv()
示例#14
0
def manage_generic_exception(exception, info, sender='unspecified'):
    """This function is to save the details of an unmanaged exception in a pickle file for troubleshooting.
    :param exception: the exception triggered
    :param info: exception context obtained using .format(sender, type(exception).__name__, exception.args, file_name)
    :param sender: the name of the sender. Use this to make the output message more clear to read
    """

    from tblib import pickling_support
    pickling_support.install()
    import pickle
    import os

    exec_info = {'info': info, 'exception': exception}

    file_suffix = 0
    while True:
        try:
            file_name = 'error_details.dat' if file_suffix == 0 else "error_details ({}).dat".format(
                file_suffix)
            while os.path.isfile(file_name):
                file_suffix += 1
                file_name = 'error_details.dat' if file_suffix == 0 else "error_details ({}).dat".format(
                    file_suffix)

            error_file = open(file_name, 'wb')

            error_file.write(pickle.dumps(exec_info))
            error_file.flush()

            template = "Unhandled exeption detected in {0} process of type {1} occurred.\n" \
                       "Arguments: {2!r}!\n" \
                       "Details of this errors have been saved in file {3}: please send it to the developer.\n"\

            print(
                template.format(sender,
                                type(exception).__name__, exception.args,
                                file_name))

            break
        except PermissionError:
            file_suffix += 1
示例#15
0
def main(**kwargs):
    set_start_method('fork')  # PyInstaller supports only fork start method

    pickling_support.install()

    init_satellite_dir()

    try:
        config = configure(**{
            name: value
            for name, value in kwargs.items() if value is not None
        })
    except InvalidConfigError as exc:
        raise click.ClickException(f'Invalid config: {exc}') from exc

    satellite_logging.configure(log_path=config.log_path, silent=config.silent)
    logger = logging.getLogger()

    db.configure(config.db_path)
    try:
        db.init()
    except db.DBVersionMismatch as exc:
        raise click.ClickException(exc) from exc

    if config.routes_path:
        with open(config.routes_path, 'r') as stream:
            try:
                loaded_routes_count = load_from_yaml(stream)
            except LoadError as exc:
                raise click.ClickException(
                    f'Unable to load routes from file: {exc}') from exc
        logger.info(
            f'Loaded {loaded_routes_count} routes from routes config file.')

    deleted_aliases = AliasStore.cleanup()
    logger.info(f'Deleted {deleted_aliases} expired aliases.')

    app = WebApplication(config)
    app.start()
示例#16
0
def main(**kwargs):
    set_start_method('fork')  # PyInstaller supports only fork start method

    pickling_support.install()

    init_satellite_dir()

    try:
        config = configure(**{
            name: value
            for name, value in kwargs.items() if value is not None
        })
    except InvalidConfigError as exc:
        raise click.ClickException(f'Invalid config: {exc}') from exc

    logging.configure(log_path=config.log_path, silent=config.silent)

    db.configure(config.db_path)
    db.init()

    app = WebApplication(config)
    app.start()
示例#17
0
    def run(self):
        pickling_support.install()
        self.session.config.hook.pytest_testthreadready()

        max_fail = self.session.config.getvalue("maxfail")
        while True:
            try:
                index = self.queue.get()
                if index == 'stop':
                    self.queue.task_done()
                    break
            except ConnectionRefusedError:
                time.sleep(.1)
                continue
            item = self.session.items[index]
            try:
                run_test(self.session, item, None)
            except BaseException:
                import pickle
                import sys

                self.errors.put((self.name, pickle.dumps(sys.exc_info())))
            finally:
                try:
                    self.queue.task_done()
                except ConnectionRefusedError:
                    pass

            if 0 < max_fail <= self.errors.qsize():
                try:
                    index = self.queue.get()
                    if index == 'stop':
                        self.queue.task_done()
                        break
                    self.queue.task_done()
                except ConnectionRefusedError:
                    time.sleep(.1)
                    continue
示例#18
0
tracker and can be accessed from different processes without any need to worry
about synchronization.
'''

# TODO: need to have mark_dirty send that key's value using the "set" command
#    or something, and test it.
# TODO: add the dirty command and other commands recently added to the POD
# TODO: add the init constructor argument

from functools import wraps
from contextlib import contextmanager
import sys
import multiprocessing
import tastypy
from tblib import pickling_support
pickling_support.install()

ITERITEMS_CHUNK_SIZE = 1000
ITERKEYS_CHUNK_SIZE = 1000


def _requires_lock(lock):
    def decorator(f):
        @wraps(f)
        def f_with_lock(*args, **kwargs):
            with lock:
                return_val = f(*args, **kwargs)
                return return_val

        return f_with_lock
示例#19
0
def run_in_process(
    target, name=None, args=(), kwargs=None, allow_detach=False, timeout=None
):
    """Run provided target in a multiprocessing.Process.

    This function does not require that the `target` and arguments
    are picklable. Only the return value of `target` must be.

    Args:
        target: same as multiprocessing.Process
        name: same as multiprocessing.Process
        args: same as multiprocessing.Process
        kwargs: same as multiprocessing.Process
        allow_detach: passes a callback as the first argument to the function
            that, when invoked, detaches from the parent by forking.
        timeout: seconds after which processing will be aborted and
            the child process killed

    Returns:
        The return value of `target`

    Raises:
        *: Any exception raised by `target`.
        TimeoutError: If a timeout occurs.
    """
    if not kwargs:
        kwargs = {}

    def launcher():
        # multiprocessing doesn't offer a good way to detach from the parent
        # process, allowing the child to exist without being cleaned up at
        # parent close. So given
        #
        # 1. parent process (which invoked run_in_process)
        # 2. runner process (executing target function)
        #
        # we fork (2), creating (3) then continue executing in (3) and forcibly
        # exit (2).
        #
        # The downside of this approach is that any exceptions from the
        # process after detaching will not be propagated to the caller
        # (and Windows incompatibility).
        def detach(result=None):
            # Indicate no exception.
            child_pipe.send(False)
            child_pipe.send(result)
            pid = os.fork()
            if pid:
                # Ensure we don't return to caller within the subprocess.
                os._exit(0)

        new_args = list(args)
        if allow_detach:
            new_args.insert(0, detach)
        try:
            result = target(*new_args, **kwargs)
        except:
            child_pipe.send(True)
            from tblib import pickling_support

            pickling_support.install()
            child_pipe.send(sys.exc_info())
            # Wait for signal from parent process to avoid exit/read race
            # condition.
            child_pipe.recv()
            # We don't really want the exception traced by multiprocessing
            # so exit like Python would.
            sys.exit(1)
        else:
            child_pipe.send(False)
            child_pipe.send(result)
            child_pipe.recv()

    ctx = multiprocessing.get_context("fork")

    child_pipe, parent_pipe = ctx.Pipe()
    p = ctx.Process(target=launcher, name=name)
    p.start()

    ready = multiprocessing_connection.wait([p.sentinel, parent_pipe], timeout=timeout)

    # Timeout
    if not ready:
        p.kill()
        raise TimeoutError("Timeout running function.")

    exc = None
    result = None
    if parent_pipe in ready:
        error = parent_pipe.recv()
        if error:
            from tblib import pickling_support

            pickling_support.install()
            _, exception, tb = parent_pipe.recv()
            exc = exception.with_traceback(tb)
        else:
            result = parent_pipe.recv()

    if p.sentinel in ready:
        # This can happen if the child process closes file descriptors, but we
        # do not handle it.
        assert p.exitcode is not None, "Exit code must exist"
        if p.exitcode:
            if not exc:
                exc = RuntimeError(f"Process died with return code {p.exitcode}")

    else:
        # Indicate OK to continue.
        parent_pipe.send(True)
        p.join()

    if exc:
        raise exc
    return result
示例#20
0
文件: logger.py 项目: diffeo/dblogger
.. This software is released under an MIT/X11 open source license.
   Copyright 2013-2014 Diffeo, Inc.

'''
from __future__ import absolute_import

import time
import logging
import cPickle as pickle
import sys
import traceback

try:
    from tblib import pickling_support
    pickling_support.install()  # register traceback smarts with pickle
except Exception, exc:
    ## tblib does not work in python2.6...
    ## log something?
    pass

from dblogger.utils import gen_uuid
import kvlayer
import yakonfig


class DatabaseLogHandler(logging.Handler):
    '''Log handler that stores log messages in a database.

    This uses :mod:`kvlayer` to store the actual log messages.
    When the log handler is created, the caller needs to pass
示例#21
0
文件: epyccel.py 项目: CKehl/pyccel
def epyccel(python_function_or_module, **kwargs):
    """
    Accelerate Python function or module using Pyccel in "embedded" mode.

    Parameters
    ----------
    python_function_or_module : function | module
        Python function or module to be accelerated.

    verbose : bool
        Print additional information (default: False).

    language : {'fortran', 'c', 'python'}
        Language of generated code (default: 'fortran').

    accelerator : str, optional
        Parallel multi-threading acceleration strategy
        (currently supported: 'openmp', 'openacc').

    Options for parallel mode
    -------------------------
    comm : mpi4py.MPI.Comm, optional
        MPI communicator for calling Pyccel in parallel mode (default: None).

    root : int, optional
        MPI rank of process in charge of accelerating code (default: 0).

    bcast : {True, False}
        If False, only root process loads accelerated function/module (default: True).

    Other options
    -------------
    compiler : str, optional
        User-defined command for compiling generated source code.

    mpi_compiler : str, optional
        Compiler for MPI parallel code.

    Returns
    -------
    res : object
        Accelerated function or module.

    Examples
    --------
    >>> def one(): return 1
    >>> from pyccel.epyccel import epyccel
    >>> one_f = epyccel(one, language='fortran')
    >>> one_c = epyccel(one, language='c')

    """
    assert isinstance(python_function_or_module, (FunctionType, ModuleType))

    comm = kwargs.pop('comm', None)
    root = kwargs.pop('root', 0)
    bcast = kwargs.pop('bcast', True)

    # Parallel version
    if comm is not None:

        from mpi4py import MPI
        from tblib import pickling_support  # [YG, 27.10.2020] We use tblib to
        pickling_support.install()  # pickle tracebacks, which allows
        # mpi4py to broadcast exceptions
        assert isinstance(comm, MPI.Comm)
        assert isinstance(root, int)

        # TODO [YG, 25.02.2020] Get default MPI compiler from somewhere else
        kwargs.setdefault('mpi_compiler', 'mpif90')

        # Master process calls epyccel
        if comm.rank == root:
            try:
                mod, fun = epyccel_seq(python_function_or_module, **kwargs)
                mod_path = os.path.abspath(mod.__file__)
                mod_name = mod.__name__
                fun_name = python_function_or_module.__name__ if fun else None
                success = True
            # error handling carried out after broadcast to prevent deadlocks
            except:  # pylint: disable=bare-except
                exc_info = sys.exc_info()
                success = False

        # Non-master processes initialize empty variables
        else:
            mod, fun = None, None
            mod_path = None
            mod_name = None
            fun_name = None
            exc_info = None
            success = None

        # Broadcast success state, and raise exception if neeeded
        if not comm.bcast(success, root=root):
            raise comm.bcast(exc_info, root=root)

        if bcast:
            # Broadcast Fortran module path/name and function name to all processes
            mod_path = comm.bcast(mod_path, root=root)
            mod_name = comm.bcast(mod_name, root=root)
            fun_name = comm.bcast(fun_name, root=root)

            # Non-master processes import Fortran module directly from its path
            # and extract function if its name is given
            if comm.rank != root:
                folder = os.path.split(mod_path)[0]
                sys.path.insert(0, folder)
                mod = importlib.import_module(mod_name)
                sys.path.remove(folder)
                fun = getattr(mod, fun_name) if fun_name else None

    # Serial version
    else:
        mod, fun = epyccel_seq(python_function_or_module, **kwargs)

    # Return Fortran function (if any), otherwise module
    return fun or mod
示例#22
0
import psutil
from multiprocess import Process, Queue
from six import reraise
from six.moves import cPickle as pickle
from six.moves import range
from six.moves.queue import Empty as EmptyQueue
from tblib import pickling_support

from . import CommandSequence, MPLogger
from .BrowserManager import Browser
from .DataAggregator import LocalAggregator, S3Aggregator
from .Errors import CommandExecutionError
from .SocketInterface import clientsocket
from .utilities.platform_utils import get_configuration_string, get_version

pickling_support.install()

SLEEP_CONS = 0.1  # command sleep constant (in seconds)
BROWSER_MEMORY_LIMIT = 1500  # in MB

AGGREGATOR_QUEUE_LIMIT = 10000  # number of records in the queue


def load_default_params(num_browsers=1):
    """
    Loads num_browsers copies of the default browser_params dictionary.
    Also loads a single copy of the default TaskManager params dictionary.
    """
    fp = open(os.path.join(os.path.dirname(__file__),
                           'default_browser_params.json'))
    preferences = json.load(fp)