Example #1
0
import asyncio
from multiprocessing.util import register_after_fork
from queue import Queue
from threading import (Barrier, BoundedSemaphore, Condition, Event, Lock,
                       RLock, Semaphore)
from multiprocessing.managers import (SyncManager, MakeProxyType, BarrierProxy,
                                      EventProxy, ConditionProxy,
                                      AcquirerProxy)

from aioprocessing.locks import _ContextManager
from .executor import _ExecutorMixin

AioBaseQueueProxy = MakeProxyType(
    'AioQueueProxy', ('task_done', 'get', 'qsize', 'put', 'put_nowait',
                      'get_nowait', 'empty', 'join', '_qsize', 'full'))


class _AioProxyMixin(_ExecutorMixin):
    _obj = None

    def _async_call(self, method, *args, loop=None, **kwargs):
        return asyncio. async (self.run_in_executor(self._callmethod,
                                                    method,
                                                    args,
                                                    kwargs,
                                                    loop=loop))


class ProxyCoroBuilder(type):
    """ Build coroutines to proxy functions. """
    def __new__(cls, clsname, bases, dct):
Example #2
0
import queue
from multiprocessing.managers import SyncManager
from multiprocessing import Process, RLock
from bs4 import BeautifulSoup
from urllib import request
from urllib.parse import urlparse, urljoin
from multiprocessing.managers import MakeProxyType

BaseSetProxy = MakeProxyType(
    'BaseSetProxy',
    ('__and__', '__contains__', '__iand__', '__ior__', '__isub__', '__ixor__',
     '__len__', '__or__', '__rand__', '__ror__', '__rsub__', '__rxor__',
     '__sub__', '__xor__', 'add', 'clear', 'copy', 'difference',
     'difference_update', 'discard', 'intersection', 'intersection_update',
     'isdisjoint', 'issubset', 'issuperset', 'pop', 'remove',
     'symmetric_difference', 'symmetric_difference_update', 'union', 'update'))


class SetProxy(BaseSetProxy):
    # in-place hooks need to return `self`, specify these manually
    def __iand__(self, value):
        self._callmethod('__iand__', (value, ))
        return self

    def __ior__(self, value):
        self._callmethod('__ior__', (value, ))
        return self

    def __isub__(self, value):
        self._callmethod('__isub__', (value, ))
        return self
Example #3
0
    ConditionProxy,
    AcquirerProxy,
)

from aioprocessing.locks import _ContextManager
from .executor import _ExecutorMixin


AioBaseQueueProxy = MakeProxyType(
    "AioQueueProxy",
    (
        "task_done",
        "get",
        "qsize",
        "put",
        "put_nowait",
        "get_nowait",
        "empty",
        "join",
        "_qsize",
        "full",
    ),
)


class _AioProxyMixin(_ExecutorMixin):
    _obj = None

    def _async_call(self, method, *args, loop=None, **kwargs):
        return asyncio.ensure_future(
            self.run_in_executor(
Example #4
0

def _localhost():
    try:
        return socket.gethostbyname(socket.gethostname())
    except Exception:
        print(">>> Cannot get ip address of host.  Return 127.0.0.1 (localhost)")
        return "127.0.0.1"


# Create proxies for Queue types to be accessible from remote clients
# NOTE: multiprocessing module has a quirk/bug where passin proxies to another separate client (e.g., on
# another machine) causes the proxy to be rebuilt with a random authkey on the other side.  Unless
# we override the constructor to force an authkey value, we will hit AuthenticationError's

JoinableQueueProxyBase = MakeProxyType("JoinableQueueProxy", exposed=["put", "get", "task_done", "join", "close"])
QueueProxyBase = MakeProxyType("QueueProxy", exposed=["put", "get", "task_done", "join", "close"])


class JoinableQueueProxy(JoinableQueueProxyBase):

    def __init__(self, token, serializer, manager=None,
                 authkey=None, exposed=None, incref=True, manager_owned=False):
        super().__init__(token, serializer, manager, AUTHKEY, exposed, incref, manager_owned)


class QueueProxy(QueueProxyBase):

    def __init__(self, token, serializer, manager=None,
                 authkey=None, exposed=None, incref=True, manager_owned=False):
        super().__init__(token, serializer, manager, AUTHKEY, exposed, incref, manager_owned)
Example #5
0
from multiprocessing.managers import MakeProxyType, SyncManager

from pytest_mproc import find_free_port, AUTHKEY
from pytest_mproc.data import TestExecutionConstraint, TestBatch
from pytest_mproc.fixtures import Node
from pytest_mproc.main import Orchestrator
from pytest_mproc.utils import BasicReporter
from pytest_mproc.worker import WorkerSession

# Create proxies for Queue types to be accessible from remote clients
# NOTE: multiprocessing module has a quirk/bug where passing proxies to another separate client (e.g., on
# another machine) causes the proxy to be rebuilt with a random authkey on the other side.  Unless
# we override the constructor to force an authkey value, we will hit AuthenticationError's

CoordinatorProxyBase = MakeProxyType(
    "CoordinatorProxy",
    exposed=["start", "put_fixture", "join", "kill", "is_local"])


class CoordinatorProxy(CoordinatorProxyBase):
    def __init__(self,
                 token,
                 serializer,
                 manager=None,
                 authkey=None,
                 exposed=None,
                 incref=True,
                 manager_owned=False):
        super().__init__(token, serializer, manager, AUTHKEY, exposed, incref,
                         manager_owned)
Example #6
0
from multiprocessing.managers import SyncManager

from directord import datastores

BaseDictProxy = MakeProxyType(
    "BaseDictProxy",
    (
        "__contains__",
        "__delitem__",
        "__getitem__",
        "__iter__",
        "__len__",
        "__setitem__",
        "clear",
        "copy",
        "get",
        "items",
        "keys",
        "pop",
        "popitem",
        "prune",
        "set",
        "setdefault",
        "update",
        "values",
    ),
)
BaseDictProxy._method_to_typeid_ = {
    "__iter__": "Iterator",
}
SyncManager.register("document", datastores.BaseDocument, BaseDictProxy)
Example #7
0
    def remove(self, value):
        position = self.index(value)
        shared_array = SharedNDArray.copy(
            np.concatenate([self.array[:position], self.array[position + 1:]]))
        self.allocated_size = -1  # TODO: overallocate more than needed
        self.replace_held_shared_array(shared_array)

    def reverse(self):
        self.array[:] = self.array[::-1]

    def sort(self):
        self.array.sort()


BaseSharedSequenceProxy = MakeProxyType(
    'BaseSharedSequenceProxy',
    ('__contains__', '__getitem__', '__len__', 'count', 'index'))

# These operations must be performed by process with ownership.
BaseSharedListProxy = MakeProxyType('BaseSharedListProxy',
                                    ('__setitem__', 'reverse', 'sort'))


class SharedListProxy(BaseSharedListProxy):
    # Really no point in deriving from BaseSharedSequenceProxy because most
    # of those methods can be performed better in the local process rather
    # than asking the process with ownership to have to perform them all.

    _shared_memory_proxy = True

    _exposed_ = (