Example #1
0
# Workers are created as daemon threads. This is done to allow the interpreter
# to exit when there are still idle threads in a ThreadPoolExecutor's thread
# pool (i.e. shutdown() was not called). However, allowing workers to die with
# the interpreter has two undesirable properties:
#   - The workers would still be running during interpreter shutdown,
#     meaning that they would fail in unpredictable ways.
#   - The workers could be killed while evaluating a work item, which could
#     be bad if the callable being evaluated has external side-effects e.g.
#     writing to a file.
#
# To work around this problem, an exit handler is installed which tells the
# workers to exit when their work queues are empty and then waits until the
# threads finish.

_threads_queues: weakref.WeakKeyDictionary[
    threading.Thread, queue.Queue] = weakref.WeakKeyDictionary()
_shutdown = False


def _python_exit():
    global _shutdown
    _shutdown = True
    items = list(_threads_queues.items())
    for t, q in items:
        q.put(None)
    for t, q in items:
        t.join()


atexit.register(_python_exit)
Example #2
0
File: layout.py Project: jean/pymux
 def __init__(self, pymux):
     self.pymux = pymux
     self._bodies_for_clis = weakref.WeakKeyDictionary(
     )  # Maps CLI to (hash, Container)
Example #3
0
class Callback(param.Parameterized):
    """
    A Callback defines some callback to be triggered when a property
    changes on the source object. A Callback can execute arbitrary
    Javascript code and will make all objects referenced in the args
    available in the JS namespace.
    """

    args = param.Dict(default={},
                      allow_None=True,
                      doc="""
        A mapping of names to Python objects. These objects are made
        available to the callback's code snippet as the values of
        named parameters to the callback.""")

    code = param.Dict(default=None,
                      doc="""
        A dictionary mapping from a source specication to a JS code
        snippet to be executed if the source property changes.""")

    # Mapping from a source id to a Link instance
    registry = weakref.WeakKeyDictionary()

    # Mapping to define callbacks by backend and Link type.
    # e.g. Callback._callbacks[Link] = Callback
    _callbacks = {}

    # Whether the link requires a target
    _requires_target = False

    def __init__(self, source, target=None, **params):
        if source is None:
            raise ValueError('%s must define a source' % type(self).__name__)
        # Source is stored as a weakref to allow it to be garbage collected
        self._source = None if source is None else weakref.ref(source)
        super(Callback, self).__init__(**params)
        self.init()

    def init(self):
        """
        Registers the Callback
        """
        if self.source in self.registry:
            links = self.registry[self.source]
            params = {
                k: v
                for k, v in self.param.get_param_values() if k != 'name'
            }
            for link in links:
                link_params = {
                    k: v
                    for k, v in link.param.get_param_values() if k != 'name'
                }
                if not hasattr(link, 'target'):
                    pass
                elif (type(link) is type(self) and link.source is self.source
                      and link.target is self.target
                      and params == link_params):
                    return
            self.registry[self.source].append(self)
        else:
            self.registry[self.source] = [self]

    @classmethod
    def register_callback(cls, callback):
        """
        Register a LinkCallback providing the implementation for
        the Link for a particular backend.
        """
        cls._callbacks[cls] = callback

    @property
    def source(self):
        return self._source() if self._source else None

    @classmethod
    def _process_callbacks(cls, root_view, root_model):
        if not root_model:
            return

        linkable = root_view.select(Viewable)
        linkable += root_model.select({'type': BkModel})

        if not linkable:
            return

        found = [(link, src, getattr(link, 'target', None)) for src in linkable
                 for link in cls.registry.get(src, [])
                 if not link._requires_target or link.target in linkable]

        arg_overrides = {}
        if 'holoviews' in sys.modules:
            from .pane.holoviews import HoloViews, generate_panel_bokeh_map

            hv_views = root_view.select(HoloViews)
            map_hve_bk = generate_panel_bokeh_map(root_model, hv_views)
            for src in linkable:
                for link in cls.registry.get(src, []):
                    if hasattr(link, 'target'):
                        for tgt in map_hve_bk.get(link.target, []):
                            found.append((link, src, tgt))
                    arg_overrides[id(link)] = {}
                    for k, v in link.args.items():
                        # Not all args are hashable
                        try:
                            hv_objs = map_hve_bk.get(v, [])
                        except Exception:
                            continue
                        for tgt in hv_objs:
                            arg_overrides[id(link)][k] = tgt

        ref = root_model.ref['id']
        callbacks = []
        for link, src, tgt in found:
            cb = cls._callbacks[type(link)]
            if ((src is None or ref not in getattr(src, '_models', [ref])) or
                (getattr(link, '_requires_target', False) and tgt is None)
                    or (tgt is not None
                        and ref not in getattr(tgt, '_models', [ref]))):
                continue
            overrides = arg_overrides.get(id(link), {})
            callbacks.append(
                cb(root_model, link, src, tgt, arg_overrides=overrides))
        return callbacks
Example #4
0
import tool_shed.repository_types.util as rt_util
from galaxy import util
from galaxy.model.orm.now import now
from galaxy.security.validate_user_input import validate_password_str
from galaxy.util import unique_id
from galaxy.util.bunch import Bunch
from galaxy.util.dictifiable import Dictifiable
from galaxy.util.hash_util import new_secure_hash
from tool_shed.dependencies.repository import relation_builder
from tool_shed.util import (hg_util, metadata_util)
from tool_shed.util.hgweb_config import hgweb_config_manager

log = logging.getLogger(__name__)

WEAK_HG_REPO_CACHE: Mapping['Repository', Any] = weakref.WeakKeyDictionary()

if TYPE_CHECKING:
    from sqlalchemy.schema import Table

    class _HasTable:
        table: Table
else:
    _HasTable = object


class APIKeys(_HasTable):
    pass


class User(Dictifiable, _HasTable):
Example #5
0
 def make_weak_keyed_dict(self):
     dict = weakref.WeakKeyDictionary()
     objects = map(Object, range(self.COUNT))
     for o in objects:
         dict[o] = o.arg
     return dict, objects
Example #6
0
class CloudPickler(Pickler):
    dispatch = Pickler.dispatch.copy()

    def __init__(self, file, protocol=None):
        if protocol is None:
            protocol = DEFAULT_PROTOCOL
        Pickler.__init__(self, file, protocol=protocol)
        # set of modules to unpickle
        self.modules = set()
        # map ids to dictionary. used to ensure that functions can share global env
        self.globals_ref = {}

    def dump(self, obj):
        self.inject_addons()
        try:
            return Pickler.dump(self, obj)
        except RuntimeError as e:
            if 'recursion' in e.args[0]:
                msg = """Could not pickle object as excessively deep recursion required."""
                raise pickle.PicklingError(msg)

    def save_memoryview(self, obj):
        self.save(obj.tobytes())

    dispatch[memoryview] = save_memoryview

    if not PY3:

        def save_buffer(self, obj):
            self.save(str(obj))

        dispatch[buffer] = save_buffer

    def save_unsupported(self, obj):
        raise pickle.PicklingError("Cannot pickle objects of type %s" %
                                   type(obj))

    dispatch[types.GeneratorType] = save_unsupported

    # itertools objects do not pickle!
    for v in itertools.__dict__.values():
        if type(v) is type:
            dispatch[v] = save_unsupported

    def save_module(self, obj):
        """
        Save a module as an import
        """
        mod_name = obj.__name__
        # If module is successfully found then it is not a dynamically created module
        if hasattr(obj, '__file__'):
            is_dynamic = False
        else:
            try:
                _find_module(mod_name)
                is_dynamic = False
            except ImportError:
                is_dynamic = True

        self.modules.add(obj)
        if is_dynamic:
            self.save_reduce(dynamic_subimport, (obj.__name__, vars(obj)),
                             obj=obj)
        else:
            self.save_reduce(subimport, (obj.__name__, ), obj=obj)

    dispatch[types.ModuleType] = save_module

    def save_codeobject(self, obj):
        """
        Save a code object
        """
        if PY3:
            args = (obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
                    obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
                    obj.co_names, obj.co_varnames, obj.co_filename,
                    obj.co_name, obj.co_firstlineno, obj.co_lnotab,
                    obj.co_freevars, obj.co_cellvars)
        else:
            args = (obj.co_argcount, obj.co_nlocals, obj.co_stacksize,
                    obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
                    obj.co_varnames, obj.co_filename, obj.co_name,
                    obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
                    obj.co_cellvars)
        self.save_reduce(types.CodeType, args, obj=obj)

    dispatch[types.CodeType] = save_codeobject

    def save_function(self, obj, name=None):
        """ Registered with the dispatch to handle all function types.
        Determines what kind of function obj is (e.g. lambda, defined at
        interactive prompt, etc) and handles the pickling appropriately.
        """
        if obj in _BUILTIN_TYPE_CONSTRUCTORS:
            # We keep a special-cased cache of built-in type constructors at
            # global scope, because these functions are structured very
            # differently in different python versions and implementations (for
            # example, they're instances of types.BuiltinFunctionType in
            # CPython, but they're ordinary types.FunctionType instances in
            # PyPy).
            #
            # If the function we've received is in that cache, we just
            # serialize it as a lookup into the cache.
            return self.save_reduce(_BUILTIN_TYPE_CONSTRUCTORS[obj], (),
                                    obj=obj)

        write = self.write

        if name is None:
            name = obj.__name__
        try:
            # whichmodule() could fail, see
            # https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
            modname = pickle.whichmodule(obj, name)
        except Exception:
            modname = None
        # print('which gives %s %s %s' % (modname, obj, name))
        try:
            themodule = sys.modules[modname]
        except KeyError:
            # eval'd items such as namedtuple give invalid items for their function __module__
            modname = '__main__'

        if modname == '__main__':
            themodule = None

        try:
            lookedup_by_name = getattr(themodule, name, None)
        except Exception:
            lookedup_by_name = None

        if themodule:
            self.modules.add(themodule)
            if lookedup_by_name is obj:
                return self.save_global(obj, name)

        # a builtin_function_or_method which comes in as an attribute of some
        # object (e.g., itertools.chain.from_iterable) will end
        # up with modname "__main__" and so end up here. But these functions
        # have no __code__ attribute in CPython, so the handling for
        # user-defined functions below will fail.
        # So we pickle them here using save_reduce; have to do it differently
        # for different python versions.
        if not hasattr(obj, '__code__'):
            if PY3:
                rv = obj.__reduce_ex__(self.proto)
            else:
                if hasattr(obj, '__self__'):
                    rv = (getattr, (obj.__self__, name))
                else:
                    raise pickle.PicklingError("Can't pickle %r" % obj)
            return self.save_reduce(obj=obj, *rv)

        # if func is lambda, def'ed at prompt, is in main, or is nested, then
        # we'll pickle the actual function object rather than simply saving a
        # reference (as is done in default pickler), via save_function_tuple.
        if (islambda(obj)
                or getattr(obj.__code__, 'co_filename', None) == '<stdin>'
                or themodule is None):
            self.save_function_tuple(obj)
            return
        else:
            # func is nested
            if lookedup_by_name is None or lookedup_by_name is not obj:
                self.save_function_tuple(obj)
                return

        if obj.__dict__:
            # essentially save_reduce, but workaround needed to avoid recursion
            self.save(_restore_attr)
            write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)
            self.save(obj.__dict__)
            write(pickle.TUPLE + pickle.REDUCE)
        else:
            write(pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)

    dispatch[types.FunctionType] = save_function

    def _save_subimports(self, code, top_level_dependencies):
        """
        Ensure de-pickler imports any package child-modules that
        are needed by the function
        """
        # check if any known dependency is an imported package
        for x in top_level_dependencies:
            if isinstance(x, types.ModuleType) and hasattr(
                    x, '__package__') and x.__package__:
                # check if the package has any currently loaded sub-imports
                prefix = x.__name__ + '.'
                for name, module in sys.modules.items():
                    # Older versions of pytest will add a "None" module to sys.modules.
                    if name is not None and name.startswith(prefix):
                        # check whether the function can address the sub-module
                        tokens = set(name[len(prefix):].split('.'))
                        if not tokens - set(code.co_names):
                            # ensure unpickler executes this import
                            self.save(module)
                            # then discards the reference to it
                            self.write(pickle.POP)

    def save_dynamic_class(self, obj):
        """
        Save a class that can't be stored as module global.
        This method is used to serialize classes that are defined inside
        functions, or that otherwise can't be serialized as attribute lookups
        from global modules.
        """
        clsdict = dict(obj.__dict__)  # copy dict proxy to a dict
        clsdict.pop('__weakref__', None)

        # On PyPy, __doc__ is a readonly attribute, so we need to include it in
        # the initial skeleton class.  This is safe because we know that the
        # doc can't participate in a cycle with the original class.
        type_kwargs = {'__doc__': clsdict.pop('__doc__', None)}

        # If type overrides __dict__ as a property, include it in the type kwargs.
        # In Python 2, we can't set this attribute after construction.
        __dict__ = clsdict.pop('__dict__', None)
        if isinstance(__dict__, property):
            type_kwargs['__dict__'] = __dict__

        save = self.save
        write = self.write

        # We write pickle instructions explicitly here to handle the
        # possibility that the type object participates in a cycle with its own
        # __dict__. We first write an empty "skeleton" version of the class and
        # memoize it before writing the class' __dict__ itself. We then write
        # instructions to "rehydrate" the skeleton class by restoring the
        # attributes from the __dict__.
        #
        # A type can appear in a cycle with its __dict__ if an instance of the
        # type appears in the type's __dict__ (which happens for the stdlib
        # Enum class), or if the type defines methods that close over the name
        # of the type, (which is utils for Python 2-style super() calls).

        # Push the rehydration function.
        save(_rehydrate_skeleton_class)

        # Mark the start of the args tuple for the rehydration function.
        write(pickle.MARK)

        # Create and memoize an skeleton class with obj's name and bases.
        tp = type(obj)
        self.save_reduce(tp, (obj.__name__, obj.__bases__, type_kwargs),
                         obj=obj)

        # Now save the rest of obj's __dict__. Any references to obj
        # encountered while saving will point to the skeleton class.
        save(clsdict)

        # Write a tuple of (skeleton_class, clsdict).
        write(pickle.TUPLE)

        # Call _rehydrate_skeleton_class(skeleton_class, clsdict)
        write(pickle.REDUCE)

    def save_function_tuple(self, func):
        """  Pickles an actual func object.
        A func comprises: code, globals, defaults, closure, and dict.  We
        extract and save these, injecting reducing functions at certain points
        to recreate the func object.  Keep in mind that some of these pieces
        can contain a ref to the func itself.  Thus, a naive save on these
        pieces could trigger an infinite loop of save's.  To get around that,
        we first create a skeleton func object using just the code (this is
        safe, since this won't contain a ref to the func), and memoize it as
        soon as it's created.  The other stuff can then be filled in later.
        """
        if is_tornado_coroutine(func):
            self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__, ),
                             obj=func)
            return

        save = self.save
        write = self.write

        code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(
            func)

        save(_fill_function)  # skeleton function updater
        write(pickle.MARK)  # beginning of tuple that _fill_function expects

        self._save_subimports(
            code,
            itertools.chain(f_globals.values(), closure_values or ()),
        )

        # create a skeleton function object and memoize it
        save(_make_skel_func)
        save((
            code,
            len(closure_values) if closure_values is not None else -1,
            base_globals,
        ))
        write(pickle.REDUCE)
        self.memoize(func)

        # save the rest of the func data needed by _fill_function
        state = {
            'globals': f_globals,
            'defaults': defaults,
            'dict': dct,
            'module': func.__module__,
            'closure_values': closure_values,
        }
        if hasattr(func, '__qualname__'):
            state['qualname'] = func.__qualname__
        save(state)
        write(pickle.TUPLE)
        write(pickle.REDUCE)  # applies _fill_function on the tuple

    _extract_code_globals_cache = (weakref.WeakKeyDictionary()
                                   if not hasattr(sys, "pypy_version_info")
                                   else {})

    @classmethod
    def extract_code_globals(cls, co):
        """
        Find all globals names read or written to by codeblock co
        """
        out_names = cls._extract_code_globals_cache.get(co)
        if out_names is None:
            try:
                names = co.co_names
            except AttributeError:
                # PyPy "builtin-code" object
                out_names = set()
            else:
                out_names = set(names[oparg]
                                for op, oparg in _walk_global_ops(co))

                # see if nested function have any global refs
                if co.co_consts:
                    for const in co.co_consts:
                        if type(const) is types.CodeType:
                            out_names |= cls.extract_code_globals(const)

            cls._extract_code_globals_cache[co] = out_names

        return out_names

    def extract_func_data(self, func):
        """
        Turn the function into a tuple of data necessary to recreate it:
            code, globals, defaults, closure_values, dict
        """
        code = func.__code__

        # extract all global ref's
        func_global_refs = self.extract_code_globals(code)

        # process all variables referenced by global environment
        f_globals = {}
        for var in func_global_refs:
            if var in func.__globals__:
                f_globals[var] = func.__globals__[var]

        # defaults requires no processing
        defaults = func.__defaults__

        # process closure
        closure = (list(map(_get_cell_contents, func.__closure__))
                   if func.__closure__ is not None else None)

        # save the dict
        dct = func.__dict__

        base_globals = self.globals_ref.get(id(func.__globals__), {})
        self.globals_ref[id(func.__globals__)] = base_globals

        return (code, f_globals, defaults, closure, dct, base_globals)

    def save_builtin_function(self, obj):
        if obj.__module__ == "__builtin__":
            return self.save_global(obj)
        return self.save_function(obj)

    dispatch[types.BuiltinFunctionType] = save_builtin_function

    def save_global(self, obj, name=None, pack=struct.pack):
        """
        Save a "global".
        The name of this method is somewhat misleading: all types get
        dispatched here.
        """
        if obj.__module__ == "__main__":
            return self.save_dynamic_class(obj)

        try:
            return Pickler.save_global(self, obj, name=name)
        except Exception:
            if obj.__module__ == "__builtin__" or obj.__module__ == "builtins":
                if obj in _BUILTIN_TYPE_NAMES:
                    return self.save_reduce(_builtin_type,
                                            (_BUILTIN_TYPE_NAMES[obj], ),
                                            obj=obj)

            typ = type(obj)
            if typ is not obj and isinstance(obj, (type, types.ClassType)):
                return self.save_dynamic_class(obj)

            raise

    dispatch[type] = save_global
    dispatch[types.ClassType] = save_global

    def save_instancemethod(self, obj):
        # Memoization rarely is ever useful due to python bounding
        if obj.__self__ is None:
            self.save_reduce(getattr, (obj.im_class, obj.__name__))
        else:
            if PY3:
                self.save_reduce(types.MethodType,
                                 (obj.__func__, obj.__self__),
                                 obj=obj)
            else:
                self.save_reduce(
                    types.MethodType,
                    (obj.__func__, obj.__self__, obj.__self__.__class__),
                    obj=obj)

    dispatch[types.MethodType] = save_instancemethod

    def save_inst(self, obj):
        """Inner logic to save instance. Based off pickle.save_inst"""
        cls = obj.__class__

        # Try the dispatch table (pickle module doesn't do it)
        f = self.dispatch.get(cls)
        if f:
            f(self, obj)  # Call unbound method with explicit self
            return

        memo = self.memo
        write = self.write
        save = self.save

        if hasattr(obj, '__getinitargs__'):
            args = obj.__getinitargs__()
            len(args)  # XXX Assert it's a sequence
            pickle._keep_alive(args, memo)
        else:
            args = ()

        write(pickle.MARK)

        if self.bin:
            save(cls)
            for arg in args:
                save(arg)
            write(pickle.OBJ)
        else:
            for arg in args:
                save(arg)
            write(pickle.INST + cls.__module__ + '\n' + cls.__name__ + '\n')

        self.memoize(obj)

        try:
            getstate = obj.__getstate__
        except AttributeError:
            stuff = obj.__dict__
        else:
            stuff = getstate()
            pickle._keep_alive(stuff, memo)
        save(stuff)
        write(pickle.BUILD)

    if not PY3:
        dispatch[types.InstanceType] = save_inst

    def save_property(self, obj):
        # properties not correctly saved in python
        self.save_reduce(property, (obj.fget, obj.fset, obj.fdel, obj.__doc__),
                         obj=obj)

    dispatch[property] = save_property

    def save_classmethod(self, obj):
        orig_func = obj.__func__
        self.save_reduce(type(obj), (orig_func, ), obj=obj)

    dispatch[classmethod] = save_classmethod
    dispatch[staticmethod] = save_classmethod

    def save_itemgetter(self, obj):
        """itemgetter serializer (needed for namedtuple support)"""
        class Dummy:
            def __getitem__(self, item):
                return item

        items = obj(Dummy())
        if not isinstance(items, tuple):
            items = (items, )
        return self.save_reduce(operator.itemgetter, items)

    if type(operator.itemgetter) is type:
        dispatch[operator.itemgetter] = save_itemgetter

    def save_attrgetter(self, obj):
        """attrgetter serializer"""
        class Dummy(object):
            def __init__(self, attrs, index=None):
                self.attrs = attrs
                self.index = index

            def __getattribute__(self, item):
                attrs = object.__getattribute__(self, "attrs")
                index = object.__getattribute__(self, "index")
                if index is None:
                    index = len(attrs)
                    attrs.append(item)
                else:
                    attrs[index] = ".".join([attrs[index], item])
                return type(self)(attrs, index)

        attrs = []
        obj(Dummy(attrs))
        return self.save_reduce(operator.attrgetter, tuple(attrs))

    if type(operator.attrgetter) is type:
        dispatch[operator.attrgetter] = save_attrgetter

    def save_file(self, obj):
        """Save a file"""
        try:
            import StringIO as pystringIO  # we can't use cStringIO as it lacks the name attribute
        except ImportError:
            import io as pystringIO

        if not hasattr(obj, 'name') or not hasattr(obj, 'mode'):
            raise pickle.PicklingError(
                "Cannot pickle files that do not map to an actual file")
        if obj is sys.stdout:
            return self.save_reduce(getattr, (sys, 'stdout'), obj=obj)
        if obj is sys.stderr:
            return self.save_reduce(getattr, (sys, 'stderr'), obj=obj)
        if obj is sys.stdin:
            raise pickle.PicklingError("Cannot pickle standard input")
        if obj.closed:
            raise pickle.PicklingError("Cannot pickle closed files")
        if hasattr(obj, 'isatty') and obj.isatty():
            raise pickle.PicklingError(
                "Cannot pickle files that map to tty objects")
        if 'r' not in obj.mode and '+' not in obj.mode:
            raise pickle.PicklingError(
                "Cannot pickle files that are not opened for reading: %s" %
                obj.mode)

        name = obj.name

        retval = pystringIO.StringIO()

        try:
            # Read the whole file
            curloc = obj.tell()
            obj.seek(0)
            contents = obj.read()
            obj.seek(curloc)
        except IOError:
            raise pickle.PicklingError(
                "Cannot pickle file %s as it cannot be read" % name)
        retval.write(contents)
        retval.seek(curloc)

        retval.name = name
        self.save(retval)
        self.memoize(obj)

    def save_ellipsis(self, obj):
        self.save_reduce(_gen_ellipsis, ())

    def save_not_implemented(self, obj):
        self.save_reduce(_gen_not_implemented, ())

    if PY3:
        dispatch[io.TextIOWrapper] = save_file
    else:
        dispatch[file] = save_file

    dispatch[type(Ellipsis)] = save_ellipsis
    dispatch[type(NotImplemented)] = save_not_implemented

    def save_weakset(self, obj):
        self.save_reduce(weakref.WeakSet, (list(obj), ))

    dispatch[weakref.WeakSet] = save_weakset

    def save_logger(self, obj):
        self.save_reduce(logging.getLogger, (obj.name, ), obj=obj)

    dispatch[logging.Logger] = save_logger
    """Special functions for Add-on libraries"""

    def inject_addons(self):
        """Plug in system. Register additional pickling functions if modules already loaded"""
        pass
Example #7
0
    struct timespec st_mtim;
    ...;
};

int fstat(int fd, struct stat *buf);

int futimens(int fd, const struct timespec times[2]);
''')
lib = ffi.verify('''
    #include <sys/types.h>
    #include <sys/stat.h>
    #include <unistd.h>
    ''',
                 ext_package='bedup')

_stat_ownership = weakref.WeakKeyDictionary()


def fstat_ns(fd):
    stat = ffi.new('struct stat *')
    if lib.fstat(fd, stat) != 0:
        raise IOError(ffi.errno, os.strerror(ffi.errno), fd)
    # The nested structs seem to be recreated at every member access.
    atime, mtime = stat.st_atim, stat.st_mtim
    assert 0 <= atime.tv_nsec < 1e9
    assert 0 <= mtime.tv_nsec < 1e9
    _stat_ownership[atime] = _stat_ownership[mtime] = stat
    return atime, mtime


def futimens(fd, ns):
Example #8
0
class CloudPickler(Pickler):

    dispatch = Pickler.dispatch.copy()

    def __init__(self, file, protocol=None):
        Pickler.__init__(self, file, protocol)
        # set of modules to unpickle
        self.modules = set()
        # map ids to dictionary. used to ensure that functions can share global env
        self.globals_ref = {}

    def dump(self, obj):
        self.inject_addons()
        try:
            return Pickler.dump(self, obj)
        except RuntimeError as e:
            if 'recursion' in e.args[0]:
                msg = """Could not pickle object as excessively deep recursion required."""
                raise pickle.PicklingError(msg)
        except pickle.PickleError:
            raise
        except Exception as e:
            if "'i' format requires" in e.message:
                msg = "Object too large to serialize: " + e.message
            else:
                msg = "Could not serialize object: " + e.__class__.__name__ + ": " + e.message
            print_exec(sys.stderr)
            raise pickle.PicklingError(msg)

    def save_memoryview(self, obj):
        """Fallback to save_string"""
        Pickler.save_string(self, str(obj))

    def save_buffer(self, obj):
        """Fallback to save_string"""
        Pickler.save_string(self, str(obj))

    if PY3:
        dispatch[memoryview] = save_memoryview
    else:
        dispatch[buffer] = save_buffer

    def save_unsupported(self, obj):
        raise pickle.PicklingError("Cannot pickle objects of type %s" %
                                   type(obj))

    dispatch[types.GeneratorType] = save_unsupported

    # itertools objects do not pickle!
    for v in itertools.__dict__.values():
        if type(v) is type:
            dispatch[v] = save_unsupported

    def save_module(self, obj):
        """
        Save a module as an import
        """
        self.modules.add(obj)
        self.save_reduce(subimport, (obj.__name__, ), obj=obj)

    dispatch[types.ModuleType] = save_module

    def save_codeobject(self, obj):
        """
        Save a code object
        """
        if PY3:
            args = (obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
                    obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
                    obj.co_names, obj.co_varnames, obj.co_filename,
                    obj.co_name, obj.co_firstlineno, obj.co_lnotab,
                    obj.co_freevars, obj.co_cellvars)
        else:
            args = (obj.co_argcount, obj.co_nlocals, obj.co_stacksize,
                    obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
                    obj.co_varnames, obj.co_filename, obj.co_name,
                    obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
                    obj.co_cellvars)
        self.save_reduce(types.CodeType, args, obj=obj)

    dispatch[types.CodeType] = save_codeobject

    def save_function(self, obj, name=None):
        """ Registered with the dispatch to handle all function types.

        Determines what kind of function obj is (e.g. lambda, defined at
        interactive prompt, etc) and handles the pickling appropriately.
        """
        write = self.write

        if name is None:
            name = obj.__name__
        try:
            # whichmodule() could fail, see
            # https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
            modname = pickle.whichmodule(obj, name)
        except Exception:
            modname = None
        # print('which gives %s %s %s' % (modname, obj, name))
        try:
            themodule = sys.modules[modname]
        except KeyError:
            # eval'd items such as namedtuple give invalid items for their function __module__
            modname = '__main__'

        if modname == '__main__':
            themodule = None

        if themodule:
            self.modules.add(themodule)
            if getattr(themodule, name, None) is obj:
                return self.save_global(obj, name)

        # if func is lambda, def'ed at prompt, is in main, or is nested, then
        # we'll pickle the actual function object rather than simply saving a
        # reference (as is done in default pickler), via save_function_tuple.
        if islambda(
                obj
        ) or obj.__code__.co_filename == '<stdin>' or themodule is None:
            #print("save global", islambda(obj), obj.__code__.co_filename, modname, themodule)
            self.save_function_tuple(obj)
            return
        else:
            # func is nested
            klass = getattr(themodule, name, None)
            if klass is None or klass is not obj:
                self.save_function_tuple(obj)
                return

        if obj.__dict__:
            # essentially save_reduce, but workaround needed to avoid recursion
            self.save(_restore_attr)
            write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)
            self.save(obj.__dict__)
            write(pickle.TUPLE + pickle.REDUCE)
        else:
            write(pickle.GLOBAL + modname + '\n' + name + '\n')
            self.memoize(obj)

    dispatch[types.FunctionType] = save_function

    def save_function_tuple(self, func):
        """  Pickles an actual func object.

        A func comprises: code, globals, defaults, closure, and dict.  We
        extract and save these, injecting reducing functions at certain points
        to recreate the func object.  Keep in mind that some of these pieces
        can contain a ref to the func itself.  Thus, a naive save on these
        pieces could trigger an infinite loop of save's.  To get around that,
        we first create a skeleton func object using just the code (this is
        safe, since this won't contain a ref to the func), and memoize it as
        soon as it's created.  The other stuff can then be filled in later.
        """
        save = self.save
        write = self.write

        code, f_globals, defaults, closure, dct, base_globals = self.extract_func_data(
            func)

        save(_fill_function)  # skeleton function updater
        write(pickle.MARK)  # beginning of tuple that _fill_function expects

        # create a skeleton function object and memoize it
        save(_make_skel_func)
        save((code, closure, base_globals))
        write(pickle.REDUCE)
        self.memoize(func)

        # save the rest of the func data needed by _fill_function
        save(f_globals)
        save(defaults)
        save(dct)
        save(func.__module__)
        write(pickle.TUPLE)
        write(pickle.REDUCE)  # applies _fill_function on the tuple

    _extract_code_globals_cache = (
        weakref.WeakKeyDictionary() if sys.version_info >=
        (2, 7) and not hasattr(sys, "pypy_version_info") else {})

    @classmethod
    def extract_code_globals(cls, co):
        """
        Find all globals names read or written to by codeblock co
        """
        out_names = cls._extract_code_globals_cache.get(co)
        if out_names is None:
            try:
                names = co.co_names
            except AttributeError:
                # PyPy "builtin-code" object
                out_names = set()
            else:
                out_names = set(names[oparg]
                                for op, oparg in _walk_global_ops(co))

                # see if nested function have any global refs
                if co.co_consts:
                    for const in co.co_consts:
                        if type(const) is types.CodeType:
                            out_names |= cls.extract_code_globals(const)

            cls._extract_code_globals_cache[co] = out_names

        return out_names

    def extract_func_data(self, func):
        """
        Turn the function into a tuple of data necessary to recreate it:
            code, globals, defaults, closure, dict
        """
        code = func.__code__

        # extract all global ref's
        func_global_refs = self.extract_code_globals(code)

        # process all variables referenced by global environment
        f_globals = {}
        for var in func_global_refs:
            if var in func.__globals__:
                f_globals[var] = func.__globals__[var]

        # defaults requires no processing
        defaults = func.__defaults__

        # process closure
        closure = [c.cell_contents
                   for c in func.__closure__] if func.__closure__ else []

        # save the dict
        dct = func.__dict__

        base_globals = self.globals_ref.get(id(func.__globals__), {})
        self.globals_ref[id(func.__globals__)] = base_globals

        return (code, f_globals, defaults, closure, dct, base_globals)

    def save_builtin_function(self, obj):
        if obj.__module__ is "__builtin__":
            return self.save_global(obj)
        return self.save_function(obj)

    dispatch[types.BuiltinFunctionType] = save_builtin_function

    def save_global(self, obj, name=None, pack=struct.pack):
        if obj.__module__ == "__builtin__" or obj.__module__ == "builtins":
            if obj in _BUILTIN_TYPE_NAMES:
                return self.save_reduce(_builtin_type,
                                        (_BUILTIN_TYPE_NAMES[obj], ),
                                        obj=obj)

        if name is None:
            name = obj.__name__

        modname = getattr(obj, "__module__", None)
        if modname is None:
            try:
                # whichmodule() could fail, see
                # https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
                modname = pickle.whichmodule(obj, name)
            except Exception:
                modname = '__main__'

        if modname == '__main__':
            themodule = None
        else:
            __import__(modname)
            themodule = sys.modules[modname]
            self.modules.add(themodule)

        if hasattr(themodule, name) and getattr(themodule, name) is obj:
            return Pickler.save_global(self, obj, name)

        typ = type(obj)
        if typ is not obj and isinstance(obj, (type, types.ClassType)):
            d = dict(obj.__dict__)  # copy dict proxy to a dict
            if not isinstance(d.get('__dict__', None), property):
                # don't extract dict that are properties
                d.pop('__dict__', None)
            d.pop('__weakref__', None)

            # hack as __new__ is stored differently in the __dict__
            new_override = d.get('__new__', None)
            if new_override:
                d['__new__'] = obj.__new__

            # workaround for namedtuple (hijacked by PySpark)
            if getattr(obj, '_is_namedtuple_', False):
                self.save_reduce(_load_namedtuple, (obj.__name__, obj._fields))
                return

            self.save(_load_class)
            self.save_reduce(typ, (obj.__name__, obj.__bases__, {
                "__doc__": obj.__doc__
            }),
                             obj=obj)
            d.pop('__doc__', None)
            # handle property and staticmethod
            dd = {}
            for k, v in d.items():
                if isinstance(v, property):
                    k = ('property', k)
                    v = (v.fget, v.fset, v.fdel, v.__doc__)
                elif isinstance(v, staticmethod) and hasattr(v, '__func__'):
                    k = ('staticmethod', k)
                    v = v.__func__
                elif isinstance(v, classmethod) and hasattr(v, '__func__'):
                    k = ('classmethod', k)
                    v = v.__func__
                dd[k] = v
            self.save(dd)
            self.write(pickle.TUPLE2)
            self.write(pickle.REDUCE)

        else:
            raise pickle.PicklingError("Can't pickle %r" % obj)

    dispatch[type] = save_global
    dispatch[types.ClassType] = save_global

    def save_instancemethod(self, obj):
        # Memoization rarely is ever useful due to python bounding
        if PY3:
            self.save_reduce(types.MethodType, (obj.__func__, obj.__self__),
                             obj=obj)
        else:
            self.save_reduce(
                types.MethodType,
                (obj.__func__, obj.__self__, obj.__self__.__class__),
                obj=obj)

    dispatch[types.MethodType] = save_instancemethod

    def save_inst(self, obj):
        """Inner logic to save instance. Based off pickle.save_inst
        Supports __transient__"""
        cls = obj.__class__

        memo = self.memo
        write = self.write
        save = self.save

        if hasattr(obj, '__getinitargs__'):
            args = obj.__getinitargs__()
            len(args)  # XXX Assert it's a sequence
            pickle._keep_alive(args, memo)
        else:
            args = ()

        write(pickle.MARK)

        if self.bin:
            save(cls)
            for arg in args:
                save(arg)
            write(pickle.OBJ)
        else:
            for arg in args:
                save(arg)
            write(pickle.INST + cls.__module__ + '\n' + cls.__name__ + '\n')

        self.memoize(obj)

        try:
            getstate = obj.__getstate__
        except AttributeError:
            stuff = obj.__dict__
            #remove items if transient
            if hasattr(obj, '__transient__'):
                transient = obj.__transient__
                stuff = stuff.copy()
                for k in list(stuff.keys()):
                    if k in transient:
                        del stuff[k]
        else:
            stuff = getstate()
            pickle._keep_alive(stuff, memo)
        save(stuff)
        write(pickle.BUILD)

    if not PY3:
        dispatch[types.InstanceType] = save_inst

    def save_property(self, obj):
        # properties not correctly saved in python
        self.save_reduce(property, (obj.fget, obj.fset, obj.fdel, obj.__doc__),
                         obj=obj)

    dispatch[property] = save_property

    def save_itemgetter(self, obj):
        """itemgetter serializer (needed for namedtuple support)"""
        class Dummy:
            def __getitem__(self, item):
                return item

        items = obj(Dummy())
        if not isinstance(items, tuple):
            items = (items, )
        return self.save_reduce(operator.itemgetter, items)

    if type(operator.itemgetter) is type:
        dispatch[operator.itemgetter] = save_itemgetter

    def save_attrgetter(self, obj):
        """attrgetter serializer"""
        class Dummy(object):
            def __init__(self, attrs, index=None):
                self.attrs = attrs
                self.index = index

            def __getattribute__(self, item):
                attrs = object.__getattribute__(self, "attrs")
                index = object.__getattribute__(self, "index")
                if index is None:
                    index = len(attrs)
                    attrs.append(item)
                else:
                    attrs[index] = ".".join([attrs[index], item])
                return type(self)(attrs, index)

        attrs = []
        obj(Dummy(attrs))
        return self.save_reduce(operator.attrgetter, tuple(attrs))

    if type(operator.attrgetter) is type:
        dispatch[operator.attrgetter] = save_attrgetter

    def save_reduce(self,
                    func,
                    args,
                    state=None,
                    listitems=None,
                    dictitems=None,
                    obj=None):
        """Modified to support __transient__ on new objects
        Change only affects protocol level 2 (which is always used by PiCloud"""
        # Assert that args is a tuple or None
        if not isinstance(args, tuple):
            raise pickle.PicklingError("args from reduce() should be a tuple")

        # Assert that func is callable
        if not hasattr(func, '__call__'):
            raise pickle.PicklingError("func from reduce should be callable")

        save = self.save
        write = self.write

        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
            #Added fix to allow transient
            cls = args[0]
            if not hasattr(cls, "__new__"):
                raise pickle.PicklingError(
                    "args[0] from __newobj__ args has no __new__")
            if obj is not None and cls is not obj.__class__:
                raise pickle.PicklingError(
                    "args[0] from __newobj__ args has the wrong class")
            args = args[1:]
            save(cls)

            #Don't pickle transient entries
            if hasattr(obj, '__transient__'):
                transient = obj.__transient__
                state = state.copy()

                for k in list(state.keys()):
                    if k in transient:
                        del state[k]

            save(args)
            write(pickle.NEWOBJ)
        else:
            save(func)
            save(args)
            write(pickle.REDUCE)

        if obj is not None:
            self.memoize(obj)

        # More new special cases (that work with older protocols as
        # well): when __reduce__ returns a tuple with 4 or 5 items,
        # the 4th and 5th item should be iterators that provide list
        # items and dict items (as (key, value) tuples), or None.

        if listitems is not None:
            self._batch_appends(listitems)

        if dictitems is not None:
            self._batch_setitems(dictitems)

        if state is not None:
            save(state)
            write(pickle.BUILD)

    def save_partial(self, obj):
        """Partial objects do not serialize correctly in python2.x -- this fixes the bugs"""
        self.save_reduce(_genpartial, (obj.func, obj.args, obj.keywords))

    if sys.version_info < (2, 7):  # 2.7 supports partial pickling
        dispatch[partial] = save_partial

    def save_file(self, obj):
        """Save a file"""
        try:
            import StringIO as pystringIO  #we can't use cStringIO as it lacks the name attribute
        except ImportError:
            import io as pystringIO

        if not hasattr(obj, 'name') or not hasattr(obj, 'mode'):
            raise pickle.PicklingError(
                "Cannot pickle files that do not map to an actual file")
        if obj is sys.stdout:
            return self.save_reduce(getattr, (sys, 'stdout'), obj=obj)
        if obj is sys.stderr:
            return self.save_reduce(getattr, (sys, 'stderr'), obj=obj)
        if obj is sys.stdin:
            raise pickle.PicklingError("Cannot pickle standard input")
        if hasattr(obj, 'isatty') and obj.isatty():
            raise pickle.PicklingError(
                "Cannot pickle files that map to tty objects")
        if 'r' not in obj.mode:
            raise pickle.PicklingError(
                "Cannot pickle files that are not opened for reading")
        name = obj.name
        try:
            fsize = os.stat(name).st_size
        except OSError:
            raise pickle.PicklingError(
                "Cannot pickle file %s as it cannot be stat" % name)

        if obj.closed:
            #create an empty closed string io
            retval = pystringIO.StringIO("")
            retval.close()
        elif not fsize:  #empty file
            retval = pystringIO.StringIO("")
            try:
                tmpfile = file(name)
                tst = tmpfile.read(1)
            except IOError:
                raise pickle.PicklingError(
                    "Cannot pickle file %s as it cannot be read" % name)
            tmpfile.close()
            if tst != '':
                raise pickle.PicklingError(
                    "Cannot pickle file %s as it does not appear to map to a physical, real file"
                    % name)
        else:
            try:
                tmpfile = file(name)
                contents = tmpfile.read()
                tmpfile.close()
            except IOError:
                raise pickle.PicklingError(
                    "Cannot pickle file %s as it cannot be read" % name)
            retval = pystringIO.StringIO(contents)
            curloc = obj.tell()
            retval.seek(curloc)

        retval.name = name
        self.save(retval)
        self.memoize(obj)

    if PY3:
        dispatch[io.TextIOWrapper] = save_file
    else:
        dispatch[file] = save_file
    """Special functions for Add-on libraries"""

    def inject_numpy(self):
        numpy = sys.modules.get('numpy')
        if not numpy or not hasattr(numpy, 'ufunc'):
            return
        self.dispatch[numpy.ufunc] = self.__class__.save_ufunc

    def save_ufunc(self, obj):
        """Hack function for saving numpy ufunc objects"""
        name = obj.__name__
        numpy_tst_mods = ['numpy', 'scipy.special']
        for tst_mod_name in numpy_tst_mods:
            tst_mod = sys.modules.get(tst_mod_name, None)
            if tst_mod and name in tst_mod.__dict__:
                return self.save_reduce(_getobject, (tst_mod_name, name))
        raise pickle.PicklingError(
            'cannot save %s. Cannot resolve what module it is defined in' %
            str(obj))

    def inject_addons(self):
        """Plug in system. Register additional pickling functions if modules already loaded"""
        self.inject_numpy()
Example #9
0
    def __init__(self,
                 container_strategy,
                 tpu_cluster_resolver=None,
                 steps_per_run=None,
                 device_assignment=None):
        super(TPUExtended, self).__init__(container_strategy)

        if tpu_cluster_resolver is None:
            tpu_cluster_resolver = TPUClusterResolver("")

        if steps_per_run is None:
            # TODO(frankchn): Warn when we are being used by DS/Keras and this is
            # not specified.
            steps_per_run = 1

        self._tpu_function_cache = weakref.WeakKeyDictionary()
        self._tpu_cluster_resolver = tpu_cluster_resolver
        self._tpu_metadata = get_tpu_system_metadata(
            self._tpu_cluster_resolver)
        self._device_assignment = device_assignment

        # Device assignment is currently only supported for 1 core case.
        if self._device_assignment:
            assert isinstance(self._device_assignment,
                              device_assignment_lib.DeviceAssignment)
            if self._device_assignment.num_replicas != 1:
                raise ValueError(
                    "Device assignment is only supported for a single "
                    "core single replica case currently.")
            if self._device_assignment.num_cores_per_replica != 1:
                raise ValueError(
                    "Device assignment is only supported for a single "
                    "core single replica case currently.")
            if not all(self._device_assignment.core_assignment[0][0] ==
                       [0, 0, 0]):
                raise ValueError(
                    "Device assignment is only supported for a single "
                    "core single replica case currently.")

        # TODO(jhseu): Switch to DeviceAssignment to support pods and model
        # parallelism.
        self._tpu_devices = [
            d.name for d in self._tpu_metadata.devices
            if "device:TPU:" in d.name
        ]

        self._host_device = device_util.get_host_for_device(
            self._tpu_devices[0])

        # Only create variables for the number of replicas we're running.
        self._tpu_devices = self._tpu_devices[:self._num_replicas_in_sync]
        self._device_map = values.ReplicaDeviceMap(self._tpu_devices)

        # Preload the data onto the TPUs.
        input_worker_devices = collections.OrderedDict()
        for tpu_device in self._tpu_devices:
            host_device = device_util.get_host_for_device(tpu_device)
            input_worker_devices.setdefault(host_device, [])
            input_worker_devices[host_device].append(tpu_device)
        self._input_workers = input_lib.InputWorkers(
            self._device_map, tuple(input_worker_devices.items()))

        # TODO(sourabhbajaj): Remove this once performance of running one step
        # at a time is comparable to multiple steps.
        self.steps_per_run = steps_per_run
        self._require_static_shapes = True

        self.experimental_enable_get_next_as_optional = True
Example #10
0
#coding:utf-8

import logging
import time

from scrapy.dupefilters import BaseDupeFilter
from scrapy.utils.request import request_fingerprint
from scrapy.utils.python import to_bytes
from . import defaults
from .connection import get_redis_from_settings
from .BloomFilter import BloomFilter
from scrapy.utils.httpobj import urlparse_cached
from urlparse import urlparse, parse_qs
from hashlib import sha1
import weakref
_fingerprint_cache = weakref.WeakKeyDictionary()

logger = logging.getLogger(__name__)


# TODO: Rename class to RedisDupeFilter.
class RFPDupeFilter(BaseDupeFilter):
    """Redis-based request duplicates filter.
    This class can also be used with default Scrapy's scheduler.
    """

    logger = logger

    def __init__(self, server, key, debug=False):
        """Initialize the duplicates filter.
Example #11
0
 def __init__(self):
     self.weak_key_dict = weakref.WeakKeyDictionary()
Example #12
0
 def __init__(self, constraint):
     self.constraint = constraint
     self.data = weakref.WeakKeyDictionary()
Example #13
0
 def _object_cache(self):
     try:
         return self._tls.object_cache
     except AttributeError:
         self._tls.object_cache = weakref.WeakKeyDictionary()
         return self._tls.object_cache
Example #14
0
    def __init__(self,
                 parent=None,
                 name=None,
                 labels=None,
                 title=None,
                 viewBox=None,
                 axisItems=None,
                 enableMenu=True,
                 **kargs):
        """
        Create a new PlotItem. All arguments are optional.
        Any extra keyword arguments are passed to :func:`PlotItem.plot() <pyqtgraph.PlotItem.plot>`.
        
        ==============  ==========================================================================================
        **Arguments:**
        *title*         Title to display at the top of the item. Html is allowed.
        *labels*        A dictionary specifying the axis labels to display::
                   
                            {'left': (args), 'bottom': (args), ...}
                     
                        The name of each axis and the corresponding arguments are passed to 
                        :func:`PlotItem.setLabel() <pyqtgraph.PlotItem.setLabel>`
                        Optionally, PlotItem my also be initialized with the keyword arguments left,
                        right, top, or bottom to achieve the same effect.
        *name*          Registers a name for this view so that others may link to it
        *viewBox*       If specified, the PlotItem will be constructed with this as its ViewBox.
        *axisItems*     Optional dictionary instructing the PlotItem to use pre-constructed items
                        for its axes. The dict keys must be axis names ('left', 'bottom', 'right', 'top')
                        and the values must be instances of AxisItem (or at least compatible with AxisItem).
        ==============  ==========================================================================================
        """

        GraphicsWidget.__init__(self, parent)

        self.setSizePolicy(QtGui.QSizePolicy.Expanding,
                           QtGui.QSizePolicy.Expanding)

        ## Set up control buttons
        path = os.path.dirname(__file__)
        self.autoBtn = ButtonItem(pixmaps.getPixmap('auto'), 14, self)
        self.autoBtn.mode = 'auto'
        self.autoBtn.clicked.connect(self.autoBtnClicked)
        self.buttonsHidden = False  ## whether the user has requested buttons to be hidden
        self.mouseHovering = False

        self.layout = QtGui.QGraphicsGridLayout()
        self.layout.setContentsMargins(1, 1, 1, 1)
        self.setLayout(self.layout)
        self.layout.setHorizontalSpacing(0)
        self.layout.setVerticalSpacing(0)

        if viewBox is None:
            viewBox = ViewBox(parent=self)
        self.vb = viewBox
        self.vb.sigStateChanged.connect(self.viewStateChanged)
        self.setMenuEnabled(
            enableMenu, enableMenu)  ## en/disable plotitem and viewbox menus

        if name is not None:
            self.vb.register(name)
        self.vb.sigRangeChanged.connect(self.sigRangeChanged)
        self.vb.sigXRangeChanged.connect(self.sigXRangeChanged)
        self.vb.sigYRangeChanged.connect(self.sigYRangeChanged)

        self.layout.addItem(self.vb, 2, 1)
        self.alpha = 1.0
        self.autoAlpha = True
        self.spectrumMode = False

        self.legend = None

        # Initialize axis items
        self.axes = {}
        self.setAxisItems(axisItems)

        self.titleLabel = LabelItem('', size='11pt', parent=self)
        self.layout.addItem(self.titleLabel, 0, 1)
        self.setTitle(None)  ## hide

        for i in range(4):
            self.layout.setRowPreferredHeight(i, 0)
            self.layout.setRowMinimumHeight(i, 0)
            self.layout.setRowSpacing(i, 0)
            self.layout.setRowStretchFactor(i, 1)

        for i in range(3):
            self.layout.setColumnPreferredWidth(i, 0)
            self.layout.setColumnMinimumWidth(i, 0)
            self.layout.setColumnSpacing(i, 0)
            self.layout.setColumnStretchFactor(i, 1)
        self.layout.setRowStretchFactor(2, 100)
        self.layout.setColumnStretchFactor(1, 100)

        self.items = []
        self.curves = []
        self.itemMeta = weakref.WeakKeyDictionary()
        self.dataItems = []
        self.paramList = {}
        self.avgCurves = {}

        ### Set up context menu

        w = QtGui.QWidget()
        self.ctrl = c = Ui_Form()
        c.setupUi(w)
        dv = QtGui.QDoubleValidator(self)

        menuItems = [
            ('Transforms', c.transformGroup),
            ('Downsample', c.decimateGroup),
            ('Average', c.averageGroup),
            ('Alpha', c.alphaGroup),
            ('Grid', c.gridGroup),
            ('Points', c.pointsGroup),
        ]

        self.ctrlMenu = QtGui.QMenu()

        self.ctrlMenu.setTitle('Plot Options')
        self.subMenus = []
        for name, grp in menuItems:
            sm = QtGui.QMenu(name)
            act = QtGui.QWidgetAction(self)
            act.setDefaultWidget(grp)
            sm.addAction(act)
            self.subMenus.append(sm)
            self.ctrlMenu.addMenu(sm)

        self.stateGroup = WidgetGroup()
        for name, w in menuItems:
            self.stateGroup.autoAdd(w)

        self.fileDialog = None

        c.alphaGroup.toggled.connect(self.updateAlpha)
        c.alphaSlider.valueChanged.connect(self.updateAlpha)
        c.autoAlphaCheck.toggled.connect(self.updateAlpha)

        c.xGridCheck.toggled.connect(self.updateGrid)
        c.yGridCheck.toggled.connect(self.updateGrid)
        c.gridAlphaSlider.valueChanged.connect(self.updateGrid)

        c.fftCheck.toggled.connect(self.updateSpectrumMode)
        c.logXCheck.toggled.connect(self.updateLogMode)
        c.logYCheck.toggled.connect(self.updateLogMode)
        c.derivativeCheck.toggled.connect(self.updateDerivativeMode)
        c.phasemapCheck.toggled.connect(self.updatePhasemapMode)

        c.downsampleSpin.valueChanged.connect(self.updateDownsampling)
        c.downsampleCheck.toggled.connect(self.updateDownsampling)
        c.autoDownsampleCheck.toggled.connect(self.updateDownsampling)
        c.subsampleRadio.toggled.connect(self.updateDownsampling)
        c.meanRadio.toggled.connect(self.updateDownsampling)
        c.clipToViewCheck.toggled.connect(self.updateDownsampling)

        self.ctrl.avgParamList.itemClicked.connect(self.avgParamListClicked)
        self.ctrl.averageGroup.toggled.connect(self.avgToggled)

        self.ctrl.maxTracesCheck.toggled.connect(self.updateDecimation)
        self.ctrl.maxTracesSpin.valueChanged.connect(self.updateDecimation)

        if labels is None:
            labels = {}
        for label in list(self.axes.keys()):
            if label in kargs:
                labels[label] = kargs[label]
                del kargs[label]
        for k in labels:
            if isinstance(labels[k], basestring):
                labels[k] = (labels[k], )
            self.setLabel(k, *labels[k])

        if title is not None:
            self.setTitle(title)

        if len(kargs) > 0:
            self.plot(**kargs)
Example #15
0
 def __init__(self):
     self.proxy_refs = weakref.WeakKeyDictionary()
     self.testing_engines = collections.defaultdict(set)
     self.dbapi_connections = set()
Example #16
0
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
# See http://www.gnu.org/licenses/ for more information.


"""
Manages locking access (across threads) to Poppler.Document instances.
"""

import threading
import weakref

_locks = weakref.WeakKeyDictionary()
_lock = threading.RLock()


def lock(document):
    """Returns a threading.RLock instance for the given Poppler.Document.

    Use:

    with lock(document):
        do_something

    """
    with _lock:
        try:
            return _locks[document]
Example #17
0
"""Find/replace widget."""
import re
import tkinter as tk
import weakref

import porcupine
from porcupine import utils, tabs

find_widgets = weakref.WeakKeyDictionary()


class Finder(tk.Frame):
    """A widget for finding and replacing text.

    Use the pack geometry manager with this widget.
    """
    def __init__(self, parent, textwidget, **kwargs):
        super().__init__(parent, **kwargs)

        self._last_pattern = None
        self._matches = None

        self.grid_columnconfigure(1, weight=1)
        self._textwidget = textwidget

        entrygrid = tk.Frame(self)
        entrygrid.grid(row=0, column=0)
        self._find_entry = self._add_entry(entrygrid, 0, "Find:", self.find)
        self._replace_entry = self._add_entry(entrygrid, 1, "Replace with:")

        buttonframe = tk.Frame(self)
Example #18
0
    def __init__(self,
                 http_port=None,
                 distrib_port=None,
                 allowForce=None,
                 public_html="public_html",
                 site=None,
                 numbuilds=20,
                 num_events=200,
                 num_events_max=None,
                 auth=None,
                 order_console_by_time=False,
                 changecommentlink=None,
                 revlink=None,
                 projects=None,
                 repositories=None,
                 authz=None,
                 logRotateLength=None,
                 maxRotatedFiles=None,
                 change_hook_dialects={},
                 provide_feeds=None):
        """Run a web server that provides Buildbot status.

        @type  http_port: int or L{twisted.application.strports} string
        @param http_port: a strports specification describing which port the
                          buildbot should use for its web server, with the
                          Waterfall display as the root page. For backwards
                          compatibility this can also be an int. Use
                          'tcp:8000' to listen on that port, or
                          'tcp:12345:interface=127.0.0.1' if you only want
                          local processes to connect to it (perhaps because
                          you are using an HTTP reverse proxy to make the
                          buildbot available to the outside world, and do not
                          want to make the raw port visible).

        @type  distrib_port: int or L{twisted.application.strports} string
        @param distrib_port: Use this if you want to publish the Waterfall
                             page using web.distrib instead. The most common
                             case is to provide a string that is an absolute
                             pathname to the unix socket on which the
                             publisher should listen
                             (C{os.path.expanduser(~/.twistd-web-pb)} will
                             match the default settings of a standard
                             twisted.web 'personal web server'). Another
                             possibility is to pass an integer, which means
                             the publisher should listen on a TCP socket,
                             allowing the web server to be on a different
                             machine entirely. Both forms are provided for
                             backwards compatibility; the preferred form is a
                             strports specification like
                             'unix:/home/buildbot/.twistd-web-pb'. Providing
                             a non-absolute pathname will probably confuse
                             the strports parser.

        @param allowForce: deprecated; use authz instead
        @param auth: deprecated; use with authz

        @param authz: a buildbot.status.web.authz.Authz instance giving the authorization
                           parameters for this view

        @param public_html: the path to the public_html directory for this display,
                            either absolute or relative to the basedir.  The default
                            is 'public_html', which selects BASEDIR/public_html.

        @type site: None or L{twisted.web.server.Site}
        @param site: Use this if you want to define your own object instead of
                     using the default.`

        @type numbuilds: int
        @param numbuilds: Default number of entries in lists at the /one_line_per_build
        and /builders/FOO URLs.  This default can be overriden both programatically ---
        by passing the equally named argument to constructors of OneLinePerBuildOneBuilder
        and OneLinePerBuild --- and via the UI, by tacking ?numbuilds=xy onto the URL.

        @type num_events: int
        @param num_events: Default number of events to show in the waterfall.

        @type num_events_max: int
        @param num_events_max: The maximum number of events that are allowed to be
        shown in the waterfall.  The default value of C{None} will disable this
        check

        @type auth: a L{status.web.auth.IAuth} or C{None}
        @param auth: an object that performs authentication to restrict access
                     to the C{allowForce} features. Ignored if C{allowForce}
                     is not C{True}. If C{auth} is C{None}, people can force or
                     stop builds without auth.

        @type order_console_by_time: bool
        @param order_console_by_time: Whether to order changes (commits) in the console
                     view according to the time they were created (for VCS like Git) or
                     according to their integer revision numbers (for VCS like SVN).

        @type changecommentlink: callable, dict, tuple (2 or 3 strings) or C{None}
        @param changecommentlink: adds links to ticket/bug ids in change comments,
            see buildbot.status.web.base.changecommentlink for details

        @type revlink: callable, dict, string or C{None}
        @param revlink: decorations revision ids with links to a web-view,
            see buildbot.status.web.base.revlink for details

        @type projects: callable, dict or c{None}
        @param projects: maps project identifiers to URLs, so that any project listed
            is automatically decorated with a link to it's front page.
            see buildbot.status.web.base.dictlink for details

        @type repositories: callable, dict or c{None}
        @param repositories: maps repository identifiers to URLs, so that any project listed
            is automatically decorated with a link to it's web view.
            see buildbot.status.web.base.dictlink for details

        @type logRotateLength: None or int
        @param logRotateLength: file size at which the http.log is rotated/reset.
            If not set, the value set in the buildbot.tac will be used, 
             falling back to the BuildMaster's default value (1 Mb).
        
        @type maxRotatedFiles: None or int
        @param maxRotatedFiles: number of old http.log files to keep during log rotation.
            If not set, the value set in the buildbot.tac will be used, 
             falling back to the BuildMaster's default value (10 files).       
        
        @type  change_hook_dialects: None or dict
        @param change_hook_dialects: If empty, disables change_hook support, otherwise      
                                     whitelists valid dialects. In the format of
                                     {"dialect1": "Option1", "dialect2", None}
                                     Where the values are options that will be passed
                                     to the dialect
                                     
                                     To enable the DEFAULT handler, use a key of DEFAULT
                                     
                                     
        
    
        @type  provide_feeds: None or list
        @param provide_feeds: If empty, provides atom, json, and rss feeds.
                              Otherwise, a dictionary of strings of
                              the type of feeds provided.  Current
                              possibilities are "atom", "json", and "rss"
        """

        service.MultiService.__init__(self)
        if type(http_port) is int:
            http_port = "tcp:%d" % http_port
        self.http_port = http_port
        if distrib_port is not None:
            if type(distrib_port) is int:
                distrib_port = "tcp:%d" % distrib_port
            if distrib_port[0] in "/~.":  # pathnames
                distrib_port = "unix:%s" % distrib_port
        self.distrib_port = distrib_port
        self.num_events = num_events
        if num_events_max:
            assert num_events_max >= num_events
            self.num_events_max = num_events_max
        self.public_html = public_html

        # make up an authz if allowForce was given
        if authz:
            if allowForce is not None:
                raise ValueError(
                    "cannot use both allowForce and authz parameters")
            if auth:
                raise ValueError(
                    "cannot use both auth and authz parameters (pass "
                    "auth as an Authz parameter)")
        else:
            # invent an authz
            if allowForce and auth:
                authz = Authz(auth=auth, default_action="auth")
            elif allowForce:
                authz = Authz(default_action=True)
            else:
                if auth:
                    log.msg(
                        "Warning: Ignoring authentication. Search for 'authorization'"
                        " in the manual")
                authz = Authz()  # no authorization for anything

        self.authz = authz

        self.orderConsoleByTime = order_console_by_time

        # If we were given a site object, go ahead and use it. (if not, we add one later)
        self.site = site

        # store the log settings until we create the site object
        self.logRotateLength = logRotateLength
        self.maxRotatedFiles = maxRotatedFiles

        # create the web site page structure
        self.childrenToBeAdded = {}
        self.setupUsualPages(numbuilds=numbuilds,
                             num_events=num_events,
                             num_events_max=num_events_max)

        # Set up the jinja templating engine.
        self.templates = createJinjaEnv(revlink, changecommentlink,
                                        repositories, projects)

        # keep track of cached connections so we can break them when we shut
        # down. See ticket #102 for more details.
        self.channels = weakref.WeakKeyDictionary()

        # do we want to allow change_hook
        self.change_hook_dialects = {}
        if change_hook_dialects:
            self.change_hook_dialects = change_hook_dialects
            self.putChild(
                "change_hook",
                ChangeHookResource(dialects=self.change_hook_dialects))

        # Set default feeds
        if provide_feeds is None:
            self.provide_feeds = ["atom", "json", "rss"]
        else:
            self.provide_feeds = provide_feeds
Example #19
0
if sys.version_info[:2] > (3, 7):
    from typing import Final
else:
    from typing_extensions import Final

LockType: Type
try:
    import _thread
    LockType = _thread.LockType
except ImportError:
    import _dummy_thread
    LockType = _dummy_thread.LockType

# Wrapper functions that can call either of 2 functions depending on a boolean
# argument
boolean_dispatched: 'weakref.WeakKeyDictionary[Callable, Dict[str, Callable]]' = weakref.WeakKeyDictionary(
)  # noqa: T484


def createResolutionCallbackFromEnv(lookup_base):
    """
    Creates a resolution callback that will look up qualified names in an
    environment, starting with `lookup_base` for the base of any qualified
    names, then proceeding down the lookup chain with the resolved object.

    You should not use this directly, it should only be used from the other
    createResolutionCallbackFrom* functions.
    """
    def lookupInModule(qualified_name, module):
        if '.' in qualified_name:
            parts = qualified_name.split('.')
            base = parts[0]
Example #20
0
@convert.register(ufl.WithMapping)
def convert_withmapping(element, **kwargs):
    return _create_element(element.wrapee, **kwargs)


@convert.register(ufl.RestrictedElement)
def convert_restrictedelement(element, **kwargs):
    finat_elem, deps = _create_element(element._element, **kwargs)
    return finat.RestrictedElement(finat_elem,
                                   element.restriction_domain()), deps


hexahedron_tpc = ufl.TensorProductCell(ufl.quadrilateral, ufl.interval)
quadrilateral_tpc = ufl.TensorProductCell(ufl.interval, ufl.interval)
_cache = weakref.WeakKeyDictionary()


def create_element(ufl_element,
                   shape_innermost=True,
                   shift_axes=0,
                   restriction=None):
    """Create a FInAT element (suitable for tabulating with) given a UFL element.

    :arg ufl_element: The UFL element to create a FInAT element from.
    :arg shape_innermost: Vector/tensor indices come after basis function indices
    :arg restriction: cell restriction in interior facet integrals
                      (only for runtime tabulated elements)
    """
    finat_element, deps = _create_element(ufl_element,
                                          shape_innermost=shape_innermost,
Example #21
0
def create_metadata_env():
    return weakref.WeakKeyDictionary()
Example #22
0
File: tiles.py Project: yyaaa1/mars
        if not to_tile.is_coarse():
            return to_tile
        dispatched = self.dispatch(to_tile.op)
        self._assign_to([d.data for d in dispatched], to_tile.op.outputs)
        return to_tile

    @classmethod
    def tiles(cls, to_tile):
        to_tile.build_graph(tiled=True, compose=False)
        return get_tiled(to_tile)


handler = OperandTilesHandler()
register = OperandTilesHandler.register

_tileable_data_to_tiled = weakref.WeakKeyDictionary()
_op_to_copied = weakref.WeakKeyDictionary()


@enter_mode(build=True)
def get_tiled(tileable, mapping=None, raise_err_if_not_tiled=True):
    tileable_data = tileable.data if hasattr(tileable, 'data') else tileable
    if mapping:
        tileable_data = mapping.get(tileable_data, tileable_data)
    if raise_err_if_not_tiled:
        return _tileable_data_to_tiled[tileable_data]
    else:
        return _tileable_data_to_tiled.get(tileable_data)


class ChunkGraphBuilder(GraphBuilder):
__author__ = 'Brian Quinlan ([email protected])'

import os
from concurrent.futures import _base
import queue
import multiprocessing as mp
import multiprocessing.connection
from multiprocessing.queues import Queue
import threading
import weakref
from functools import partial
import itertools
import sys
import traceback

_threads_wakeups = weakref.WeakKeyDictionary()
_global_shutdown = False


class _ThreadWakeup:
    def __init__(self):
        self._closed = False
        self._reader, self._writer = mp.Pipe(duplex=False)

    def close(self):
        if not self._closed:
            self._closed = True
            self._writer.close()
            self._reader.close()

    def wakeup(self):
Example #24
0
def singledispatch(func):
    """Single-dispatch generic function decorator.

    Transforms a function into a generic function, which can have different
    behaviours depending upon the type of its first argument. The decorated
    function acts as the default implementation, and additional
    implementations can be registered using the register() attribute of the
    generic function.
    """
    # There are many programs that use functools without singledispatch, so we
    # trade-off making singledispatch marginally slower for the benefit of
    # making start-up of such applications slightly faster.
    import types, weakref

    registry = {}
    dispatch_cache = weakref.WeakKeyDictionary()
    cache_token = None

    def dispatch(cls):
        """generic_func.dispatch(cls) -> <function implementation>

        Runs the dispatch algorithm to return the best available implementation
        for the given *cls* registered on *generic_func*.

        """
        nonlocal cache_token
        if cache_token is not None:
            current_token = get_cache_token()
            if cache_token != current_token:
                dispatch_cache.clear()
                cache_token = current_token
        try:
            impl = dispatch_cache[cls]
        except KeyError:
            try:
                impl = registry[cls]
            except KeyError:
                impl = _find_impl(cls, registry)
            dispatch_cache[cls] = impl
        return impl

    def _is_union_type(cls):
        from typing import get_origin, Union
        return get_origin(cls) in {Union, types.UnionType}

    def _is_valid_dispatch_type(cls):
        if isinstance(cls, type) and not isinstance(cls, GenericAlias):
            return True
        from typing import get_args
        return (_is_union_type(cls) and all(
            isinstance(arg, type) and not isinstance(arg, GenericAlias)
            for arg in get_args(cls)))

    def register(cls, func=None):
        """generic_func.register(cls, func) -> func

        Registers a new implementation for the given *cls* on a *generic_func*.

        """
        nonlocal cache_token
        if _is_valid_dispatch_type(cls):
            if func is None:
                return lambda f: register(cls, f)
        else:
            if func is not None:
                raise TypeError(f"Invalid first argument to `register()`. "
                                f"{cls!r} is not a class or union type.")
            ann = getattr(cls, '__annotations__', {})
            if not ann:
                raise TypeError(
                    f"Invalid first argument to `register()`: {cls!r}. "
                    f"Use either `@register(some_class)` or plain `@register` "
                    f"on an annotated function.")
            func = cls

            # only import typing if annotation parsing is necessary
            from typing import get_type_hints
            argname, cls = next(iter(get_type_hints(func).items()))
            if not _is_valid_dispatch_type(cls):
                if _is_union_type(cls):
                    raise TypeError(f"Invalid annotation for {argname!r}. "
                                    f"{cls!r} not all arguments are classes.")
                else:
                    raise TypeError(f"Invalid annotation for {argname!r}. "
                                    f"{cls!r} is not a class.")

        if _is_union_type(cls):
            from typing import get_args

            for arg in get_args(cls):
                registry[arg] = func
        else:
            registry[cls] = func
        if cache_token is None and hasattr(cls, '__abstractmethods__'):
            cache_token = get_cache_token()
        dispatch_cache.clear()
        return func

    def wrapper(*args, **kw):
        if not args:
            raise TypeError(f'{funcname} requires at least '
                            '1 positional argument')

        return dispatch(args[0].__class__)(*args, **kw)

    funcname = getattr(func, '__name__', 'singledispatch function')
    registry[object] = func
    wrapper.register = register
    wrapper.dispatch = dispatch
    wrapper.registry = types.MappingProxyType(registry)
    wrapper._clear_cache = dispatch_cache.clear
    update_wrapper(wrapper, func)
    return wrapper
Example #25
0
class AsyncAuth(object):
    '''
    Set up an Async object to maintain authentication with the salt master
    '''
    # This class is only a singleton per minion/master pair
    # mapping of io_loop -> {key -> auth}
    instance_map = weakref.WeakKeyDictionary()

    # mapping of key -> creds
    creds_map = {}

    def __new__(cls, opts, io_loop=None):
        '''
        Only create one instance of SAuth per __key()
        '''
        # do we have any mapping for this io_loop
        io_loop = io_loop or tornado.ioloop.IOLoop.current()
        if io_loop not in AsyncAuth.instance_map:
            AsyncAuth.instance_map[io_loop] = weakref.WeakValueDictionary()
        loop_instance_map = AsyncAuth.instance_map[io_loop]

        key = cls.__key(opts)
        if key not in loop_instance_map:
            log.debug('Initializing new SAuth for {0}'.format(key))
            # we need to make a local variable for this, as we are going to store
            # it in a WeakValueDictionary-- which will remove the item if no one
            # references it-- this forces a reference while we return to the caller
            new_auth = object.__new__(cls)
            new_auth.__singleton_init__(opts, io_loop=io_loop)
            loop_instance_map[key] = new_auth
        else:
            log.debug('Re-using SAuth for {0}'.format(key))
        return loop_instance_map[key]

    @classmethod
    def __key(cls, opts, io_loop=None):
        return (opts['pki_dir'],     # where the keys are stored
                opts['id'],          # minion ID
                opts['master_uri'],  # master ID
                )

    # has to remain empty for singletons, since __init__ will *always* be called
    def __init__(self, opts, io_loop=None):
        pass

    # an init for the singleton instance to call
    def __singleton_init__(self, opts, io_loop=None):
        '''
        Init an Auth instance

        :param dict opts: Options for this server
        :return: Auth instance
        :rtype: Auth
        '''
        self.opts = opts
        self.token = Crypticle.generate_key_string()
        self.serial = salt.payload.Serial(self.opts)
        self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
        self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
        if 'syndic_master' in self.opts:
            self.mpub = 'syndic_master.pub'
        elif 'alert_master' in self.opts:
            self.mpub = 'monitor_master.pub'
        else:
            self.mpub = 'minion_master.pub'
        if not os.path.isfile(self.pub_path):
            self.get_keys()

        self.io_loop = io_loop or tornado.ioloop.IOLoop.current()

        salt.utils.reinit_crypto()
        key = self.__key(self.opts)
        # TODO: if we already have creds for this key, lets just re-use
        if key in AsyncAuth.creds_map:
            creds = AsyncAuth.creds_map[key]
            self._creds = creds
            self._crypticle = Crypticle(self.opts, creds['aes'])
            self._authenticate_future = tornado.concurrent.Future()
            self._authenticate_future.set_result(True)
        else:
            self.authenticate()

    def __deepcopy__(self, memo):
        cls = self.__class__
        result = cls.__new__(cls, copy.deepcopy(self.opts, memo), io_loop=None)
        memo[id(self)] = result
        for key in self.__dict__:
            if key in ('io_loop',):
                # The io_loop has a thread Lock which will fail to be deep
                # copied. Skip it because it will just be recreated on the
                # new copy.
                continue
            setattr(result, key, copy.deepcopy(self.__dict__[key], memo))
        return result

    @property
    def creds(self):
        return self._creds

    @property
    def crypticle(self):
        return self._crypticle

    @property
    def authenticated(self):
        return hasattr(self, '_authenticate_future') and \
               self._authenticate_future.done() and \
               self._authenticate_future.exception() is None

    def invalidate(self):
        if self.authenticated:
            del self._authenticate_future
            key = self.__key(self.opts)
            if key in AsyncAuth.creds_map:
                del AsyncAuth.creds_map[key]

    def authenticate(self, callback=None):
        '''
        Ask for this client to reconnect to the origin

        This function will de-dupe all calls here and return a *single* future
        for the sign-in-- whis way callers can all assume there aren't others
        '''
        # if an auth is in flight-- and not done-- just pass that back as the future to wait on
        if hasattr(self, '_authenticate_future') and not self._authenticate_future.done():
            future = self._authenticate_future
        else:
            future = tornado.concurrent.Future()
            self._authenticate_future = future
            self.io_loop.add_callback(self._authenticate)

        if callback is not None:
            def handle_future(future):
                response = future.result()
                self.io_loop.add_callback(callback, response)
            future.add_done_callback(handle_future)

        return future

    @tornado.gen.coroutine
    def _authenticate(self):
        '''
        Authenticate with the master, this method breaks the functional
        paradigm, it will update the master information from a fresh sign
        in, signing in can occur as often as needed to keep up with the
        revolving master AES key.

        :rtype: Crypticle
        :returns: A crypticle used for encryption operations
        '''
        acceptance_wait_time = self.opts['acceptance_wait_time']
        acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
        if not acceptance_wait_time_max:
            acceptance_wait_time_max = acceptance_wait_time
        creds = None
        while True:
            try:
                creds = yield self.sign_in()
            except SaltClientError:
                break
            if creds == 'retry':
                if self.opts.get('caller'):
                    print('Minion failed to authenticate with the master, '
                          'has the minion key been accepted?')
                    sys.exit(2)
                if acceptance_wait_time:
                    log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
                    yield tornado.gen.sleep(acceptance_wait_time)
                if acceptance_wait_time < acceptance_wait_time_max:
                    acceptance_wait_time += acceptance_wait_time
                    log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
                continue
            break
        if not isinstance(creds, dict) or 'aes' not in creds:
            try:
                del AsyncAuth.creds_map[self.__key(self.opts)]
            except KeyError:
                pass
            self._authenticate_future.set_exception(
                SaltClientError('Attempt to authenticate with the salt master failed')
            )
        else:
            AsyncAuth.creds_map[self.__key(self.opts)] = creds
            self._creds = creds
            self._crypticle = Crypticle(self.opts, creds['aes'])
            self._authenticate_future.set_result(True)  # mark the sign-in as complete

    @tornado.gen.coroutine
    def sign_in(self, timeout=60, safe=True, tries=1):
        '''
        Send a sign in request to the master, sets the key information and
        returns a dict containing the master publish interface to bind to
        and the decrypted aes key for transport decryption.

        :param int timeout: Number of seconds to wait before timing out the sign-in request
        :param bool safe: If True, do not raise an exception on timeout. Retry instead.
        :param int tries: The number of times to try to authenticate before giving up.

        :raises SaltReqTimeoutError: If the sign-in request has timed out and :param safe: is not set

        :return: Return a string on failure indicating the reason for failure. On success, return a dictionary
        with the publication port and the shared AES key.

        '''
        auth = {}

        auth_timeout = self.opts.get('auth_timeout', None)
        if auth_timeout is not None:
            timeout = auth_timeout
        auth_safemode = self.opts.get('auth_safemode', None)
        if auth_safemode is not None:
            safe = auth_safemode
        auth_tries = self.opts.get('auth_tries', None)
        if auth_tries is not None:
            tries = auth_tries

        m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)

        auth['master_uri'] = self.opts['master_uri']

        channel = salt.transport.client.AsyncReqChannel.factory(self.opts,
                                                                crypt='clear',
                                                                io_loop=self.io_loop)

        try:
            payload = yield channel.send(
                self.minion_sign_in_payload(),
                tries=tries,
                timeout=timeout
            )
        except SaltReqTimeoutError as e:
            if safe:
                log.warning('SaltReqTimeoutError: {0}'.format(e))
                raise tornado.gen.Return('retry')
            raise SaltClientError('Attempt to authenticate with the salt master failed with timeout error')
        if 'load' in payload:
            if 'ret' in payload['load']:
                if not payload['load']['ret']:
                    if self.opts['rejected_retry']:
                        log.error(
                            'The Salt Master has rejected this minion\'s public '
                            'key.\nTo repair this issue, delete the public key '
                            'for this minion on the Salt Master.\nThe Salt '
                            'Minion will attempt to to re-authenicate.'
                        )
                        raise tornado.gen.Return('retry')
                    else:
                        log.critical(
                            'The Salt Master has rejected this minion\'s public '
                            'key!\nTo repair this issue, delete the public key '
                            'for this minion on the Salt Master and restart this '
                            'minion.\nOr restart the Salt Master in open mode to '
                            'clean out the keys. The Salt Minion will now exit.'
                        )
                        sys.exit(salt.defaults.exitcodes.EX_OK)
                # has the master returned that its maxed out with minions?
                elif payload['load']['ret'] == 'full':
                    raise tornado.gen.Return('full')
                else:
                    log.error(
                        'The Salt Master has cached the public key for this '
                        'node, this salt minion will wait for {0} seconds '
                        'before attempting to re-authenticate'.format(
                            self.opts['acceptance_wait_time']
                        )
                    )
                    raise tornado.gen.Return('retry')
        auth['aes'] = self.verify_master(payload)
        if not auth['aes']:
            log.critical(
                'The Salt Master server\'s public key did not authenticate!\n'
                'The master may need to be updated if it is a version of Salt '
                'lower than {0}, or\n'
                'If you are confident that you are connecting to a valid Salt '
                'Master, then remove the master public key and restart the '
                'Salt Minion.\nThe master public key can be found '
                'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
            )
            raise SaltSystemExit('Invalid master key')
        if self.opts.get('syndic_master', False):  # Is syndic
            syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
            if syndic_finger:
                if salt.utils.pem_finger(m_pub_fn) != syndic_finger:
                    self._finger_fail(syndic_finger, m_pub_fn)
        else:
            if self.opts.get('master_finger', False):
                if salt.utils.pem_finger(m_pub_fn) != self.opts['master_finger']:
                    self._finger_fail(self.opts['master_finger'], m_pub_fn)
        auth['publish_port'] = payload['publish_port']
        raise tornado.gen.Return(auth)

    def get_keys(self):
        '''
        Return keypair object for the minion.

        :rtype: Crypto.PublicKey.RSA._RSAobj
        :return: The RSA keypair
        '''
        # Make sure all key parent directories are accessible
        user = self.opts.get('user', 'root')
        salt.utils.verify.check_path_traversal(self.opts['pki_dir'], user)

        if os.path.exists(self.rsa_path):
            with salt.utils.fopen(self.rsa_path) as f:
                key = RSA.importKey(f.read())
            log.debug('Loaded minion key: {0}'.format(self.rsa_path))
        else:
            log.info('Generating keys: {0}'.format(self.opts['pki_dir']))
            gen_keys(self.opts['pki_dir'],
                     'minion',
                     self.opts['keysize'],
                     self.opts.get('user'))
            with salt.utils.fopen(self.rsa_path) as f:
                key = RSA.importKey(f.read())
        return key

    def gen_token(self, clear_tok):
        '''
        Encrypt a string with the minion private key to verify identity
        with the master.

        :param str clear_tok: A plaintext token to encrypt
        :return: Encrypted token
        :rtype: str
        '''
        return private_encrypt(self.get_keys(), clear_tok)

    def minion_sign_in_payload(self):
        '''
        Generates the payload used to authenticate with the master
        server. This payload consists of the passed in id_ and the ssh
        public key to encrypt the AES key sent back from the master.

        :return: Payload dictionary
        :rtype: dict
        '''
        payload = {}
        payload['cmd'] = '_auth'
        payload['id'] = self.opts['id']
        try:
            pubkey_path = os.path.join(self.opts['pki_dir'], self.mpub)
            with salt.utils.fopen(pubkey_path) as f:
                pub = RSA.importKey(f.read())
            cipher = PKCS1_OAEP.new(pub)
            payload['token'] = cipher.encrypt(self.token)
        except Exception:
            pass
        with salt.utils.fopen(self.pub_path) as f:
            payload['pub'] = f.read()
        return payload

    def decrypt_aes(self, payload, master_pub=True):
        '''
        This function is used to decrypt the AES seed phrase returned from
        the master server. The seed phrase is decrypted with the SSH RSA
        host key.

        Pass in the encrypted AES key.
        Returns the decrypted AES seed key, a string

        :param dict payload: The incoming payload. This is a dictionary which may have the following keys:
            'aes': The shared AES key
            'enc': The format of the message. ('clear', 'pub', etc)
            'sig': The message signature
            'publish_port': The TCP port which published the message
            'token': The encrypted token used to verify the message.
            'pub_key': The public key of the sender.

        :rtype: str
        :return: The decrypted token that was provided, with padding.

        :rtype: str
        :return: The decrypted AES seed key
        '''
        if self.opts.get('auth_trb', False):
            log.warning(
                    'Auth Called: {0}'.format(
                        ''.join(traceback.format_stack())
                        )
                    )
        else:
            log.debug('Decrypting the current master AES key')
        key = self.get_keys()
        cipher = PKCS1_OAEP.new(key)
        key_str = cipher.decrypt(payload['aes'])
        if 'sig' in payload:
            m_path = os.path.join(self.opts['pki_dir'], self.mpub)
            if os.path.exists(m_path):
                try:
                    with salt.utils.fopen(m_path) as f:
                        mkey = RSA.importKey(f.read())
                except Exception:
                    return '', ''
                digest = hashlib.sha256(key_str).hexdigest()
                m_digest = public_decrypt(mkey.publickey(), payload['sig'])
                if m_digest != digest:
                    return '', ''
        else:
            return '', ''
        if '_|-' in key_str:
            return key_str.split('_|-')
        else:
            if 'token' in payload:
                token = cipher.decrypt(payload['token'])
                return key_str, token
            elif not master_pub:
                return key_str, ''
        return '', ''

    def verify_pubkey_sig(self, message, sig):
        '''
        Wraps the verify_signature method so we have
        additional checks.

        :rtype: bool
        :return: Success or failure of public key verification
        '''
        if self.opts['master_sign_key_name']:
            path = os.path.join(self.opts['pki_dir'],
                                self.opts['master_sign_key_name'] + '.pub')

            if os.path.isfile(path):
                res = verify_signature(path,
                                       message,
                                       binascii.a2b_base64(sig))
            else:
                log.error('Verification public key {0} does not exist. You '
                          'need to copy it from the master to the minions '
                          'pki directory'.format(os.path.basename(path)))
                return False
            if res:
                log.debug('Successfully verified signature of master '
                          'public key with verification public key '
                          '{0}'.format(self.opts['master_sign_key_name'] + '.pub'))
                return True
            else:
                log.debug('Failed to verify signature of public key')
                return False
        else:
            log.error('Failed to verify the signature of the message because '
                      'the verification key-pairs name is not defined. Please '
                      'make sure that master_sign_key_name is defined.')
            return False

    def verify_signing_master(self, payload):
        try:
            if self.verify_pubkey_sig(payload['pub_key'],
                                      payload['pub_sig']):
                log.info('Received signed and verified master pubkey '
                         'from master {0}'.format(self.opts['master']))
                m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
                uid = salt.utils.get_uid(self.opts.get('user', None))
                with salt.utils.fpopen(m_pub_fn, 'wb+', uid=uid) as wfh:
                    wfh.write(payload['pub_key'])
                return True
            else:
                log.error('Received signed public-key from master {0} '
                          'but signature verification failed!'.format(self.opts['master']))
                return False
        except Exception as sign_exc:
            log.error('There was an error while verifying the masters public-key signature')
            raise Exception(sign_exc)

    def check_auth_deps(self, payload):
        '''
        Checks if both master and minion either sign (master) and
        verify (minion). If one side does not, it should fail.

        :param dict payload: The incoming payload. This is a dictionary which may have the following keys:
            'aes': The shared AES key
            'enc': The format of the message. ('clear', 'pub', 'aes')
            'publish_port': The TCP port which published the message
            'token': The encrypted token used to verify the message.
            'pub_key': The RSA public key of the sender.
        '''
        # master and minion sign and verify
        if 'pub_sig' in payload and self.opts['verify_master_pubkey_sign']:
            return True
        # master and minion do NOT sign and do NOT verify
        elif 'pub_sig' not in payload and not self.opts['verify_master_pubkey_sign']:
            return True

        # master signs, but minion does NOT verify
        elif 'pub_sig' in payload and not self.opts['verify_master_pubkey_sign']:
            log.error('The masters sent its public-key signature, but signature '
                      'verification is not enabled on the minion. Either enable '
                      'signature verification on the minion or disable signing '
                      'the public key on the master!')
            return False
        # master does NOT sign but minion wants to verify
        elif 'pub_sig' not in payload and self.opts['verify_master_pubkey_sign']:
            log.error('The master did not send its public-key signature, but '
                      'signature verification is enabled on the minion. Either '
                      'disable signature verification on the minion or enable '
                      'signing the public on the master!')
            return False

    def extract_aes(self, payload, master_pub=True):
        '''
        Return the AES key received from the master after the minion has been
        successfully authenticated.

        :param dict payload: The incoming payload. This is a dictionary which may have the following keys:
            'aes': The shared AES key
            'enc': The format of the message. ('clear', 'pub', etc)
            'publish_port': The TCP port which published the message
            'token': The encrypted token used to verify the message.
            'pub_key': The RSA public key of the sender.

        :rtype: str
        :return: The shared AES key received from the master.
        '''
        if master_pub:
            try:
                aes, token = self.decrypt_aes(payload, master_pub)
                if token != self.token:
                    log.error(
                        'The master failed to decrypt the random minion token'
                    )
                    return ''
            except Exception:
                log.error(
                    'The master failed to decrypt the random minion token'
                )
                return ''
            return aes
        else:
            aes, token = self.decrypt_aes(payload, master_pub)
            return aes

    def verify_master(self, payload):
        '''
        Verify that the master is the same one that was previously accepted.

        :param dict payload: The incoming payload. This is a dictionary which may have the following keys:
            'aes': The shared AES key
            'enc': The format of the message. ('clear', 'pub', etc)
            'publish_port': The TCP port which published the message
            'token': The encrypted token used to verify the message.
            'pub_key': The RSA public key of the sender.

        :rtype: str
        :return: An empty string on verification failure. On success, the decrypted AES message in the payload.
        '''
        m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
        if os.path.isfile(m_pub_fn) and not self.opts['open_mode']:
            local_master_pub = salt.utils.fopen(m_pub_fn).read()

            if payload['pub_key'].replace('\n', '').replace('\r', '') != \
                    local_master_pub.replace('\n', '').replace('\r', ''):
                if not self.check_auth_deps(payload):
                    return ''

                if self.opts['verify_master_pubkey_sign']:
                    if self.verify_signing_master(payload):
                        return self.extract_aes(payload, master_pub=False)
                    else:
                        return ''
                else:
                    # This is not the last master we connected to
                    log.error('The master key has changed, the salt master could '
                              'have been subverted, verify salt master\'s public '
                              'key')
                    return ''

            else:
                if not self.check_auth_deps(payload):
                    return ''
                # verify the signature of the pubkey even if it has
                # not changed compared with the one we already have
                if self.opts['always_verify_signature']:
                    if self.verify_signing_master(payload):
                        return self.extract_aes(payload)
                    else:
                        log.error('The masters public could not be verified. Is the '
                                  'verification pubkey {0} up to date?'
                                  ''.format(self.opts['master_sign_key_name'] + '.pub'))
                        return ''

                else:
                    return self.extract_aes(payload)
        else:
            if not self.check_auth_deps(payload):
                return ''

            # verify the masters pubkey signature if the minion
            # has not received any masters pubkey before
            if self.opts['verify_master_pubkey_sign']:
                if self.verify_signing_master(payload):
                    return self.extract_aes(payload, master_pub=False)
                else:
                    return ''
            # the minion has not received any masters pubkey yet, write
            # the newly received pubkey to minion_master.pub
            else:
                salt.utils.fopen(m_pub_fn, 'wb+').write(payload['pub_key'])
                return self.extract_aes(payload, master_pub=False)
Example #26
0
 def __init__(self):
     self._registry = weakref.WeakKeyDictionary()
Example #27
0
    def get(self, request):
        params = self.get_params(request)

        if params['aware']:
            context = self.get_context(request)
        else:
            context = DataContext()

        # Get all published app/model pairs to produce counts for.
        model_names = DataField.objects.published()\
            .values_list('app_name', 'model_name')\
            .order_by('model_name').distinct()

        results = []
        data = []
        models = set()

        QueryProcessor = pipeline.query_processors[params['processor']]

        # Workaround for a Python bug for versions 2.7.5 and below
        # http://bugs.python.org/issue10015
        if not hasattr(threading.current_thread(), '_children'):
            threading.current_thread()._children = weakref.WeakKeyDictionary()

        # Pool of threads to execute the counts in parallel
        pool = ThreadPool()

        for app_name, model_name in model_names:
            # DataField used here to resolve foreign key-based fields.
            model = DataField(app_name=app_name, model_name=model_name).model

            # No redundant counts
            if model in models:
                continue

            models.add(model)

            opts = model._meta

            # Format is called to resolve Django's internal proxy wrapper.
            verbose_name = opts.verbose_name.format()
            verbose_name_plural = opts.verbose_name_plural.format()

            # Assume no custom verbose_name as been set in Meta class, so
            # apply a minimal title-case.
            if verbose_name.islower():
                verbose_name = verbose_name.title()

            if verbose_name_plural.islower():
                verbose_name_plural = verbose_name_plural.title()

            # Placeholder with the model name. The count will be replaced if
            # successful.
            data.append({
                'count': None,
                'app_name': app_name,
                'model_name': model_name,
                'verbose_name': verbose_name,
                'verbose_name_plural': verbose_name_plural,
            })

            # Asynchronously execute the count
            result = pool.apply_async(get_count,
                                      args=(request, model, params['refresh'],
                                            QueryProcessor, context))

            results.append(result)

        pool.close()

        for i, r in enumerate(results):
            try:
                count = r.get(timeout=serrano_settings.STATS_COUNT_TIMEOUT)
                data[i]['count'] = count
            except Exception:
                pass

        return data
Example #28
0
    def initializeGL(self, gls):
        # Build trace vertex VBO and associated vertex data
        dtype = [("vertex", numpy.float32, 2), ("ptid", numpy.uint32)]
        self.working_array = numpy.zeros(NUM_ENDCAP_SEGMENTS * 2 + 2,
                                         dtype=dtype)
        self.trace_vbo = VBO(self.working_array, GL.GL_DYNAMIC_DRAW)

        # Generate geometry for trace and endcaps
        # ptid is a variable with value 0 or 1 that indicates which endpoint the geometry is associated with
        self.__build_trace()

        self.__attribute_shader_vao = VAO()
        self.__attribute_shader = gls.shader_cache.get(
            "line_vertex_shader", "frag1", defines={"INPUT_TYPE": "in"})

        # Now we build an index buffer that allows us to render filled geometry from the same
        # VBO.
        arr = []
        for i in range(NUM_ENDCAP_SEGMENTS - 1):
            arr.append(0)
            arr.append(i + 2)
            arr.append(i + 3)

        for i in range(NUM_ENDCAP_SEGMENTS - 1):
            arr.append(1)
            arr.append(i + NUM_ENDCAP_SEGMENTS + 2)
            arr.append(i + NUM_ENDCAP_SEGMENTS + 3)

        arr.append(2)
        arr.append(2 + NUM_ENDCAP_SEGMENTS - 1)
        arr.append(2 + NUM_ENDCAP_SEGMENTS)
        arr.append(2 + NUM_ENDCAP_SEGMENTS)
        arr.append(2 + NUM_ENDCAP_SEGMENTS * 2 - 1)
        arr.append(2)

        arr = numpy.array(arr, dtype=numpy.uint32)
        self.index_vbo = VBO(arr, target=GL.GL_ELEMENT_ARRAY_BUFFER)

        self.instance_dtype = numpy.dtype([
            ("pos_a", numpy.float32, 2),
            ("pos_b", numpy.float32, 2),
            ("thickness", numpy.float32, 1),
            #("color", numpy.float32, 4)
        ])

        # Use a fake array to get a zero-length VBO for initial binding
        instance_array = numpy.ndarray(0, dtype=self.instance_dtype)
        self.instance_vbo = VBO(instance_array)

        with self.__attribute_shader_vao, self.trace_vbo:
            vbobind(self.__attribute_shader, self.trace_vbo.dtype,
                    "vertex").assign()
            vbobind(self.__attribute_shader, self.trace_vbo.dtype,
                    "ptid").assign()

        with self.__attribute_shader_vao, self.instance_vbo:
            self.__bind_pos_a = vbobind(self.__attribute_shader,
                                        self.instance_dtype,
                                        "pos_a",
                                        div=1)
            self.__bind_pos_b = vbobind(self.__attribute_shader,
                                        self.instance_dtype,
                                        "pos_b",
                                        div=1)
            self.__bind_thickness = vbobind(self.__attribute_shader,
                                            self.instance_dtype,
                                            "thickness",
                                            div=1)
            #vbobind(self.__attribute_shader, self.instance_dtype, "color", div=1).assign()
            self.__base_rebind(0)

            self.index_vbo.bind()

        self.__initialize_uniform(gls)

        self.__last_prepared = weakref.WeakKeyDictionary()
Example #29
0
                lines = [max_content_width(context, child)]
        # The first text line goes on the current line
        current_line += lines[0]
        if len(lines) > 1:
            # Forced line break
            yield current_line
            if len(lines) > 2:
                for line in lines[1:-1]:
                    yield line
            current_line = lines[-1]
        is_line_start = lines[-1] == 0
        skip_stack = None
    yield current_line


TABLE_CACHE = weakref.WeakKeyDictionary()


def _percentage_contribution(box):
    """Return the percentage contribution of a cell, column or column group.

    http://dbaron.org/css/intrinsic/#pct-contrib

    """
    min_width = (
        box.style.min_width.value if box.style.min_width != 'auto' and
        box.style.min_width.unit == '%' else 0)
    max_width = (
        box.style.max_width.value if box.style.max_width != 'auto' and
        box.style.max_width.unit == '%' else float('inf'))
    width = (
Example #30
0
class _PossiblyDisallowedChildren:
    """A set of possibly disallowed types contained within a type.

  These kinds of types may be banned in one or more contexts, so
  `_possibly_disallowed_members` and its cache record which of these type kinds
  appears inside a type, allowing for quick well-formedness checks.
  """
    federated: Optional[Type]
    function: Optional[Type]
    sequence: Optional[Type]


# Manual cache used rather than `cachetools.cached` due to incompatibility
# with `WeakKeyDictionary`. We want to use a `WeakKeyDictionary` so that
# cache entries are destroyed once the types they index no longer exist.
_possibly_disallowed_children_cache = weakref.WeakKeyDictionary({})


def _possibly_disallowed_children(
    type_signature: Type, ) -> _PossiblyDisallowedChildren:
    """Returns possibly disallowed child types appearing in `type_signature`."""
    cached = _possibly_disallowed_children_cache.get(type_signature, None)
    if cached:
        return cached
    disallowed = _PossiblyDisallowedChildren(None, None, None)
    for child_type in type_signature.children():
        if child_type.is_federated():
            disallowed = attr.evolve(disallowed, federated=child_type)
        elif child_type.is_function():
            disallowed = attr.evolve(disallowed, function=child_type)
        elif child_type.is_sequence():