Beispiel #1
0

# =============================================================================
# >> GLOBAL VARIABLES
# =============================================================================
# Get a list of projectiles for the game
_projectile_weapons = [weapon.name for weapon in WeaponClassIter('grenade')]

# Get a dictionary to store the delays
_entity_delays = defaultdict(set)

# Get a dictionary to store the repeats
_entity_repeats = defaultdict(set)

# Get a set to store the registered entity classes
_entity_classes = WeakSet()


# =============================================================================
# >> CLASSES
# =============================================================================
class _EntityCaching(BoostPythonClass):
    """Metaclass used to cache entity instances."""

    def __init__(cls, classname, bases, attributes):
        """Initializes the class."""
        # New instances of this class will be cached in that dictionary
        cls._cache = {}

        # Set whether or not this class is caching its instances by default
        try:
Beispiel #2
0
 def union(self, other):
     return WeakSet(set(self).union(other))
Beispiel #3
0
from django.contrib.admin import ModelAdmin, actions
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.http import Http404, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.urls import NoReverseMatch, reverse
from django.utils.functional import LazyObject
from django.utils.module_loading import import_string
from django.utils.text import capfirst
from django.utils.translation import gettext as _, gettext_lazy
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.i18n import JavaScriptCatalog

all_sites = WeakSet()


class AlreadyRegistered(Exception):
    pass


class NotRegistered(Exception):
    pass


class AdminSite:
    """
    An AdminSite object encapsulates an instance of the Django admin application, ready
    to be hooked in to your URLconf. Models are registered with the AdminSite using the
    register() method, and the get_urls() method can then be used to access Django view
Beispiel #4
0
 def test_ne(self):
     self.assertTrue(self.s != set(self.items))
     s1 = WeakSet()
     s2 = WeakSet()
     self.assertFalse(s1 != s2)
Beispiel #5
0
The id() of the main-process object is used to identify loadscreens.
"""
from weakref import WeakSet
from abc import abstractmethod
import contextlib
import multiprocessing

from loadScreen_daemon import run_screen as _splash_daemon
from BEE2_config import GEN_OPTS
import utils

from typing import Set, Tuple

# Keep a reference to all loading screens, so we can close them globally.
_ALL_SCREENS = WeakSet()  # type: Set[LoadScreen]

# For each loadscreen ID, record if the cancel button was pressed. We then raise
# Cancelled upon the next interaction with it to stop operation.
_SCREEN_CANCEL_FLAG = {}

# Pairs of pipe ends we use to send data to the daemon and vice versa.
# DAEMON is sent over to the other process.
_PIPE_MAIN_REC, _PIPE_DAEMON_SEND = multiprocessing.Pipe(duplex=False)
_PIPE_DAEMON_REC, _PIPE_MAIN_SEND = multiprocessing.Pipe(duplex=False)


class Cancelled(SystemExit):
    """Raised when the user cancels the loadscreen."""

Beispiel #6
0
 def test_lt(self):
     self.assertTrue(self.ab_weakset < self.abcde_weakset)
     self.assertFalse(self.abcde_weakset < self.def_weakset)
     self.assertFalse(self.ab_weakset < self.ab_weakset)
     self.assertFalse(WeakSet() < WeakSet())
Beispiel #7
0
 def test_constructor_identity(self):
     s = WeakSet(self.items)
     t = WeakSet(s)
     self.assertNotEqual(id(s), id(t))
Beispiel #8
0
class Presets:
    """
    Manager for device preset positions.

    This provides methods for adding new presets, checking which presets are
    active, and related utilities.

    It will install the ``mv_presetname`` and ``wm_presetname`` methods onto
    the associated device, and the ``add_preset`` and ``add_preset_here``
    methods onto itself.

    Parameters
    ----------
    device: ``Device``
        The device to manage saved preset positions for. It must implement the
        `FltMvInterface`.

    Attributes
    ----------
    positions: ``SimpleNamespace``
        A namespace that contains all of the active presets as `PresetPosition`
        objects.
    """
    _registry = WeakSet()
    _paths = {}

    def __init__(self, device):
        self._device = device
        self._methods = []
        self._fd = None
        self._registry.add(self)
        self.name = device.name + '_presets'
        self.sync()

    def _path(self, preset_type):
        """
        Utility function to get the preset file ``Path``.
        """
        path = self._paths[preset_type] / (self._device.name + '.yml')
        logger.debug('select presets path %s', path)
        return path

    def _read(self, preset_type):
        """
        Utility function to get a particular preset's datum dictionary.
        """
        logger.debug('read presets for %s', self._device.name)
        with self._file_open_rlock(preset_type) as f:
            f.seek(0)
            return yaml.load(f) or {}

    def _write(self, preset_type, data):
        """
        Utility function to overwrite a particular preset's datum dictionary.
        """
        logger.debug('write presets for %s', self._device.name)
        with self._file_open_rlock(preset_type) as f:
            f.seek(0)
            yaml.dump(data, f, default_flow_style=False)
            f.truncate()

    @contextmanager
    def _file_open_rlock(self, preset_type, timeout=1.0):
        """
        File locking context manager for this object.

        Works like threading.Rlock in that you can acquire it multiple times
        safely.

        Parameters
        ----------
        fd: ``file``
            The file descriptor to lock on.

        Raises
        ------
        BlockingIOError:
            If we cannot acquire the file lock.
        """
        if self._fd is None:
            path = self._path(preset_type)
            with open(path, 'r+') as fd:
                # Set up file lock timeout with a raising handler
                # We will need this handler due to PEP 475
                def interrupt(signum, frame):
                    raise InterruptedError()

                old_handler = signal.signal(signal.SIGALRM, interrupt)
                try:
                    signal.setitimer(signal.ITIMER_REAL, timeout)
                    fcntl.flock(fd, fcntl.LOCK_EX)
                except InterruptedError:
                    # Ignore interrupted and proceed to cleanup
                    pass
                finally:
                    # Clean up file lock timeout
                    signal.setitimer(signal.ITIMER_REAL, 0)
                    signal.signal(signal.SIGALRM, old_handler)
                # Error now if we still can't get the lock.
                # Getting lock twice is safe.
                fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
                logger.debug('acquired lock for %s', path)
                self._fd = fd
                yield fd
                fcntl.flock(fd, fcntl.LOCK_UN)
                logger.debug('released lock for %s', path)
            self._fd = None
        else:
            logger.debug('using already open file descriptor')
            yield self._fd

    def _update(self, preset_type, name, value=None, comment=None,
                active=True):
        """
        Utility function to update a preset position.

        Reads the existing preset's datum, updates the value the comment, and
        the active state, and then writes the datum back to the file, updating
        the history accordingly.
        """
        logger.debug(('call %s presets._update(%s, %s, value=%s, comment=%s, '
                      'active=%s)'), self._device.name, preset_type, name,
                     value, comment, active)
        if not isinstance(name, str):
            raise TypeError(('name must be of type <str>, not type'
                             '{}'.format(type(name))))
        if value is not None and not isinstance(value, numbers.Real):
            raise TypeError(('value must be a real numeric type, not type'
                             '{}'.format(type(value))))
        try:
            path = self._path(preset_type)
            if not path.exists():
                path.touch()
                path.chmod(0o666)
            with self._file_open_rlock(preset_type):
                data = self._read(preset_type)
                if value is None and comment is not None:
                    value = data[name]['value']
                if value is not None:
                    if name not in data:
                        data[name] = {}
                    ts = time.strftime('%d %b %Y %H:%M:%S')
                    data[name]['value'] = value
                    history = data[name].get('history', {})
                    if comment:
                        comment = ' ' + comment
                    else:
                        comment = ''
                    history[ts] = '{:10.4f}{}'.format(value, comment)
                    data[name]['history'] = history
                if active:
                    data[name]['active'] = True
                else:
                    data[name]['active'] = False
                self._write(preset_type, data)
        except BlockingIOError:
            self._log_flock_error()

    def sync(self):
        """
        Synchronize the presets with the database.
        """
        logger.debug('call %s presets.sync()', self._device.name)
        self._remove_methods()
        self._cache = {}
        logger.debug('filling %s cache', self.name)
        for preset_type in self._paths.keys():
            path = self._path(preset_type)
            if path.exists():
                try:
                    self._cache[preset_type] = self._read(preset_type)
                except BlockingIOError:
                    self._log_flock_error()
            else:
                logger.debug('No %s preset file for %s',
                             preset_type, self._device.name)
        self._create_methods()

    def _log_flock_error(self):
        logger.error(('Unable to acquire file lock for %s. '
                      'File may be being edited by another user.'), self.name)
        logger.debug('', exc_info=True)

    def _create_methods(self):
        """
        Create the dynamic methods based on the configured paths.

        Add methods to this object for adding presets of each type, add
        methods to the associated device to move and check each preset, and
        add `PresetPosition` instances to ``self.positions`` for each preset
        name.
        """
        logger.debug('call %s presets._create_methods()', self._device.name)
        for preset_type in self._paths.keys():
            add, add_here = self._make_add(preset_type)
            self._register_method(self, 'add_' + preset_type, add)
            self._register_method(self, 'add_here_' + preset_type, add_here)
        for preset_type, data in self._cache.items():
            for name, info in data.items():
                if info['active']:
                    mv, umv = self._make_mv_pre(preset_type, name)
                    wm = self._make_wm_pre(preset_type, name)
                    self._register_method(self._device, 'mv_' + name, mv)
                    self._register_method(self._device, 'umv_' + name, umv)
                    self._register_method(self._device, 'wm_' + name, wm)
                    setattr(self.positions, name,
                            PresetPosition(self, preset_type, name))

    def _register_method(self, obj, method_name, method):
        """
        Utility function for managing dynamic methods.

        Adds a method to the ``_methods`` list and binds the method to an
        object.
        """
        logger.debug('register method %s to %s', method_name, obj.name)
        self._methods.append((obj, method_name))
        setattr(obj, method_name, MethodType(method, obj))

    def _make_add(self, preset_type):
        """
        Create the functions that add preset positions.

        Creates suitable versions of ``add`` and ``add_here`` for a particular
        preset type, e.g. ``add_preset_type`` and ``add_here_preset_type``.
        """
        def add(self, name, value, comment=None):
            """
            Add a preset position of type "{}".

            Parameters
            ----------
            name: ``str``
                The name of the new preset position.

            value: ``float``
                The value of the new preset_position.

            comment: ``str``, optional
                A comment to associate with the preset position.
            """
            self._update(preset_type, name, value=value,
                         comment=comment)
            self.sync()

        def add_here(self, name, comment=None):
            """
            Add a preset of the current position of type "{}".

            Parameters
            ----------
            name: ``str``
                The name of the new preset position.

            comment: ``str``, optional
                A comment to associate with the preset position.
            """
            add(self, name, self._device.wm(), comment=comment)

        add.__doc__ = add.__doc__.format(preset_type)
        add_here.__doc__ = add_here.__doc__.format(preset_type)
        return add, add_here

    def _make_mv_pre(self, preset_type, name):
        """
        Create the functions that move to preset positions.

        Creates a suitable versions of ``mv`` and ``umv`` for a particular
        preset type and name e.g. ``mv_sample``.
        """
        def mv_pre(self, timeout=None, wait=False):
            """
            Move to the {} preset position.

            Parameters
            ----------
            timeout: ``float``, optional
                If provided, the mover will throw an error if motion takes
                longer than timeout to complete. If omitted, the mover's
                default timeout will be use.

            wait: ``bool``, optional
                If ``True``, wait for motion completion before returning.
                Defaults to ``False``.
            """
            pos = self.presets._cache[preset_type][name]['value']
            self.mv(pos, timeout=timeout, wait=wait)

        def umv_pre(self, timeout=None):
            """
            Update move to the {} preset position.

            Parameters
            ----------
            timeout: ``float``, optional
                If provided, the mover will throw an error if motion takes
                longer than timeout to complete. If omitted, the mover's
                default timeout will be use.
            """
            pos = self.presets._cache[preset_type][name]['value']
            self.umv(pos, timeout=timeout)

        mv_pre.__doc__ = mv_pre.__doc__.format(name)
        umv_pre.__doc__ = umv_pre.__doc__.format(name)
        return mv_pre, umv_pre

    def _make_wm_pre(self, preset_type, name):
        """
        Create a method to get the offset from a preset position.

        Creates a suitable version of ``wm`` for a particular preset type and
        name e.g. ``wm_sample``.
        """
        def wm_pre(self):
            """
            Check the offset from the {} preset position.

            Returns
            -------
            offset: ``float``
                How far we are from the preset position. If this is near zero,
                we are at the position. If this positive, the preset position
                is in the positive direction from us.
            """
            pos = self.presets._cache[preset_type][name]['value']
            return pos - self.wm()

        wm_pre.__doc__ = wm_pre.__doc__.format(name)
        return wm_pre

    def _remove_methods(self):
        """
        Remove all methods created in the last call to _create_methods.
        """
        logger.debug('call %s presets._remove_methods()', self._device.name)
        for obj, method_name in self._methods:
            try:
                delattr(obj, method_name)
            except AttributeError:
                pass
        self._methods = []
        self.positions = SimpleNamespace()
Beispiel #9
0
def make_napari_viewer(qtbot, request: 'FixtureRequest',
                       napari_plugin_manager):
    """A fixture function that creates a napari viewer for use in testing.

    Use this fixture as a function in your tests:

        viewer = make_napari_viewer()

    It accepts all the same arguments as napari.Viewer, plus the following
    test-related paramaters:

    ViewerClass : Type[napari.Viewer], optional
        Override the viewer class being used.  By default, will
        use napari.viewer.Viewer
    strict_qt : bool or str, optional
        If True, a check will be performed after test cleanup to make sure that
        no top level widgets were created and *not* cleaned up during the
        test.  If the string "raise" is provided, an AssertionError will be
        raised.  Otherwise a warning is emitted.
        By default, this is False unless the test is being performed within
        the napari package.
        This can be made globally true by setting the 'NAPARI_STRICT_QT'
        environment variable.
    block_plugin_discovery : bool, optional
        Block discovery of non-builtin plugins.  Note: plugins can still be
        manually registered by using the 'napari_plugin_manager' fixture and
        the `napari_plugin_manager.register()` method. By default, True.

    Examples
    --------
    >>> def test_adding_shapes(make_napari_viewer):
    ...     viewer = make_napari_viewer()
    ...     viewer.add_shapes()
    ...     assert len(viewer.layers) == 1

    >>> def test_something_with_plugins(make_napari_viewer):
    ...     viewer = make_napari_viewer(block_plugin_discovery=False)

    >>> def test_something_with_strict_qt_tests(make_napari_viewer):
    ...     viewer = make_napari_viewer(strict_qt=True)
    """
    from qtpy.QtWidgets import QApplication

    from napari import Viewer
    from napari._qt.qt_viewer import QtViewer
    from napari.settings import get_settings

    gc.collect()

    _do_not_inline_below = len(QtViewer._instances)
    # # do not inline to avoid pytest trying to compute repr of expression.
    # # it fails if C++ object gone but not Python object.
    if request.config.getoption(_SAVE_GRAPH_OPNAME):
        fail_obj_graph(QtViewer)
    QtViewer._instances.clear()
    assert _do_not_inline_below == 0, (
        "Some instance of QtViewer is not properly cleaned in one of previous test. For easier debug one may "
        f"use {_SAVE_GRAPH_OPNAME} flag for pytest to get graph of leaked objects. If you use qtbot (from pytest-qt)"
        " to clean Qt objects after test you may need to switch to manual clean using "
        "`deleteLater()` and `qtbot.wait(50)` later.")

    settings = get_settings()
    settings.reset()

    viewers: WeakSet[Viewer] = WeakSet()

    # may be overridden by using `make_napari_viewer(strict=True)`
    _strict = False

    initial = QApplication.topLevelWidgets()
    prior_exception = getattr(sys, 'last_value', None)
    is_internal_test = request.module.__name__.startswith("napari.")

    def actual_factory(
        *model_args,
        ViewerClass=Viewer,
        strict_qt=is_internal_test or os.getenv("NAPARI_STRICT_QT"),
        block_plugin_discovery=True,
        **model_kwargs,
    ):
        nonlocal _strict
        _strict = strict_qt

        if not block_plugin_discovery:
            napari_plugin_manager.discovery_blocker.stop()

        should_show = request.config.getoption("--show-napari-viewer")
        model_kwargs['show'] = model_kwargs.pop('show', should_show)
        viewer = ViewerClass(*model_args, **model_kwargs)
        viewers.add(viewer)

        return viewer

    yield actual_factory

    # Some tests might have the viewer closed, so this call will not be able
    # to access the window.
    with suppress(AttributeError):
        get_settings().reset()

    # close viewers, but don't saving window settings while closing
    for viewer in viewers:
        if hasattr(viewer.window, '_qt_window'):
            with patch.object(viewer.window._qt_window,
                              '_save_current_window_settings'):
                viewer.close()
        else:
            viewer.close()

    gc.collect()

    if request.config.getoption(_SAVE_GRAPH_OPNAME):
        fail_obj_graph(QtViewer)

    _do_not_inline_below = len(QtViewer._instances)
    # do not inline to avoid pytest trying to compute repr of expression.
    # it fails if C++ object gone but not Python object.
    assert _do_not_inline_below == 0

    # only check for leaked widgets if an exception was raised during the test,
    # or "strict" mode was used.
    if _strict and getattr(sys, 'last_value', None) is prior_exception:
        QApplication.processEvents()
        leak = set(QApplication.topLevelWidgets()).difference(initial)
        # still not sure how to clean up some of the remaining vispy
        # vispy.app.backends._qt.CanvasBackendDesktop widgets...
        if any([n.__class__.__name__ != 'CanvasBackendDesktop' for n in leak]):
            # just a warning... but this can be converted to test errors
            # in pytest with `-W error`
            msg = f"""The following Widgets leaked!: {leak}.

            Note: If other tests are failing it is likely that widgets will leak
            as they will be (indirectly) attached to the tracebacks of previous failures.
            Please only consider this an error if all other tests are passing.
            """
            # Explanation notes on the above: While we are indeed looking at the
            # difference in sets of widgets between before and after, new object can
            # still not be garbage collected because of it.
            # in particular with VisPyCanvas, it looks like if a traceback keeps
            # contains the type, then instances are still attached to the type.
            # I'm not too sure why this is the case though.
            if _strict == 'raise':
                raise AssertionError(msg)
            else:
                warnings.warn(msg)
 def __init__(self):
     self.load_sprites()
     self.last_t = None
     self.objects = WeakSet()
Beispiel #11
0
from typing import (Callable, Dict, Generic, Iterator, List, Mapping, Optional, Sequence, Set,
                    Tuple, Type, TypeVar, Union, cast, overload)
from weakref import WeakKeyDictionary, WeakValueDictionary, WeakSet

from redis import Redis
from redis.client import Script # pylint: disable=unused-import; typing
from redis.exceptions import ResponseError
from typing_extensions import Literal

T = TypeVar('T')
U = TypeVar('U')

ExpectFunc = Callable[[object], T]

_SCRIPT_CACHE = WeakKeyDictionary() # type: WeakKeyDictionary[Redis, Dict[str, Script]]
_BZPOPTIMED_CACHE = WeakSet() # type: WeakSet[Redis]

class JSONRedis(Generic[T]):
    """Extended :class:`Redis` client for convenient use with JSON objects.

    Objects are stored as JSON-encoded strings in the Redis database and en-/decoding is handled
    transparently.

    The translation from an arbitrary object to a JSON-serializable form is carried out by a given
    ``encode(object)`` function. A JSON-serializable object is one that only cosists of the types
    given in https://docs.python.org/3/library/json.html#py-to-json-table . *encode* is passed as
    *default* argument to :func:`json.dumps()`.

    The reverse translation is done by a given ``decode(json)`` function. *decode* is passed as
    *object_hook* argument to :func:`json.loads()`.
Beispiel #12
0
class CalibreDB():
    _init = False
    engine = None
    config = None
    session_factory = None
    # This is a WeakSet so that references here don't keep other CalibreDB
    # instances alive once they reach the end of their respective scopes
    instances = WeakSet()

    def __init__(self, expire_on_commit=True):
        """ Initialize a new CalibreDB session
        """
        self.session = None
        if self._init:
            self.initSession(expire_on_commit)

        self.instances.add(self)

    def initSession(self, expire_on_commit=True):
        self.session = self.session_factory()
        self.session.expire_on_commit = expire_on_commit
        self.update_title_sort(self.config)

    @classmethod
    def setup_db_cc_classes(self, cc):
        cc_ids = []
        books_custom_column_links = {}
        for row in cc:
            if row.datatype not in cc_exceptions:
                if row.datatype == 'series':
                    dicttable = {
                        '__tablename__':
                        'books_custom_column_' + str(row.id) + '_link',
                        'id':
                        Column(Integer, primary_key=True),
                        'book':
                        Column(Integer,
                               ForeignKey('books.id'),
                               primary_key=True),
                        'map_value':
                        Column('value',
                               Integer,
                               ForeignKey('custom_column_' + str(row.id) +
                                          '.id'),
                               primary_key=True),
                        'extra':
                        Column(Float),
                        'asoc':
                        relationship('custom_column_' + str(row.id),
                                     uselist=False),
                        'value':
                        association_proxy('asoc', 'value')
                    }
                    books_custom_column_links[row.id] = type(
                        str('books_custom_column_' + str(row.id) + '_link'),
                        (Base, ), dicttable)
                else:
                    books_custom_column_links[row.id] = Table(
                        'books_custom_column_' + str(row.id) + '_link',
                        Base.metadata,
                        Column('book',
                               Integer,
                               ForeignKey('books.id'),
                               primary_key=True),
                        Column('value',
                               Integer,
                               ForeignKey('custom_column_' + str(row.id) +
                                          '.id'),
                               primary_key=True))
                cc_ids.append([row.id, row.datatype])

                ccdict = {
                    '__tablename__': 'custom_column_' + str(row.id),
                    'id': Column(Integer, primary_key=True)
                }
                if row.datatype == 'float':
                    ccdict['value'] = Column(Float)
                elif row.datatype == 'int':
                    ccdict['value'] = Column(Integer)
                elif row.datatype == 'datetime':
                    ccdict['value'] = Column(TIMESTAMP)
                elif row.datatype == 'bool':
                    ccdict['value'] = Column(Boolean)
                else:
                    ccdict['value'] = Column(String)
                if row.datatype in ['float', 'int', 'bool', 'datetime']:
                    ccdict['book'] = Column(Integer, ForeignKey('books.id'))
                cc_classes[row.id] = type(str('custom_column_' + str(row.id)),
                                          (Base, ), ccdict)

        for cc_id in cc_ids:
            if cc_id[1] in ['bool', 'int', 'float', 'datetime']:
                setattr(
                    Books, 'custom_column_' + str(cc_id[0]),
                    relationship(
                        cc_classes[cc_id[0]],
                        primaryjoin=(Books.id == cc_classes[cc_id[0]].book),
                        backref='books'))
            elif cc_id[1] == 'series':
                setattr(
                    Books, 'custom_column_' + str(cc_id[0]),
                    relationship(books_custom_column_links[cc_id[0]],
                                 backref='books'))
            else:
                setattr(
                    Books, 'custom_column_' + str(cc_id[0]),
                    relationship(cc_classes[cc_id[0]],
                                 secondary=books_custom_column_links[cc_id[0]],
                                 backref='books'))

        return cc_classes

    @classmethod
    def setup_db(cls, config, app_db_path):
        cls.config = config
        cls.dispose()

        # toDo: if db changed -> delete shelfs, delete download books, delete read boks, kobo sync??

        if not config.config_calibre_dir:
            config.invalidate()
            return False

        dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
        if not os.path.exists(dbpath):
            config.invalidate()
            return False

        try:
            cls.engine = create_engine(
                'sqlite://',
                echo=False,
                isolation_level="SERIALIZABLE",
                connect_args={'check_same_thread': False},
                poolclass=StaticPool)
            with cls.engine.begin() as connection:
                connection.execute(
                    text("attach database '{}' as calibre;".format(dbpath)))
                connection.execute(
                    text("attach database '{}' as app_settings;".format(
                        app_db_path)))

            conn = cls.engine.connect()
            # conn.text_factory = lambda b: b.decode(errors = 'ignore') possible fix for #1302
        except Exception as ex:
            config.invalidate(ex)
            return False

        config.db_configured = True

        if not cc_classes:
            try:
                cc = conn.execute("SELECT id, datatype FROM custom_columns")
                cls.setup_db_cc_classes(cc)
            except OperationalError as e:
                log.debug_or_exception(e)

        cls.session_factory = scoped_session(
            sessionmaker(autocommit=False, autoflush=True, bind=cls.engine))
        for inst in cls.instances:
            inst.initSession()

        cls._init = True
        return True

    def get_book(self, book_id):
        return self.session.query(Books).filter(Books.id == book_id).first()

    def get_filtered_book(self, book_id, allow_show_archived=False):
        return self.session.query(Books).filter(Books.id == book_id). \
            filter(self.common_filters(allow_show_archived)).first()

    def get_book_by_uuid(self, book_uuid):
        return self.session.query(Books).filter(
            Books.uuid == book_uuid).first()

    def get_book_format(self, book_id, file_format):
        return self.session.query(Data).filter(Data.book == book_id).filter(
            Data.format == file_format).first()

    # Language and content filters for displaying in the UI
    def common_filters(self, allow_show_archived=False):
        if not allow_show_archived:
            archived_books = (ub.session.query(ub.ArchivedBook).filter(
                ub.ArchivedBook.user_id == int(current_user.id)).filter(
                    ub.ArchivedBook.is_archived == True).all())
            archived_book_ids = [
                archived_book.book_id for archived_book in archived_books
            ]
            archived_filter = Books.id.notin_(archived_book_ids)
        else:
            archived_filter = true()

        if current_user.filter_language() != "all":
            lang_filter = Books.languages.any(
                Languages.lang_code == current_user.filter_language())
        else:
            lang_filter = true()
        negtags_list = current_user.list_denied_tags()
        postags_list = current_user.list_allowed_tags()
        neg_content_tags_filter = false() if negtags_list == [
            ''
        ] else Books.tags.any(Tags.name.in_(negtags_list))
        pos_content_tags_filter = true() if postags_list == [
            ''
        ] else Books.tags.any(Tags.name.in_(postags_list))
        if self.config.config_restricted_column:
            try:
                pos_cc_list = current_user.allowed_column_value.split(',')
                pos_content_cc_filter = true() if pos_cc_list == [''] else \
                    getattr(Books, 'custom_column_' + str(self.config.config_restricted_column)). \
                        any(cc_classes[self.config.config_restricted_column].value.in_(pos_cc_list))
                neg_cc_list = current_user.denied_column_value.split(',')
                neg_content_cc_filter = false() if neg_cc_list == [''] else \
                    getattr(Books, 'custom_column_' + str(self.config.config_restricted_column)). \
                        any(cc_classes[self.config.config_restricted_column].value.in_(neg_cc_list))
            except (KeyError, AttributeError):
                pos_content_cc_filter = false()
                neg_content_cc_filter = true()
                log.error(
                    u"Custom Column No.%d is not existing in calibre database",
                    self.config.config_restricted_column)
                flash(_(
                    "Custom Column No.%(column)d is not existing in calibre database",
                    column=self.config.config_restricted_column),
                      category="error")

        else:
            pos_content_cc_filter = true()
            neg_content_cc_filter = false()
        return and_(lang_filter, pos_content_tags_filter,
                    ~neg_content_tags_filter, pos_content_cc_filter,
                    ~neg_content_cc_filter, archived_filter)

    @staticmethod
    def get_checkbox_sorted(inputlist, state, offset, limit, order):
        outcome = list()
        elementlist = {ele.id: ele for ele in inputlist}
        for entry in state:
            try:
                outcome.append(elementlist[entry])
            except KeyError:
                pass
            del elementlist[entry]
        for entry in elementlist:
            outcome.append(elementlist[entry])
        if order == "asc":
            outcome.reverse()
        return outcome[offset:offset + limit]

    # Fill indexpage with all requested data from database
    def fill_indexpage(self, page, pagesize, database, db_filter, order,
                       *join):
        return self.fill_indexpage_with_archived_books(page, pagesize,
                                                       database, db_filter,
                                                       order, False, *join)

    def fill_indexpage_with_archived_books(self, page, pagesize, database,
                                           db_filter, order,
                                           allow_show_archived, *join):
        pagesize = pagesize or self.config.config_books_per_page
        if current_user.show_detail_random():
            randm = self.session.query(Books) \
                .filter(self.common_filters(allow_show_archived)) \
                .order_by(func.random()) \
                .limit(self.config.config_random_books).all()
        else:
            randm = false()
        off = int(int(pagesize) * (page - 1))
        query = self.session.query(database)
        if len(join) == 3:
            query = query.outerjoin(join[0], join[1]).outerjoin(join[2])
        elif len(join) == 2:
            query = query.outerjoin(join[0], join[1])
        elif len(join) == 1:
            query = query.outerjoin(join[0])
        query = query.filter(db_filter)\
            .filter(self.common_filters(allow_show_archived))
        entries = list()
        pagination = list()
        try:
            pagination = Pagination(page, pagesize, len(query.all()))
            entries = query.order_by(*order).offset(off).limit(pagesize).all()
        except Exception as ex:
            log.debug_or_exception(ex)
        #for book in entries:
        #    book = self.order_authors(book)
        return entries, randm, pagination

    # Orders all Authors in the list according to authors sort
    def order_authors(self, entry):
        sort_authors = entry.author_sort.split('&')
        authors_ordered = list()
        error = False
        ids = [a.id for a in entry.authors]
        for auth in sort_authors:
            results = self.session.query(Authors).filter(
                Authors.sort == auth.lstrip().strip()).all()
            # ToDo: How to handle not found authorname
            if not len(results):
                error = True
                break
            for r in results:
                if r.id in ids:
                    authors_ordered.append(r)
        if not error:
            entry.authors = authors_ordered
        return entry

    def get_typeahead(self,
                      database,
                      query,
                      replace=('', ''),
                      tag_filter=true()):
        query = query or ''
        self.session.connection().connection.connection.create_function(
            "lower", 1, lcase)
        entries = self.session.query(database).filter(tag_filter). \
            filter(func.lower(database.name).ilike("%" + query + "%")).all()
        json_dumps = json.dumps(
            [dict(name=r.name.replace(*replace)) for r in entries])
        return json_dumps

    def check_exists_book(self, authr, title):
        self.session.connection().connection.connection.create_function(
            "lower", 1, lcase)
        q = list()
        authorterms = re.split(r'\s*&\s*', authr)
        for authorterm in authorterms:
            q.append(
                Books.authors.any(
                    func.lower(Authors.name).ilike("%" + authorterm + "%")))

        return self.session.query(Books) \
            .filter(and_(Books.authors.any(and_(*q)), func.lower(Books.title).ilike("%" + title + "%"))).first()

    def search_query(self, term, *join):
        term.strip().lower()
        self.session.connection().connection.connection.create_function(
            "lower", 1, lcase)
        q = list()
        authorterms = re.split("[, ]+", term)
        for authorterm in authorterms:
            q.append(
                Books.authors.any(
                    func.lower(Authors.name).ilike("%" + authorterm + "%")))
        query = self.session.query(Books)
        if len(join) == 3:
            query = query.outerjoin(join[0], join[1]).outerjoin(join[2])
        elif len(join) == 2:
            query = query.outerjoin(join[0], join[1])
        elif len(join) == 1:
            query = query.outerjoin(join[0])
        return query.filter(self.common_filters(True)).filter(
            or_(
                Books.tags.any(func.lower(Tags.name).ilike("%" + term + "%")),
                Books.series.any(
                    func.lower(Series.name).ilike("%" + term + "%")),
                Books.authors.any(and_(*q)),
                Books.publishers.any(
                    func.lower(Publishers.name).ilike("%" + term + "%")),
                func.lower(Books.title).ilike("%" + term + "%")))

    # read search results from calibre-database and return it (function is used for feed and simple search
    def get_search_results(self,
                           term,
                           offset=None,
                           order=None,
                           limit=None,
                           *join):
        order = order or [Books.sort]
        pagination = None
        result = self.search_query(term, *join).order_by(*order).all()
        result_count = len(result)
        if offset != None and limit != None:
            offset = int(offset)
            limit_all = offset + int(limit)
            pagination = Pagination((offset / (int(limit)) + 1), limit,
                                    result_count)
        else:
            offset = 0
            limit_all = result_count

        ub.store_ids(result)
        return result[offset:limit_all], result_count, pagination

    # Creates for all stored languages a translated speaking name in the array for the UI
    def speaking_language(self, languages=None):
        from . import get_locale

        if not languages:
            languages = self.session.query(Languages) \
                .join(books_languages_link) \
                .join(Books) \
                .filter(self.common_filters()) \
                .group_by(text('books_languages_link.lang_code')).all()
        for lang in languages:
            try:
                cur_l = LC.parse(lang.lang_code)
                lang.name = cur_l.get_language_name(get_locale())
            except UnknownLocaleError:
                lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
        return languages

    def update_title_sort(self, config, conn=None):
        # user defined sort function for calibre databases (Series, etc.)
        def _title_sort(title):
            # calibre sort stuff
            title_pat = re.compile(config.config_title_regex, re.IGNORECASE)
            match = title_pat.search(title)
            if match:
                prep = match.group(1)
                title = title[len(prep):] + ', ' + prep
            return title.strip()

        conn = conn or self.session.connection().connection.connection
        conn.create_function("title_sort", 1, _title_sort)

    @classmethod
    def dispose(cls):
        # global session

        for inst in cls.instances:
            old_session = inst.session
            inst.session = None
            if old_session:
                try:
                    old_session.close()
                except Exception:
                    pass
                if old_session.bind:
                    try:
                        old_session.bind.dispose()
                    except Exception:
                        pass

        for attr in list(Books.__dict__.keys()):
            if attr.startswith("custom_column_"):
                setattr(Books, attr, None)

        for db_class in cc_classes.values():
            Base.metadata.remove(db_class.__table__)
        cc_classes.clear()

        for table in reversed(Base.metadata.sorted_tables):
            name = table.key
            if name.startswith("custom_column_") or name.startswith(
                    "books_custom_column_"):
                if table is not None:
                    Base.metadata.remove(table)

    def reconnect_db(self, config, app_db_path):
        self.dispose()
        self.engine.dispose()
        self.setup_db(config, app_db_path)
Beispiel #13
0
def _deserialize_Task_WeakSet_Mapping(mapping, tasks):
    return {
        name: WeakSet(tasks[i] for i in ids if i in tasks)
        for name, ids in items(mapping or {})
    }
Beispiel #14
0
 def _add_pending_task_child(self, task):
     try:
         ch = self._tasks_to_resolve[task.parent_id]
     except KeyError:
         ch = self._tasks_to_resolve[task.parent_id] = WeakSet()
     ch.add(task)
Beispiel #15
0
    def __init__(self, parent, trackSelection=True, vp = None, vpVoxSize = None):
        attriblist = [wx.glcanvas.WX_GL_RGBA,wx.glcanvas.WX_GL_STENCIL_SIZE,8, wx.glcanvas.WX_GL_DOUBLEBUFFER, 16]
        GLCanvas.__init__(self, parent,-1, attribList = attriblist)
        #GLCanvas.__init__(self, parent,-1)
        wx.EVT_PAINT(self, self.OnPaint)
        wx.EVT_SIZE(self, self.OnSize)
        wx.EVT_MOUSEWHEEL(self, self.OnWheel)
        wx.EVT_LEFT_DOWN(self, self.OnLeftDown)
        wx.EVT_LEFT_UP(self, self.OnLeftUp)
        wx.EVT_LEFT_DCLICK(self, self.OnLeftDClick)
        if trackSelection:
            wx.EVT_MOTION(self, self.OnMouseMove)
        #wx.EVT_ERASE_BACKGROUND(self, self.OnEraseBackground)
        #wx.EVT_IDLE(self, self.OnIdle)
        
        self.gl_context = wx.glcanvas.GLContext(self)
        #self.gl_context.SetCurrent()

        self.init = False
        self.nVertices = 0
        self.IScale = [1.0, 1.0, 1.0]
        self.zeroPt = [0, 1.0/3, 2.0/3]
        self.cmap = cm_hot
        self.clim = [0,1]

        self.parent = parent

        self.vp = vp
        self.vpVoxSize = vpVoxSize

        self.pointSize=5 #default point size = 5nm

        self.pixelsize = 10

        self.xmin =0
        self.xmax = self.pixelsize*self.Size[0]
        self.ymin = 0
        self.ymax = self.pixelsize*self.Size[1]

        self.scaleBarLength = 200

        self.scaleBarOffset = (20.0, 20.0) #pixels from corner
        self.scaleBarDepth = 10.0 #pixels
        self.scaleBarColour = [1,1,0]

        self.crosshairColour = [0,1,1]
        self.centreCross=False

        self.numBlurSamples = 1
        self.blurSigma = 0.0

        self.LUTDraw = True
        self.mode = 'triang'

        self.backgroundImage = False

        self.colouring = 'area'

        self.drawModes = {'triang':GL_TRIANGLES, 'quads':GL_QUADS, 'edges':GL_LINES, 'points':GL_POINTS, 'tracks': GL_LINE_STRIP}

        self.c = numpy.array([1,1,1])
        self.zmin = -1
        self.zmax = 1
        self.ang = 0

        self.selectionDragging = False
        self.selectionStart = (0,0)
        self.selectionFinish = (0,0)
        self.selection = False

        self.wantViewChangeNotification = WeakSet()
        self.pointSelectionCallbacks = []
Beispiel #16
0
 def __init__(self, val: T_co, deadline: float) -> None:
     self._v = val
     self._ch: Chan[None] = broadcast(None)
     self._deadline = deadline
     self._children: MutableSet[Context] = WeakSet()
Beispiel #17
0
            else:
                self._state = 0
            self._cond.notify_all()

        
########################################Thread开始########################################        
get_ident = _thread.get_ident
get_native_id = _thread.get_native_id
_counter = _count().__next__
_counter()  # Consume 0 so first non-main thread has id 1.
_newname = lambda: "Thread-{}".format(_counter())

_active_limbo_lock = Lock()
_active = {}    # maps thread id to Thread object
_limbo = {}
_dangling = WeakSet()  # 所有申请过的线程对象集合

# Set of Thread._tstate_lock locks of non-daemon threads used by _shutdown() to wait until all Python thread states get deleted: see Thread._set_tstate_lock().
_shutdown_locks_lock = Lock()
_shutdown_locks = set()

_threading_atexits = []
_SHUTTING_DOWN = False

class Thread:

    _initialized = False

    def __init__(self, group=None, target=None, name=None,args=(), kwargs=None, *, daemon=None):
        """
        This constructor should always be called with keyword arguments.
Beispiel #18
0
 def __init__(self):
     self._calls = WeakSet()
Beispiel #19
0
 def test_gt(self):
     self.assertTrue(self.abcde_weakset > self.ab_weakset)
     self.assertFalse(self.abcde_weakset > self.def_weakset)
     self.assertFalse(self.ab_weakset > self.ab_weakset)
     self.assertFalse(WeakSet() > WeakSet())
 def __init__(self):
     self._functions = WeakSet()
     self._methods = WeakKeyDictionary()
Beispiel #21
0
 def test_clear(self):
     self.s.clear()
     self.assertEqual(self.s, WeakSet([]))
     self.assertEqual(len(self.s), 0)
     ExceptionReport, DebugWaiting, DebugComplete, InteractionReply, \
     ServerExited, ImageSetSuccess, ImageSetSuccessWithDictionary, \
     SharedDictionaryRequest, Ack, UpstreamExit, ANNOUNCE_DONE,  \
     OmeroLoginRequest, OmeroLoginReply
import cellprofiler.utilities.jutil as J
from cellprofiler.utilities.rpdb import Rpdb
from cellprofiler.utilities.run_loop import enter_run_loop, stop_run_loop
#
# CellProfiler expects NaN as a result during calculation
#
import numpy as np
np.seterr(all='ignore')

# to guarantee closing of measurements, we store all of them in a WeakSet, and
# close them on exit.
all_measurements = WeakSet()

NOTIFY_ADDR = "inproc://notify"
NOTIFY_STOP = "STOP"

the_zmq_context = zmq.Context.instance()
stdin_monitor_lock = threading.Lock()
stdin_monitor_cv = threading.Condition(stdin_monitor_lock)
stdin_monitor_started = False


def main():
    #
    # For Windows build with Ilastik, look for site-packages
    # in order to find Ilastik sources.
    #
Beispiel #23
0
 def __init__(self, name):
     self.name = name
     self._task = WeakSet()
     self._event_storage = Storage()
Beispiel #24
0
 def __init__(self):
     self.envs = WeakSet()  # weak set of environments
     self.cache = Cache()  # cache for all records
     self.todo = {}  # recomputations {field: [records]}
     self.mode = False  # flag for draft/onchange
     self.recompute = True
Beispiel #25
0
Datei: api.py Projekt: ecoreos/hz
 def __init__(self):
     self.envs = WeakSet()  # weak set of environments
     self.todo = {}  # recomputations {field: [records]}
     self.mode = False  # flag for draft/onchange
     self.recompute = True
     self.recompute_old = []  # list of old api compute fields to recompute
Beispiel #26
0
 def __init__(cls, *args, **kwargs):
     """Instantiate a class"""
     from weakref import WeakSet
     super().__init__(*args, **kwargs)
     cls._instances = WeakSet()
Beispiel #27
0
 def __init__(self, *args, **kwargs):
     self.futures = WeakSet()
Beispiel #28
0
 def __init__(self, sender):
     self.__sender = ref(sender)
     self.__callbacks = set()
     self.__weak_callbacks = WeakSet()
     self.__lock = Lock()
Beispiel #29
0
 def test_isdisjoint(self):
     self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
     self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters)))
Beispiel #30
0
import atexit
import logging
import asyncio
from weakref import WeakSet
from contextlib import contextmanager
from concurrent.futures import ProcessPoolExecutor
from multiprocessing import Pipe

from tornado.ioloop import IOLoop
from tornado.autoreload import add_reload_hook

import hwdecrypt
from .synthetic import stack_card

G_ATEXIT_REGISTERED = False
G_ACTIVE_PROCESSPOOLS = WeakSet()


def shutdown_all_pools():
    for p in G_ACTIVE_PROCESSPOOLS:
        print(p)
        p.shutdown(wait=True)


add_reload_hook(shutdown_all_pools)


def to_unsigned(i):
    return struct.unpack("<I", struct.pack("<i", i))[0]