示例#1
0
 def __init__(self, table_name):
     self.table_name = table_name
     self.table_kwargs = {'readonly': False, 'ack': False}
     self.table = pt.table(table_name, **self.table_kwargs)
     self.lock = Lock()
     self.readlocks = 0
     self.writelocks = 0
     self.write = False
     self.writeable = self.table.iswritable()
示例#2
0
class SingletonMixin(object):
    __singleton_lock = Lock()
    __singleton_instance = None

    @classmethod
    def instance(cls):
        if not cls.__singleton_instance:
            with cls.__singleton_lock:
                if not cls.__singleton_instance:
                    cls.__singleton_instance = cls()

        return cls.__singleton_instance
示例#3
0
try:
    from dask.utils import SerializableLock as Lock
except ImportError:
    from threading import Lock


# Create a logger object, but don't add any handlers. Leave that to user code.
logger = logging.getLogger(__name__)


NONE_VAR_NAME = '__values__'


# dask.utils.SerializableLock if available, otherwise just a threading.Lock
GLOBAL_LOCK = Lock()


def _encode_variable_name(name):
    if name is None:
        name = NONE_VAR_NAME
    return name


def _decode_variable_name(name):
    if name == NONE_VAR_NAME:
        name = None
    return name


def find_root(ds):
示例#4
0
            raise InstallCubException("Extracting %s failed\n%s" %
                                      (archive, str(e)))
        finally:
            shutil.rmtree(tmpdir, ignore_errors=True)

        log.info("NVIDIA cub archive unzipped into '%s'" % _cub_dir)

    # Final check on installation
    there, reason = is_cub_installed(_cub_readme, _cub_header,
                                     _cub_version_str)

    if not there:
        raise InstallCubException(reason)


_cub_install_lock = Lock()

with _cub_install_lock:
    _cub_installed, _ = is_cub_installed(_cub_readme, _cub_header,
                                         _cub_version_str)


def cub_dir():
    global _cub_installed

    if _cub_installed is False:
        with _cub_install_lock:
            # Double-locking pattern
            if _cub_installed is False:
                _install_cub()
                _cub_installed = True
示例#5
0
import os
from collections import OrderedDict
import numpy as np

from .. import DataArray
from ..core.utils import is_scalar
from ..core import indexing
from .common import BackendArray
try:
    from dask.utils import SerializableLock as Lock
except ImportError:
    from threading import Lock

RASTERIO_LOCK = Lock()

_ERROR_MSG = ('The kind of indexing operation you are trying to do is not '
              'valid on rasterio files. Try to load your data with ds.load()'
              'first.')


class RasterioArrayWrapper(BackendArray):
    """A wrapper around rasterio dataset objects"""
    def __init__(self, rasterio_ds):
        self.rasterio_ds = rasterio_ds
        self._shape = (rasterio_ds.count, rasterio_ds.height,
                       rasterio_ds.width)
        self._ndims = len(self.shape)

    @property
    def dtype(self):
        dtypes = self.rasterio_ds.dtypes
示例#6
0
 def __init__(self, key_fn):
     self._key_fn = key_fn
     self._lock = Lock()
     self._cache = {}
示例#7
0
# -*- coding: utf-8 -*-

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

try:
    from dask.utils import SerializableLock as Lock
except ImportError:
    from threading import Lock

__run_marker = {'in_pytest': False}
__run_marker_lock = Lock()

# Tag indicating that missing packages should generate an
# exception, regardless of the 'in_pytest' marker
# Used for testing exception raising behaviour
force_missing_pkg_exception = object()


def in_pytest():
    """ Return True if we're marked as executing inside pytest """
    with __run_marker_lock:
        return __run_marker['in_pytest']


def mark_in_pytest(in_pytest=True):
    """ Mark if we're in a pytest run """
    if not type(in_pytest) == bool:
        raise TypeError('in_pytest %s is not a boolean' % in_pytest)
示例#8
0
        except Exception as e:
            raise InstallTroveException("Extracting %s failed\n%s" % (
                                      archive, str(e)))
        finally:
            shutil.rmtree(tmpdir, ignore_errors=True)

        log.info("NVIDIA trove archive unzipped into '%s'" % _trove_dir)

    # Final check on installation
    there, reason = is_trove_installed(_trove_readme)

    if not there:
        raise InstallTroveException(reason)


_trove_install_lock = Lock()

with _trove_install_lock:
    _trove_installed, _ = is_trove_installed(_trove_readme)


def trove_dir():
    global _trove_installed

    if _trove_installed is False:
        with _trove_install_lock:
            # Double-locking pattern
            if _trove_installed is False:
                _install_trove()
                _trove_installed = True
示例#9
0
class TableExecutor(object):
    """
    Singleton class providing CASA Table IO operations
    isolated within a single thread per table
    """
    __cache = {}
    __refcounts = defaultdict(lambda: 0)
    __cache_lock = Lock()

    @classmethod
    def register(cls, table_name):
        """
        Registers a table with the Executor table cache.
        """
        with cls.__cache_lock:
            # Create a new executor or bump the reference count
            # on the new one
            try:
                executor = cls.__cache[table_name]
            except KeyError:
                cls.__cache[table_name] = executor = ThreadPoolExecutor(1)
                cls.__refcounts[table_name] = 1
                return executor.submit(_table_create, table_name)
            else:
                cls.__refcounts[table_name] += 1
                return executor.submit(lambda: True)

    @classmethod
    def deregister(cls, table_name):
        """
        Deregisters a table with the Executor table cache.
        """
        with cls.__cache_lock:
            try:
                executor = cls.__cache[table_name]
            except KeyError:
                raise KeyError("Table '%s' not registered with the executor")
            else:
                cls.__refcounts[table_name] -= 1

                if cls.__refcounts[table_name] == 0:
                    f = executor.submit(_table_close, table_name)
                    executor.shutdown(wait=False)
                    del cls.__cache[table_name]
                    del cls.__refcounts[table_name]
                    return f
                elif cls.__refcounts[table_name] < 0:
                    raise ValueError("Invalid condition")
                else:
                    return executor.submit(lambda: True)

    @classmethod
    def close(cls, wait=False):
        """ Closes the pool and associated table cache """
        with cls.__cache_lock:
            for table_name, executor in cls.__cache.items():
                executor.submit(_table_close, table_name)
                executor.shutdown(wait=wait)

            cls.__cache.clear()
            cls.__refcounts.clear()

    @classmethod
    def getcol(cls, table_name, *args, **kwargs):
        """ Returns a future calling a getcol on the table """
        with cls.__cache_lock:
            try:
                executor = cls.__cache[table_name]
            except KeyError:
                raise ValueError("Table '%s' not registered" % table_name)

            return executor.submit(_getcol, *args, **kwargs)

    @classmethod
    def getcolnp(cls, table_name, *args, **kwargs):
        """ Returns a future calling a getcolnp on the table """
        with cls.__cache_lock:
            try:
                executor = cls.__cache[table_name]
            except KeyError:
                raise ValueError("Table '%s' not registered" % table_name)

            return executor.submit(_getcolnp, *args, **kwargs)

    @classmethod
    def putcol(cls, table_name, *args, **kwargs):
        """ Returns a future calling a putcol on the table """
        with cls.__cache_lock:
            try:
                executor = cls.__cache[table_name]
            except KeyError:
                raise ValueError("Table '%s' not registered" % table_name)

            return executor.submit(_putcol, *args, **kwargs)