Exemplo n.º 1
0
    def __init__(self,
                 path=None,
                 symmetry=None,
                 hydrogens=False,
                 pdbfix=False,
                 vdw_radii=None,
                 **kwargs):
        self._kwargs = kwargs.copy()
        GeneProvider.__init__(self, **kwargs)
        self._kwargs = kwargs
        self.path = path
        self.symmetry = symmetry
        self.hydrogens = hydrogens
        self.pdbfix = pdbfix
        self.vdw_radii = vdw_radii
        try:
            self.catalog = self._CATALOG[self.name]
        except KeyError:
            self.catalog = self._CATALOG[self.name] = tuple(
                self._compile_catalog())
        self._compounds_cache = self._cache.setdefault(
            self.name + '_compounds', LRU(300))
        self._atomlookup_cache = self._cache.setdefault(
            self.name + '_atomlookup', LRU(300))
        self._residuelookup_cache = self._cache.setdefault(
            self.name + '_residuelookup', LRU(300))
        self.allele = random.choice(self.catalog)

        # An optimization for similarity methods: xform coords are
        # cached here after all genes have expressed. See Individual.express.
        self._expressed_coordinates = None
Exemplo n.º 2
0
 def __init__(self, cache_location=None, pickle_cache=False,
              memory_cache_size=None):
     if memory_cache_size:
         self._cache = LRU(max_size=memory_cache_size)
     else:
         self._cache = {}
     self.cache_location = cache_location
     self.pickle_cache = pickle_cache
    def __init__(self, max_size: int, ttl_millis: int, from_remote):
        '''
		:param max_size: max_size of the cache,using LRU
		:param ttl_millis: max live time for the objects
		:param from_remote: function to load data if the key not exists
		'''
        self.cache = LRU(max_size=max_size)
        self.ttl = ttl_millis
        self.from_remote = from_remote
        self._lock = RLock()
    def __init__(self, name, max_size=128, from_remote=None):
        '''
		:param max_size:	最大容量
		:param from_remote: 查询数据函数,返回值必须是一个dict
		:param ttl:	有效时间,单位:秒
		'''
        self.local_cache = LRU(max_size=max_size)
        self.from_remote = from_remote
        self.empty = {}
        self.name = name
Exemplo n.º 5
0
def test_lru():
    lru = LRU(max_size=1)
    lru['hi'] = 0
    lru['bye'] = 1
    assert len(lru) == 1
    lru['bye']
    assert lru.get('hi') is None

    del lru['bye']
    assert 'bye' not in lru
    assert len(lru) == 0
    assert not lru
class CacheModule:
    def __init__(self, max_size: int, ttl_millis: int, from_remote):
        '''
		:param max_size: max_size of the cache,using LRU
		:param ttl_millis: max live time for the objects
		:param from_remote: function to load data if the key not exists
		'''
        self.cache = LRU(max_size=max_size)
        self.ttl = ttl_millis
        self.from_remote = from_remote
        self._lock = RLock()

    def multi_get(self, keys) -> dict:
        result, missing = {}, []

        now = int(round(time.time() * 1000))
        for key in keys:
            value: dict = self.cache.get(key)
            # key未命中
            if not value:
                missing.append(key)
            # key超时
            elif now < value['max_time']:
                missing.append(key)
            # key存在且不是默认值
            elif value['data']:
                result[key] = value['data']

        if len(missing) > 0:
            loads_d: dict = self.from_remote(missing)
            dft = {
                'max_time': int(round(time.time() * 1000)) + self.ttl,
                'data': {}
            }
            for key in missing:
                loads_d.setdefault(key, dft)
            self.multi_set(loads_d)
        return result

    def multi_set(self, data: dict):
        with self._lock:
            for k, v in data.items():
                self.cache.setdefault(k, v)

    def remove(self, keys):
        with self._lock:
            for key in keys:
                self.cache.pop(key)
Exemplo n.º 7
0
    def __init__(self, residues=None, library='Dunbrack', avoid_replacement=False,
                 mutations=[], ligation=False, hydrogens=False, **kwargs):
        GeneProvider.__init__(self, **kwargs)
        self._kwargs = kwargs
        self._residues = residues
        self.library = library
        self.mutations = mutations
        self.ligation = ligation
        self.hydrogens = hydrogens
        self.avoid_replacement = avoid_replacement
        self.allele = []
        # set caches
        try:
            self.residues = self._cache[self.name + '_residues']
        except KeyError:
            self.residues = self._cache[self.name + '_residues'] = OrderedDict()
            
        try:
            self.rotamers = self._cache[self.name + '_rotamers']
        except KeyError:
            cache_size = len(residues) * (1 + 0.5 * len(mutations))
            self.rotamers = self._cache[self.name + '_rotamers'] = LRU(int(cache_size))

        if self.ligation:
            self.random_number = random.random()
        else:
            self.random_number = None

        # Avoid unnecessary calls to expensive get_rotamers if residue is known
        # to not have any rotamers
        self._residues_without_rotamers = ['ALA', 'GLY']
Exemplo n.º 8
0
 def __setstate__(self, state):
     if not '_cache' in state:
         if state.get('memory_cache_size'):
             state['_cache'] = LRU(max_size=memory_cache_size)
         else:
             state['_cache'] = {}
     self.__dict__ = dict(state)
Exemplo n.º 9
0
 def __init__(self, stimuli, fixations, max_fixations_in_cache=500*1000*1000):
     self.stimuli = stimuli
     self.fixations = fixations
     cache_size = int(max_fixations_in_cache / len(self.fixations.x))
     self.cache = LRU(cache_size)
     self.nonfixations_for_image = cached(self.cache)(self._nonfixations_for_image)
     self.widths = np.asarray([s[1] for s in stimuli.sizes]).astype(float)
     self.heights = np.asarray([s[0] for s in stimuli.sizes]).astype(float)
Exemplo n.º 10
0
class CacheModule:
    def __init__(self, name, max_size=128, from_remote=None):
        '''
		:param max_size:	最大容量
		:param from_remote: 查询数据函数,返回值必须是一个dict
		:param ttl:	有效时间,单位:秒
		'''
        self.local_cache = LRU(max_size=max_size)
        self.from_remote = from_remote
        self.empty = {}
        self.name = name

    def get_all(self, keys: list) -> dict:
        result, missing_keys = {}, []

        for key in keys:
            val = self.local_cache.get(key)
            if val is not None:
                if val != self.empty:
                    result[key] = val
            else:
                missing_keys.append(key)
        # 缓存未命中
        if len(missing_keys) > 0:
            missing_dict = self.reload(missing_keys)
            result = {**result, **missing_dict}

        return result

    def remove(self, keys: list):
        for key in keys:
            self.local_cache.__delitem__(key)

    def statistics(self):
        hitRateAsString = '%.3f' % (
            self.local_cache.hit_count /
            (self.local_cache.hit_count + self.local_cache.miss_count))
        return {
            "name": self.name,
            "hit_count": self.local_cache.hit_count,
            "miss_count": self.local_cache.miss_count,
            "size": len(self.local_cache.keys()),
            "hitRateAsString": hitRateAsString + "%",
        }

    def reload(self, keys: list) -> dict:
        d = {}
        missing_vals = self.from_remote(keys)
        # 不存在的值,设置为空
        if missing_vals:
            for key, val in missing_vals.items():
                self.local_cache.setdefault(key, val)
                d[key] = val
        for missing_key in keys:
            self.local_cache.setdefault(missing_key, self.empty)
        return d
Exemplo n.º 11
0
def test_cachedmethod():
    class Car(object):
        def __init__(self, cache=None):
            self.h_cache = LRI() if cache is None else cache
            self.door_count = 0
            self.hook_count = 0
            self.hand_count = 0

        @cachedmethod('h_cache')
        def hand(self, *a, **kw):
            self.hand_count += 1

        @cachedmethod(lambda obj: obj.h_cache)
        def hook(self, *a, **kw):
            self.hook_count += 1

        @cachedmethod('h_cache', scoped=False)
        def door(self, *a, **kw):
            self.door_count += 1

    car = Car()

    # attribute name-style
    assert car.hand_count == 0
    car.hand('h', a='nd')
    assert car.hand_count == 1
    car.hand('h', a='nd')
    assert car.hand_count == 1

    # callable-style
    assert car.hook_count == 0
    car.hook()
    assert car.hook_count == 1
    car.hook()
    assert car.hook_count == 1

    # Ensure that non-selfish caches share the cache nicely
    lru = LRU()
    car_one = Car(cache=lru)
    assert car_one.door_count == 0
    car_one.door('bob')
    assert car_one.door_count == 1
    car_one.door('bob')
    assert car_one.door_count == 1

    car_two = Car(cache=lru)
    assert car_two.door_count == 0
    car_two.door('bob')
    assert car_two.door_count == 0

    # try unbound for kicks
    Car.door(Car(), 'bob')

    # always check the repr
    print(repr(car_two.door))
    print(repr(Car.door))
    return
Exemplo n.º 12
0
    def __init__(self, username, password, uri_base, dry_run=False):
        self.log = logging.getLogger('confl-copier')
        self._dry_run = dry_run
        self._client = ConfluenceAPIDryRunProxy(username=username,
                                                password=password,
                                                uri_base=uri_base,
                                                dry_run=dry_run)

        self._cache = LRU()
Exemplo n.º 13
0
def test_lru_with_dupes_2():
    "From Issue #55, h/t github.com/mt"
    SIZE = 3
    lru = LRU(max_size=SIZE)
    keys = ['A', 'A', 'B', 'A', 'C', 'B', 'D', 'E']
    for i, k in enumerate(keys):
        lru[k] = 'HIT'
        assert _test_linkage(lru._anchor, SIZE + 1), 'linked list invalid'

    return
Exemplo n.º 14
0
 def __init__(self,
              parent_model,
              stimuli,
              resized_predictions_cache_size=5000,
              compute_size=(500, 500),
              **kwargs):
     super(ShuffledBaselineModel, self).__init__(**kwargs)
     self.parent_model = parent_model
     self.stimuli = stimuli
     self.compute_size = compute_size
     self.resized_predictions_cache = LRU(
         max_size=resized_predictions_cache_size, on_miss=self._cache_miss)
Exemplo n.º 15
0
def test_cached_dec():
    lru = LRU()
    inner_func = CountingCallable()
    func = cached(lru)(inner_func)

    assert inner_func.call_count == 0
    func()
    assert inner_func.call_count == 1
    func()
    assert inner_func.call_count == 1
    func('man door hand hook car door')
    assert inner_func.call_count == 2
    return
Exemplo n.º 16
0
def create_devices(netbox_api, devices, role_id, site_id, threads=10):
    device_types_mapper = NetboxMapper(netbox_api, "dcim", "device-types")
    platforms_mapper = NetboxMapper(netbox_api, "dcim", "platforms")
    caches = {
        "device_types":
        LRU(on_miss=lambda slug: next(device_types_mapper.get(slug=slug))),
        "platforms":
        LRU(on_miss=lambda slug: next(platforms_mapper.get(slug=slug)))
    }
    device_mapper = NetboxMapper(netbox_api, "dcim", "devices")

    with ThreadPoolExecutor(max_workers=threads) as executor:
        futures = []
        for name, props in devices.items():
            future = executor.submit(_thread_push_device, device_mapper,
                                     caches, name, props, role_id, site_id)
            futures.append(future)

        try:
            [future.result() in concurrent.futures.as_completed(futures)]
        except requests.exceptions.HTTPError as e:
            print(e.response.data)
            raise
Exemplo n.º 17
0
def test_callable_cached_dec():
    lru = LRU()
    get_lru = lambda: lru

    inner_func = CountingCallable()
    func = cached(get_lru)(inner_func)

    assert inner_func.call_count == 0
    func()
    assert inner_func.call_count == 1
    func()
    assert inner_func.call_count == 1

    lru.clear()

    func()
    assert inner_func.call_count == 2
    func()
    assert inner_func.call_count == 2

    print(repr(func))

    return
Exemplo n.º 18
0
def test_callable_cached_dec():
    lru = LRU()
    get_lru = lambda: lru

    inner_func = CountingCallable()
    func = cached(get_lru)(inner_func)

    assert inner_func.call_count == 0
    func()
    assert inner_func.call_count == 1
    func()
    assert inner_func.call_count == 1

    lru.clear()

    func()
    assert inner_func.call_count == 2
    func()
    assert inner_func.call_count == 2

    print(repr(func))

    return
Exemplo n.º 19
0
def test_unscoped_cached_dec():
    lru = LRU()
    inner_func = CountingCallable()
    func = cached(lru)(inner_func)

    other_inner_func = CountingCallable()
    other_func = cached(lru)(other_inner_func)

    assert inner_func.call_count == 0
    func('a')
    assert inner_func.call_count == 1
    func('a')

    other_func('a')
    assert other_inner_func.call_count == 0
    return
Exemplo n.º 20
0
 def __init__(self,
              parent_model,
              stimuli,
              resized_predictions_cache_size=5000,
              compute_size=(500, 500),
              library='torch',
              **kwargs):
     super(ShuffledBaselineModel, self).__init__(**kwargs)
     self.parent_model = parent_model
     self.stimuli = stimuli
     self.compute_size = compute_size
     self.resized_predictions_cache = LRU(
         max_size=resized_predictions_cache_size, on_miss=self._cache_miss)
     if library not in ['torch', 'tensorflow', 'numpy']:
         raise ValueError(library)
     self.library = library
Exemplo n.º 21
0
def encodeSwoopTensorInFormat(tensor,
                              descriptor,
                              tensor_shape=None,
                              cache_size=32):
    codec = Codec(tuple(descriptor), [True] * len(descriptor))

    # get output dict based on rank names
    rank_names = tensor.getRankIds()
    # print("encode tensor: rank names {}, descriptor {}".format(rank_names, descriptor))
    # TODO: move output dict generation into codec
    output = codec.get_output_dict(rank_names)
    # print("output dict {}".format(output))
    output_tensor = []
    for i in range(0, len(descriptor) + 1):
        output_tensor.append(list())

    # print("encode, output {}".format(output_tensor))
    codec.encode(-1,
                 tensor.getRoot(),
                 tensor.getRankIds(),
                 output,
                 output_tensor,
                 shape=tensor_shape)

    # name the fibers in order from left to right per-rank
    rank_idx = 0
    rank_names = ["root"] + tensor.getRankIds()
    # tensor_cache = dict()

    tensor_cache = LRU(max_size=cache_size)
    for rank in output_tensor:
        fiber_idx = 0
        for fiber in rank:
            fiber_name = "_".join(
                [tensor.getName(), rank_names[rank_idx],
                 str(fiber_idx)])
            fiber.setName(fiber_name)
            # fiber.printFiber()
            fiber.cache = tensor_cache
            fiber_idx += 1
        rank_idx += 1
    return output_tensor
Exemplo n.º 22
0
    def __init__(self,
                 method='prody',
                 target=None,
                 modes=None,
                 n_samples=10000,
                 rmsd=1.0,
                 group_by=None,
                 group_lambda=None,
                 path=None,
                 write_modes=False,
                 **kwargs):
        # Fire up!
        GeneProvider.__init__(self, **kwargs)
        self.method = method
        self.target = target
        self.modes = modes if modes is not None else range(12)
        self.max_modes = max(self.modes) + 1
        self.n_samples = n_samples
        self.rmsd = rmsd
        self.group_by = None
        self.group_by_options = None
        self.path = None
        self.write_modes = write_modes
        if method == 'prody':
            if path is None:
                self.normal_modes_function = self.calculate_prody_normal_modes
                self.group_by = group_by
                self.group_by_options = {} if group_lambda is None else {
                    'n': group_lambda
                }
            else:
                self.path = path
                self.normal_modes_function = self.read_prody_normal_modes
        else:  # gaussian
            self.normal_modes_function = self.read_gaussian_normal_modes
            if path is None:
                raise ValueError('Path is required if method == gaussian')
            self.path = path

        if self.name not in self._cache:
            self._cache[self.name] = LRU(300)
Exemplo n.º 23
0
from flask_sqlalchemy import BaseQuery, Pagination, SQLAlchemy
from sqlalchemy import exc as sqlalchemy_err, text
from sqlalchemy.engine.result import ResultProxy, RowProxy
from sqlalchemy.exc import DatabaseError, IntegrityError
from sqlalchemy.orm import Query, Session
from sqlalchemy.orm.attributes import InstrumentedAttribute
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.sql.schema import Table

from .err_msg import mysql_msg
from .exceptions import DBDuplicateKeyError, DBError, FuncArgsError, HttpError
from .utils import gen_class_name, verify_message

__all__ = ("DBClient", "DialectDriver")

_lru_cache = LRU()


class DialectDriver(object):
    """
    数据库方言驱动
    """
    #  postgresql
    pg_default = "postgresql+psycopg2"  # default
    pg_pg8000 = "postgresql+pg8000"
    # mysql
    mysql_default = "mysql+mysqldb"  # default
    mysql_pymysql = "mysql+pymysql"
    # oracle
    oracle_cx = "oracle+cx_oracle"  # default
    # SQL Server
Exemplo n.º 24
0
def test_lru_basic():
    lru = LRU(max_size=1)
    repr(lru)  # sanity

    lru['hi'] = 0
    lru['bye'] = 1
    assert len(lru) == 1
    lru['bye']
    assert lru.get('hi') is None

    del lru['bye']
    assert 'bye' not in lru
    assert len(lru) == 0
    assert not lru

    try:
        lru.pop('bye')
    except KeyError:
        pass
    else:
        assert False

    default = object()
    assert lru.pop('bye', default) is default

    try:
        lru.popitem()
    except KeyError:
        pass
    else:
        assert False

    lru['another'] = 1
    assert lru.popitem() == ('another', 1)

    lru['yet_another'] = 2
    assert lru.pop('yet_another') == 2

    lru['yet_another'] = 3
    assert lru.pop('yet_another', default) == 3

    lru['yet_another'] = 4
    lru.clear()
    assert not lru

    lru['yet_another'] = 5
    second_lru = LRU(max_size=1)
    assert lru.copy() == lru

    second_lru['yet_another'] = 5
    assert second_lru == lru
    assert lru == second_lru

    lru.update(LRU(max_size=2, values=[('a', 1), ('b', 2)]))
    assert len(lru) == 1
    assert 'yet_another' not in lru

    lru.setdefault('x', 2)
    assert dict(lru) == {'x': 2}
    lru.setdefault('x', 3)
    assert dict(lru) == {'x': 2}

    assert lru != second_lru
    assert second_lru != lru
Exemplo n.º 25
0
    def __init__(self, study_path='study.json'):

        self.gepResult = None
        with open(study_path) as json_file:
            config = json.load(json_file, object_pairs_hook=OrderedDict)

        #save the config with the output
        filename = config['parameters']['output_path'] + study_path
        #make sure the output_path folder exists
        if not os.path.exists(config['parameters']['output_path']):
            os.makedirs(config['parameters']['output_path'])
        pretty = json.dumps(config, indent=2, separators=(',', ':'))
        with open(filename, 'w') as outfile:
            outfile.write(pretty)
        outfile.close()

        # print(json.dumps(config['ontology'], indent=2))
        self.parameters = config['parameters']
        super().__init__(self.parameters['seed'])
        self.reproduction_report = self.reproduction_report()
        self.blackboard = config['blackboard']
        self.ontology = config['ontology']
        self.registry = registry
        self.emergent_functions = OrderedDict()
        self.emergent_functions_arity = OrderedDict()
        self.emergent_functions_call_number = 0
        self.stochastic_pattern = re.compile(r'_stochastic\d+')
        self.prefix_pattern = re.compile(r'^f\d+_')

        pickle_config_path = config['parameters'][
            'output_path'] + 'pickles/' + 'index.p'

        if pickle_config_path and os.path.exists(pickle_config_path):
            with open(pickle_config_path, 'rb') as cachehandle:
                pickle_config = pickle.load(cachehandle)
        else:
            pickle_config = OrderedDict([("count", 0),
                                         ("pickles", OrderedDict())])

        self.pickle_count = pickle_config[
            'count']  # contains the next number for the pickle file
        self.pickles = pickle_config['pickles']

        self.resultTuple = ()
        # self.cache = LRU(max_size = 512)
        self.cache = LRU()

        # Buyers gather offers by ranking those who offer to sell the product that have a price overlap.
        # call choose partners several times to ensure that all parts that the supply chain has a
        # chance to be settled in multiple trades, or offer networks have a chance to be filled.
        # In step the agent has a chance to put out a new message given the knowledge of purchases
        # made on the last round

        stage_list = [
            'step',
            'gather_offers',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
            'choose_partners',
        ]

        self.schedule = StagedActivation(self,
                                         stage_list=stage_list,
                                         shuffle=True,
                                         shuffle_between_stages=True)

        # Create agents

        # first initial agents then random agents
        initial_blackboard = copy.deepcopy(self.blackboard)
        self.blackboard = []
        agent_count = 0
        for i, message in enumerate(initial_blackboard):
            if message['type'] in self.parameters['agent_parameters']:
                agent_parameters = self.parameters['agent_parameters'][
                    message['type']]
            else:
                agent_parameters = None
            for _ in range(self.parameters['blackboard_agents'][i]):
                a = globals()[message['type']](agent_count, self, message,
                                               agent_parameters)
                self.schedule.add(a)
                agent_count += 1

        for agent_type, n in self.parameters['random_agents'].items():
            if agent_type in self.parameters['agent_parameters']:
                agent_parameters = self.parameters['agent_parameters'][
                    agent_type]
            else:
                agent_parameters = None
            for i in range(n):
                a = globals()[agent_type](agent_count, self, None,
                                          agent_parameters)
                self.schedule.add(a)
                agent_count += 1

        print("Final line of snet sim init")
Exemplo n.º 26
0
def test_lru_add():
    cache = LRU(max_size=3)
    for i in range(4):
        cache[i] = i
    assert len(cache) == 3
    assert 0 not in cache
Exemplo n.º 27
0
from typing import Any
from typing import Dict

import asyncpg
import ujson
from boltons.cacheutils import LRU

from app.core.config import settings
from app.postgres.db.session import db

CACHE = LRU(max_size=settings.CACHE_ITEMS_MAX_SIZE)


class User(db.Model):
    __USER_CACHE_KEY = "USERS:ID:{user_id}"
    __tablename__ = "users"

    id = db.Column(db.BigInteger(), primary_key=True)
    nickname = db.Column(db.Unicode(), default="unnamed")

    @classmethod
    async def get_user_by_id(cls, user_id: int) -> "User":
        cache_key: str = cls.__get_cache_key(user_id)
        if cache_key not in CACHE:
            print(f"User {user_id} cache MISS.")
            fetched_user: User = await cls.get_or_404(user_id)
            CACHE[cache_key]: User = fetched_user
        return CACHE[cache_key]

    @classmethod
    def db_event(cls, con_ref: asyncpg.Connection, pid: int, channel: str,
Exemplo n.º 28
0
 def load(self):
     self._config = convert_config(self._file_config)
     self._word_cache = LRU(self.cache_size)
Exemplo n.º 29
0
def test_lru_basic():
    lru = LRU(max_size=1)
    repr(lru)                   # sanity

    lru['hi'] = 0
    lru['bye'] = 1
    assert len(lru) == 1
    lru['bye']
    assert lru.get('hi') is None

    del lru['bye']
    assert 'bye' not in lru
    assert len(lru) == 0
    assert not lru

    try:
        lru.pop('bye')
    except KeyError:
        pass
    else:
        assert False

    default = object()
    assert lru.pop('bye', default) is default

    try:
        lru.popitem()
    except KeyError:
        pass
    else:
        assert False

    lru['another'] = 1
    assert lru.popitem() == ('another', 1)

    lru['yet_another'] = 2
    assert lru.pop('yet_another') == 2

    lru['yet_another'] = 3
    assert lru.pop('yet_another', default) == 3

    lru['yet_another'] = 4
    lru.clear()
    assert not lru

    lru['yet_another'] = 5
    second_lru = LRU(max_size=1)
    assert lru.copy() == lru

    second_lru['yet_another'] = 5
    assert second_lru == lru
    assert lru == second_lru

    lru.update(LRU(max_size=2, values=[('a', 1),
                                       ('b', 2)]))
    assert len(lru) == 1
    assert 'yet_another' not in lru

    lru.setdefault('x', 2)
    assert dict(lru) == {'x': 2}
    lru.setdefault('x', 3)
    assert dict(lru) == {'x': 2}

    assert lru != second_lru
    assert second_lru != lru
Exemplo n.º 30
0
class Cache(MutableMapping):
    """Cache that supports saving the items to files

    Set `cache_location` to save all newly set
    items to .npy files in cache_location.

    .. warning ::
        Items that have been set before setting `cache_location` won't
        be saved to files!

    """
    def __init__(self, cache_location=None, pickle_cache=False,
                 memory_cache_size=None):
        if memory_cache_size:
            self._cache = LRU(max_size=memory_cache_size)
        else:
            self._cache = {}
        self.cache_location = cache_location
        self.pickle_cache = pickle_cache

    def clear(self):
        """ Clear memory cache"""
        self._cache = {}

    def filename(self, key):
        return os.path.join(self.cache_location, '{}.npy'.format(key))

    def __getitem__(self, key):
        if not key in self._cache:
            if self.cache_location is not None:
                filename = self.filename(key)
                if os.path.exists(filename):
                    value = np.load(filename)
                    self._cache[key] = value
                else:
                    raise KeyError('Key {} neither in cache nor on disk'.format(key))
        return self._cache[key]

    def __setitem__(self, key, value):
        if not isinstance(key, str):
            raise TypeError('Only string keys are supported right now!')
        if self.cache_location is not None:
            if not os.path.exists(self.cache_location):
                os.makedirs(self.cache_location)
            filename = self.filename(key)
            np.save(filename, value)
        self._cache[key] = value

    def __delitem__(self, key):
        if self.cache_location is not None:
            filename = self.filename(key)
            if os.path.exists(filename):
                os.remove(filename)
        del self._cache[key]

    def __iter__(self):
        if self.cache_location is not None:
            filenames = iglob(self.filename('*'))
            keys = map(lambda f: os.path.splitext(os.path.basename(f))[0], filenames)
            new_keys = filterfalse(lambda key: key in self._cache.keys(), keys)
            return chain(iterkeys(self._cache), new_keys)
        else:
            return iterkeys(self._cache)

    def __len__(self):
        i = iter(self)
        return len(list(i))

    def __getstate__(self):
        # we don't want to save the cache
        state = dict(self.__dict__)
        if not self.pickle_cache:
            state.pop('_cache')
        return state

    def __setstate__(self, state):
        if not '_cache' in state:
            if state.get('memory_cache_size'):
                state['_cache'] = LRU(max_size=memory_cache_size)
            else:
                state['_cache'] = {}
        self.__dict__ = dict(state)
Exemplo n.º 31
0
def test_lru_with_dupes():
    SIZE = 2
    lru = LRU(max_size=SIZE)
    for i in [0, 0, 1, 1, 2, 2]:
        lru[i] = i
        assert _test_linkage(lru._anchor, SIZE + 1), 'linked list invalid'
Exemplo n.º 32
0
def test_popitem_should_return_a_tuple():
    cache = LRU()
    cache['t'] = 42
    assert cache.popitem() == ('t', 42)