Exemplo n.º 1
0
 def lru(self, model):
     ormcache = model._ormcache
     try:
         d = ormcache[self.method]
     except KeyError:
         d = ormcache[self.method] = lru.LRU(self.size)
     return d
Exemplo n.º 2
0
async def test_sessions_are_purged_on_max_number_of_sessions():
    aiohttp_client._sessions = lru.LRU(1,
                                       callback=aiohttp_client.session_purged)

    resp = await aiohttp_client.get('https://o.onna.io')
    assert resp.status == 200

    sess = aiohttp_client.get_session('https://o.onna.io')
    resp = await aiohttp_client.get('https://www.google.com')
    assert resp.status == 200

    assert sess.closed == True

    # restore original
    aiohttp_client._sessions = lru.LRU(aiohttp_client._max_number_sessions,
                                       callback=aiohttp_client.session_purged)
Exemplo n.º 3
0
 def __init__(
     self,
     model,
     maximizable=False,
     tokenizer=None,
     use_cache=True,
     query_budget=float("inf"),
     model_batch_size=32,
     model_cache_size=2**18,
 ):
     validators.validate_model_goal_function_compatibility(
         self.__class__, model.__class__)
     self.model = model
     self.maximizable = maximizable
     self.tokenizer = tokenizer
     if not self.tokenizer:
         if hasattr(self.model, "tokenizer"):
             self.tokenizer = self.model.tokenizer
         else:
             raise NameError(
                 "Cannot instantiate goal function without tokenizer")
     if not hasattr(self.tokenizer, "encode"):
         raise TypeError("Tokenizer must contain `encode()` method")
     self.use_cache = use_cache
     self.query_budget = query_budget
     self.model_batch_size = model_batch_size
     if self.use_cache:
         self._call_model_cache = lru.LRU(model_cache_size)
     else:
         self._call_model_cache = None
Exemplo n.º 4
0
    def __init__(self):
        self._log = logging.getLogger("com.fussyware.edproxy")
        
        self._db_version = 1

        edproxy_db_dir = edutils.get_database_dir()
        self._edproxy_db_filename = os.path.join(edproxy_db_dir, "edsm.db")
        
        self._first_time_install = False
        self._dbconn = None
        
        self._update_event = threading.Event()
        self._background_update_running = False
        
        self._lru = lru.LRU(max_size = 25)
        
        if (not os.path.exists(edproxy_db_dir)):
            os.makedirs(edproxy_db_dir)
            
        if (not os.path.exists(self._edproxy_db_filename)):
            self.__do_create_db(self._edproxy_db_filename)
        else:
            self._dbconn = sqlite3.connect(self._edproxy_db_filename, check_same_thread=False)
            
        self.__db_upgrade(self.__get_db_version())
        self._dbconn.close()
        self._dbconn = None
Exemplo n.º 5
0
    def new(cls, max_size, impl='auto'):
        """
        Creates an LRU dictionary instance, but uses the efficient c-backend
        if that is available.

        Args:
            max_size (int):
            impl (str, default='auto'): which implementation to use

        Example:

        """
        try:
            import lru
        except Exception as ex:
            lru = None
            import warnings
            warnings.warn('Optional lru-dict c-implementation is unavailable.'
                          ' `pip install lru-dict` to supress this warning.'
                          ' Fallback to pure python. ex={!r}'.format(ex))

        if impl == 'auto':
            impl = 'py' if lru is None else 'c'

        if impl == 'py':
            self = cls(max_size)
        elif impl == 'c':
            self = lru.LRU(max_size)
        else:
            raise KeyError(impl)
        return self
Exemplo n.º 6
0
    def __init__(
        self,
        tagger_type="nltk",
        tagset="universal",
        allow_verb_noun_swap=True,
        compare_against_original=True,
        language_nltk="eng",
        language_stanza="en",
    ):
        super().__init__(compare_against_original)
        self.tagger_type = tagger_type
        self.tagset = tagset
        self.allow_verb_noun_swap = allow_verb_noun_swap
        self.language_nltk = language_nltk
        self.language_stanza = language_stanza

        self._pos_tag_cache = lru.LRU(2 ** 14)
        if tagger_type == "flair":
            if tagset == "universal":
                self._flair_pos_tagger = SequenceTagger.load("upos-fast")
            else:
                self._flair_pos_tagger = SequenceTagger.load("pos-fast")

        if tagger_type == "stanza":
            self._stanza_pos_tagger = stanza.Pipeline(
                lang=self.language_stanza,
                processors="tokenize, pos",
                tokenize_pretokenized=True,
            )
Exemplo n.º 7
0
 def __init__(self,
              chain_db: ChainDatabaseAPI,
              header_cache: HeaderCache,
              epoch_length: int) -> None:
     self._snapshots = lru.LRU(IN_MEMORY_SNAPSHOTS)
     self._header_cache = header_cache
     self._chain_db = chain_db
     self._epoch_length = epoch_length
Exemplo n.º 8
0
 def __init__(self, model, use_cache=True):
     validators.validate_model_goal_function_compatibility(
         self.__class__, model.__class__)
     self.model = model
     self.use_cache = use_cache
     self.num_queries = 0
     if self.use_cache:
         self._call_model_cache = lru.LRU(utils.config('MODEL_CACHE_SIZE'))
     else:
         self._call_model_cache = None
Exemplo n.º 9
0
 def lru(self, self2):
     try:
         ormcache = getattr(self2, '_ormcache')
     except AttributeError:
         ormcache = self2._ormcache = {}
     try:
         d = ormcache[self.method]
     except KeyError:
         d = ormcache[self.method] = lru.LRU(self.size)
     return d
Exemplo n.º 10
0
    def __init__(
        self,
        graph_db: graph_tuple_database.Database,
        cache_size: Optional[int] = None,
    ):
        self.graph_db = graph_db

        # Maintain a mapping from IR IDs to encoded sequences to amortize the
        # cost of encoding.
        cache_size = cache_size or FLAGS.graph2seq_cache_entries
        self.ir_id_to_encoded: Dict[int, np.array] = lru.LRU(cache_size)
Exemplo n.º 11
0
    def __init__(
        self,
        goal_function=None,
        constraints=[],
        transformation=None,
        search_method=None,
        constraint_cache_size=2 ** 20,
    ):
        """Initialize an attack object.

        Attacks can be run multiple times.
        """
        self.goal_function = goal_function
        if not self.goal_function:
            raise NameError(
                "Cannot instantiate attack without self.goal_function for predictions"
            )
        self.search_method = search_method
        if not self.search_method:
            raise NameError("Cannot instantiate attack without search method")
        self.transformation = transformation
        if not self.transformation:
            raise NameError("Cannot instantiate attack without transformation")
        self.is_black_box = getattr(transformation, "is_black_box", True)

        if not self.search_method.check_transformation_compatibility(
            self.transformation
        ):
            raise ValueError(
                f"SearchMethod {self.search_method} incompatible with transformation {self.transformation}"
            )

        self.constraints = []
        self.pre_transformation_constraints = []
        for constraint in constraints:
            if isinstance(
                constraint, textattack.constraints.PreTransformationConstraint,
            ):
                self.pre_transformation_constraints.append(constraint)
            else:
                self.constraints.append(constraint)

        self.constraint_cache_size = constraint_cache_size
        self.constraints_cache = lru.LRU(constraint_cache_size)

        # Give search method access to functions for getting transformations and evaluating them
        self.search_method.get_transformations = self.get_transformations
        # The search method only needs access to the first argument. The second is only used
        # by the attack class when checking whether to skip the sample
        self.search_method.get_goal_results = lambda attacked_text_list: self.goal_function.get_results(
            attacked_text_list
        )
        self.search_method.filter_transformations = self.filter_transformations
Exemplo n.º 12
0
 def __init__(self, _host, _port, _file):
     """ We set class params, plus we bing socket (interface and port)
     """
     try:
         self.lines_lru = lru.LRU(200)
         self.lines_lru_lock = threading.Lock()
         self.file = _file
         self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
         self.sock.bind((_host, _port))
         print("Binding to {}:{}".format(_host, _port))
     except Exception as e:
         print(e)
Exemplo n.º 13
0
    def __init__(self,
                 tagger_type="nltk",
                 tagset="universal",
                 allow_verb_noun_swap=True):
        self.tagger_type = tagger_type
        self.tagset = tagset
        self.allow_verb_noun_swap = allow_verb_noun_swap

        self._pos_tag_cache = lru.LRU(2**14)
        if tagger_type == "flair":
            if tagset == "universal":
                self._flair_pos_tagger = SequenceTagger.load("upos-fast")
            else:
                self._flair_pos_tagger = SequenceTagger.load("pos-fast")
Exemplo n.º 14
0
    def __init__(self, bot, config, database, redis):
        self.bot = bot
        self.database = database
        self.database.hold()
        self.redis = redis
        self.redis.hold()

        self._recent_pins = lru.LRU(128)

        self.batch_presence_updates = []
        self.batch_name_updates = []
        self._batch_name_curr_updates = []
        self._batch_presence_curr_updates = []
        self.batch_presence_task = utils.create_task(self.batch_presence())
        self.batch_name_task = utils.create_task(self.batch_name())
Exemplo n.º 15
0
    def __init__(
        self,
        goal_function=None,
        constraints=[],
        transformation=None,
        search_method=None,
        constraint_cache_size=2 ** 18,
    ):
        """ Initialize an attack object. Attacks can be run multiple times. """
        self.goal_function = goal_function
        if not self.goal_function:
            raise NameError(
                "Cannot instantiate attack without self.goal_function for predictions"
            )
        self.search_method = search_method
        if not self.search_method:
            raise NameError("Cannot instantiate attack without search method")
        self.transformation = transformation
        if not self.transformation:
            raise NameError("Cannot instantiate attack without transformation")
        self.is_black_box = getattr(transformation, "is_black_box", True)

        if not self.search_method.check_transformation_compatibility(
            self.transformation
        ):
            raise ValueError(
                "SearchMethod {self.search_method} incompatible with transformation {self.transformation}"
            )

        self.constraints = []
        self.pre_transformation_constraints = []
        for constraint in constraints:
            if isinstance(
                constraint,
                textattack.constraints.pre_transformation.PreTransformationConstraint,
            ):
                self.pre_transformation_constraints.append(constraint)
            else:
                self.constraints.append(constraint)

        self.constraint_cache_size = constraint_cache_size
        self.constraints_cache = lru.LRU(constraint_cache_size)

        # Give search method access to functions for getting transformations and evaluating them
        self.search_method.get_transformations = self.get_transformations
        self.search_method.get_goal_results = self.goal_function.get_results
        self.search_method.filter_transformations = self.filter_transformations
Exemplo n.º 16
0
    def __init__(
        self,
        tagger_type="nltk",
        tagset="universal",
        allow_verb_noun_swap=True,
        compare_against_original=True,
    ):
        super().__init__(compare_against_original)
        self.tagger_type = tagger_type
        self.tagset = tagset
        self.allow_verb_noun_swap = allow_verb_noun_swap

        self._pos_tag_cache = lru.LRU(2**14)
        if tagger_type == "flair":
            if tagset == "universal":
                self._flair_pos_tagger = load_flair_upos_fast()
            else:
                self._flair_pos_tagger = SequenceTagger.load("pos-fast")
Exemplo n.º 17
0
    def __init__(
        self,
        max_diff,
        model_name="textattack/bert-base-uncased-CoLA",
        compare_against_original=True,
    ):
        super().__init__(compare_against_original)
        if not isinstance(max_diff, float) and not isinstance(max_diff, int):
            raise TypeError("max_diff must be a float or int")
        if max_diff < 0.0:
            raise ValueError("max_diff must be a value greater or equal to than 0.0")

        self.max_diff = max_diff
        self.model_name = model_name
        self._reference_score_cache = lru.LRU(2 ** 10)
        model = AutoModelForSequenceClassification.from_pretrained(model_name)
        tokenizer = AutoTokenizer.from_pretrained(model_name)
        self.model = HuggingFaceModelWrapper(model, tokenizer)
Exemplo n.º 18
0
 def __init__(
         self,
         model,
         maximizable=False,
         use_cache=True,
         query_budget=float("inf"),
         model_cache_size=2**20,
 ):
     validators.validate_model_goal_function_compatibility(
         self.__class__, model.__class__)
     self.model = model
     self.maximizable = maximizable
     self.use_cache = use_cache
     self.query_budget = query_budget
     if self.use_cache:
         self._call_model_cache = lru.LRU(model_cache_size)
     else:
         self._call_model_cache = None
Exemplo n.º 19
0
    def __init__(self):
        lm_folder = utils.download_if_needed(GoogLMHelper.CACHE_PATH)
        self.PBTXT_PATH = os.path.join(lm_folder, "graph-2016-09-10-gpu.pbtxt")
        self.CKPT_PATH = os.path.join(lm_folder, "ckpt-*")
        self.VOCAB_PATH = os.path.join(lm_folder, "vocab-2016-09-10.txt")

        self.BATCH_SIZE = 1
        self.NUM_TIMESTEPS = 1
        self.MAX_WORD_LEN = 50

        self.vocab = lm_data_utils.CharsVocabulary(self.VOCAB_PATH, self.MAX_WORD_LEN)
        with tf.device("/gpu:1"):
            self.graph = tf.Graph()
            self.sess = tf.compat.v1.Session(graph=self.graph)
        with self.graph.as_default():
            self.t = lm_utils.LoadModel(
                self.sess, self.graph, self.PBTXT_PATH, self.CKPT_PATH
            )

        self.lm_cache = lru.LRU(2 ** 18)
Exemplo n.º 20
0
 def __init__(self,
              goal_function,
              transformation,
              constraints=[],
              is_black_box=True):
     """ Initialize an attack object. Attacks can be run multiple times.
     """
     self.goal_function = goal_function
     if not self.goal_function:
         raise NameError(
             'Cannot instantiate attack without self.goal_function for predictions'
         )
     if not hasattr(self, 'tokenizer'):
         if hasattr(self.goal_function.model, 'tokenizer'):
             self.tokenizer = self.goal_function.model.tokenizer
         else:
             raise NameError('Cannot instantiate attack without tokenizer')
     self.transformation = transformation
     self.constraints = constraints
     self.is_black_box = is_black_box
     self.constraints_cache = lru.LRU(utils.config('CONSTRAINT_CACHE_SIZE'))
Exemplo n.º 21
0
 def __init__(
     self, model, tokenizer=None, use_cache=True, query_budget=float("inf")
 ):
     validators.validate_model_goal_function_compatibility(
         self.__class__, model.__class__
     )
     self.model = model
     self.tokenizer = tokenizer
     if not self.tokenizer:
         if hasattr(self.model, "tokenizer"):
             self.tokenizer = self.model.tokenizer
         else:
             raise NameError("Cannot instantiate goal function without tokenizer")
     if not hasattr(self.tokenizer, "encode"):
         raise TypeError("Tokenizer must contain `encode()` method")
     self.use_cache = use_cache
     self.num_queries = 0
     self.query_budget = query_budget
     if self.use_cache:
         self._call_model_cache = lru.LRU(utils.config("MODEL_CACHE_SIZE"))
     else:
         self._call_model_cache = None
Exemplo n.º 22
0
    def __init__(self,
                 goal_function=None,
                 constraints=[],
                 transformation=None,
                 search_method=None):
        """ Initialize an attack object. Attacks can be run multiple times. """
        self.search_method = search_method
        self.goal_function = goal_function
        if not self.goal_function:
            raise NameError(
                'Cannot instantiate attack without self.goal_function for predictions'
            )
        if not hasattr(self, 'tokenizer'):
            if hasattr(self.goal_function.model, 'tokenizer'):
                self.tokenizer = self.goal_function.model.tokenizer
            else:
                raise NameError('Cannot instantiate attack without tokenizer')
        self.transformation = transformation
        self.is_black_box = getattr(transformation, 'is_black_box', True)

        if not self.search_method.check_transformation_compatibility(
                self.transformation):
            raise ValueError(
                'SearchMethod {self.search_method} incompatible with transformation {self.transformation}'
            )

        self.constraints = []
        self.pre_transformation_constraints = []
        for constraint in constraints:
            if isinstance(constraint, PreTransformationConstraint):
                self.pre_transformation_constraints.append(constraint)
            else:
                self.constraints.append(constraint)

        self.constraints_cache = lru.LRU(utils.config('CONSTRAINT_CACHE_SIZE'))

        # Give search method access to functions for getting transformations and evaluating them
        self.search_method.get_transformations = self.get_transformations
        self.search_method.get_goal_results = self.goal_function.get_results
Exemplo n.º 23
0
    def check(self, host_port, key):
        """
        判断key是否存在于lru中,不存在则加入

        Parameters:
            host_port - host + "_" + str(port) 组成的字符串,指定查找的LRU
            key - 在LRU中查找的key
        
        Raises:
            KeyError - key不存在于LRU中
        """
        try:
            target_lru = self.lru_dict[host_port]
        except KeyError:
            self.lru_dict[host_port] = lru.LRU(self.max_size)
            self.lru_dict[host_port][key] = None
            raise KeyError

        try:
            target_lru[key]
        except KeyError:
            target_lru[key] = None
            raise KeyError
Exemplo n.º 24
0
import requests
import lru

from utils.caching import (
    generate_cache_key, )


def _remove_session(key, session):
    session.close()


_session_cache = lru.LRU(8, callback=_remove_session)


def _get_session(*args, **kwargs):
    cache_key = generate_cache_key((args, kwargs))
    if cache_key not in _session_cache:
        _session_cache[cache_key] = requests.Session()
    return _session_cache[cache_key]


def make_post_request(endpoint_uri, data, *args, **kwargs):
    kwargs.setdefault('timeout', 10)
    session = _get_session(endpoint_uri)
    response = session.post(endpoint_uri, data=data, *args, **kwargs)
    response.raise_for_status()

    return response.content
Exemplo n.º 25
0
 def __init__(self, extractor: IFeatureExtractor, size: int):
     self._extractor = extractor
     self._cache = lru.LRU(size)
Exemplo n.º 26
0
 def __setstate__(self, state: Tuple[IFeatureExtractor, int]):
     extractor, size = state
     self._extractor = extractor
     self._cache = lru.LRU(size)
Exemplo n.º 27
0
    def __init__(
        self,
        goal_function: GoalFunction,
        constraints: List[Union[Constraint, PreTransformationConstraint]],
        transformation: Transformation,
        search_method: SearchMethod,
        transformation_cache_size=2**15,
        constraint_cache_size=2**15,
    ):
        """Initialize an attack object.

        Attacks can be run multiple times.
        """
        assert isinstance(
            goal_function, GoalFunction
        ), f"`goal_function` must be of type `textattack.goal_functions.GoalFunction`, but got type `{type(goal_function)}`."
        assert isinstance(
            constraints, list
        ), "`constraints` must be a list of `textattack.constraints.Constraint` or `textattack.constraints.PreTransformationConstraint`."
        for c in constraints:
            assert isinstance(
                c, (Constraint, PreTransformationConstraint)
            ), "`constraints` must be a list of `textattack.constraints.Constraint` or `textattack.constraints.PreTransformationConstraint`."
        assert isinstance(
            transformation, Transformation
        ), f"`transformation` must be of type `textattack.transformations.Transformation`, but got type `{type(transformation)}`."
        assert isinstance(
            search_method, SearchMethod
        ), f"`search_method` must be of type `textattack.search_methods.SearchMethod`, but got type `{type(search_method)}`."

        self.goal_function = goal_function
        self.search_method = search_method
        self.transformation = transformation
        self.is_black_box = (getattr(transformation, "is_black_box", True)
                             and search_method.is_black_box)

        if not self.search_method.check_transformation_compatibility(
                self.transformation):
            raise ValueError(
                f"SearchMethod {self.search_method} incompatible with transformation {self.transformation}"
            )

        self.constraints = []
        self.pre_transformation_constraints = []
        for constraint in constraints:
            if isinstance(
                    constraint,
                    textattack.constraints.PreTransformationConstraint,
            ):
                self.pre_transformation_constraints.append(constraint)
            else:
                self.constraints.append(constraint)

        # Check if we can use transformation cache for our transformation.
        if not self.transformation.deterministic:
            self.use_transformation_cache = False
        elif isinstance(self.transformation, CompositeTransformation):
            self.use_transformation_cache = True
            for t in self.transformation.transformations:
                if not t.deterministic:
                    self.use_transformation_cache = False
                    break
        else:
            self.use_transformation_cache = True
        self.transformation_cache_size = transformation_cache_size
        self.transformation_cache = lru.LRU(transformation_cache_size)

        self.constraint_cache_size = constraint_cache_size
        self.constraints_cache = lru.LRU(constraint_cache_size)

        # Give search method access to functions for getting transformations and evaluating them
        self.search_method.get_transformations = self.get_transformations
        # Give search method access to self.goal_function for model query count, etc.
        self.search_method.goal_function = self.goal_function
        # The search method only needs access to the first argument. The second is only used
        # by the attack class when checking whether to skip the sample
        self.search_method.get_goal_results = self.goal_function.get_results

        self.search_method.filter_transformations = self.filter_transformations
Exemplo n.º 28
0
 def __setstate__(self, state):
     self.__dict__ = state
     self.transformation_cache = lru.LRU(self.transformation_cache_size)
     self.constraints_cache = lru.LRU(self.constraint_cache_size)
Exemplo n.º 29
0
import lru
import requests

from web3.utils.caching import generate_cache_key

_session_cache = lru.LRU(8)


def _get_session(*args, **kwargs):
    cache_key = generate_cache_key((args, kwargs))
    if cache_key not in _session_cache:
        _session_cache[cache_key] = requests.Session()
    return _session_cache[cache_key]


def make_post_request(endpoint_uri, data, *args, **kwargs):
    kwargs.setdefault('timeout', 10)
    session = _get_session(endpoint_uri)
    response = session.post(endpoint_uri, data=data, *args, **kwargs)
    response.raise_for_status()

    return response.content
Exemplo n.º 30
0
)
from gevent import (  # noqa: F401
    getcurrent,
    subprocess,
    socket,
    threading,
)

import lru

from geventhttpclient import HTTPClient

from web3.utils.six import urlparse


_client_cache = lru.LRU(8)


sleep = gevent.sleep
spawn = gevent.spawn
GreenletThread = gevent.Greenlet


class ClassicThread(object):
    def __init__(self, threadid):
        self.ident = threadid


def get_current_thread():
    threadid = id(getcurrent())
    return ClassicThread(threadid)