Ejemplo n.º 1
0
class Cache:
    client = None
    __path = ""
    __md5 = ""
    __chunks = []

    def __init__(self, chunk_size=1000000):
        self.client = Client(('localhost', 11211))
        self.__chunk_size = chunk_size

    def readFile(self, path, data=None):
        self.__path = path
        if data is not None:
            data = data.encode()
        self.__md5 = self.__getMd5(data)
        self.__getChunks(data)
        self.__cache()
        return self.__chunks

    def getFile(self, name):
        """ TODO: use join() instead """
        i = 0
        content = bytearray()
        while True:
            data = self.client.get("{}:{}".format(name, i))
            if not data:
                break
            content += data
            i += 1

        checksum = self.client.get("{}:hash".format(name))
        new_md5 = hashlib.md5(content).digest()
        assert checksum == new_md5, 'data corrupted'
        return content

    def __cache(self):
        self.client.set("{}:hash".format(self.__path), self.__md5)
        for i, chunk in enumerate(self.__chunks):
            item_key = "{}:{}".format(self.__path, i)
            self.client.set(item_key, chunk)

    def __getMd5(self, data=None):
        if data is not None:
            return hashlib.md5(data).digest()
        file = open(self.__path, 'rb')
        content = file.read()
        file.close()
        return hashlib.md5(content).digest()

    def __getChunks(self, data=None):
        self.__chunks = []
        if data is not None:
            return hashlib.md5(data).digest()
        file = open(self.__path, 'rb')
        count = 0
        while True:
            data = file.read(self.__chunk_size)
            if not data:
                break
            self.__chunks.append(data)
Ejemplo n.º 2
0
def plugin(srv, item):

    srv.logging.debug("*** MODULE=%s: service=%s, target=%s", __file__, item.service, item.target)

    if HAVE_SLACK == False:
        srv.logging.error("slacker module missing")
        return False

    token = item.config.get("token")
    if token is None:
        srv.logging.error("No token found for slack")
        return False

    try:
        channel_id, size_x, size_y, timespan, dir_to_save = item.addrs

    except:
        srv.logging.error("Incorrect target configuration")
        return False

    # make animated gif, save to local disk, upload to slack channel

    client = Client(("127.0.0.1", 11211))
    images_original = []
    images_resized = []
    size = size_x, size_y

    cur_month = datetime.datetime.now().strftime("%Y%m")
    cur_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")

    cur_dir = dir_to_save + "/" + cur_month
    if not os.path.exists(cur_dir):
        os.makedirs(cur_dir)

    cur_img_original = cur_dir + "/" + cur_time + ".gif"
    cur_img_resized = cur_dir + "/" + cur_time + "_s.gif"

    # make a gif file
    try:
        result = client.get("curno")
        if result:
            curno = int(result)
            for i in range((curno - timespan), curno):
                data = client.get(str(i))
                if data:
                    im = Image.open(StringIO.StringIO(data))
                    images_original.append(im)
                    imresized = ImageOps.fit(im, size, Image.ANTIALIAS)
                    images_resized.append(imresized)

            if len(images_original) > 0:
                images2gif.writeGif(cur_img_original, images_original, duration=0.2)
                images2gif.writeGif(cur_img_resized, images_resized, duration=0.2)

    except Exception, e:
        srv.logging.warning("Cannot make a gif: %s" % (str(e)))
        return False
Ejemplo n.º 3
0
Archivo: app.py Proyecto: Puhnatiy/e6
 def get_from_cache(n):
     client = Client(('my_memc', 11211))
     if client.get(str(n)):
         result = client.get(str(n)).decode("utf-8")    
         try:
             return int(result)
         except ValueError:
             return None
     return None
Ejemplo n.º 4
0
 def memcclient(self, host):
     c = Client(host)
     lock = c.get('lock')
     while lock:
         sleep(.25)
         lock = c.get('lock')
     c.set('lock', 'true')
     try:
         yield c
     finally:
         c.delete('lock')
Ejemplo n.º 5
0
class Cache():
    def __init__(self, hostname=cache_server_host, port=cache_server_port):
        self.server = Client((hostname, port))

    def set(self, key, value, expiry=0):
        self.server.set(str(key), value, expiry)

    def get(self, key):
        if self.server.get(str(key)):
            value = self.server.get(str(key)).decode("utf-8")
            return int(value)

    def delete(self, key):
        self.server.delete(str(key))
Ejemplo n.º 6
0
class Reader(BaseReader):
    """
    Memcached settings Reader

    A simple memcached getter using pymemcache library.
    """
    _default_conf = {
        'host': 'localhost',
        'port': 11211
    }

    def __init__(self, conf):
        super(Reader, self).__init__(conf)

        self.client = Client((self.conf['host'], self.conf['port']))

    def _get(self, key):
        result = self.client.get(key)
        if isinstance(result, six.binary_type):
            result = result.decode('utf-8')

        return result

    def _set(self, key, value):
        self.client.set(key, value, noreply=False)
def startClient(id):
    global active_threads
    global max_active_threads

    try:
        client = Client(('localhost', 8888))
        print('Client ' + str(id) + ' connected')
        start = time.time()

        lock.acquire()
        active_threads += 1
        max_active_threads = max(max_active_threads, active_threads)
        lock.release()

        for _ in range(500):
            key, value = randomString(5), randomString(5)
            client.set(key, value)
            result = client.get(key)

        lock.acquire()
        active_threads -= 1
        lock.release()

        end = time.time()
        print('Client ' + str(id) + ' exiting. Time spent: ' +
              str(end - start))
        end = time.time()
    except BaseException as e:
        print('Exception' + str(e))
        return
Ejemplo n.º 8
0
def main():
    client = Client(('localhost', 11211))
    client.set('some_key', 'some_value')
    client.set('some_key1', 'some_value')
    client.set('some_key2', 'some_value')
    client.set('some_key3', 'some_value')
    client.set('some_key4', 'some_value')
    # client.set('some_key1', { "a": 3, "b": 1234, "c": 1234567 })
    # result = client.get('some_key')
    # client.set('some_key2', { "a": 3, "b": 1234, "c": 1234567 })
    # result = client.get('some_key')
    # client.set('some_key3', { "a": 3, "b": 1234, "c": 1234567 })
    # result = client.get('some_key')
    # print (result)
    result = client.get_multi(['some_key2', 'some_key3'])
    print(result)
    print(client.delete('some_key'))
    print(client.get('some_key'))
    client.set('new', 'unknown value')
    client.set('test1', 'tesing unknow reasonable text')
    client.set('test2', 'although i know it will be hash value')
    client.set('test3', 'show me the value')
    client.set('test4', 'whatever it takes')
    client.set('test5', 'something at works')
    return True
Ejemplo n.º 9
0
class MyMemcache(object):
    def __init__(self, ip='localhost', port=11211):
        # json_serializer(key, value)
        def json_serializer(key, value):
            if type(value) == str:
                return value, 1
            return json.dumps(value), 2

        # json_deserializer(key, value, flags) python3
        def json_deserializer(key, value, flags):
            if flags == 1:
                return value.decode('utf-8')
            if flags == 2:
                return json.loads(value.decode('utf-8'))
            raise Exception("Unknown serialization format")

        self.ip = ip
        self.port = port
        self.client = Client((ip, port),
                             serializer=json_serializer,
                             deserializer=json_deserializer,
                             key_prefix='',
                             encoding='utf8',
                             allow_unicode_keys=True)

    '''
    set_task_result
    '''

    @catch_exception
    def set_value(self, key, value):
        # set(key, value, expire=0, noreply=None, flags=None)
        # result = "-".join(result.split())
        expire_second = int(60 * 60 * 24 * 1.2)  # expire_second must int
        self.client.set(key=key, value=value, expire=expire_second)

        # debug_p('[set_value]', key, value)

    '''
    get_task_result
    '''

    @catch_exception
    def get_value(self, key, default=''):
        result = self.client.get(key=key, default=default)

        debug_p('[get_value]', key, result)

        return result

    '''
    close
    '''

    def client_close(self):
        try:
            self.client.close()
        except Exception as e:
            #
            pass
Ejemplo n.º 10
0
def test_incr_decr(client_class, host, port, socket_module):
    client = Client((host, port), socket_module=socket_module)
    client.flush_all()

    result = client.incr(b'key', 1, noreply=False)
    assert result is None

    result = client.set(b'key', b'0', noreply=False)
    assert result is True
    result = client.incr(b'key', 1, noreply=False)
    assert result == 1

    def _bad_int():
        client.incr(b'key', b'foobar')

    with pytest.raises(MemcacheClientError):
        _bad_int()

    result = client.decr(b'key1', 1, noreply=False)
    assert result is None

    result = client.decr(b'key', 1, noreply=False)
    assert result == 0
    result = client.get(b'key')
    assert result == b'0'
Ejemplo n.º 11
0
def UpdateClassifier():
    print("Update Classifier")

    clf = LogisticRegression(
        random_state=0, solver='lbfgs', multi_class='multinomial')
    smt = SMOTETomek(random_state=42)

    discussions = list(Discussion.objects.filter(reviewed=True))
    globalFeaturesIndex = ef.GetGlobalFeaturesIndex(
        discussions, list(range(0, len(discussions))), ef.E19)

    X, y = [], []
    for discussion in discussions:
        featureVector = ef.ExtractFeatureFromCorpus(
            globalFeaturesIndex, discussion.content, ef.E19)
        X.append(featureVector)
        y.append(discussion.tag.tag_id)

    X, y = smt.fit_sample(X, y)
    selector = GenericUnivariateSelect(chi2, 'percentile', param=20)
    X = selector.fit_transform(X, y)

    try:
        clf.fit(X, y)
        print("fit done")
        client = Client(('localhost', 11211))
        model = (clf, globalFeaturesIndex, selector)
        client.set('model', pickle.dumps(model))
        model_in_bytes = client.get('model')
        model_from_cache = pickle.loads(model_in_bytes)
        print(len(model_from_cache))
    except:
        print("clf failed to cache...")
Ejemplo n.º 12
0
def test_incr_decr(client_class, host, port, socket_module):
    client = Client((host, port), socket_module=socket_module)
    client.flush_all()

    result = client.incr(b'key', 1, noreply=False)
    assert result is None

    result = client.set(b'key', b'0', noreply=False)
    assert result is True
    result = client.incr(b'key', 1, noreply=False)
    assert result == 1

    def _bad_int():
        client.incr(b'key', b'foobar')

    with pytest.raises(MemcacheClientError):
        _bad_int()

    result = client.decr(b'key1', 1, noreply=False)
    assert result is None

    result = client.decr(b'key', 1, noreply=False)
    assert result == 0
    result = client.get(b'key')
    assert result == b'0'
Ejemplo n.º 13
0
class MemcachedWrapper(object):
    '''Class to allow readonly access to underlying memcached connection'''

    def __init__(self, counter, host, port=11211, socket_connect_timeout=1):
        if not host:
            raise ConfigurationError('Memcached wrapper improperly configured. Valid memcached host is required!')

        self.__con = Client((host, port))
        self._counter = counter('')

    def __del__(self):
        self.__con.quit()

    def get(self, key):
        return self.__con.get(key)

    def json(self, key):
        return pyjson.loads(self.get(key))

    def stats(self, extra_keys=[]):
        data = self.__con.stats()
        ret = {}
        for key in data:
            if key in COUNTER_KEYS:
                ret['{}_per_sec'.format(key.replace('total_', ''))] = \
                    round(self._counter.key(key).per_second(data.get(key, 0)), 2)
            elif key in VALUE_KEYS:
                ret[key] = data[key]
            elif key in extra_keys:
                ret[key] = data[key]
        return ret
Ejemplo n.º 14
0
class MemcacheCli(object):
    def __init__(self, host, port):
        self.host = host
        self.port = port
        self.client = Client((self.host, self.port))

    def set(self, key, value, expire):
        try:
            return self.client.set(key, value, expire)
        except Exception as e:
            return False

    def get(self, key):
        try:
            return self.client.get(key, default=None)
        except Exception as e:
            return None

    def mset(self, values, expire):
        try:
            return self.client.set_many(values, expire)
        except Exception as e:
            return False

    def mget(self, keys):
        try:
            return self.client.get_many(keys)
        except Exception as e:
            return None
class MemcachedWrapper():

    MEMCACHED_CONNECT_TIMEOUT = 5  # 5 seconds
    MEMCACHED_TIMEOUT = 5  # 5 seconds

    def __init__(self, memcached_location):

        memcached_location_portions = memcached_location.split(":")
        if len(memcached_location_portions) != 2:
            raise ValueError(
                f"Found incorrectly formatted parameter memcached location: {memcached_location}"
            )

        memcached_host = memcached_location_portions[0]
        memcached_port = int(memcached_location_portions[1])

        self.memcached_client = Client(
            server=(memcached_host, memcached_port),
            serializer=serde.python_memcache_serializer,
            deserializer=serde.python_memcache_deserializer,
            connect_timeout=MemcachedWrapper.MEMCACHED_CONNECT_TIMEOUT,
            timeout=MemcachedWrapper.MEMCACHED_TIMEOUT)

    def get(self, key):
        return self.memcached_client.get(key)

    def set(self, key, value, timeout=None):
        self.memcached_client.set(key, value, timeout)

    def delete(self, key):
        self.memcached_client.delete(key)
Ejemplo n.º 16
0
class Client:
    def __init__(self, config):
        self.config = config
        self.memcache_client = PymemcacheClient(server=(self.config["ip"], self.config["port"]), serializer=self.json_serializer, deserializer=self.json_deserializer, connect_timeout=self.config["connect_timeout"], timeout=self.config["timeout"])

    def json_serializer(self, key, value):
        if type(value) == str:
            return value, 1
        return json.dumps(value), 2

    def json_deserializer(self, key, value, flags):
        if flags == 1:
            return value
        if flags == 2:
            return json.loads(value)
        raise Exception("Unknown serialization format")

    def write(self, key, message):
        logger.info("Writing to cache: {}".format(key))
        self.memcache_client.set(key,
                                 message,
                                 expire=self.config["key_expiration"],
                                 noreply=self.config["noreply_flag"])

    def read(self, key):
        logger.info("Reading from cache: {}".format(key))
        return self.memcache_client.get(key)
Ejemplo n.º 17
0
class NoSQLCache:
    __instance = None
    timeout = 5

    @staticmethod
    def inst():
        if NoSQLCache.__instance == None:
            NoSQLCache.__instance = NoSQLCache(SqliteDb.inst())
        return NoSQLCache.__instance

    def __init__(self, sql_db_object):
        self.client = Client(('localhost', 11211))
        self.sql_db = sql_db_object

    def get(self, key):
        key_corrector = re.sub(r' ', '$', key)
        value = self.client.get(key_corrector)
        if value:
            return eval(value.decode('utf-8'))
        else:
            value = self.sql_db.get(key)
            self.set(key_corrector, value, self.timeout)
            return value

    def set(self, key, value, timeout):
        self.client.set(key, value, expire=timeout)
Ejemplo n.º 18
0
class MemcachedWrapper(object):
    '''Class to allow readonly access to underlying memcached connection'''
    def __init__(self, counter, host, port=11211, socket_connect_timeout=1):
        if not host:
            raise ConfigurationError(
                'Memcached wrapper improperly configured. Valid memcached host is required!'
            )

        self.__con = Client((host, port))
        self._counter = counter('')

    def __del__(self):
        self.__con.quit()

    def get(self, key):
        return self.__con.get(key)

    def json(self, key):
        return pyjson.loads(self.get(key))

    def stats(self, extra_keys=[]):
        data = self.__con.stats()
        ret = {}
        for key in data:
            if key in COUNTER_KEYS:
                ret['{}_per_sec'.format(key.replace('total_', ''))] = \
                    round(self._counter.key(key).per_second(data.get(key, 0)), 2)
            elif key in VALUE_KEYS:
                ret[key] = data[key]
            elif key in extra_keys:
                ret[key] = data[key]
        return ret
Ejemplo n.º 19
0
def main():
	line1 = Line(1, 5)
	line2 = Line(2, 6)
	line3 = Line(6, 8)
	line4 = Line(7, 9)
	
	assert do_intersect(line1, line2) == True
	assert do_intersect(line1, line3) == False
	
	assert do_intersect(line2, line1) == True
	assert do_intersect(line3, line1) == False
	
	assert do_intersect(line3, line4) == True
	
	assert compare_versions("1.1", "1.2") == -1
	assert compare_versions("1.2.1", "1.2") == 1
	assert compare_versions("1.2.1", "1.2.1a") == -1
	assert compare_versions("1.2.1b", "1.2.1a") == 1
	assert compare_versions("1.2.1", "1.21") == -1
	assert compare_versions("2.1.1", "1.2.1") == 1
	assert compare_versions("1.2.1", "1.2.1") == 0
	assert compare_versions("1.", "1") == 0
	assert compare_versions("a1", "1a") == 1
	assert compare_versions("1.2.1", "1.19.0") == -1
	
	client = Client('127.0.0.1', 11211)
	client.set('some_key', 'some_value', expire = 30)
	assert client.get('some_key') == 'some_value'
	print("OK")
def execqueryWithMem(name, c1, c2):
    try:
        mysql_conn = mysql.connector.connect(host=host, user=dbusername, password=dbpassword, database=dbname,
                                             port=port)
        mc = Client(('cloudassgn.hw0lsb.0001.use2.cache.amazonaws.com', 11211))

        query = 'select ' + name + ' from Earthquake where ' + c1 + ' and ' + c2
        hashVal = hashlib.sha224(query).hexdigest()

        starttime = int(round(time.time() * 1000))

        data = mc.get(hashVal)
        count = 0

        if not data:
            for i in range(0, 250):
                cursor = mysql_conn.cursor()
                cursor.execute(query)
                row = cursor.fetchall()
                count = cursor.rowcount
                cursor.close()
            mc.set(hashVal, count)

        endtime = int(round(time.time() * 1000))
        totalexectime = endtime - starttime

        mysql_conn.close()

        resultStr = '<div style="font-size:14px;margin-top: 30px;"><div> Last Name : Manakan </div><div> Last 4 digit ID : 6131 </div><div> Class Section : 10:30 AM </div></div>'
        resultStr = resultStr + '<br> Time taken : ' + str(totalexectime) + ' msecs'
        resultStr = resultStr + '<br> Rows effected : ' + str(count)
        return resultStr
    except Exception as e:
        print e
        return 'Error ' + str(e)
Ejemplo n.º 21
0
class Memcached(object):
    """Memcached based caching.
    This is extremely lightweight in terms of memory usage, but it
    appears to be slightly slower than local database caching, and a
    lot slower than local memory based caching. This should be used if
    the cache is to be shared between multiple users.
    """

    LRU = LeastRecentlyUsed = 0

    def __init__(self, url='127.0.0.1', mode=LRU, ttl=None):
        """Create a new engine.

        Parameters:
            url (str): Memcached server to connect to.
            mode (int): How to purge the old keys.
                This does not affect anything as memcached is LRU only.
                The option is there to match the other engines.
            ttl (int): Time the cache is valid for.
                Set to None or 0 for infinite.
        """
        try:
            from pymemcache.client.base import Client
            self.client = Client(url, timeout=3, connect_timeout=3)
        except ImportError:
            from memcache import Client
            self.client = Client([url], socket_timeout=3)
        self.ttl = ttl

        # Don't allow cross version caches
        # Pickle may have incompatibilities between versions
        self._key_prefix = '{}.{}.'.format(sys.version_info.major,
                                           sys.version_info.minor)

    def get(self, key):
        """Get the value belonging to a key.
        An error will be raised if the cache is expired or doesn't
        exist.
        """
        value = self.client.get(self._key_prefix + key)
        if value is None:
            raise exceptions.CacheNotFound(key)
        return _decode(value)

    def put(self, key, value, ttl=None):
        """Write a new value to the cache.
        This will overwrite any old cache with the same key.
        """
        if ttl is None:
            ttl = self.ttl
        self.client.set(self._key_prefix + key,
                        _encode(value),
                        ttl or 0,
                        noreply=True)

    def delete(self, key):
        """Delete an item of cache if it exists."""
        return self.client.delete(self._key_prefix + key, noreply=False)
Ejemplo n.º 22
0
def get_memcached(k):
    '''获取memcached数据
    '''
    try:
        client = Client(('172.20.10.7', 11211))
        return json.load(client.get(k))
    except Exception as e:
        print(e)
        return False
Ejemplo n.º 23
0
class MemcacheTool(object):
    def __init__(self, host, port):
        self.client = Client((host, port))

    def set(self, key, value):
        self.client.set(key, value)

    def get(self, key):
        return self.client.get(key)
Ejemplo n.º 24
0
def test_memcached(mc_host, mc_port):
    # test that memcache is working
    from pymemcache.client.base import Client
    mc_client = Client((mc_host, mc_port))
    my_id = str(uuid4())
    my_val = str(uuid4())
    mc_client.set(my_id, my_val, 1000)
    if mc_client.get(my_id).decode('utf8') != my_val:
        raise RuntimeError('Unable to communicate with Memcached')
Ejemplo n.º 25
0
class MemClient():
    def __init__(self):
        self.client = Client(('localhost', 11211))

    def get(self, key):
        value = self.client.get(key)
        return json.loads(value)

    def set(self, key, value):
        self.client.set(key, json.dumps(value), expire=18000)
Ejemplo n.º 26
0
class MyLib:
    def __init__(self):
        MEMCACHED_SERVER = os.getenv('MEMCACHED_SERVER', '127.0.0.1')
        MEMCACHED_PORT = os.getenv('MEMCACHED_PORT', 11211)
        #self.client = Client(('127.0.0.1', 11211))
        self.client = Client((MEMCACHED_SERVER, MEMCACHED_PORT))
        self.chunk_size = 1000000.0

    def get_counts(self, file_size):
        return int(math.ceil(file_size / self.chunk_size))

    def set_file(self, name, obj_file):
        file_size = len(obj_file.read())
        if self.client.get("%s%s%s" % (name, "_", str(0))) is not None:
            return "File already exists."

        if file_size <= 50000000:
            obj_file.seek(0)
            counts = self.get_counts(file_size)
            self.client.set(name, counts)
            for i in range(counts):
                data = obj_file.read(int(self.chunk_size))
                self.client.set("%s%s%s" % (name, "_", str(i)), data)
            return "Done."
        else:
            return "Oversized, rejected."

    def get_file(self, name):
        if self.client.get("%s%s%s" % (name, "_", str(0))) is None:
            return "File doest not exists."
        try:
            counts = int(self.client.get(name))
            obj_file = b''
            for i in range(counts):
                data = self.client.get("%s%s%s" % (name, "_", str(i)))
                obj_file = obj_file + data
            file = open(name, 'wb')
            file.write(obj_file)
            file.close()
            return name
        except:
            return None
Ejemplo n.º 27
0
 def delMemcache(self, configkey, key, **kwargs):
     try:
         clients = self.getMemcacheConfig(configkey)
         for client_conf in clients:
             client = Client(client_conf)
             result = client.get(key=key)
             if result is not None:
                 result = client.delete(key=key)
         logging.info("删除memcache的key={0},操作结果={1}".format(key, result))
     except Exception as e:
         logging.error(e)
Ejemplo n.º 28
0
def fib_handler(num):
    client = Client((MEMCACHED_HOST, '11211'),
                    serializer=json_serializer,
                    deserializer=json_deserializer)
    fib_result = client.get(str(num))
    if fib_result is None:
        result = fib(num)
        client.set(str(num), result)
    else:
        result = fib_result
    print('fib_result=', fib_result)
    return str(result)
Ejemplo n.º 29
0
def fib_handler(k):
    client = Client((MEMCACHED_HOST, '11211')
        , serializer=json_serializer
        , deserializer=json_deserializer) # клиент кэширования
    fib_result = client.get(str(k)) # число фибоначчи из кэша
    if fib_result is None:
        result = fib(k)
        client.set(str(k), result)  # кэшируем число фибоначчи
    else: 
        result = fib_result
    print('fib_result=', fib_result)
    return str(result)
Ejemplo n.º 30
0
def get_test(request):
    client = Client(('/opt/www/memcached/memcached.sock'),
                    serializer=json_serializer,
                    deserializer=json_deserializer)

    result = client.get('INDEX_ONLINE')

    if result is None:
        result = {'status': 'NotFound'}

    print(result)
    return JsonResponse(result)
Ejemplo n.º 31
0
class MemcachedCache(object):
    """ a memcached cache which stores the states as pickled objects """

    def __init__(self, master):
        self.is_master = False
        self.wait_on_insert = master.wait_on_insert
        self.memc_params = master.memc_params
        self.val_ttl = master.val_ttl

    def __del__(self):
        self.teardown()

    def setup(self):
        self.client = Client((self.memc_params['host'], self.memc_params['port']),
                             default_noreply=not self.wait_on_insert)
        return self

    def teardown(self):
        return self

    def fetch(self, routingkey, key):
        cache_key = str(routingkey) + '_' + str(key)
        obj_pickle = self.client.get(cache_key)
        if obj_pickle is None:
            return None
        try:
            obj = pickle.loads(obj_pickle)
        except:
            # logger.debug("Error decoding pickle")
            obj = None
        return obj

    def insert(self, routingkey, key, obj):
        cache_key = str(routingkey) + '_' + key
        obj_pickle = pickle.dumps(obj)
        if self.wait_on_insert:
            self.client.set(key=cache_key, value=obj_pickle, expire=self.val_ttl)
        else:
            threading.Thread(target=self.client.set,
                             kwargs={'key': cache_key, 'value': obj_pickle, 'expire': self.val_ttl},
                             daemon=False).start()

    def get_client(self):
        return self.__copy__()

    def __copy__(self):
        if not self.is_master:
            raise Exception('not allowed; already a slave')
        return MemcachedCache(self)

    def copy(self):
        return self.__copy__()
Ejemplo n.º 32
0
def set_api_throttle():
	
	client = Client((CACHE_CLIENT_ENDPOINT, CACHE_PORT))	
	result = client.get('daily_requests')
		
	if (result):
		result = int(result) + 1	
	else:
		result = 1
			
	client.set('daily_requests',result, expire=CACHE_EXPIRE)
	
	return result
Ejemplo n.º 33
0
def get_api_throttle():
	is_throttled = 0
	
	#memecached
	client = Client((CACHE_CLIENT_ENDPOINT, CACHE_PORT))	
	result = client.get('daily_requests')
			
	if (result):
		result = int(result)
		if (result >= API_LIMIT):
			is_throttled = 1
		
	return is_throttled
Ejemplo n.º 34
0
class MemcachedCache(CachualCache):
    """A cache using `Memcached <https://memcached.org/>`_ as the backing
    cache. The same caveats apply to keys and values as for Redis - you should
    only try to store strings (using the packing/unpacking functions). See the
    documentation on Keys and Values here:
    :class:`pymemcache.client.base.Client`.

    :type host: string
    :param host: The Memcached host to use for the cache.

    :type port: integer
    :param port: The port to use for the Memcached server.

    :type kwargs: dict
    :param kwargs: Any additional args to pass to the :class:`CachualCache`
                   constructor.
    """
    def __init__(self, host='localhost', port=11211, **kwargs):
        super(MemcachedCache, self).__init__(**kwargs)
        self.client = MemcachedClient((host, port))

    def get(self, key):
        """Get a value from the cache using the given key.

        :type key: string
        :param key: The cache key to get the value for.

        :returns: The value for the cache key, or None in the case of cache
                  miss.
        """
        return self.client.get(key)

    def put(self, key, value, ttl=None):
        """Put a value into the cache at the given key. For constraints on keys
        and values, see :class:`pymemcache.client.base.Client`.

        :type key: string
        :param key: The cache key to use for the value.

        :param value: The value to store in the cache.

        :type ttl: integer
        :param ttl: The time-to-live for key in seconds, after which it will
                    expire.
        """
        if ttl is None:
            ttl = 0
        self.client.set(key, value, expire=ttl)
Ejemplo n.º 35
0
def test_serialization_deserialization(host, port, socket_module):
    def _ser(key, value):
        return json.dumps(value).encode('ascii'), 1

    def _des(key, value, flags):
        if flags == 1:
            return json.loads(value.decode('ascii'))
        return value

    client = Client((host, port), serializer=_ser, deserializer=_des,
                    socket_module=socket_module)
    client.flush_all()

    value = {'a': 'b', 'c': ['d']}
    client.set(b'key', value)
    result = client.get(b'key')
    assert result == value
Ejemplo n.º 36
0
    # check current input source no
    if handleCommand("47"):
        time.sleep(4)
        m, n = getscreenimage()
        return m, n

#------------------------
#logging.basicConfig(
#    format='# %(levelname)s: %(message)s',
#    level=logging.DEBUG,
#)

client = Client(('127.0.0.1', 11211))

try:
        result = client.get('appletv')
        if result:
            sys.exit("time is not passed")
        else:
            print ("appletv is on")
            # get current channel and change input to HDMI2
            # tv should be on
            m, n = getCHandHDMI2()
            if m != n:
                changeinout(m, n)
            else:
                handleCommand("20")
            client.set('appletv', 'true', 60) 

except Exception, e:
        print e.__doc__
import sys
import StringIO
from PIL import Image, ImageOps
import images2gif 
from pymemcache.client.base import Client

# http://stackoverflow.com/questions/19149643/error-in-images2gif-py-with-globalpalette
# change palettes.append( getheader(im)[1] ) to palettes.append(im.palette.getdata()[1]) in images2gif.py

client = Client(('127.0.0.1', 11211))
images = []
size = 256, 192
timespace = 10

try:
        result = client.get('curno')
        if result :
                curno = int(result)
                for i in range((curno - timespace), curno):
                        data = client.get(str(i))
                        if data:
                                im = Image.open(StringIO.StringIO(data))
                                imresized = ImageOps.fit(im, size, Image.ANTIALIAS)
                                images.append(imresized)

                filename = "my_gif.GIF"
                images2gif.writeGif(filename, images, duration=0.2)

except Exception, e:
        print e.__doc__
        print e.message      
Ejemplo n.º 38
0
def gao_memcache():
    client = Client((app.config['MEMCACHE_HOST'], app.config['MEMCACHE_PORT']))
    client.set('some_key', 'some_value', expire=10)
    result = client.get('some_key')
    print result
Ejemplo n.º 39
0
class TwitterLimits(object):
    """
    Checks against either the twitter API or memcached twitter API results for
    twitter API requests limits
    """
    def __init__(self, timeline_rate_reserve=5, multi_proc_logger=None):
        """
        The reserve arguments are how many requests to hold back on to leave
        some form of buffer in place with regards to API limits
        """

        if multi_proc_logger:
            self.logger = multi_proc_logger
        else:
            self.logger = utils.MultiProcessCheckingLogger(module_logger)

        self.memcacheClient = MemCacheClient(
            (interns_settings.memcache_host, interns_settings.memcache_port)
        )

        self.timeline_rate_reserve = timeline_rate_reserve

        self.tl_total_reqs = interns_settings.twitter_timeline_requests
        self.tl_reqs_left = interns_settings.twitter_timeline_req_left
        self.tl_reqs_reset_time = interns_settings.twitter_timeline_reset_time

        self.update_limits()

    def update_limits(self):
        """
        Update the limits associated with the twitter api

        First attempts to check memcache and then checks directly with the
        twitter API
        """
        self.logger.debug(__name__, 'Updating twitter limits')
        cache_total_tl_reqs = self.memcacheClient.get('timeline_limit')
        if cache_total_tl_reqs:
            self.tl_total_reqs = int(cache_total_tl_reqs)

        cache_tl_reqs_left = self.memcacheClient.get('timeline_remaining')
        if cache_tl_reqs_left:
            self.tl_reqs_left = int(cache_tl_reqs_left)

        cache_tl_reqs_reset_time = self.memcacheClient.get(
            'timeline_reset'
        )
        if cache_tl_reqs_reset_time:
            self.tl_reqs_reset_time = int(cache_tl_reqs_reset_time)
            utc_now = datetime.utcnow()
            utc_secs = (utc_now - epoch_time).total_seconds()
            secs_until_reset = self.tl_reqs_reset_time - utc_secs
            if secs_until_reset <= 0:
                # Force getting rates from twitter
                self.tl_reqs_reset_time = None

        update_values_valid = (
            self.tl_total_reqs and
            (self.tl_reqs_left is not None) and
            self.tl_reqs_reset_time
        )

        if not update_values_valid:
            self.logger.debug(__name__, 'Making twitter API limits request')
            update_vals = (
                twitterClient.get_user_timeline_rate_limit()
            )
            self.tl_total_reqs = update_vals.limit
            self.tl_reqs_left = update_vals.remaining
            self.tl_reqs_reset_time = update_vals.reset
        else:
            self.logger.debug(
                __name__,
                'Using twitter API limits from memcache'
            )
        self.logger.debug(
            __name__,
            'Total twitter timeline requests allowed is: {0}'.format(
                self.tl_total_reqs
            )
        )
        self.logger.debug(
            __name__,
            'Number of twitter timeline requests left is: {0}'.format(
                self.tl_reqs_left
            )
        )
        self.logger.debug(
            __name__,
            'Twitter timeline request reset time is: {0}'.format(
                self.tl_reqs_reset_time
            )
        )

    def get_sleep_between_jobs(self):
        """
        Calculate the sleep time between jobs as to not run into the twitter
        API limits
        """
        self.update_limits()
        self.logger.debug(__name__, 'Updating sleep time between jobs')
        utc_now = datetime.utcnow()
        utc_secs = (utc_now - epoch_time).total_seconds()
        self.logger.debug(__name__, 'UTC in seconds {0}'.format(utc_secs))
        secs_until_reset = self.tl_reqs_reset_time - utc_secs
        self.logger.debug(
            __name__,
            'Seconds until API reset {0}'.format(secs_until_reset)
        )
        buffered_tl_reqs_left = (
            self.tl_reqs_left - self.timeline_rate_reserve
        )
        self.logger.debug(
            __name__,
            'Buffered timeline requests left {0}'.format(
                buffered_tl_reqs_left
            )
        )
        if buffered_tl_reqs_left <= 0:
            sleep_time = secs_until_reset
        else:
            sleep_time = secs_until_reset / buffered_tl_reqs_left
        self.logger.debug(__name__, 'Sleep time {0}'.format(sleep_time))
        return sleep_time