Exemplo n.º 1
0
 def configure(self, env):
     print '!!!! redis_slave configure start !!!!'
     import params
     params.redis_slave = True
     env.set_params(params)
     redis()
     print '!!!! redis_slave configure end !!!!'
Exemplo n.º 2
0
def sendPartition(iter):
	# ConnectionPool is a static, lazily initialized pool of connections
	#connection = ConnectionPool.get_connection()
	#my_server = redis.StrictRedis(host ='ec2-52-37-251-31.us-west-2.compute.amazonaws.com', port = 6379)
	#print "connection = ", connection
	for record in iter:
		my_server = redis.redis(connection_pool=POOL)
		my_server.set("Word:" + record, 1)
Exemplo n.º 3
0
def save_matrix_redis(name, data):
    client = redis(host='127.0.0.1', port=6379, db=0)
    name = str(name)
    dtype = str(data.dtype)
    shape = str(data.shape)
    key = '{0}|{1}|{2}'.format(name, dtype, shape)
    client.set(key, data.ravel().tostring())
    return key
Exemplo n.º 4
0
def clear_matrix_redis():
    for server in redis_addresses:
        host = server[0]
        port = server[1]
        client = redis(host=host, port=port, db=0)
        try:
            client.flushdb()
        except Exception as error:
            continue
Exemplo n.º 5
0
def load_matrix_redis(key):
    data = None
    for server in redis_addresses:
        host = server[0]
        port = server[1]
        client = redis(host=host, port=port, db=0)
        try:
            entry = client.get(key)
            if entry != None:
                dtype_str = key.split('|')[1]
                shape_str = key.split('|')[2]
                shape = []
                for s in shape_str[1:-1].split(','):
                    shape.append(int(s))
                data = np.fromstring(entry, dtype=dtype_str).reshape(tuple(shape))
                break
        except Exception as error:
            continue
    return data
Exemplo n.º 6
0
def load_from_redis(key):
    """Retrieve a dataset from redis

    Retrieve a cached dataset that was stored in redis
    with the input key.

    Parameters
    ----------
    key : str
        The key of the dataset that was stored in redis.

    Returns
    -------
    M : numpy.ndarray
        The retrieved dataset in array format.
    """

    try:
        from redis import StrictRedis as redis
    except ImportError:
        print(
            "Error! Redis does not appear to be installed in your system.",
            file=sys.stderr,
        )
        exit(1)

    database = redis(host="localhost", port=6379, db=0)

    try:
        M = database.get(key)
    except KeyError:
        print(
            "Error! No dataset was found with the supplied key.",
            file=sys.stderr,
        )
        exit(1)

    array_dtype, n, m = key.split("|")[1].split("#")

    M = np.fromstring(M, dtype=array_dtype).reshape(int(n), int(m))
    return M
Exemplo n.º 7
0
def load_into_redis(filename):
    """Load a file into redis

    Load a matrix file and sotre it in memory with redis.
    Useful to pass around huge datasets from scripts to
    scripts and load them only once.

    Inspired from https://gist.github.com/alexland/ce02d6ae5c8b63413843

    Parameters
    ----------
    filename : str, file or pathlib.Path
        The file of the matrix to load.

    Returns
    -------
    key : str
        The key of the dataset needed to retrieve it from redis.
    """
    try:
        from redis import StrictRedis as redis
        import time
    except ImportError:
        print(
            "Error! Redis does not appear to be installed in your system.",
            file=sys.stderr,
        )
        exit(1)

    M = np.genfromtxt(filename, dtype=None)
    array_dtype = str(M.dtype)
    m, n = M.shape
    M = M.ravel().tostring()
    database = redis(host="localhost", port=6379, db=0)
    key = "{0}|{1}#{2}#{3}".format(int(time.time()), array_dtype, m, n)

    database.set(key, M)

    return key
Exemplo n.º 8
0
def connect_db():
    return redis(db=REDIS_DB, host=REDIS_HOST, port=PORT)
Exemplo n.º 9
0
#!/env/bin/env python3
# -*- coding: utf-8 -*-

# Author :braior
# File   :redisdel.py
# Time   :2018-12-20 15:30

import redis

# 选择连接的数据库
db = input('输入数据库:')
r = redis.redis(host='127.0.0.1', port=6379, db=0)

# 输入要匹配的键名
id = input('请输入要执行匹配的字段:')
arg = '*' + id + '*'

n = r.keys(arg)

# 查看匹配到键值
for i in n:
    print(i.decode('utf-8'))

# 确定清除的键名
delid = input('输入要删除的键:')

print('清除缓存 %s 成功' % delid)
Exemplo n.º 10
0
import re
import bs4
import sys
import datetime
import copy
import json
import qrcode
from io import BytesIO
import os
import threading
import queue
# import gevent
import multiprocessing
from logger import logger
from redis import redis
myredis = redis()


class zju():
    def __init__(self, username=None, password=None):
        if username:
            self._username = username
            self._password = password
            self._stuid = self._username
            self._grade = int(self._stuid[2])
            self._semester_num = 9 - self._grade

        self._cc98_config = {}
        with open('./cc98.config', 'r', encoding='utf-8') as f:
            self._cc98_config = eval(f.read())
Exemplo n.º 11
0
"""
尽管redis-py中使用了连接池,但每次在执行请求时都会创建和断开一次连接操作(连接池申请连接,归还连接池),
如果想要在一次请求中执行多个命令,则可以使用 pipline 实现一次请求执行多个命令.
redis-py默认在一次pipeline中的操作是原子的,要改变这种方式,可以传入transaction=False
"""
import redis

pool = redis.ConnectionPool(host='10.211.55.4', port=6379)
r = redis.redis(connection_pool=pool)
# pipe = r.pipeline(transaction=False)

pipe = r.pipeline(transaction=True)

r.set('name', 'nick')
r.set('age', '18')

pipe.execute()
Exemplo n.º 12
0
 def __init__(self, redis_url, namespace='whoosh'):
     self.folder = namespace
     self.redis = redis(redis_url)
     self.locks = {}
Exemplo n.º 13
0
import csv
import codecs 
from collections import defaultdict
from dateutil import parser
import redis as redis

tokenizer = None 
tagger = None 

def normalize(s):
	if type(s) == unicode:
		return s.encode('utf8', 'ignore')
	else:
		return str(s)

r_server = redis.redis("localhost")


# write to a csv for classification 
# ofile = open('/Users/Michael/git/CompEcon/BigData/corpus.csv', mode='w', encoding ='uft-8', errors='replace')

# writer = csv.writer(ofile)

# create twython creds
app_key = 'wZTYfz4PqHSVNgljYpcA'
app_key_secret = 'oleEwE1L4MaKGOTZPO1GhK0BmbW4Tg6ocYarNofDkw'
access_token = '265679542-Tfk6oCfq259Smu8PD557qkdIOgVJCSxugKMouDnj'
access_token_secret = 'W9lYBystbtl8ILhn85oZYH5wGnSq4q6ClTFP4nKrcWoNL'

twit = Twython(app_key, app_key_secret)
Exemplo n.º 14
0
def keyword_filter(tagme_response_df, cache_cred, path_to_category_lookup,
                   subject, update_corpus, filter_score_val, num_keywords):
    print("subject:", subject)
    try:
        for i in ['port', 'host', 'password']:
            assert i in list(cache_cred.keys())
            cache_status = True
    except:
        try:
            r = redis.redis(host=cache_cred['host'], port=cache_cred['port'])
            r.set("test", "test")
            cache_cred["password"] = ""
            cache_status = True
        except IOError:
            print("Unable to establish connection with redis cache.")

    keyword_df = pd.DataFrame({'keyword': [], 'dbpedia_score': []})
    if cache_status:
        for ind in range(len(tagme_response_df)):
            keyword = tagme_response_df['spot'][ind]
            score = getRediskey(subject + "." + keyword, cache_cred['host'],
                                cache_cred['port'], cache_cred['password'])
            if score:
                score_df = pd.DataFrame({
                    'keyword': [keyword],
                    'dbpedia_score': [score]
                })
            else:
                with open(path_to_category_lookup, 'r') as stream:
                    subject_ls = yaml.load(stream)[subject]
                dbpedia_categories = tagme_response_df['dbpedia_categories'][
                    ind]
                count = 0
                try:
                    for cat in dbpedia_categories:
                        dbpedia_prefix_cat = getTaxonomy(cat)
                        status = checkSubject(dbpedia_prefix_cat, subject_ls)
                        count += status
                    if len(dbpedia_categories) > 0:
                        relatedness = float(count) / float(
                            len(dbpedia_categories))
                    else:
                        relatedness = 0
                except BaseException:
                    relatedness = 0
                score_df = pd.DataFrame({
                    'keyword': [keyword],
                    'dbpedia_score': [relatedness]
                })
            keyword_df = keyword_df.append(score_df, ignore_index=True)

    # preprocessing
    keyword_df['keyword'] = [
        str(x).lower() for x in list((keyword_df['keyword']))
        if str(x) != 'nan'
    ]
    if update_corpus:
        corpus_update_df = keyword_df.drop_duplicates('keyword')
        corpus_update_df = corpus_update_df.dropna()
        for ind, val in corpus_update_df.iterrows():
            setRediskey(subject + "." + val['keyword'], val['dbpedia_score'],
                        cache_cred['host'], cache_cred['port'],
                        cache_cred['password'])
    if filter_score_val:
        try:
            keyword_df = keyword_df[keyword_df['dbpedia_score'] >= float(
                filter_score_val)]  ### from yaml#filtered_keyword_df
        except BaseException:
            print("Error: Invalid filter_score_val. Unable to filter. ")
    if num_keywords:
        try:
            keyword_df = keyword_df.sort_values(
                'dbpedia_score', ascending=[False]).iloc[0:int(num_keywords)]
        except BaseException:
            print("Error: Invalid num_keywords. Unable to filter. ")
    # keyword_relatedness_df.iloc[0:4]['KEYWORDS'].to_csv(Path_to_keywords + "KEYWORDS.csv")
    return keyword_df
Exemplo n.º 15
0
 def __init__(self, db=redis_db):
     self.r = redis(db=db)
Exemplo n.º 16
0
def connect_db():
    return redis(db=REDIS_DB, host=REDIS_HOST, port=PORT)
from time import time
import numpy as NP
from redis import StrictRedis as redis
A = 10 * NP.random.randn(10000).reshape(1000, 10)

# flatten the 2D NumPy array and save it as a binary string
array_dtype = A.dtype
l, w = A.shape
As = A.ravel().tostring()

# create a key as a UNIX timestamp w/ array shape appended to end of key delimited by '|'
db = redis(db=0)
key = '{0}|{1}#{2}|{3}'.format(int(time()), l, w, A.dtype)

# store the binary string in redis
db.set(key, As)
 
# retrieve the proto-array from redis
As = db.get(key)
 
# deserialize it 
l, w = key.split('|')[1].split('#')
atype = key.split('|')[2]

A2 = NP.fromstring(As, dtype=atype).reshape(int(l), int(w))
print A==A2
print A2
Exemplo n.º 18
0
#!/usr/local/bin/p'y'thon2.7
# encoding: utf-8


import os
import sys
import numpy as NP
from redis import StrictRedis as redis


r0 = redis(db=0)

def timeseries_to_redis(fname, game, end_time, time_step=86400000):
    with open(fname, "r") as f:
        data = [ row.strip().split(',')[-1] for row in f.readlines() 
                if not row.startswith('#')][1:]
    this_time = end_time
    for datapoint in data:
        key = '{0}:{1}'.format(game, this_time)
        r0.hset('game:backyardMonsters', key, datapoint)
        this_time -= time_step


ddir = '/Users/doug/Dropbox/DataArchive/competitor intelligence/raw data appdata'
dfile = 'BackYard_Monsters_to29Jun11.csv'

fname = os.path.join(ddir, dfile)

timeseries_to_redis(fname, 'BackyardMonsters', 1309392000000)

Exemplo n.º 19
0
def connect_db():
    return redis(db=DATABASE, host=REDIS_HOST, port=PORT)
Exemplo n.º 20
0
#!/bin/env python

import json
from config import REDIS_INSTANCES
from redis import redis
from zabbix import push_to_zabbix
import re

instance_data = []
slave_data = []
keyspace_data = []

for conf in REDIS_INSTANCES:
    r = redis(conf)
    instance_data.append({'{#PORT}': r.port})
    for key in r.info.Replication.keys():
        if re.match('slave\d', key):
            slave_data.append({
                '{#PORT}':
                r.port,
                '{#SLAVE}':
                '{ip}:{port}'.format(ip=r.info.Replication[key]['ip'],
                                     port=r.info.Replication[key]['port'])
            })

    for key in r.info.Keyspace.keys():
        if key.startswith('db'):
            keyspace_data.append({'{#PORT}': r.port, '{#KEYSPACE}': key})

print json.dumps({'data': instance_data})
'''
parser.add_argument("-r", "--redis", help="Redis IP", type=str)
args = parser.parse_args()
if args.cascades:
    cascades = pickle.load(
        open(
            "tests/test_resources/wsdm_cup_features/wsdm_training_cascades.pk",
            "rb"))
else:
    cascades = None

if args.redis is None:
    redis_ip = "127.0.0.1"
else:
    redis_ip = args.redis

db = redis(host=redis_ip)


def get_row_to_merge_features_uf(key):
    global num_queries
    num_queries += 1
    redis_key = str(0) + "_" + str(int(key))
    ser_value = db.get(redis_key)
    value = pickle.loads(ser_value)
    return value


def get_row_to_merge_features_sf(key):
    global num_queries
    num_queries += 1
    redis_key = str(1) + "_" + str(int(key))
Exemplo n.º 22
0
 def configure(self, env):
     import params
     env.set_params(params)
     redis()
Exemplo n.º 23
0
 def configure(self, env):
     import params
     params.redis_slave = True
     env.set_params(params)
     redis()
 def configure(self, env):
     print '!!!! redis configure start !!!!'
     import params
     env.set_params(params)
     redis()
     print '!!!! redis configure end !!!!'
Exemplo n.º 25
0
 def __init__(self, redis_url, namespace='whoosh'):
     self.folder = namespace
     self.redis = redis(redis_url)
     self.locks = {}
Exemplo n.º 26
0
 def configure(self, env):
   import params
   env.set_params(params)
   redis()
Exemplo n.º 27
0
def connect_db():
	return redis(db=DATABASE, host=REDIS_HOST, port=PORT)
 def configure(self, env):
   import params
   params.redis_slave=True
   env.set_params(params)
   redis()