Exemple #1
0
def index():

    contacts = [] if user.is_anonymous() else redis.lrange(user.username +"_phonebook",0,-1)

    if(user.is_anonymous()):
        messages = []
    else:
        messages = redis.lrange(user.username +"_Messages",0,-1)
            
    return render_template('index.html', contacts=contacts, messages=reversed(messages))
Exemple #2
0
def send_text():
    
    anonymous = None
    error = None

    if(user.is_anonymous()):
        contacts = []
    else:
        contacts = redis.lrange(user.username +"_phonebook",0,-1)
    
    
    if request.method == 'POST':

        if 'Anonymous_checkbox' in request.form:
           anonymous = True
      

        if (request.form['number'] == "" or request.form['message'] == ""):
            error = "Please fill in the above fields"
        else:
            number = request.form['number']
            message = request.form['message']
            return redirect(url_for('send_message', number=number, message=message,anonymous=anonymous, source = "/sendtext"))


    return render_template('send.html', error=error, contacts=contacts)
Exemple #3
0
def index():
    """Show the size of whatever URL the app is setup to check."""
    context = {"url": "'%s'" % APP_URL.rsplit("://")[1]}

    # Get data from redis for our visualization.
    context["past_sizes"] = [json.loads(size) for size in redis.lrange("fatmirror-%s" % APP_URL, 0, -1)]

    return render_template("index.html", **context)
def get_route():
    '''Gets the current trace'''
    val = redis.lrange(trace_list, 0, 0)
    if val:
        res = val[0]
    else:
        res = ''
    return res
Exemple #5
0
 def disks_usage(self):
     disks = {}
     disks_for_host = redis.keys('%s:disk_usage:*' % self.host)
     for disk in disks_for_host:
         disk_name = disk.split(':')[-2]
         usage = map(clean_timeseries_array, redis.lrange(disk, -200, -1))
         disks[disk_name] = usage
     return disks
Exemple #6
0
def render_list(key):
    strategies = {
                '::': render
              , ':#': render
              , ':-': lambda x: '\n'.join([ render(each) for each in redis.keys(x + ':*') ])
              , '': str
            }
    return '\n'.join ([ definite_strategy (strategies, key[0:2], key, '')
            for key in redis.lrange ( key, 0, -1 ) ])
Exemple #7
0
def test_list(redis, keys, length, retry):
    print "generating data..."
    for i in range(length):
        redis.lpush("key", ''.join([random.choice(string.ascii_letters + string.digits) for i in range(8)]))
    for i in range(keys - 1):
        redis.lpush(''.join([random.choice(string.ascii_letters + string.digits) for i in range(8)]),
                    ''.join([random.choice(string.ascii_letters + string.digits) for i in range(8)]))
    delays = []
    print "testing..."
    for i in range(retry):
        t1 = datetime.datetime.now()
        redis.lrange("key", 0, -1)
        t2 = datetime.datetime.now()
        td = t2 - t1
        delays.append(td.days * 24 * 3600 * 1000 + td.seconds * 1000 + td.microseconds / 1000.0)

    result = pd.Series(delays)
    result.to_csv("list_%d_%d.csv" % (length, retry))
    print result.describe()
Exemple #8
0
def show_key(key):
    return json.dumps ( definite_strategy (
        {
              'string': lambda x: redis.get(x)
            , 'hash': lambda x: redis.hgetall(x)
            , 'list': lambda x: redis.lrange(x,0,-1)
        }
        , redis.type(key)
        , key
        ) )
Exemple #9
0
def repr_list(key):
    strategies = {
                '::': repr
              , ':#': repr
              , ':!': repr
              , ':-': lambda x: [ repr(each) for each in redis.keys(x + ':*') ]
              , '': str
            }
    return [ definite_strategy (strategies, key[0:2], key, '')
            for key in redis.lrange ( key, 0, -1 ) ]
Exemple #10
0
def fetch_all(type_name, hashcode):
    namehash = "%s:%s" % (type_name, hashcode)
    metahash = "%s:linked_fields:%s" % (type_name, hashcode)
    try:
        return redis.get(namehash)
    except Exception, e:
        try:
            current_level_fields = redis.hgetall(namehash)
            linked_fields = redis.hgetall(metahash)
            for key in linked_fields:
                obj = linked_fields[key]
                if obj == "0": current_level_fields[key] = []
                else: current_level_fields[key] = fetch_all(type_name, obj)
        except Exception, e:
            current_level_fields = redis.lrange(namehash, 0, -1)
            linked_fields = redis.lrange(metahash, 0, -1)
            for index_item in linked_fields:
                i = int(index_item)
                list_item_hash = current_level_fields[i]
                current_level_fields[i] = fetch_all(type_name, list_item_hash)
def hello():
#    to_store = read_tags()
#    to_store.append('test tag not on MER')
#    redis.delete('allTags')
#    for tag in to_store:
#        redis.rpush('allTags', tag)
    hint = redis.get('hint').decode('utf-8')
    sol = redis.get('sol').decode('utf-8')
    stored_tags = redis.lrange('allTags', 0, -1)
    MER_tags = read_tags()
    not_on_MER = [item for item in stored_tags if item not in MER_tags]
    return render_template("communicate.html",
                           myhint=hint, mysol=sol, tags=MER_tags, tags_not_on_MER=not_on_MER)
def communicate_post():
    if request.form['submit'] == 'preview':
        hint = request.form['inputHint']
        sol = request.form['inputSol']
        stored_tags = redis.lrange('allTags', 0, -1)
        MER_tags = read_tags()
        not_on_MER = [item for item in stored_tags if item not in MER_tags]
        return render_template("communicate.html",
                               myhint=hint, mysol=sol, tags=MER_tags, tags_not_on_MER=not_on_MER)
    
    elif request.form['submit'] == 'submit':
        hint = request.form['inputHint']
        sol = request.form['inputSol']
        redis.set('hint', hint)
        redis.set('sol', sol)
        stored_tags = redis.lrange('allTags', 0, -1)
        MER_tags = read_tags()
        not_on_MER = [item for item in stored_tags if item not in MER_tags]
        return render_template("communicate.html",
                               myhint=hint, mysol=sol, tags=MER_tags, tags_not_on_MER=not_on_MER)
    else:
        return "Something went wrong"
Exemple #13
0
def update_since_id(redis, key):
    """"""
    stored_since_id = redis.get(key + ":last_since_id")
 
    try:
        last_tweet_id = str(redis.lrange(key, 0, 0)[0])
    except IndexError:
        last_tweet_id = None
 
 
    if last_tweet_id and (last_tweet_id != stored_since_id):
        redis.set(key + ":last_since_id", last_tweet_id)
 
    return True
Exemple #14
0
def clear_sub_nodes(type_name, hashcode):
    namehash = "%s:%s" % (type_name, hashcode)
    metahash = "%s:linked_fields:%s" % (type_name, hashcode)
    m_type = redis.type(metahash)
    if m_type == "list":
        for index in redis.lrange(metahash, 0, -1):
            i = int(index)
            clear_sub_nodes(type_name, redis.lindex(namehash, i))
    elif m_type == "hash":
        meta_dictionary = redis.hgetall(namehash)
        for key in meta_dictionary:
            linked_node_hash = meta_dictionary[key]
            clear_sub_nodes(type_name, linked_node_hash)
    redis.delete(namehash)
    redis.delete(metahash)
Exemple #15
0
def get_log(username):
    """
    Return a list of page views.

    Each item is a dict with `datetime`, `method`, `path` and `code` keys.
    """
    redis = get_redis_client()
    log_key = 'log:{}'.format(username)
    raw_log = redis.lrange(log_key, 0, -1)
    log = []
    for raw_item in raw_log:
        item = json.loads(raw_item.decode())
        item['datetime'] = convert_timestamp(item.pop('time'))
        log.append(item)
    return log
Exemple #16
0
 def __getitem__(self, name):
     try:
         item = redis.hget(self.root_node[0], name)  #strings
         if not item:
             item = redis.hget(self.root_node[1], name)  #objects
             return RedisObject(self.type_name, root=item)
         else:
             return RedisPrimitive(self.type_name, self.hashcode, name)
     except Exception, e:
         try:
             index = int(name)
             item = redis.lrange(self.root_node[0], index, index)[0]
             if item.isdigit(): return int(item)
             return RedisObject(self.type_name, root=item)
         except Exception, e2:
             raise e2
Exemple #17
0
def main():
    top_story_ids = requests.get(
        "{}/topstories.json".format(API_PREFIX)).json()
    pool = Pool(50)
    rust_stories = list(
        filter(lambda story: "Rust" in story.get("title", ""),
               pool.imap(fetch_story, top_story_ids)))[:MAX_COUNT]
    stories_length = len(rust_stories)
    if stories_length < MAX_COUNT:
        existed_story_ids = set(map(int, redis.lrange(REDIS_KEY, 0, -1)))
        existed_story_ids -= set(item["id"] for item in rust_stories)
        rust_stories.extend(
            pool.imap(fetch_story,
                      list(existed_story_ids)[:MAX_COUNT - stories_length]))
    redis.lpush(REDIS_KEY, *[item["id"] for item in rust_stories])
    redis.ltrim(REDIS_KEY, 0, MAX_COUNT - 1)
    render(rust_stories)
Exemple #18
0
def queued_job_info():
    """Provides metadata for all known jobs.

    Returns a list of dictionaries:
        [ {job_id, request_url, submitted, page_title, status},
          ...,
        ]"""
    jobs = []
    # Show the ten most recent jobs
    for job_id in redis.lrange(joblist, 0, 9):
        job = rqueue.fetch_job(job_id)
        if job is None:
            continue # don't bother showing the 'deleted' jobs
        job_details = redis.hgetall(jobkey(job_id))
        job_details['submitted'] = nicetimedelta(job_details['submitted'])
        job_details['status'] = job.get_status()
        jobs.append(job_details)
    return jobs
Exemple #19
0
def tweet_and_shout(api_session, redis, key, timeout=600):
    """"""
    for tweet_id in redis.lrange("%s:%s" % (LOLCOIFFEURS_LIST, key), 0, -1):
        tweet_dict = redis.hgetall("%s:tweet:%s" % (LOLCOIFFEURS_LIST, tweet_id))
 
        # Tracking answered tweets in a brand new set, and posting
        # a reply to it
        print "replying tweet : %s" % (tweet_id)
        redis.sadd((LOLCOIFFEURS_LIST + ":%s:answered" % (key)), tweet_id)
#        api_session.PostUpdate("@%s %s" % (tweet_dict["username"], RESPONSE), in_reply_to_status_id=tweet_id)
        # Popping out element from the left of the list
        # as we answer it
        redis.rpop("%s:%s" % (LOLCOIFFEURS_LIST, key))
 
        # Wait timeout before replying again
        sleep(timeout)
 
    return
Exemple #20
0
					dbprint('models load time: %d:%02d' % (tdiff//60, tdiff%60))
				msglist = []
				labellist = []
				dbprint('model names: %s' % models.keys())
				for mname,model in models.items():
					output_name = REDIS_OUTPUT_PREFIX + mname
					dbprint('Start %s' % bname)
					label,imgdata = detect_image_label(model, ftp_h, fpath)
					if label == NO_LABEL:
						queue_pfx = NO_LABEL_QUEUE_PREFIX + mname
						redis.rpush(queue_pfx, bname)
						redis.ltrim(queue_pfx, max(0, redis.llen(queue_pfx) - 100), -1)
					elif label in ('open', 'close'):
						redis.set('gate', json.dumps({'label': label, 'ts': time.time()}))
					if label != NO_LABEL:
						last_rec = redis.lrange(output_name, -1, -1)
						if last_rec:
							last_rec = json.loads(last_rec[0])
							if last_rec['ts'] < ts and last_rec['label'] != label:
								msg = '%s changed at %s from %s to %s (diff=%d), %s' % (mname, dt.strftime('%d/%m %H:%M:%S'), last_rec['label'], label, ts - last_rec['ts'], bname)
								dbprint('%s %s' % (bname, msg))
								msglist.append(msg)
								labellist.append((mname, label))
						else:
							msg = 'Initial at %s %s' % (dt.strftime('%d/%m %H:%M:%S'), label)
							dbprint('%s %s' % (bname, msg))
							msglist.append(msg)
							labellist.append((mname, label))
						dbprint(bname)
						redis.rpush(output_name, json.dumps({'label': label, 'ts': ts, 'name': fpath}))
						redis.ltrim(output_name, max(0, redis.llen(output_name) - 100), -1)
Exemple #21
0
#     pipe.hmset(key,update)
# pipe.execute()

###############################################################
### fish字段表更新
###  新增1个字段  get_rate
###   2018-01-03
###############################################################
update = {'get_rate':''}
for key in redis.keys(FISH_ROOM_TABLE%('*')):
     if 'set' in key or 'desc' in key:
         continue
     print 'set fish_table[%s]'%(key)
     pipe.hmset(key,update)

goods_lists = redis.lrange(FISH_REWARD_ON_SHOP_LIST,0,-1)
for goods in goods_lists:
     goods_type = redis.hget(FISH_REWARD_TABLE%(goods),'reward_type')
     pipe.lpush(FISH_REWARD_ON_SHOP_TYPE_LIST%(goods_type),goods)
     print 'set goods id index success....[%s]'%(goods_type)
pipe.execute()

###############################################################
### exchange_table更新
###  新增1个字段  exchange_type
###   2018-01-03
###############################################################
#otal = redis.llen(FISH_EXCHANGE_LIST)
#xchange_ids = redis.lrange(FISH_EXCHANGE_LIST,0,-1)

#xchange_id_keys = [FISH_EXCHANGE_TABLE%(exchange_id) for exchange_id in exchange_ids]
Exemple #22
0
 def get_winner_pids(self):
     pids = redis.lrange(self.key(':winners'), 0, -1)
     return [int(pid) for pid in pids]
Exemple #23
0
 def get_gids():
     gids = redis.lrange('games', 0, -1)
     return [int(gid) for gid in gids]
Exemple #24
0
 def memory_usage(self):
     base_key = '%s:memory_usage:system:' % self.host
     free = redis.lrange(base_key + 'free', -200, -1)
     used = redis.lrange(base_key + 'free', -200, -1)
     usage = redis.lrange(base_key + 'usage', -200, -1)
     return map(clean_timeseries_array, usage)
Exemple #25
0
 def f(key):
     return ( definite_strategy (
         { 'list': lambda x: reduce(lambda x,y: x+y, [ f(a) if a[0:2] == '::' or a[0:2] == ':#' else [a] for a in redis.lrange(x, 0, -1) ])
               , 'hash': lambda x: [redis.hgetall(x)]
               , 'none': lambda x: []
               , 'string': lambda x: [ f(a) if a[0:2] == '::' else a for a in [redis.get(x)] ]
               }
               , redis.type(key)
               , key
               , default_strategy = 'hash'
               )
             )
Exemple #26
0
 def check_pulse(self, signum, _):
     ekg = redis.lrange('EKG', 0, -1) 
     redis.delete('active_feeds')
     for i in ekg:
         redis.sadd('active_feeds', i)
Exemple #27
0
def get_processed_files(worker):
    key = "processed-file-%s" % worker
    data = redis.lrange(key, 0, -1)
    data = [json.loads(d) for d in data]
    data = sorted(data, key=lambda x: x["date"])
    return data
Exemple #28
0
def get_worker_actions(worker):
    key = "worker-action-%s" % worker
    data = redis.lrange(key, 0, -1)
    data = [json.loads(d) for d in data]
    data = sorted(data, key=lambda x: x["date"])
    return data
Exemple #29
0
import json
import time
import util
import redis
import sys
# print(sys.argv)
redis = redis.Redis(host="127.0.0.1", port=6379, db=2, decode_responses=True)
limitDay = 30  #只看最近30天的情况
# print(redis.keys())
lis = []
for key in redis.keys("0*"):  # 0*代表基金号,排除其他
    try:
        allData = redis.lrange(key, 0, -1)
    except Exception as e:
        print(key)
        print(str(e))
    lis.append(util.judgeFund(allData, limitDay))

print("sort rate: ****************************************")
lis.sort(key=lambda x: float(x["rate"]), reverse=True)
for i in lis[:int(sys.argv[1])]:
    print(i)
print("sort diffRate: **********************************")
lis.sort(key=lambda x: float(x["diffRate"]), reverse=True)
for i in lis[:int(sys.argv[1])]:
    print(i)
print("sort winPercent: **********************************")
lis.sort(key=lambda x: float(x["winPercent"]), reverse=True)
queryNum = int(sys.argv[1])
num = 0
for i in lis:
Exemple #30
0

def handle(msg):
    global last, last_text
    if mf(msg):
        text = msg['message'][
            args.key] if args.key is not None else msg['message']
        eqv = all([
            not args.nocompress, last is not None
            and len(last['tree']) == len(msg['tree'])
            and all([x == y for (x, y) in zip(last['tree'], msg['tree'])]),
            last_text is not None
            and sim(last_text, text) > similarity_threshold
        ])
        last = msg
        last_text = text
        if eqv:
            print('\033[F', end='')
        print(
            _fmt(msg['tree'], msg['timestamp'], text, msg['filename'],
                 msg['lineno'], msg['block'], msg['linetxt']))


cached = (json.loads(msg.decode()) for msg in redis.lrange(config.KEY, 0, -1))
for message in list(cached)[::-1]:
    handle(message)

while not False:
    message = json.loads(socket.recv().decode('ascii'))
    handle(message)
Exemple #31
0
def fetch_rewards():
    rews = redis.lrange(REWARDS_KEY, 0, -1)
    return [struct.unpack('d', r) for r in rews]
Exemple #32
0
#     pipe.hmset(key,update)
#
# goods_lists = redis.lrange(FISH_REWARD_ON_SHOP_LIST,0,-1)
# for goods in goods_lists:
#     goods_type = redis.hget(FISH_REWARD_TABLE%(goods),'reward_type')
#     pipe.lpush(FISH_REWARD_ON_SHOP_TYPE_LIST%(goods_type),goods)
#     print 'set goods id index success....[%s]'%(goods_type)
# pipe.execute()

###############################################################
### exchange_table更新
###  新增1个字段  exchange_type
###   2018-01-03
###############################################################
total = redis.llen(FISH_EXCHANGE_LIST)
exchange_ids = redis.lrange(FISH_EXCHANGE_LIST, 0, -1)

exchange_id_keys = [
    FISH_EXCHANGE_TABLE % (exchange_id) for exchange_id in exchange_ids
]
exchange_details = [
    exchange_detail for exchange_detail in redis.mget(exchange_id_keys)
]
exchange_info = []
for exchange_detail in exchange_details:
    exchange_detail = eval(exchange_detail)
    exchange_detail['exchange_type'] = redis.hget(
        FISH_REWARD_TABLE % (exchange_detail['exchange_reward_id']),
        'reward_type')
    pipe.set(FISH_EXCHANGE_TABLE % (exchange_detail['exchange_id']),
             exchange_detail)
Exemple #33
0
 def load_alternatives(experiment_name, redis=None):
     key = _key("e:{0}:alternatives".format(experiment_name))
     return redis.lrange(key, 0, -1)
Exemple #34
0
    system_contain_bro,agent_contain_bro,fish_system_bro = list(system_contain_bro),list(agent_contain_bro),list(fish_system_bro)
    system_contain_bro.extend(agent_contain_bro)
    system_contain_bro.extend(fish_system_bro)

    for bro in system_contain_bro:
        if bro in play_set:
            redis.srem(HALL_BRO_PLAY_SET,bro)
            print '[try do_clearContainBrocast] broadId[%s] is remove..'%(bro)

try:
    print '[%s] broadManager is running..'%(datetime.strftime(datetime.now(),"%Y-%m-%d %H:%M:%S"))
    do_clearContainBrocast()
    while True:
        nowTime = time.time()
        nowDate = datetime.now()
        borads = redis.lrange(HALL_BRO_LIST,0,-1)
        borads_fish = redis.lrange(FISH_BRO_LIST,0,-1)
        borads,borads_fish = list(borads),list(borads_fish)
        borads.extend(borads_fish)
        print '[%s][try getBrocast] borads[%s]'%(datetime.strftime(nowDate,"%Y-%m-%d %H:%M:%S"),borads)
        out_set = redis.smembers(HALL_BRO_OUT_SET)
        play_set = redis.smembers(HALL_BRO_PLAY_SET)
        for borad in borads:
            if borad in out_set:
                continue
            if borad in play_set:
                print '[%s][try Brocast] borads[%s] is boradding....'%(datetime.strftime(nowDate,"%Y-%m-%d %H:%M:%S"),borad)
            broad_table = HALL_BRO_TABLE%(borad)
            boradInfo = redis.hgetall(broad_table)
            start_date = datetime.strptime(boradInfo['start_date'],'%Y-%m-%d %H:%M')
            end_date = None
Exemple #35
0
def fiximages():
    keys = redis.keys('H*')
    for key in keys:
        list = redis.lrange(key, 0, -1)
        print list
Exemple #36
0
from musicautils import *

YOUTUBE_DL = os.path.join(os.getenv("HOME"), ".local/bin/youtube-dl")

if not os.path.isdir(DATA_DIR):
	os.mkdir(DATA_DIR)

redis = redis.Redis()

# refresh the loading queue

while redis.lpop("musicaload") is not None:
	pass

for ent in redis.lrange("musicaqueue", 0, -1):
	redis.rpush("musicaload", json.loads(ent.decode())["ytid"])

def gen_cmdline(ytid, for_title=False):
	return [YOUTUBE_DL, "--no-playlist", "--id", "--no-progress", "--format", "mp4"] + (["--get-title"] if for_title else []) + ["--", sanitize(ytid)]

def get_title(ytid):
	return subprocess.check_output(gen_cmdline(ytid, for_title=True))

# "mplayer -fs"

while True:
	_, to_load = redis.blpop("musicaload")
	try:
		to_load = to_load.decode()
		if redis.get("musicatitle." + to_load) is None or redis.get("musicatitle." + to_load).startswith(b"Could not load video "):
Exemple #37
0
def get_processed_files(worker):
    key = "processed-file-%s" % worker
    data = redis.lrange(key, 0, -1)
    data = [json.loads(d) for d in data]
    data = sorted(data, key=lambda x: x["date"])
    return data
Exemple #38
0
redis = getInst(1)

#进程执行心跳
SLEEP_SECS = 5.0
#倒计时结算
DISCOUNT_TIME = 60 * 3

AGENT_IDS = ['113388']

try:
    print 'check is monitor...'
    while True:
        nowTime = time.time()
        nowDate = datetime.now()
        for agentId in AGENT_IDS:
            memberIds = redis.lrange(JOIN_GROUP_LIST % (agentId), 0, -1)
            pipe = redis.pipeline()
            for memberId in memberIds:
                if int(memberId) <= 0:
                    continue
                print memberId
                status = redis.get(JOIN_GROUP_RESULT %
                                   (memberId)).split(':')[1]
                if int(status) == 0:
                    status = 1
                    pipe.set(JOIN_GROUP_RESULT % (memberId),
                             "%s:%s" % (agentId, status))
                    pipe.sadd(FORMAT_ADMIN_ACCOUNT_MEMBER_TABLE % (agentId),
                              memberId)
                    pipe.hset(FORMAT_USER_TABLE % (memberId), 'parentAg',
                              agentId)
Exemple #39
0
def get_worker_actions(worker):
    key = "worker-action-%s" % worker
    data = redis.lrange(key, 0, -1)
    data = [json.loads(d) for d in data]
    data = sorted(data, key=lambda x: x["date"])
    return data
Exemple #40
0
 def get_hand(self):
     return redis.lrange(self.key(':hand'), 0, -1)
Exemple #41
0
 def elements_as_json(self):
     """Return all elements as a JSON object"""	
     all_elements = redis.lrange(self.key, 0, -1) or [ ]
     all_elements_as_json = json.dumps(all_elements)
     return all_elements_as_json
Exemple #42
0
 def get_usernames():
     return redis.lrange('users', 0, -1)
Exemple #43
0
def get_events():
    events = {}
    for e in redis.lrange("events", 0, -1):
        e = json.loads(e)
        events.setdefault(e['name'], []).append(e)
    return events
Exemple #44
0
def data():
    data = redis.lrange('data', 0, -1)
    redis.delete('data')
    return json.dumps(data)
Exemple #45
0
def lrange(key):
    return [loads(item) for item in r.lrange(key, 0, -1)]
Exemple #46
0
	def find(self, uuid):
		found = [ent for ent in redis.lrange("musicaqueue", 0, -1) if json.loads(ent.decode())["uuid"] == uuid]
		assert len(found) <= 1
		return found[0] if found else None
Exemple #47
0
-RPOP <mylist> Ex.list 1 2 3 get 3 and delete 3 in list 
if empty wait value before POP
-BLPOP , BRPOP <mylist> .. <timeout>
pop and push other list 
-RPOPLPUSH <source> <desination> Ex.list1 a b c > RPOPLPUSH list1 list2 > list1 a b , list2 c 
edit list 
-LSET <mylist> <index> <listvalue>
delete value 
-LREM <mylist> -n [bottom to top] +n [top to bottom] Ex.RPUSH list1 A B C A A > LREM list1 -1 "A" > LRANGE list > A B C A 
'''
#LPUSH
print("Lpush")
redis.lpush("mylist","a","b","c")
#lrange
print("lrange")
lrange=redis.lrange("mylist",0,-1)
print(lrange)
#RPUSH
print("Rpush")
redis.lpush("mylist","a","b","c")
#lrange
print("lrange")
lrange=redis.lrange("mylist",0,-1)
print(lrange)
#LPOP
print("Lpop")
lpop=redis.lpop("mylist")
print(lpop)
#lrange
print("lrange")
lrange=redis.lrange("mylist",0,-1)
Exemple #48
0
 def elems(self):
     return [
         json.loads(ent.decode())
         for ent in redis.lrange("musicaqueue", 0, -1)
     ]
Exemple #49
0
#     print key
#     redis.delete(key)

# for key in redis.keys(FORMAT_USER_UNREAD_MESSAGE%('*')):
#     print key
#     redis.delete(key)

# for key in redis.keys(FORMAT_MGR_SEND_MESSAGE_LIST%('*')):
#     print key
#     redis.delete(key)

# for key in redis.keys(FORMAT_MSG_READ_SET%('*')):
#     print key
#     redis.delete(key)

# for key in redis.keys(FORMAT_GAMEHALL_NOTIC_TABLE%('*')):
#     print key
#     redis.delete(key)

# for key in redis.keys(FORMAT_USER_MESSAGE_LIST%('*')):
#     print key
#     redis.delete(key)

删除公告脚本
listss = redis.lrange(FORMAT_GAMEHALL_NOTIC_LIST_TABLE, 0, -1)
for lists in listss:
    ahInfo = redis.hgetall(FORMAT_GAMEHALL_NOTIC_TABLE % (lists))
    if ahInfo['status'] == '0':
        print 'delete.......[%s]' % (lists)
        redis.lrem(FORMAT_GAMEHALL_NOTIC_LIST_TABLE, lists)
Exemple #50
0
    job_id = sys.argv[2]

    data = redis.hgetall(job_id)
    print('Data for job ident {}:'.format(job_id))
    print('{}'.format(
        json.dumps(data, sort_keys=True, indent=4, separators=(',', ': '))))
    exit(0)

elif sys.argv[1] == 'get_all_pending_queues':
    for name in redis.scan_iter('pending:*'):
        print('{}'.format(name))

    exit(0)

elif sys.argv[1] == 'dump_working_queue':
    pprint('{}'.format(redis.lrange('working', 0, -1)))
    exit(0)

elif sys.argv[1] == 'reset_job_counters':
    if len(sys.argv) < 3:
        print('Invalid arguments')
        exit(1)

    job_id = sys.argv[2]

    # fetch required job parameter; bail if not found
    data = redis.hgetall(job_id)
    if 'log_key' not in data:
        print(
            'Redis does not have a log key for that job (maybe it does not exist)'
        )
def pross():

    params = dict()
    params["logging_level"] = 3
    params["output_resolution"] = "-1x-1"
    params["net_resolution"] = "-1x368"
    params["model_pose"] = "BODY_25"
    params["alpha_pose"] = 0.6
    params["scale_gap"] = 0.3
    params["scale_number"] = 1
    params["render_threshold"] = 0.05
    # If GPU version is built, and multiple GPUs are available, set the ID here
    params["num_gpu_start"] = 0
    params["disable_blending"] = False
    # Ensure you point to the correct path where models are located
    params[
        "default_model_folder"] = "/woody/software/source/openpose2019/models/"
    # Construct OpenPose object allocates GPU memory

    openpose = OpenPose(params)

    tracker = Tracker(link=50, match=0.3)
    #video_path = "rtsp://172.16.3.26/test"
    #video_path = "/woody/software/source/openpose/examples/media/video.avi"
    video_path = "/home/woody/tmp/openpose/test.mp4"
    #video_path = "/home/woody/tmp/openpose/video/4804_exit_overvie.mp4"
    video = cv2.VideoCapture()

    if not video.open(video_path):
        logger.info("can not open the video")
        exit(1)

    #fps = video.get(cv2.CAP_PROP_FPS)
    #size = (int(video.get(cv2.CAP_PROP_FRAME_WIDTH)), int(video.get(cv2.CAP_PROP_FRAME_HEIGHT)))

    #fourcc = cv2.VideoWriter_fourcc(*'mp4v')
    #w_video = cv2.VideoWriter("/home/woody/tmp/openpose//aaa.mp4", fourcc, fps, size,True)
    index = 1
    img_index = 1
    count = 0
    f_count = 1
    imageArray = {}
    images = []
    start = time.time()
    tmp = []
    while True:
        _, frame = video.read()
        if frame is None:
            break
        #w_video.write(frame)
        if f_count % 15 == 0:
            st = time.time()
            output_image = frame
            keypoints, scores = openpose.forward(frame, False)
            logger.info("openpose>>>" + str(time.time() - st))
            #gene service
            kp = keypoints.reshape(-1, 75)
            st = time.time()
            data_flow, output_image = tracker.wrapped_track(
                output_image, kp.tolist(), scores, 'test', img_index)
            img_index += 1
            tmp.append(keypoints)
            logger.info("wrapped_track>>>" + str(time.time() - st))
            if data_flow is None:
                continue
            if output_image is None:
                continue
            if len(data_flow) < 1:
                continue

            image = output_image
            continuity = False
            key = "image_info_" + str(index)
            st = time.time()
            data = {}
            img_encode = cv2.imencode('.jpg', output_image)[1]
            data_encode = np.array(img_encode)
            str_encode = data_encode.tostring()
            data["image"] = str_encode
            data["data_flow"] = data_flow
            redis.set(key, pickle.dumps(data))
            people_data = []
            user_midHip_data = {}
            for keypointdata in data_flow:
                flag = False
                new_pid = str(keypointdata['new_pid'])
                keypoint = keypointdata['box_pose_pos']
                x1, x2, y1, y2 = keypointdata["box_pos"]
                print("box_pos" + str(x1) + "," + str(x2) + "," + str(y1) +
                      "," + str(y2))
                cv2.putText(image, new_pid, (x1 + 10, y1 - 10),
                            cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
                userBuleData = redis.get("user_bule_" + new_pid)
                if userBuleData is None:
                    userBuleData = pickle.dumps(False)

                user_bule_flag = pickle.loads(userBuleData)
                #logger.info(str(new_pid) + ">>>>,keypoint>>>>" + str(keypoint))
                try:
                    if user_bule_flag == False:
                        bule_flag, bule_totail = calcBuleRate(
                            image, [keypoint[12][0], keypoint[12][1]],
                            [keypoint[9][0], keypoint[9][1]],
                            [keypoint[2][0], keypoint[2][1]],
                            [keypoint[5][0], keypoint[5][1]])
                        bule_list = redis.lrange("bule_list_" + new_pid, 0, -1)
                        if bule_list is not None:
                            bule_count = 0
                            for bule_score_data in bule_list:
                                if float(bule_score_data) >= APM_BULE_SCORE:
                                    if bule_count >= APM_BULE_COUNT:
                                        user_bule_flag = True
                                        break
                                    bule_count = bule_count + 1
                        redis.lpush("bule_list_" + new_pid, bule_totail)
                        if user_bule_flag:
                            redis.set("user_bule_" + new_pid,
                                      pickle.dumps(user_bule_flag))
                except Exception as err:
                    logger.info(err)
                logger.info(key + " >>> bulu_list >>>>>" + new_pid + ">>>>" +
                            str(user_bule_flag))
                if user_bule_flag == False:
                    continue
                x1 = keypoint[2][0]
                y1 = keypoint[2][1]
                x2 = keypoint[9][0]
                y2 = keypoint[9][1]
                x3 = keypoint[10][0]
                y3 = keypoint[10][1]
                r_result, r_flag = calcHipAngle(x1, y1, x2, y2, x3, y3)
                x1 = keypoint[5][0]
                y1 = keypoint[5][1]
                x2 = keypoint[12][0]
                y2 = keypoint[12][1]
                x3 = keypoint[13][0]
                y3 = keypoint[13][1]
                l_result, l_flag = calcHipAngle(x1, y1, x2, y2, x3, y3)

                x1 = keypoint[9][0]
                y1 = keypoint[9][1]
                x2 = keypoint[10][0]
                y2 = keypoint[10][1]
                x3 = keypoint[11][0]
                y3 = keypoint[11][1]
                ra_result, ra_flag = calcKneeAngle(x1, y1, x2, y2, x3, y3)
                ra_len_result, ra_len_flag = calcLenRate(
                    x1, y1, x2, y2, x3, y3)
                x1 = keypoint[12][0]
                y1 = keypoint[12][1]
                x2 = keypoint[13][0]
                y2 = keypoint[13][1]
                x3 = keypoint[14][0]
                y3 = keypoint[14][1]
                la_result, la_flag = calcKneeAngle(x1, y1, x2, y2, x3, y3)
                la_len_result, la_len_flag = calcLenRate(
                    x1, y1, x2, y2, x3, y3)

                if ra_flag and la_flag:
                    flag = True
                if ra_len_flag and la_len_flag:
                    flag = True
                if (r_flag or l_flag) and (abs(r_result - l_result) <=
                                           30) and (ra_result or la_result):
                    flag = True
                if la_result >= 170 or ra_result >= 170:
                    flag = False

                if (la_len_result >= 0.9 and la_len_result <= 1.09) or (
                        ra_len_result >= 0.9 and ra_len_result <= 1.09):
                    flag = False

                try:
                    sat_data_info = {}
                    sat_data_info["flag"] = flag
                    sat_data_info["image"] = key
                    redis.lpush("user_sat_" + new_pid,
                                pickle.dumps(sat_data_info))
                except Exception as err:
                    logger.info(err)
                logger.info(key + " >>> sat_list >>>>>" + new_pid + ">>>>" +
                            str(flag))
                people_data.append(new_pid)
                if flag and user_bule_flag:
                    if "user_midHip_" + new_pid in user_midHip_data.keys():
                        midipData_info = user_midHip_data["user_midHip_" +
                                                          new_pid]
                        if midipData_info[0] == 0 or midipData_info[1] == 0:
                            continuity = True
                        midHip2 = (midipData_info[0] - keypoint[8][0]) * (
                            midipData_info[0] - keypoint[8][0]) + (
                                midipData_info[1] - keypoint[8][1]) * (
                                    midipData_info[1] - keypoint[8][1])
                        midHip = cmath.sqrt(midHip2)
                        f_midHip = cmath.sqrt(
                            0.02 * (midipData_info[0] + midipData_info[1]) *
                            (midipData_info[0] + midipData_info[1]))
                        if midHip.real < f_midHip.real:
                            continuity = True
                        user_midHip_data["user_midHip_" +
                                         new_pid] = keypoint[8]
                    else:
                        continuity = True
                        user_midHip_data["user_midHip_" +
                                         new_pid] = keypoint[8]
            if continuity:
                save_path = "{}/{:>03s}.jpg".format(
                    "/home/woody/tmp/openpose/hh", str(key))
                cv2.imwrite(save_path, image)
                if 'index' in imageArray.keys():
                    if imageArray['start'] >= count - E_TOTAIL:
                        if imageArray['index'] == count - 1:
                            imageArray['start'] = count
                            imageArray['count'] = imageArray['count'] + 1
                            if imageArray['count'] >= APM_TOTAIL:
                                if imageArray['apm']:
                                    imageArray['apm'] = True
                                    save_path = "{}/{:>03s}.jpg".format(
                                        "/home/woody/tmp/openpose/test",
                                        str(imageArray['key']))
                                    cv2.imwrite(save_path, imageArray["image"])
                                    redis.hset(
                                        "key", str(count),
                                        pickle.dumps(imageArray["image"]))
                                    image_data = {}
                                    image_data["key"] = key
                                    image_data["image"] = image
                                    images.append(image_data)
                                    for image_info in images:
                                        save_path = "{}/{:>03s}.jpg".format(
                                            "/home/woody/tmp/openpose/li",
                                            str(image_info['key']))
                                        cv2.imwrite(save_path,
                                                    image_info["image"])
                                    images = []
                        logger.info("imageArray['count']  >>> " +
                                    str(imageArray['count']))
                        imageArray['index'] = count
                        if imageArray['apm'] == False:
                            image_data = {}
                            image_data["key"] = key
                            image_data["image"] = image
                            images.append(image_data)
                    else:
                        save_path = "{}/{:>03s}.jpg".format(
                            "/home/woody/tmp/openpose/test",
                            str(imageArray['key']))
                        if imageArray['apm'] == False:
                            if imageArray['count'] > E_COUNT:
                                cv2.imwrite(save_path, imageArray["image"])
                                redis.hset("key", str(count),
                                           pickle.dumps(imageArray["image"]))
                                image_data = {}
                                image_data["key"] = key
                                image_data["image"] = image
                                images.append(image_data)
                                for image_info in images:
                                    save_path = "{}/{:>03s}.jpg".format(
                                        "/home/woody/tmp/openpose/li",
                                        str(image_info['key']))
                                    cv2.imwrite(save_path, image_info["image"])
                                images = []
                        imageArray = {}
                        images = []
                else:
                    imageArray['index'] = count
                    imageArray['start'] = count
                    imageArray['count'] = 0
                    imageArray['apm'] = False
                    imageArray['image'] = output_image
                    imageArray['key'] = key
            else:
                save_path = "{}/{:>03s}.jpg".format(
                    "/home/woody/tmp/openpose/wu", str(key))
                cv2.imwrite(save_path, image)
                if 'index' in imageArray.keys():
                    if imageArray['start'] < count - E_TOTAIL:
                        save_path = "{}/{:>03s}.jpg".format(
                            "/home/woody/tmp/openpose/test",
                            str(imageArray['key']))
                        if imageArray['apm'] == False:
                            if imageArray['count'] > E_COUNT:
                                cv2.imwrite(save_path, imageArray["image"])
                                redis.hset("key", str(count),
                                           pickle.dumps(imageArray["image"]))
                                #image_data = {}
                                #image_data["key"] = key
                                #image_data["image"] = image
                                #images.append(image_data)
                                for image_info in images:
                                    save_path = "{}/{:>03s}.jpg".format(
                                        "/home/woody/tmp/openpose/li",
                                        str(image_info['key']))
                                    cv2.imwrite(save_path, image_info["image"])
                                images = []
                        imageArray = {}
                        images = []

            logger.info("buletotail>>>" + str(time.time() - st))
            count = count + 1
            logger.info("end ===============" + key)
            logger.info(str(count) + ",totail>>>" + str(time.time() - start))
            index += 1
        f_count = f_count + 1
    if 'index' in imageArray.keys():
        save_path = "{}/{:>03s}.jpg".format("/home/woody/tmp/openpose/test",
                                            str(imageArray['key']))
        if imageArray['apm'] == False:
            if imageArray['count'] > E_COUNT:
                cv2.imwrite(save_path, imageArray["image"])
                redis.hset("key", str(count),
                           pickle.dumps(imageArray["image"]))
                #image_data = {}
                #image_data["key"] = key
                #image_data["image"] = image
                #images.append(image_data)
                for image_info in images:
                    save_path = "{}/{:>03s}.jpg".format(
                        "/home/woody/tmp/openpose/li", str(image_info['key']))
                    cv2.imwrite(save_path, image_info["image"])
                images = []
    logger.info(">>>" + str(time.time() - start))
    video.release()
    logger.info("Totally save {:d} pics".format(index - 1))
    'baro', 'tof', 'h', 'pitch', 'roll', 'yaw', 'vgx', 'vgy', 'vgz', 'agx',
    'agy', 'agz', 'runtime'
]
sensor_data_types = {
    'baro': float,
    'tof': int,
    'h': int,
    'pitch': int,
    'roll': int,
    'yaw': int,
    'vgx': int,
    'vgy': int,
    'vgz': int,
    'agx': float,
    'agy': float,
    'agz': float,
    'runtime': int
}
max_data_len = 2500

redis = redis.StrictRedis(host='localhost', port=6379, db=0)
keys = redis.keys('*')
data = {}
for i in range(len(keys)):
    keys[i] = keys[i].decode('ASCII')
    data[keys[i]] = np.char.decode(redis.lrange(keys[i], 0, max_data_len))
dataframe = pandas.DataFrame(data)
dataframe = dataframe.astype(sensor_data_types)
dataframe[sensor_data_col].to_csv(export_file)
# dataframe[sensor_data_col].info()
Exemple #53
0
 def elements(self):
     """Return all elements as a Python list"""
     all_elements = redis.lrange(self.key, 0, -1) or [ ]
     return all_elements
Exemple #54
0
 dup = DuplicateImageService(uowm, event_logger, reddit, config=config)
 monitor = SubMonitor(dup,
                      uowm,
                      reddit_manager,
                      response_builder,
                      ResponseHandler(reddit_manager,
                                      uowm,
                                      event_logger,
                                      source='submonitor',
                                      live_response=config.live_responses),
                      event_logger=event_logger,
                      config=config)
 redis = get_redis_client(config)
 while True:
     while True:
         queued_items = redis.lrange('submonitor', 0, 20000)
         if len(queued_items) == 0:
             log.info('Sub monitor queue empty.  Starting over')
             break
         log.info('Sub monitor queue still has %s tasks', len(queued_items))
         time.sleep(60)
     with uowm.start() as uow:
         monitored_subs = uow.monitored_sub.get_all()
         for monitored_sub in monitored_subs:
             if not monitored_sub.active:
                 continue
             log.info('Checking sub %s', monitored_sub.name)
             if not monitored_sub.active:
                 log.debug('Sub %s is disabled', monitored_sub.name)
                 continue
             if not monitored_sub.check_all_submissions:
def data():
    data = redis.lrange('data', 0, -1)
    redis.delete('data')
    return json.dumps(data)
Exemple #56
0
 def memory_usage(self):
     base_key = '%s:memory_usage:system:' % self.host
     free = redis.lrange(base_key + 'free', -200, -1)
     used = redis.lrange(base_key + 'free', -200, -1)
     usage = redis.lrange(base_key + 'usage', -200, -1)
     return map(clean_timeseries_array, usage)
Exemple #57
0
import requests
import json
import re
import time
import datetime
import random
import redis
redis=redis.Redis(host="127.0.0.1",port=6379,db=2,decode_responses=True)
url="http://fundgz.1234567.com.cn/js/{}.js?rt=1589463125600"
daihao=redis.lrange("errorFundCode",0,-1)
print(time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time())))
print("number of errorFundCode: ",len(daihao))
num=0
redis.delete("errorFundCode")
for i in daihao:
    try:
        r=requests.get(url.format(i),timeout=3)
        # print(url.format(i))
        text = re.findall('\((.*?)\)', r.text)[0]
        dic=json.loads(text)
        print("[{}] ".format(num),text)
    except Exception as e:
        # print("wrong daihao: ",i)
        redis.lpush("errorFundCode",i)
        continue
    latest=redis.lindex(i,-1)
    if latest==None:
        redis.rpush(i,text)
        num+=1
        print("[{}] new add: ".format(num),i)
    else:
Exemple #58
0
def get_module_actions(module):
    key = "module-action-%s" % module
    data = redis.lrange(key, 0, -1)
    data = [json.loads(d) for d in data]
    data = sorted(data, key=lambda x: x["date"])
    return data
Exemple #59
0
def get_module_actions(module):
    key = "module-action-%s" % module
    data = redis.lrange(key, 0, -1)
    data = [json.loads(d) for d in data]
    data = sorted(data, key=lambda x: x["date"])
    return data