Пример #1
0
def resubmit_jobs():
    if config.get('ckan.harvest.mq.type') != 'redis':
        return
    redis = get_connection()

    log.debug('_create_or_update_package')
    harvest_object_pending = redis.keys('harvest_object_id:*')
    for key in harvest_object_pending:
        date_of_key = datetime.datetime.strptime(redis.get(key),
                                                 "%Y-%m-%d %H:%M:%S.%f")
        if (datetime.datetime.now() - date_of_key).seconds > 180: # 3 minuites for fetch and import max
            redis.rpush('harvest_object_id',
                json.dumps({'harvest_object_id': key.split(':')[-1]})
            )
            redis.delete(key)

    harvest_jobs_pending = redis.keys('harvest_job_id:*')
    for key in harvest_jobs_pending:
        date_of_key = datetime.datetime.strptime(redis.get(key),
                                                 "%Y-%m-%d %H:%M:%S.%f")
        if (datetime.datetime.now() - date_of_key).seconds > 7200: # 3 hours for a gather
            redis.rpush('harvest_job_id',
                json.dumps({'harvest_job_id': key.split(':')[-1]})
            )
            redis.delete(key)
def prepare_sms_object():

	no_of_sms = len(redis.keys('sms_*'))
	sms_keys = redis.keys('sms_*')
	sms_text = []
	sms_time = []
	sms_from = []
	sms_to = []
	sms_key = []
	sms_replied = []
	
	

	for key in sms_keys:
		sms_replied.append(redis.hget(key,'Replied'))
		sms_text.append(redis.hget(key,'Text'))
		sms_from.append(redis.hget(key,'From'))
		sms_to.append(redis.hget(key,'Plivo DID'))
		sms_time.append(redis.hget(key,'Time Received'))
		sms_key.append(key)
		

	sms_object = {'sms_from':sms_from, 'sms_to':sms_to, 'sms_text':sms_text, 'sms_time':sms_time, 'sms_key':sms_key, \
					'sms_replied':sms_replied}
	
	response = make_response(render_template("sms_dashboard.html", no_of_sms = no_of_sms-1,\
				sms_object = sms_object))
	response.headers['Content-type'] = 'text/html'
	return response
Пример #3
0
def resubmit_jobs():
    if config.get('ckan.harvest.mq.type') != 'redis':
        return
    redis = get_connection()

    log.debug('_create_or_update_package')
    harvest_object_pending = redis.keys('harvest_object_id:*')
    for key in harvest_object_pending:
        date_of_key = datetime.datetime.strptime(redis.get(key),
                                                 "%Y-%m-%d %H:%M:%S.%f")
        if (datetime.datetime.now() - date_of_key
            ).seconds > 180:  # 3 minuites for fetch and import max
            redis.rpush('harvest_object_id',
                        json.dumps({'harvest_object_id': key.split(':')[-1]}))
            redis.delete(key)

    harvest_jobs_pending = redis.keys('harvest_job_id:*')
    for key in harvest_jobs_pending:
        date_of_key = datetime.datetime.strptime(redis.get(key),
                                                 "%Y-%m-%d %H:%M:%S.%f")
        if (datetime.datetime.now() -
                date_of_key).seconds > 7200:  # 3 hours for a gather
            redis.rpush('harvest_job_id',
                        json.dumps({'harvest_job_id': key.split(':')[-1]}))
            redis.delete(key)
Пример #4
0
def resubmit_jobs():
    '''
    Examines the fetch and gather queues for items that are suspiciously old.
    These are removed from the queues and placed back on them afresh, to ensure
    the fetch & gather consumers are triggered to process it.
    '''
    if config.get('ckan.harvest.mq.type') != 'redis':
        return
    redis = get_connection()

    # fetch queue
    harvest_object_pending = redis.keys(get_fetch_routing_key() + ':*')
    for key in harvest_object_pending:
        date_of_key = datetime.datetime.strptime(redis.get(key),
                                                 "%Y-%m-%d %H:%M:%S.%f")
        # 3 minutes for fetch and import max
        if (datetime.datetime.now() - date_of_key).seconds > 180:
            redis.rpush(get_fetch_routing_key(),
                json.dumps({'harvest_object_id': key.split(':')[-1]})
            )
            redis.delete(key)

    # gather queue
    harvest_jobs_pending = redis.keys(get_gather_routing_key() + ':*')
    for key in harvest_jobs_pending:
        date_of_key = datetime.datetime.strptime(redis.get(key),
                                                 "%Y-%m-%d %H:%M:%S.%f")
        # 3 hours for a gather
        if (datetime.datetime.now() - date_of_key).seconds > 7200:
            redis.rpush(get_gather_routing_key(),
                json.dumps({'harvest_job_id': key.split(':')[-1]})
            )
            redis.delete(key)
Пример #5
0
def resubmit_jobs():
    '''
    Examines the fetch and gather queues for items that are suspiciously old.
    These are removed from the queues and placed back on them afresh, to ensure
    the fetch & gather consumers are triggered to process it.
    '''
    if config.get('ckan.harvest.mq.type') != 'redis':
        return
    redis = get_connection()

    # fetch queue
    harvest_object_pending = redis.keys(get_fetch_routing_key() + ':*')
    for key in harvest_object_pending:
        date_of_key = datetime.datetime.strptime(redis.get(key),
                                                 "%Y-%m-%d %H:%M:%S.%f")
        # 3 minutes for fetch and import max
        if (datetime.datetime.now() - date_of_key).seconds > 180:
            redis.rpush(get_fetch_routing_key(),
                json.dumps({'harvest_object_id': key.split(':')[-1]})
            )
            redis.delete(key)

    # gather queue
    harvest_jobs_pending = redis.keys(get_gather_routing_key() + ':*')
    for key in harvest_jobs_pending:
        date_of_key = datetime.datetime.strptime(redis.get(key),
                                                 "%Y-%m-%d %H:%M:%S.%f")
        # 3 hours for a gather
        if (datetime.datetime.now() - date_of_key).seconds > 7200:
            redis.rpush(get_gather_routing_key(),
                json.dumps({'harvest_job_id': key.split(':')[-1]})
            )
            redis.delete(key)
def add_custom_commute_score_to_all(walk_weight=1,
                                    bike_weight=1,
                                    transit_weight=1,
                                    drive_weight=1):
    listings = redis.keys('house-search:listings/*')
    if walk_weight < 0:
        walk_weight = 1
        log.info("Reset negative walk weight to 1")

    if bike_weight < 0:
        bike_weight = 1
        log.info("Reset negative bike weight to 1")

    if transit_weight < 0:
        transit_weight = 1
        log.info("Reset negative transit weight to 1")

    if drive_weight < 0:
        drive_weight = 1
        log.info("Reset negative drive weight to 1")

    weighted_sum = walk_weight + bike_weight + transit_weight + drive_weight
    if weighted_sum <= 0:
        # Someone entered weird values
        log.info("Resetting weights due to weighted_sum being <=0")
        weighted_sum = 4
        walk_weight = bike_weight = transit_weight = drive_weight = 1

    for i in listings:
        k = i.decode()
        if "latitude" in k or "longitude" in k or "/downtown" in k or "/poi/" in k or "score" in k:
            pass
            # these keys do not represent a listing
        else:
            try:
                walk_score = bike_score = transit_score = drive_score = 0
                pois = redis.keys(k + '/poi/*')
                if len(pois) > 0:
                    for place in pois:
                        str_place = place.decode()
                        data = ast.literal_eval(redis.get(str_place).decode())
                        walk_score = walk_score + calculate_walk_score(
                            data['walk_time'])
                        bike_score = bike_score + calculate_bike_score(
                            data['bike_time'])
                        drive_score = drive_score + calculate_drive_score(
                            data['drive_time'])
                        transit_score = transit_score + calculate_transit_score(
                            data['transit_time'],
                            data['transit_route']['routes'][0])
                    score = (walk_score * walk_weight + bike_score *
                             bike_weight + transit_score * transit_weight +
                             drive_score * drive_weight) / (weighted_sum *
                                                            len(pois))
                    redis.set(k + "/custom_commute_score", score)
            except Exception as e:
                log.exception(traceback.format_exc())
Пример #7
0
def resubmit_jobs():
    '''
    Examines the fetch and gather queues for items that are suspiciously old.
    These are removed from the queues and placed back on them afresh, to ensure
    the fetch & gather consumers are triggered to process it.
    '''
    if config.get('ckan.harvest.mq.type') != 'redis':
        return
    redis = get_connection()

    # fetch queue
    harvest_object_pending = redis.keys(get_fetch_routing_key() + ':*')
    for key in harvest_object_pending:
        redis_value = redis.get(key)
        if redis_value is None:
            log.info(
                'Fetch Queue: Redis cannot get value for key {}'.format(key))
            continue
        date_of_key = datetime.datetime.strptime(redis_value,
                                                 "%Y-%m-%d %H:%M:%S.%f")
        log.debug('[Fetch queue]: Check key {} with value {}'.format(
            key, date_of_key))
        # 3 minutes for fetch and import max
        if (datetime.datetime.now() - date_of_key).seconds > 180:
            log.debug(
                '[Fetch queue]: Re-new harvest object with KEY {} in redis'.
                format(key))
            redis.rpush(get_fetch_routing_key(),
                        json.dumps({'harvest_object_id': key.split(':')[-1]}))
            redis.delete(key)

    # gather queue
    harvest_jobs_pending = redis.keys(get_gather_routing_key() + ':*')
    for key in harvest_jobs_pending:
        redis_value = redis.get(key)
        if redis_value is None:
            log.info(
                'Gather Queue: Redis cannot get value for key {}'.format(key))
            continue
        date_of_key = datetime.datetime.strptime(redis_value,
                                                 "%Y-%m-%d %H:%M:%S.%f")
        log.debug('[Gather queue]: Check key {} with value {}'.format(
            key, date_of_key))
        # 3 hours for a gather
        if (datetime.datetime.now() - date_of_key).seconds > 7200:
            log.debug(
                '[Gather queue]: Re-new harvest job with KEY {} in redis'.
                format(key))
            redis.rpush(get_gather_routing_key(),
                        json.dumps({'harvest_job_id': key.split(':')[-1]}))
            redis.delete(key)
def add_custom_commute_score_to_one(location,
                                    walk_weight=1,
                                    bike_weight=1,
                                    transit_weight=1,
                                    drive_weight=1):
    listings = redis.keys('house-search:listings/*')
    if walk_weight < 0:
        walk_weight = 1
        log.info("Reset negative walk weight to 1")

    if bike_weight < 0:
        bike_weight = 1
        log.info("Reset negative bike weight to 1")

    if transit_weight < 0:
        transit_weight = 1
        log.info("Reset negative transit weight to 1")

    if drive_weight < 0:
        drive_weight = 1
        log.info("Reset negative drive weight to 1")

    weighted_sum = walk_weight + bike_weight + transit_weight + drive_weight
    if weighted_sum <= 0:
        # Someone entered weird values
        log.info("Resetting weights due to weighted_sum being <=0")
        weighted_sum = 4
        walk_weight = bike_weight = transit_weight = drive_weight = 1

    pois = redis.keys(location.listing_key + '/poi/*')
    if len(pois) > 0:
        try:
            walk_score = bike_score = transit_score = drive_score = 0
            for place in pois:
                str_place = place.decode()
                data = ast.literal_eval(redis.get(str_place).decode())
                walk_score = walk_score + calculate_walk_score(
                    data['walk_time'])
                bike_score = bike_score + calculate_bike_score(
                    data['bike_time'])
                drive_score = drive_score + calculate_drive_score(
                    data['drive_time'])
                transit_score = transit_score + calculate_transit_score(
                    data['transit_time'], data['transit_route']['routes'][0])
            score = (walk_score * walk_weight + bike_score * bike_weight +
                     transit_score * transit_weight +
                     drive_score * drive_weight) / (weighted_sum * len(pois))
            redis.set(location.listing_key + "/custom_commute_score", score)
        except Exception as e:
            log.exception(traceback.format_exc())
Пример #9
0
	def GET(self):
		header = render.header()
		nav = render.nav('master')
		playlist_keys = redis.keys('playlist:*')
		playlists = []
		for i in playlist_keys:
			playlists.append(i.split(':')[1])
		playlists = sorted(playlists)
		cue_keys = redis.keys("cue:*")
		cues = []
		for i in cue_keys:
			cues.append(i.split(':')[1])
		cues = sorted(cues)
		return render.master(header, nav, playlists, cues)
Пример #10
0
def inject_availability_message():
    if len(redis.keys('AVAILABILITY_MESSAGE')) == 1:
        return {
            "availability_message":
            redis.get('AVAILABILITY_MESSAGE').decode('utf-8')
        }
    return {}
Пример #11
0
def do_create_total(redis):
    count = 0
    for key in redis.keys(FORMAT_LOGIN_DATE_TABLE4FISH%('*')):
        count+=convert_util.to_int(redis.scard(key))

    print 'do_create_total  total[%s]'%(count)
    redis.set("fish:login:per:day:total",count)
Пример #12
0
def getYearData(year):
    '''
    '''
    if len(year) != 4:
        return {"error": "Invalid parameter"}
    redis = brconfig.getRedis()
    #redis=getRedis()
    pipe = redis.pipeline(transaction=True)
    keyList = redis.keys("ids:date:%s????" % year)
    monthList = []
    for key in keyList:
        monthList.append(key[9:15])  #like:201206

    monthList = list(set(monthList))  #remove duplicate
    yearData = {}
    for month in monthList:
        monthData = getMonthData(month)
        errorCount = 0
        liveCount = 0
        for item in monthData:
            errorCount += monthData[item]["error"]
            liveCount += monthData[item]["live"]
        yearData[month[4:6]] = {
            "error": errorCount,
            "live": liveCount,
            "link": "summary?month=%s" % month
        }
    return yearData
Пример #13
0
	def POST(self):
		playlists = []
		playlist_keys = redis.keys('playlist:*')
		for i in playlist_keys:
			playlists.append(i.split(':')[1])
		print playlists
		return json.dumps(playlists)
Пример #14
0
def export_data_to_csv():
    try:
        records = []
        for key in redis.keys():
            if not key in [
                    'last_block', 'last_block_time_seconds',
                    'last_usage_total_sent'
            ]:
                accounts = list(set([key[:-12] for key in redis.hkeys(key)]))
                for account in accounts:
                    cpu_usage_us = redis.hget(key, f'{account}-cpu-archive')
                    net_usage_words = redis.hget(key, f'{account}-net-archive')
                    record = {
                        'date': key,
                        'account': account,
                        'cpu_usage_us': cpu_usage_us,
                        'net_usage_words': net_usage_words
                    }
                    records.append(record)
        with open('/data/accounts-usage.csv', 'w', encoding='utf8',
                  newline='') as output_file:
            fc = csv.DictWriter(output_file, fieldnames=records[0].keys())
            fc.writeheader()
            fc.writerows(records)
        logger.info('Exported DB to CSV!')
    except Exception as e:
        logger.info('Could not export data!')
        logger.info(traceback.format_exc())
Пример #15
0
def list_users():
    users = {}
    for key in redis.keys():
        if key.startswith(redis_prefix):
            name = key[len(redis_prefix):]
            users[name] = load(name)
    return users
Пример #16
0
def getYearData(year):
    '''
    '''
    if len(year)!=4:
        return {"error":"Invalid parameter"}
    redis=brconfig.getRedis()
    #redis=getRedis()
    pipe=redis.pipeline(transaction=True)
    keyList=redis.keys("ids:date:%s????"%year)
    monthList=[]
    for key in keyList:
        monthList.append(key[9:15])#like:201206
    
        
    monthList=list(set(monthList))#remove duplicate
    yearData={}
    for month in monthList:
        monthData=getMonthData(month)
        errorCount=0
        liveCount=0
        for item in monthData:
            errorCount+=monthData[item]["error"]
            liveCount+=monthData[item]["live"]
        yearData[month[4:6]]={"error":errorCount,"live":liveCount,"link":"summary?month=%s"%month}
    return yearData 
Пример #17
0
 def print_all(redis, name):
     """
     Function to print all the entries in our database
     :param redis: Redis connection
     :param name: task name
     """
     if name is None:
         # printing names of all the task lists in db
         tasks = redis.keys()
         if tasks:
             stri = "Here are the names of all the task lists:\n"
             for task in tasks:
                 stri += task + '\n'
             return stri
         else:
             logging.warning('Oops..smth went wrong. List is empty')
             return 'Nothing to print.'
     else:
         # printing all the tasks in the task list
         stri = ''
         tasks = List.pull_from_redis(redis, name, False)
         if tasks and tasks is not None:
             for key, value in tasks.iteritems():
                 stri += 'Task ' + key + ': ' + '\n'
                 for k, v in value.iteritems():
                     stri += k + ': ' + v + '\n'
                 stri += '--------------\n'
             return stri
         else:
             logging.warning('Oops..smth went wrong. List is empty')
             return 'Nothing to print.'
Пример #18
0
	def GET(self):
		header = render.header()
		nav = render.nav('clips')
		cue_keys = redis.keys("cue:*")
		cues = []
		for i in cue_keys:
			cues.append(i.split(':')[1])
		return render.clips(header, nav, cues)
Пример #19
0
def injectGlobals(bot, fromSession):
    # Get the list of keys stored in this session
    q = "{}.{}.*.*".format(GLOBALS_KEY, fromSession)
    keys = redis.keys(q)
    for k in keys:
        keyname = k.split('.')[-2]
        value = retrieveVariable(k)
        bot.__dict__[keyname] = value
Пример #20
0
def cull_playlists(item):
	playlist_keys = redis.keys('playlist:*')
	for i in playlist_keys:
		playlist_name = i.split(':')[1]
		playlist = json.loads(redis.get(i))
		cull_playlist(playlist, item)
		redis.set('playlist:' + playlist_name, json.dumps(playlist))
		redis.save()
Пример #21
0
 def disks_usage(self):
     disks = {}
     disks_for_host = redis.keys('%s:disk_usage:*' % self.host)
     for disk in disks_for_host:
         disk_name = disk.split(':')[-2]
         usage = map(clean_timeseries_array, redis.lrange(disk, -200, -1))
         disks[disk_name] = usage
     return disks
Пример #22
0
 def disks_usage(self):
     disks = {}
     disks_for_host = redis.keys('%s:disk_usage:*' % self.host)
     for disk in disks_for_host:
         disk_name = disk.split(':')[-2]
         usage = map(clean_timeseries_array, redis.lrange(disk, -200, -1))
         disks[disk_name] = usage
     return disks
Пример #23
0
def do_create_recharge_total(redis):
    """ 统计所有的充值总数 """
    count = 0
    for key in redis.keys(FISH_SYSTEM_DATE_RECHARGE_TOTAL%('*')):
        print key
        count+=convert_util.to_int(redis.hget(key,'recharge_coin_total'))

    print 'do_create_recharge_total  total[%s]'%(count)
    redis.set(FISH_SYSTEM_RECHARGE_TOTAL,count)
Пример #24
0
	def GET(self):
		header = render.header()
		nav = render.nav('scheduling')
		playlist_keys = redis.keys("playlist:*")
		playlists = []
		for i in playlist_keys:
			playlists.append(i.split(':')[1])
		playlists = sorted(playlists)
		return render.scheduling(header, nav, playlists)
Пример #25
0
def render_list(key):
    strategies = {
                '::': render
              , ':#': render
              , ':-': lambda x: '\n'.join([ render(each) for each in redis.keys(x + ':*') ])
              , '': str
            }
    return '\n'.join ([ definite_strategy (strategies, key[0:2], key, '')
            for key in redis.lrange ( key, 0, -1 ) ])
Пример #26
0
def top():
    keys = sorted(redis.keys())
    res = ''
    for key in keys:
        s = '<p>%s %s</p>' % (key.decode('utf-8'),
                              redis.get(key).decode('utf-8'))
        res += s
    print(res)  #debug
    return res
Пример #27
0
def strings_redis():
    import redis
    #charset="utf-8", decode_responses=True => avoid b' in redis python
    redis = redis.Redis(host='127.0.0.1',
                        port=6379,
                        db=0,
                        charset="utf-8",
                        decode_responses=True)
    print("-------------")
    print("STRINGS")
    print("-------------")

    #info()
    print(redis.info())
    print("-------------")

    #monitor()
    print(redis.monitor())
    print("-------------")

    #set()
    redis.set("name", "javier")
    redis.set("name", "jaime")
    print("key: ", redis.get("name"))
    print("-------------")
    print("all keys: ", redis.keys())
    print("keys with a 'name...': ", redis.keys("name*"))
    print("keys with a 'e': ", redis.keys("*e*"))
    print("-------------")

    #setnx(name, value)
    redis.set("name", "javier")

    #mset(name, value)
    redis.mset({"name": "peter", "name": "david"})
    print("name: ", redis.mget("name"))
    print("-------------")

    #getrange(name, start, end) - substrings of the value
    print("range : ", redis.getrange("name", 0, 3))

    #delete all keys
    for key in redis.scan_iter("prefix:*"):
        redis.delete(key)
Пример #28
0
def index():
  tweets = [redis.get(k) for k in redis.keys('tweet:*')]
  return """
  <form action="/tweet" method="post">
  <textarea name="message"></textarea><br />
  <input type="submit" name="submit" value="Enviar">
  </form>
  <br />
  %s
  """ % '<br /><br />'.join(tweets) 
Пример #29
0
	def POST(self):
		print "GETTING CLIPS"
		cues = {}
		cue_keys = redis.keys('cue:*')
		print cue_keys
		for i in cue_keys:
			cueName = i.split(':')[1]
			cues[cueName] = redis.get(i)
		print cues
		return json.dumps(cues)
Пример #30
0
def repr_list(key):
    strategies = {
                '::': repr
              , ':#': repr
              , ':!': repr
              , ':-': lambda x: [ repr(each) for each in redis.keys(x + ':*') ]
              , '': str
            }
    return [ definite_strategy (strategies, key[0:2], key, '')
            for key in redis.lrange ( key, 0, -1 ) ]
Пример #31
0
def prune_data():
    try:
        for key in redis.keys():
            if not key in ['last_block', 'last_block_time_seconds', 'last_usage_total_sent']:
                if datetime.strptime(key, '%Y-%m-%d') < datetime.utcnow() - timedelta(days=8):
                    redis.delete(key)
                    logger.info(f'Deleted old data from DB: {key}')
    except Exception as e:
        logger.info('Could not prune data!')
        logger.info(traceback.format_exc())
Пример #32
0
def edit_playlists(old_name, new_name):
	print "fixing playlists"
	playlist_keys = redis.keys('playlist:*')
	for i in playlist_keys:
		playlist_name = i.split(':')[1]
		playlist = json.loads(redis.get(i))
		#print playlist
		edit_playlist(playlist, old_name, new_name)
		redis.set('playlist:' + playlist_name, json.dumps(playlist))
		redis.save()
Пример #33
0
def dump_all(redis=r):
    keys = redis.keys('*')
    pairs = {}
    for key in keys:
        type = redis.type(key)
        val = redis.get(key)
        try:
            pairs[key] = eval(val)
        except:
            pass
    return pairs
Пример #34
0
def route_post_initialize():
    redis = get_redis()

    for key in redis.keys('isu4:*'):
        redis.delete(key)

    shutil.rmtree(get_dir('log'))

    response = make_response('OK')
    response.headers['Content-Type'] = 'text/plain'
    return response
Пример #35
0
def dump_all(redis=r):
    keys = redis.keys('*')
    pairs = {}
    for key in keys:
        type = redis.type(key)
        val = redis.get(key)
        try:
            pairs[key] = eval(val)
        except Exception as e:
            print pairs, key, val, e
    return pairs
Пример #36
0
def route_post_initialize():
    redis = get_redis()

    for key in redis.keys('isu4:*'):
        redis.delete(key)

    shutil.rmtree(get_dir('log'))

    response = make_response('OK')
    response.headers['Content-Type'] = 'text/plain'
    return response
Пример #37
0
def _get_like_list(ad_id):
    likes = redis.keys('user:like:*:%s' % ad_id)
    like_list = []

    for like in likes:
        l_user = redis.get(like)
        l_date = redis.get('date:%s' % ":".join(like.split(':')[1:]))

        like_list.append({'user': l_user, 'date': l_date})

    return like_list
Пример #38
0
 def pull_redis_queue(self, host="localhost", port=6379, **kwargs): #拉取对应redis下的队列
     if not redis_enable:
         raise RedisImportException
     redis = redis.Redis(host = host, port = port, **kwargs)
     for key in redis.keys():
         if name[:11] == "redis_queue":
             self.queue_dict[key] = RedisQ(key, **kwargs)
             if self.queue_name_counter.has_key(queue_type):
                 self.queue_name_counter["redis_queue"] += 1
             else:
                 self.queue_name_counter["redis_queue"] = 0
Пример #39
0
 def process_queues(cls):
     with utils.get_redis_for_cache() as redis:
         LOG.info("smart strict workflow loop start")
         for queue_name in redis.keys("strict-merge-queues~*"):
             queue = cls.from_queue_name(redis, queue_name)
             try:
                 queue.process()
             except exceptions.MergifyNotInstalled:
                 queue.delete()
             except Exception:
                 queue.log.error("Fail to process merge queue", exc_info=True)
         LOG.info("smart strict workflow loop end")
Пример #40
0
def calc_per_recharge():
    if not redis.exists(FISH_SYSTEM_RECHARGE_TOTAL):
        do_create_recharge_total(redis)
    else:

        already_create_day = len(redis.keys(FISH_SYSTEM_DATE_RECHARGE_TOTAL%('*')))
        already_recharge_total = convert_util.to_int(redis.get(FISH_SYSTEM_RECHARGE_TOTAL))
        result = already_recharge_total/already_create_day
        print 'already_create_day[%s] already_recharge[%s] result[%s]'%(already_create_day,already_recharge_total,result)
        redis.set("fish:per:recharge:rate",result)
    #删除当天充值人数统计
    redis.delete(FISH_RECHARGE_USER_DAY_IDS)
Пример #41
0
	def _get_scheduled_items(self, params):
		date = params.date
		date_key = 'scheduledItem:' + date + ':*'
		scheduled_keys = redis.keys(date_key)
		scheduled_items = {}
		for i in scheduled_keys:
			print "s item" + i
			split_key = i.split(':')
			hour = split_key[2]
			minute = split_key[3]
			playlist = redis.get(i)
			scheduled_items[hour + ':' + minute] = playlist
		return json.dumps(scheduled_items)
Пример #42
0
def calc_per_login():
    if not redis.exists("fish:login:per:day:total"):
        do_create_total(redis)
    else:
        today = date.today()

        do_add_yesterday_data(redis,today-timeDelt)

    already_create_day = len(redis.keys(FORMAT_LOGIN_DATE_TABLE4FISH%('*')))
    already_login_total = convert_util.to_int(redis.get("fish:login:per:day:total"))
    result = already_login_total/already_create_day
    print 'already_create_day[%s] already_login_total[%s] result[%s]'%(already_create_day,already_login_total,result)
    redis.set("fish:per:login:rate",result)
Пример #43
0
def get_recipes(search):
    # returns all recipes with the search string in the name or description
    results = []
    for r in [redis.get(k) for k in redis.keys('*recipe*')]:
        r = loads(r)
        score = compute_match(search, r)
        if score > 0:
            results.append((r, score))
    sorted_r = sorted(results, key=operator.itemgetter(1))
    sorted_r.reverse()
    print sorted_r
    i = min(len(sorted_r), 10)
    return sorted_r[:i] 
Пример #44
0
 def process_queues(cls):
     # NOTE(sileht): Don't use the celery retry mechanism here, the
     # periodic tasks already retries. This ensure a repo can't block
     # another one.
     redis = utils.get_redis_for_cache()
     LOG.info("smart strict workflow loop start")
     for queue_name in redis.keys("strict-merge-queues~*"):
         queue = cls.from_queue_name(redis, queue_name)
         queue.log.info("handling queue")
         try:
             queue.process()
         except Exception:
             queue.log.error("Fail to process merge queue", exc_info=True)
     LOG.info("smart strict workflow loop end")
Пример #45
0
 def pull_redis_queue(self,
                      host="localhost",
                      port=6379,
                      **kwargs):  #拉取对应redis下的队列
     if not redis_enable:
         raise RedisImportException
     redis = redis.Redis(host=host, port=port, **kwargs)
     for key in redis.keys():
         if name[:11] == "redis_queue":
             self.queue_dict[key] = RedisQ(key, **kwargs)
             if self.queue_name_counter.has_key(queue_type):
                 self.queue_name_counter["redis_queue"] += 1
             else:
                 self.queue_name_counter["redis_queue"] = 0
Пример #46
0
	def GET(self):
		header = render.header()
		nav = render.nav('playlists')
		playlists = []
		playlist_keys = redis.keys('playlist:*')
		cues = []
		cue_keys = redis.keys('cue:*')
		for i in cue_keys:
			cues.append(i.split(':')[1])
		for i in playlist_keys:
			playlist_name = i.split(':')[1]
			playlist = json.loads(redis.get(i))
			#new_item = {'name':playlist_name, 'val':playlist}
			playlists.append({'name':playlist_name, 'val': playlist})
			#playlists[playlist_name] = playlist
			#playlists = sorted(playlists)
		playlists = sorted(playlists, key=lambda k: k['name']) 
		playlist_keys = []
		for i in playlists:
			playlist_keys.append(i['name'])
		print "returning plists"
		print playlists
		return render.playlists(header, nav, playlists, cues, playlist_keys)
Пример #47
0
def merge():
    topic_count = defaultdict(int)

    f = "word2count.txt"

    keys = redis.keys("*")
    for pos, key in enumerate(keys):
        l = redis.hgetall(key)
        print "1",pos, key
        for k,v in l.iteritems():
            topic_count[int(k)]+=int(v)

    #word_topic_freq = defaultdict(list)

    with open("word_tf.txt", "w") as word_freq:
        for pos, word in enumerate(keys):
            tf = []
            l = redis.hgetall(word)
            for topic, freq in l.iteritems():
                topic = int(topic)
                count = topic_count[topic]
                if count < 10000:
                    continue
                freq = int(freq)*500000/count
                if freq > 0:
                    tf.append((topic, freq))

            fcount = sum(i[1] for i in tf)

            tf = dict(tf)
            id = NAME2ID.get(name_tidy(word), 0)
            if id:
                t = tf.get(id,0)
                diff = fcount - t
                tf[id] = fcount
                fcount += diff

            if not fcount:
                continue

            t = []
            for topic, f in tf.iteritems():
                rank = int(f*10000/fcount)
                if rank:
                    t.append((topic, rank))
            if t:
                word_freq.write(
                    dumps([word, t])+"\n"
                )
Пример #48
0
def retrieve_mail(rcpt):
  output = ""
  try:
    for key in redis.keys():
      if key != 'mail:id':
        if redis.hget(key, 'To:') == rcpt:
          mail = redis.hgetall(key)
          output += 'Date: ' + mail['Date:'] + '\n'
          output += 'From: ' + mail['From:'] + '\n'
          output += 'Message: \n' + mail['Message:'] + '\n'
          output += "-------------------------------------\n"
  except:
    print 'Unexpected error:', sys.exc_info()[0]
    exit(1)
  return output
Пример #49
0
def stations():
    try:
        keys = [
            key.decode().split(':').pop()
            for key in redis.keys('services:*')
        ]

        keys.sort()

        key = 'services:{}'.format(keys.pop())

        return redis.get(key)
    except:
        subprocess.Popen(['scrapy', 'crawl', 'bikesampa'])

        return json.dumps([])
Пример #50
0
def getInvitationData(redis):
    returnData = []
    for invitation in redis.keys():
        data = json.loads(redis.get(invitation))
        data['user'] = invitation.decode()
        data['url'] = getRegistrationUrl(invitation.decode(), data['token'])
        del data['token']
        returnData.append(data)

    if len(returnData
           ) == 1:  # add empty row so json2table will convert consistent
        emptyDict = data.copy()
        for key in emptyDict.keys():
            emptyDict[key] = ''
        returnData.append(emptyDict)
    return returnData
Пример #51
0
def merge():
    topic_count = defaultdict(int)

    f = "word2count.txt"

    keys = redis.keys("*")
    for pos, key in enumerate(keys):
        l = redis.hgetall(key)
        print "1", pos, key
        for k, v in l.iteritems():
            topic_count[int(k)] += int(v)

    #word_topic_freq = defaultdict(list)

    with open("word_tf.txt", "w") as word_freq:
        for pos, word in enumerate(keys):
            tf = []
            l = redis.hgetall(word)
            for topic, freq in l.iteritems():
                topic = int(topic)
                count = topic_count[topic]
                if count < 10000:
                    continue
                freq = int(freq) * 500000 / count
                if freq > 0:
                    tf.append((topic, freq))

            fcount = sum(i[1] for i in tf)

            tf = dict(tf)
            id = NAME2ID.get(name_tidy(word), 0)
            if id:
                t = tf.get(id, 0)
                diff = fcount - t
                tf[id] = fcount
                fcount += diff

            if not fcount:
                continue

            t = []
            for topic, f in tf.iteritems():
                rank = int(f * 10000 / fcount)
                if rank:
                    t.append((topic, rank))
            if t:
                word_freq.write(dumps([word, t]) + "\n")
Пример #52
0
def _is_reached_limit_like(user):
    hour_ago = datetime.now() - timedelta(hours=1)
    like_counter = 0
    limit = 5

    likes = redis.keys('user:like:*:%s' % 'ad:*')
    like_list = []

    for like in likes:
        l_user = redis.get(like)
        l_date = redis.get('date:%s' % ":".join(like.split(':')[1:]))

        if user == l_user and hour_ago < datetime.strptime(
                l_date, '%Y-%m-%d %H:%M'):
            like_counter += 1

    return like_counter >= limit
Пример #53
0
def setStuInfo2Class():

    stu = Student.instance()
    redis = stu.getDB()
    sks = redis.keys('student:20*')
    for sk in sks :
        stuid = sk[8:]
        try:
            si = stu.getStuInfo(stuid)
        except Exception,e:
            continue
        if si==None:
            print "student None:",stuid
            continue
        cid = si['class']
        ct = si['teacher']
        try:
            ci = stu.getClassInfo(cid)
        except Exception,e:
            print e
            continue
Пример #54
0
   #  Contructing IO Devices
   #  a remote has to be attached to a controller
   #  Multiple controllers can interface to udp server but not to same controller
   #

   cf.add_udp_io_sever(name="main_remote", ip = "192.168.1.82", redis_key="MODBUS_STATISTICS:127.0.0.1",remote_type= "UDP", port=5005   )
   cf.add_rtu_interface(name = "rtu_2",protocol="modify_modbus",baud_rate=38400 )
   cf.add_remote(  name="satellite_1",modbus_address=100,irrigation_station_number=44, card_dict={"open":"Remote 1 Open Wire","short":"Remote 1 Shorted Selenoid","connectivity":"Remote 1 Connectivity"})
   cf.add_remote(  name="satellite_2",modbus_address=125 ,irrigation_station_number=22,card_dict={"open":"Remote 2 Open Wire","short":"Remote 2 Shorted Selenoid","connectivity":"Remote 2 Connectivity"})
   cf.add_remote(  name="satellite_3",modbus_address=170,irrigation_station_number=22,card_dict={"open":"Remote 3 Open Wire","short":"Remote 3 Shorted Selenoid","connectivity":"Remote 3 Connectivity"}) 
   cf.end_rtu_interface()
   cf.end_udp_io_server()
   cf.end_controller()
   cf.end_site()
   cf.end_system()
   keys = redis.keys("*")
   
   for i in keys:
      print "+++++++++++++:"
      print i
      temp = i.split( common.sep)
      print len(temp)
      print redis.hgetall(i)
      print "----------------"
   print "lenght",len(keys)
   print "testing query functions"
   
   print qc.match_labels( "CONTROLLER" ) # match single item
   temp = qc.match_labels( "REMOTE" ) # match single item
   print len(temp),temp
Пример #55
0
from logicalblocks import getblocks
from logicalblocks import getdistance
from clusters import makeclusters 

redis = redis.Redis(host='localhost', port=6379, db=0)

# DEBUG levels
# extractor 
# distance
# cluster 
# generic
DEBUG = 'clusters'
#DEBUG = 'distance'

filter = "*2015/02/13/7058423*"
keys = redis.keys(filter)
id = 0
for key in keys:
    id = id + 1
    type = redis.type(key);

    value = ''
    if type == 'string':
        val = redis.get(key);
        html = val 

        result = chardet.detect(html)
        charset = result['encoding']
        if charset == 'utf-8':
            data = html
        else:
Пример #56
0
def find(redis, key_namespace, predicate, action):
	keys = redis.keys(key_namespace + "*")
	for key in keys:
		value = redis.get(key)
		if predicate(key, value):
			action(key, value)
Пример #57
0
def delete_all(redis, prefix):
    for key in redis.keys(REDIS_PREFIX + "*"):
        redis.delete(key)
Пример #58
0
def del_all(redis=r):
    keys = redis.keys('*')
    for k in keys:
        print 'Deleting:', k, 'result is', redis.delete(k)
Пример #59
0
       for i in keys:
          self.redis.delete(i)


if __name__ == "__main__":
   # test driver
   redis  = redis.StrictRedis( host = "127.0.0.1", port=6379, db = 11 )   
   common = Redis_Graph_Common( redis)
   redis_key, new_namespace =common.construct_node( [], "","head","head" )
   print redis_key,new_namespace
   print redis.hgetall(redis_key)

   redis_key, new_namespace =common.construct_node( new_namespace,"relation 1","level_one","h1" )
   print redis_key,new_namespace
   print redis.hgetall(redis_key)
   redis_key, new_namespace =common.construct_node(  new_namespace,"relation 2","level_two","h2" )
   print redis_key,new_namespace
   print redis.hgetall(redis_key)

   print "simple match"
   print common.match( "relation 2","level_two","h2")
   print "starting match"
   print common.match( "*","level_two","h2",[["","head","head"]])

   print "all the keys"
   print redis.keys("*")
   print "none of the keys"
   common.delete_all()
   print redis.keys("*")