def test_cleanup(self): """ Ensure no side effects are left in the db after a delete. """ redis = connect(None) class RomTestCleanupA(Model): foo = Text() blist = OneToMany('RomTestCleanupB') class RomTestCleanupB(Model): bar = Text() a = ManyToOne('RomTestCleanupA') a = RomTestCleanupA(foo='foo') a.save() b = RomTestCleanupB(bar='foo', a=a) b.save() b.delete() self.assertFalse(redis.hkeys('RomTestCleanupB:%d' % b.id)) a.delete() self.assertFalse(redis.hkeys('RomTestCleanupA:%d' % a.id)) # Test delete() where a column value does not change. This affects # the writer logic which checks for deltas as a means to determine # what keys should be removed from the redis hash bucket. a = RomTestCleanupA(foo='foo') a.save() b = RomTestCleanupB(bar='foo', a=a) b.save() a.delete() # Nullify FK on b. self.assertFalse(redis.hkeys('RomTestCleanupA:%d' % a.id)) session.rollback() # XXX purge session cache b = RomTestCleanupB.get(b.id) b.delete() # Nullify FK on b. self.assertFalse(redis.hkeys('RomTestCleanupB:%d' % b.id))
def AddbatMessage(batname,scopetype=1): #get batname and expfilename from redis if redis.hget('batdo',batname) is not None: expFileName = str(redis.hget('batdo',batname).decode('utf-8')) expFilepath = str(redis.hget('batfilepath',batname).decode('utf-8')) if (scopetype==1): #get center db userid pwd dbname = str(redis.hget('dbname','10').decode('utf-8')) dbip = str(redis.hget('dbip','10').decode('utf-8')) dbuser = str(redis.hget('dbuser','10').decode('utf-8')) dbpwd = str(redis.hget('dbpwd','10').decode('utf-8')) messagebody=getMessagebody(batname,dbname,expFilepath+'\\'+expFileName,dbip,dbuser,dbpwd) channel.basic_publish(exchange='',routing_key='cps1',body=messagebody) print (' [x] process %s' %(messagebody,)) else : for key in redis.hkeys('dbip'): if (key.decode('utf-8') !='10'):# exclude 010 db skey =str(key.decode('utf-8')) dbname = str(redis.hget('dbname',skey).decode('utf-8')) dbip = str(redis.hget('dbip',skey).decode('utf-8')) dbuser= str(redis.hget('dbuser',skey).decode('utf-8')) dbpwd = str(redis.hget('dbpwd',skey).decode('utf-8')) messagebody=getMessagebody(batname,dbname,expFilepath+'\\'+expFileName+str(key.decode('utf-8')),dbip,dbuser,dbpwd) print('[x] process %s'%(getMessagebody(batname,dbname,expFileName+str(key.decode('utf-8')),dbip,dbuser,dbpwd),)) channel.basic_publish(exchange='',routing_key='cps1',body=messagebody) else : print(' %s is not exists'%(batname,))
def get_all_url_from_redis_set(self): urls = redis.hkeys("url_title") for i in urls: if len(redis.hget("url_title", i)) != 2: redis.hset("can_use_urls", i.decode("utf8"), redis.hget("url_title", i)) print("set {} ok!".format(i.decode("utf8")))
def export_data_to_csv(): try: records = [] for key in redis.keys(): if not key in [ 'last_block', 'last_block_time_seconds', 'last_usage_total_sent' ]: accounts = list(set([key[:-12] for key in redis.hkeys(key)])) for account in accounts: cpu_usage_us = redis.hget(key, f'{account}-cpu-archive') net_usage_words = redis.hget(key, f'{account}-net-archive') record = { 'date': key, 'account': account, 'cpu_usage_us': cpu_usage_us, 'net_usage_words': net_usage_words } records.append(record) with open('/data/accounts-usage.csv', 'w', encoding='utf8', newline='') as output_file: fc = csv.DictWriter(output_file, fieldnames=records[0].keys()) fc.writeheader() fc.writerows(records) logger.info('Exported DB to CSV!') except Exception as e: logger.info('Could not export data!') logger.info(traceback.format_exc())
def edit(request, room): was_private = room.is_private if request.method == 'POST': form = RoomForm(request.POST, instance=room) if form.is_valid(): form.save(commit=False) room.save() if room.is_private and not was_private: redis = create_redis_connection() redis.publish('web_channel', 'room_private:' + str(room.id)) try: redis.hdel(redis_room_key(room.id), *redis.hkeys(redis_room_key(room.id))) except: pass return HttpResponseRedirect(request.get_full_path()) else: form = RoomForm(instance=room) response_data = { 'form' : form } if room.is_private: response_data['invited_users'] = room.invited.order_by('username').all() response_data['users'] = User.objects.exclude(pk=room.owner.id).exclude(rooms__pk=room.id).order_by('username').all() return response_data
def test_cleanup(self): """ Ensure no side effects are left in the db after a delete. """ redis = connect(None) class RomTestCleanupA(Model): foo = Text() blist = OneToMany("RomTestCleanupB", "no action") class RomTestCleanupB(Model): bar = Text() a = ManyToOne("RomTestCleanupA") a = RomTestCleanupA(foo="foo") a.save() b = RomTestCleanupB(bar="foo", a=a) b.save() b.delete() self.assertFalse(redis.hkeys("RomTestCleanupB:%d" % b.id)) a.delete() self.assertFalse(redis.hkeys("RomTestCleanupA:%d" % a.id)) # Test delete() where a column value does not change. This affects # the writer logic which checks for deltas as a means to determine # what keys should be removed from the redis hash bucket. a = RomTestCleanupA(foo="foo") a.save() b = RomTestCleanupB(bar="foo", a=a) b.save() aid = a.id apk = a._pk self.assertTrue(b.a) a.delete() # Nullify FK on b. self.assertFalse(redis.hkeys("RomTestCleanupA:%d" % a.id)) # verify removal from the session object self.assertFalse(RomTestCleanupA.get(aid)) self.assertFalse(apk in session.known) self.assertFalse(apk in session.wknown) session.rollback() # XXX purge session cache b = RomTestCleanupB.get(b.id) self.assertFalse(b.a) b.delete() # Nullify FK on b. self.assertFalse(redis.hkeys("RomTestCleanupB:%d" % b.id))
def clean_counter(redis): """ 清理计数器,保留最新的120个样本 :param redis: :return: """ # 为了平等地处理更新频率各不相同的多个计数器,程序需要记录清理操作执行的次数。 passes = 0 while 1: p = redis.pipeline(True) start = time.time() # 渐进地遍历所有已知的计数器 index = 0 while index < redis.zcard('test:known:'): # 取得被检擦计数器的数据 hash = redis.zrange('test:known:', index, index) index += 1 if not hash: break hash = hash[0] prec = int(hash.partition(":")[0]) # 因为清理程序每60秒就会循环一次,所以需要根据计数器的更新频率来判断是否真的有必要对计数器进行清理。 bprec = int(prec // 60) or 1 # 如果整个计数器在这次循环里不需要进行清理,那么检擦下一个计数器 if passes % bprec: continue hkey = 'test:count:' + hash # print hkey # 更具给定的精度以及需要b保留的样本数量,计算出我们需要保留什么时间之前的样本。 cutoff = time.time() - SAMPLE_COUNT * prec samples = map(int, redis.hkeys(hkey)) samples.sort() remove = bisect.bisect_right(samples, cutoff) if remove: print "===========================================" print hkey, samples[:remove] redis.hdel(hkey, *samples[:remove]) if remove == len(samples): try: # 尝试修改计数器散列之前对其进行监视 p.watch(hkey) if not p.hlen(hkey): p.multi() p.zrem('test:known:', hash) p.execute() # 在删除一个计数器的情况下,下次循环可以使用与本次循环相同的索引 index -= 1 else: p.unwatch() except WatchError: pass passes += 1 duration = min(int(time.time() - start) + 1, 60) print "sleep" time.sleep(max(60 - duration, 1))
def redisKey(request): if request.method == 'GET': keyword = request.GET.get('keyword', '') elif request.method == 'POST': keyword = request.POST.get('keyword', '') keys = redis.hkeys(KEYWORDS) if keyword in keys: redis.hincrby(KEYWORDS, keyword, amount=1) else: redis.hset(KEYWORDS, keyword, 2) return HttpResponse()
def delete(request, room): redis = create_redis_connection() redis.publish('web_channel', 'room_deleted:' + str(room.id)) try: redis.hdel(redis_room_key(room.id), *redis.hkeys(redis_room_key(room.id))) except: pass room.delete() return HttpResponseRedirect(reverse('rooms.views.my_list'))
def clearPagingCache(): key='paging_token' redis=getRedis() pipe = redis.pipeline(transaction=True) now=time.time() tokens=redis.hkeys(key) for token in tokens: expireTime=int(redis.hget(key,token)) if expireTime<now: pipe.hdel(key,token) pipe.delete("tmp_paging_%s"%token) pipe.execute()
def show_cart(): user_id = g.current_user.id redis = client_redis() goods = redis.hkeys(user_id) list = [] total = 0 for i in goods: count = redis.hget(user_id, i).decode("utf-8") # print(count) good = json.loads(i) good["amount"] = count # print(good) total += good.get("goods_price") * int(count) list.append(good) return jsonify({"status": "true", "goods_list": list, "totalCost": total})
def get_matches_for_champion(players, champ, begin_time=datetime.utcnow() - timedelta(weeks=1), cache_ignore=False): if not cache_ignore: keys = redis.hkeys('player_matches') if keys: champ_keys = [x for x in keys if "_{}".format(champ['id']) in x] if len(champ_keys) > 10: matches = [] for k,v in redis.hscan_iter('player_matches', '*_{}'.format(champ['id'])): if v: matches += json.loads(v) return matches matches = [] last_week = int(time.mktime(begin_time.timetuple())) * 1000 for region in players.keys(): for player in players[region]: if not cache_ignore: this_player = redis.hget('player_matches', "{}_{}".format(player['id'], champ['id'])) if this_player: print u"CACHE HIT - {}'s {} matches".format(player['name'], champ['name']).encode("utf-8") matches += this_player continue print u"NETWORK - {}'s {} matches".format(player['name'], champ['name']).encode("utf-8") this_player = [] page = riot.get_match_list(player['id'], region=region, champion_ids=champ['id'], ranked_queues='TEAM_BUILDER_DRAFT_RANKED_5x5', begin_time=last_week) while 'matches' in page.keys() and page['matches']: for m in page['matches']: if m['champion'] != champ['id'] or m['queue'] == 'CUSTOM': continue this_player.append({'lane': m['lane'], 'matchId': m['matchId'], 'region': m['region'], 'role': m['role']}) if len(this_player) == page['totalGames']: break time.sleep(1) print u"NETWORK INNER - {}'s {} matches".format(player['name'], champ['name']).encode("utf-8") page = riot.get_match_list(player['id'], region=region, champion_ids=champ['id'], ranked_queues='TEAM_BUILDER_DRAFT_RANKED_5x5', begin_time=last_week, begin_index=page['endIndex']) if this_player: redis.hset('player_matches', "{}_{}".format(player['id'], champ['id']), json.dumps(this_player)) matches += this_player time.sleep(2) return matches
def mycreate(addr, user): user_id = user.id # code = shop_pid() redis = client_redis() goods = redis.hkeys(user_id) # list = [] total = 0 b1 = BuyerOrder() b1.user_id = user_id b1.order_code = shop_pid() b1.order_address = addr b1.order_tiem = datetime.now() for i in goods: count = redis.hget(user_id, i).decode("utf-8") # print(count) # 商品信息 good = json.loads(i) # 订单数量 good["amount"] = count # print(good) # 订单价格 total += good.get("goods_price") * int(count) # list.append(good) good_obj = DishesInfoModel.query.filter_by( goods_id=good["goods_id"]).first() # 添加订单信息 # 实例化订单类 # 添加订单商品 b1.shop_id = good_obj.cate.shop.id b1.order_price = total b1.goods.append( OrderGoodsModel(goods_id=good_obj.id, goods_name=good["goods_name"], goods_img=good["goods_img"], goods_price=good["goods_price"], amount=int(good['amount']))) db.session.add(b1) db.session.commit() return b1
def get_all_split_url_to_redis(self): all_page_num = 0 for i in redis.hkeys("can_use_urls"): all_page_num += 1 head_url = i.decode('utf8') print(head_url) base_url = head_url[:len(head_url) - len('Index.aspx')] modol_url = base_url + "Index_{}" + ".aspx" response = requests.get(head_url, timeout=5) time.sleep(0.5) html = response.text page = etree.HTML(html) url_details = page.xpath('//span[@class="disabled"]/text()') if not url_details: continue max_page = re.search("/共(.*?)页", str(url_details)).group(1) urls = [head_url] for i in range(2, int(max_page) + 1): urls.append(modol_url.format(i)) all_page_num += 1 redis.hset("all_urls", head_url, str(urls)) print("all page :{}".format(all_page_num))
def AddbatMessage(batname, scopetype=1): #get batname and expfilename from redis if redis.hget('batdo', batname) is not None: expFileName = str(redis.hget('batdo', batname).decode('utf-8')) expFilepath = str(redis.hget('batfilepath', batname).decode('utf-8')) if (scopetype == 1): #get center db userid pwd dbname = str(redis.hget('dbname', '10').decode('utf-8')) dbip = str(redis.hget('dbip', '10').decode('utf-8')) dbuser = str(redis.hget('dbuser', '10').decode('utf-8')) dbpwd = str(redis.hget('dbpwd', '10').decode('utf-8')) messagebody = getMessagebody(batname, dbname, expFilepath + '\\' + expFileName, dbip, dbuser, dbpwd) channel.basic_publish(exchange='', routing_key='cps1', body=messagebody) print(' [x] process %s' % (messagebody, )) else: for key in redis.hkeys('dbip'): if (key.decode('utf-8') != '10'): # exclude 010 db skey = str(key.decode('utf-8')) dbname = str(redis.hget('dbname', skey).decode('utf-8')) dbip = str(redis.hget('dbip', skey).decode('utf-8')) dbuser = str(redis.hget('dbuser', skey).decode('utf-8')) dbpwd = str(redis.hget('dbpwd', skey).decode('utf-8')) messagebody = getMessagebody( batname, dbname, expFilepath + '\\' + expFileName + str(key.decode('utf-8')), dbip, dbuser, dbpwd) print('[x] process %s' % (getMessagebody( batname, dbname, expFileName + str(key.decode('utf-8')), dbip, dbuser, dbpwd), )) channel.basic_publish(exchange='', routing_key='cps1', body=messagebody) else: print(' %s is not exists' % (batname, ))
def get_all_pag_url_to_redis(self): values = redis.hkeys("all_urls") urls = set() page_num = 0 urls_num = 0 for url in values: url = url.decode("utf8") split_urls = redis.hget("all_urls", url).decode("utf8") for i in eval(split_urls): try: response = requests.get(i, timeout=5) time.sleep(0.5) html = response.text page = etree.HTML(html) page_urls = page.xpath( "//li/a[contains(@href,'Item')]/@href") for page_url in page_urls: urls.add(page_url) print("{} add over".format(page_url)) urls_num += 1 print("{} already get all url".format(i)) except Exception as e: print(e) print(i) print(url) continue page_num += 1 print("{} page get!".format(page_num)) print("{} url get!".format(urls_num)) url_s = '' for i in urls: url_s += ',' + i print(i) redis.hset('all_splite_url', str(urls), url_s)
def hash_hset(): redis.hset('hash1', 'k1', 'v1') redis.hset('hash1', 'k2', 'v2') print(redis.hkeys('hash1'))
#!/usr/bin/env python # -*- coding: utf-8 -*- import redis print((redis.__file__)) # 连接,可选不同数据库 redis = redis.Redis(host='localhost', port=6379, db=0) info = redis.info() print(info) keys = redis.keys("*") print(keys) keys = redis.hkeys("keys") print(keys) print((len(keys))) key__value = redis.hget("key", 'hashkey') print(key__value)
def get_rated_films(user_id): if not user_id: return set() user_key = "user:%s:ratings" % user_id return set(int(id) for id in redis.hkeys(user_key))
def aggregate_period_data(period_start): logger.info( f'Aggregating data for period {seconds_to_time_string(period_start)}') period_accounts = sorted([ key[:-4] for key in redis.hkeys('AGGREGATION_DATA_' + str(period_start)) if key[-4:] == '-cpu' ]) total_cpu_usage_us = 0 total_net_usage_words = 0 usage_datasets = [[]] usage_dataset_hashes = [] if len(period_accounts) > 0: for i in range(0, len(period_accounts), DATASET_BATCH_SIZE): individual_usage_data = [] individual_usage_hash_string = '' accounts = period_accounts[i:i + DATASET_BATCH_SIZE] if len(accounts) > 0: for account in accounts: cpu_usage = int( redis.hget('AGGREGATION_DATA_' + str(period_start), f'{account}-cpu')) net_usage = int( redis.hget('AGGREGATION_DATA_' + str(period_start), f'{account}-net')) individual_usage_data.append({ 'a': account, 'u': cpu_usage }) individual_usage_hash_string += account + str(cpu_usage) total_cpu_usage_us += cpu_usage total_net_usage_words += net_usage else: pass # finished usage_datasets.append(individual_usage_data) usage_dataset_hashes.append( hashlib.sha256( individual_usage_hash_string.encode("utf8")).hexdigest()) total_usage_hash = hashlib.sha256( (str(total_cpu_usage_us) + '-' + str(total_net_usage_words)).encode("utf8")).hexdigest() usage_dataset_hashes = [total_usage_hash] + usage_dataset_hashes all_data_hash = hashlib.sha256( ('-'.join(usage_dataset_hashes)).encode("utf8")).hexdigest() data = { 'total_cpu_usage_us': total_cpu_usage_us, 'total_net_usage_words': total_net_usage_words, 'total_usage_hash': total_usage_hash, 'all_data_hash': all_data_hash, 'usage_datasets': usage_datasets } # temporary debugging # logger.info('Usage Datasets') # logger.info(usage_datasets) # logger.info(usage_dataset_hashes) logger.info( f'Total CPU: {total_cpu_usage_us}, Total NET: {total_net_usage_words}, Totals Hash: {total_usage_hash}, All Data hash: {all_data_hash}' ) # remove from AGGREGATION_DATA and add to SUBMISSION_DATA p = redis.pipeline() p.set('SUBMISSION_DATA_' + str(period_start), json.dumps(data)) for account in period_accounts: p.delete('AGGREGATION_DATA_' + str(period_start)) p.execute()
# import redis # make redis dictionary "SYS:FILES" # store json_object to redis data "global_sensors" import os from os import listdir from os.path import isfile, join import base64 import redis import json redis = redis.StrictRedis(host="127.1.1.1", port=6379, db=0) app_files = "/home/pi/new_python/app_data_files/" sys_files = "/home/pi/new_python/system_data_files/" app_list = redis.hkeys("APP_FILES") sys_list = redis.hkeys("SYS_FILES") class APP_FILES: def __init__(self, redis): self.path = app_files self.key = "FILES:APP" self.redis = redis def file_directory(self): return self.redis.hkeys(self.key) def delete_file(self, name): self.redis.hdel(self.key, name)
model_value, trim:{trim_name:trim_value,} } """ # redis = redis.StrictRedis() # ret = redis.hmset('params:make10100005:model10100027', {'haha':'heihei'}) # print(ret) # rep = redis.hgetall('params:make10100005:model10100027') # print(rep) # # ret = redis.hmset('params:makeValue', {'haha1':'heihei1'}) # print(ret) # rep = redis.hgetall('params:makeValue') # print(rep) # redis = redis.StrictRedis() # redis.hmset('a', {'haha':'heihei'}) # ret = redis.hgetall('a') # print(ret) # redis.delete('a') # redis.hmset('a', {'haha2':'heihei2'}) # ret = redis.hgetall('a') # print(ret) redis = redis.StrictRedis(decode_responses=True) # ret = redis.hget('make', 'Audi').decode() # print(ret) # ret = redis.hget('make', 'Audi222') # print(ret) ret = redis.hkeys('make') print(ret)
import redis import os import sys rootpath='g:/mig/data/' redis = redis.Redis(host='10.96.142.109',port=6380,db=2) #get all batfile filelist = redis.hkeys('batdo') for item in filelist: filename = str(item.decode('utf-8')) if redis.hget('batfilepath',filename) is not None: batpath = rootpath+str(redis.hget('batfilepath',filename).decode('utf-8')) if os.path.exists(batpath)==False: os.makedirs(batpath) scope = redis.hget('batscope',filename) if scope is not None: if str(scope.decode('utf-8'))=='seg': batcmdA='copy ' batcmdB='copy ' expfile = str(redis.hget('batdo',filename).decode('utf-8')) batcmdFile = batpath+'/'+expfile+'.bat' for segitem in redis.smembers('segment'): segcode=str(segitem.decode('utf-8')) iSegcode = int(segcode[0]) if iSegcode>3: batcmdB=batcmdB+expfile+segcode+'.csv+' else: batcmdA=batcmdA+expfile+segcode+'.csv+' newfile=batpath+'/'+expfile+segcode+'.csv' if os.path.exists(newfile)==False: f=open(newfile,'w')
def save_matches_info(matches, champ): for m in matches: if "{}_{}_{}".format(m['region'], m['matchId'], champ['id']) in redis.hkeys('match_details'): continue match = redis.hget('match_infos', '{}_{}'.format(m['region'], m['matchId'])) if match: match = json.loads(match) else: match = riot.get_match(m['matchId'], region=m['region'].lower(), include_timeline=True) redis.hset('match_infos', '{}_{}'.format(m['region'], m['matchId']), json.dumps(match)) participantId = None participant = None for p in match['participants']: if p['championId'] == champ['id']: participant = p participantId = p['participantId'] break team = None for t in match['teams']: if t['teamId'] == participant['teamId']: team = t break won = team['winner'] stats = participant['stats'] kills, deaths, assists = stats['kills'], stats['deaths'], stats['assists'] first_blood = stats['firstBloodKill'] first_blood_assist = stats['firstBloodAssist'] tanks_friendly_team = 0 tanks_enemy_team = 0 lane_partner = None for p in match['participants']: friendly = p['teamId'] == participant['teamId'] tags = champion_data_by_id[p['championId']] if 'Tank' in tags: if friendly: tanks_friendly_team += 1 else: tanks_enemy_team += 1 if friendly: continue lane, role = None, None for t in p['timeline']: if 'lane' not in t or 'role' not in t: continue lane = t['lane'] role = t['role'] break if not (lane and role): continue raise ValueError('{}, {}'.format(lane, role)) if lane == m['lane'] and role == m['role']: lane_partner = p break if lane_partner: lane_partner_champ = champion_data_by_id[lane_partner['championId']] lane_partner_ad = lane_partner_champ['info']['attack'] > lane_partner['info']['magic'] else: lane_partner_ad = False items = [] for e in match['timeline']['frames']: if 'events' not in e.keys(): continue for ev in e['events']: if ev['eventType'] == 'ITEM_PURCHASED' and ev['participantId'] == participantId: # I don't care about biscuits or health potions or wards or trinkets if ev['itemId'] in (2003, 2010, 2043, 3340, 3341, 3361, 3362, 3363, 3364): continue item_info = item_data[ev['itemId']] # Don't care about base items, only upgrades if 'depth' not in item_info: continue # Don't care about level 3 boot upgrades if 'group' in item_info and 'boots' in item_info['group'].lower(): continue items.append(ev['itemId']) trimmed_items = [] for i, item_id in enumerate(items): item = item_data[item_id] prev_items = trimmed_items[:] if prev_items and 'from' in item: # if the last few items all build into this item, but they're # different parts of the tree (i.e. they don't upgrade into each other) # we shouldn't have the entire build path in the item list from_items = item['from'] last_item = prev_items.pop() while str(last_item) in from_items: if 'from' in item_data[last_item]: from_items += item_data[last_item]['from'] trimmed_items.remove(last_item) if not prev_items: break last_item = prev_items.pop() prev_items = trimmed_items[:] if prev_items and 'from' in item: # if the N-1th or N-2nd item is something that upgrades into this, skip it last_item = prev_items.pop() last_last_item = prev_items.pop() if prev_items else None if str(last_item) in item['from']: trimmed_items.remove(last_item) if str(last_last_item) in item['from']: trimmed_items.remove(last_last_item) trimmed_items.append(item_id) items = trimmed_items details = { 'championId': champ['id'], 'won': won, 'duration': match['matchDuration'], 'kills': kills, 'deaths': deaths, 'assists': assists, 'first_blood_kill': first_blood, 'first_blood_assist': first_blood_assist, 'lane_enemy_ad': lane_partner_ad, 'purchases': items } redis.hset('match_details', "{}_{}_{}".format(m['region'], m['matchId'], champ['id']), json.dumps(details)) print "Parsed and saved match {}\n".format(m['matchId']) pprint(details) print "-" * 30
def submit_resource_usage(): try: response = {} t = datetime.utcnow() current_date_start = datetime(t.year, t.month, t.day, tzinfo=None) last_block_time = datetime.utcfromtimestamp( int(redis.get('last_block_time_seconds'))) previous_date_start = current_date_start - timedelta(days=1) previous_date_string = previous_date_start.strftime("%Y-%m-%d") previous_date_accounts = [ key[:-12] for key in redis.hkeys(previous_date_string) if key[-12:] == '-cpu-current' ] if last_block_time >= current_date_start: current_date_string = current_date_start.strftime("%Y-%m-%d") current_date_accounts = [ key[:-12] for key in redis.hkeys(current_date_string) if key[-12:] == '-cpu-current' ] logger.info( f'Collating todays records... {len(current_date_accounts)} accounts so far.' ) if len(previous_date_accounts) > 0: # if totals for previous date haven't been sent, calculate and send them now if redis.get('last_usage_total_sent') != previous_date_string: total_cpu_usage_us = 0 total_net_usage_words = 0 for account in previous_date_accounts: total_cpu_usage_us += int( redis.hget(previous_date_string, f'{account}-cpu-current')) total_net_usage_words += int( redis.hget(previous_date_string, f'{account}-net-current')) action = { "account": CONTRACT_ACCOUNT, "name": "settotal", "authorization": [{ "actor": SUBMISSION_ACCOUNT, "permission": SUBMISSION_PERMISSION, }], "data": { "source": SUBMISSION_ACCOUNT, "total_cpu_quantity": total_cpu_usage_us, "total_net_quantity": total_net_usage_words, "time": int(previous_date_start.timestamp()) } } logger.info( f'Submitting resource usage totals for {previous_date_string}...' ) tx = {'actions': [action]} logger.info(tx) # response = requests.post('http://eosjsserver:3000/push_transaction', json=tx, timeout=10).json() # logger.info(f'Transaction {response["transaction_id"]} successfully submitted!') logger.info( f'Transaction {response["transaction_id"]} successfully submitted!' ) redis.set('last_usage_total_sent', previous_date_string) time.sleep(5) # send ubsubmitted data actions = [] for account in previous_date_accounts[: MAX_ACCOUNTS_PER_SUBMISSION]: cpu_usage_us = redis.hget(previous_date_string, f'{account}-cpu-current') net_usage_words = redis.hget(previous_date_string, f'{account}-net-current') action = { "account": CONTRACT_ACCOUNT, "name": "adddistrib", "authorization": [{ "actor": SUBMISSION_ACCOUNT, "permission": SUBMISSION_PERMISSION, }], "data": { "source": SUBMISSION_ACCOUNT, "account": account, "cpu_quantity": cpu_usage_us, "net_quantity": net_usage_words, "time": int(previous_date_start.timestamp()) } } actions.append(action) logger.info( f'Submitting resource usage stats for {previous_date_string}...' ) tx = {'actions': actions} logger.info(tx) # response = requests.post('http://eosjsserver:3000/push_transaction', json=tx, timeout=10).json() # logger.info(f'Transaction {response["transaction_id"]} successfully submitted!') # remove data from -current once successfully sent for account in previous_date_accounts[: MAX_ACCOUNTS_PER_SUBMISSION]: redis.hdel(previous_date_string, f'{account}-cpu-current') redis.hdel(previous_date_string, f'{account}-net-current') # todo - handle if tx doesn't get included in immutable block? # if last block was yesterday, then aggregation is not finished, so don't submit if last_block_time < current_date_start: if len(previous_date_accounts) > 0: logger.info( f'Collating yesterdays records... {len(previous_date_accounts)} accounts so far.' ) except Exception as e: logger.error('Could not submit tx!') logger.error(response.get('error', traceback.format_exc()))
# import redis # make redis dictionary "SYS:FILES" # store json_object to redis data "global_sensors" import os from os import listdir from os.path import isfile, join import base64 import redis import json redis = redis.StrictRedis(host="127.1.1.1", port=6379, db=0) app_files = "/home/pi/new_python/app_data_files/" sys_files = "/home/pi/new_python/system_data_files/" app_list = redis.hkeys("APP_FILES") sys_list = redis.hkeys("SYS_FILES") class APP_FILES(): def __init__(self, redis): self.path = app_files self.key = "FILES:APP" self.redis = redis def file_directory(self): return self.redis.hkeys(self.key) def delete_file(self, name): self.redis.hdel(self.key, name)
import redis import os import sys rootpath = 'g:/mig/data/' redis = redis.Redis(host='10.96.142.109', port=6380, db=2) #get all batfile filelist = redis.hkeys('batdo') for item in filelist: filename = str(item.decode('utf-8')) if redis.hget('batfilepath', filename) is not None: batpath = rootpath + str( redis.hget('batfilepath', filename).decode('utf-8')) if os.path.exists(batpath) == False: os.makedirs(batpath) scope = redis.hget('batscope', filename) if scope is not None: if str(scope.decode('utf-8')) == 'seg': batcmdA = 'copy ' batcmdB = 'copy ' expfile = str(redis.hget('batdo', filename).decode('utf-8')) batcmdFile = batpath + '/' + expfile + '.bat' for segitem in redis.smembers('segment'): segcode = str(segitem.decode('utf-8')) iSegcode = int(segcode[0]) if iSegcode > 3: batcmdB = batcmdB + expfile + segcode + '.csv+' else: batcmdA = batcmdA + expfile + segcode + '.csv+' newfile = batpath + '/' + expfile + segcode + '.csv'
def sendName_rss(): user = request.form['user'] channels = redis.hkeys(user) print(channels) return jsonify(res='ok')