def user_forget_task(email, active_code): redis.set(email, active_code) redis.expire(email, 120) body = 'http://localhost:8000/accounts/change-password/{}/{}/'.format( signer.sign(email), signer.sign(active_code)) send_mail('فعالسازی حساب', body, settings.EMAIL_HOST_USER, [email]) return 1
def callback(): oauth_token = request.args.get('oauth_token') oauth_verifier = request.args.get('oauth_verifier') oauth_token_secret = redis.get(oauth_token) redis.delete(oauth_token) response = vatsim.get_user_details(oauth_token, oauth_token_secret, oauth_verifier) if response['request']['result'] != 'success': return json.dumps(response['request']['message']), 401 vatsim_user = response['user'] user_doc = current_app.data.driver.db['users'].find_one( {'vatsim_identity.id': vatsim_user['id']}) if not user_doc: now = datetime.utcnow() user_doc = { '_created': now, '_updated': now, 'vatsim_identity': vatsim_user } user_doc['_id'] = current_app.data.driver.db['users'].insert_one( user_doc).inserted_id redis.set(oauth_verifier, user_doc['_id']) redis.expire(oauth_verifier, 24 * 60 * 60) return JSONRenderer().render(user_doc), 200
def acquire_lock_with_timeout(redis, lockname, acquire_timeout=10, lock_timeout=30): """ 正确地实现基本地加锁, (设置超时时间的锁) setnx 命令天生就适用来实现锁地获取功能,这个命令只会在键不存在地情况下为键设置值, 而锁要做的就是将一个随机生成的128位uuid设置为键的值,并使用这个值来防止锁被其他进程取得。 :param redis: :param lockname: 锁名称 :param acquire_timeout: :return: """ identifire = str(uuid.uuid4()) lockname = 'lock:' + lockname end = time.time() + acquire_timeout # 确保传给EXPIRE的都是整数 lock_timeout = int(math.ceil(lock_timeout)) while time.time() < end: if redis.setnx(lockname, identifire ): # 以锁名称为键,uuid的值为值,redis服务器setnx保证了只能有一个客户端成功设置键的原子性 redis.expire(lockname, lock_timeout) # 设置键的过期时间,过期自动剔除,释放锁 return identifire elif not redis.ttl(lockname): # 当锁未被设置过期时间时,重新设置其过期时间 redis.expire(lockname, lock_timeout) time.sleep(0.001) return None
def file_add(): redis.expire(session['current_user'], time=50) user_path = app.upload_path.joinpath(redis.get(session['current_user']).decode()).resolve() files = [x.name for x in user_path.glob('**/*') if x.is_file()] files_len = len(files) token = creating_token("allow", 240).decode('utf-8') return render_template('upload.html', files_len=files_len, token=token)
def cachingToRedis(): ## caching the data into redis meomory and delet it after 60 sec context = pa.default_serialization_context() redis.set('key', context.serialize(scrapes_BTC_Data()).to_buffer().to_pybytes()) redis.expire('key', 60) return context
def storage(): redis.expire(session['current_user'], time=300) user_path = app.upload_path.joinpath(redis.get(session['current_user']).decode('utf-8')).resolve() user = redis.get(session['current_user']).decode('utf-8') cont = requests.get(app.file_server + "list/" + user, verify=False).content.decode() miniature = requests.get(app.file_server + "miniatures/" + user, verify=False).content.decode() files = json.loads(cont)['list'] miniatures = [] miniatures = json.loads(miniature)['list'] files = [] for filename in os.listdir(str(user_path)): data = [] data.append(filename) data.append(str(os.stat(str(user_path) + "/" + filename).st_size) + "B") data.append("/slyko/dl/download/" + filename) data.append("/slyko/dl/delete/" + filename) data.append("/slyko/static/miniatures/" + filename) miniatures.append(filename) files.append(data) tokens = {} #for f in files: # tokens[f]=creating_token(f,240).decode('utf-8') return render_template( 'storage.html',user=redis.get(session['current_user']).decode('utf-8'), files_len=len(files), files=files, tokens=tokens, miniatures=miniatures)
def user_resend_code_task(email): active_code = randint(1000, 9999) redis.set(email, active_code) redis.expire(email, 120) body = 'http://localhost:8000/accounts/verify/{}/{}/'.format( signer.sign(email), active_code) send_mail('فعالسازی حساب', body, settings.EMAIL_HOST_USER, [email]) return 1
def reset_value(self, key, value, type="", expire_tm=None): self.conn() redis = self.redis value = self.__set_value(value) key = self.__get_name(key, type) redis.set(key, value) if isinstance(expire_tm, int): redis.expire(key, expire_tm)
def remove_worker(self, client_id): """ Remove the id """ app.logger.debug("Removing client: {}".format(client_id)) redis.srem(XROOTD_CLIENT, client_id) redis.expire(client_id, 30) del self.clients[client_id]
def remove_server(self, server_id): """ Remove the id """ app.logger.debug("Removing server: {}".format(server_id)) redis.srem(XROOTD_SERVER, server_id) redis.expire(server_id, 30) del self.servers[server_id]
def set_active_code(user): result = redis.set(user, randint(1000, 9999)) redis.expire(user, 120) if result: return True else: return False
def get_gold_rank(redis, groupid, account): """ 获取排行榜 """ sortby = 'week' prredis = getPrivateRedisInst(redis, MASTER_GAMEID) today = datetime.now().strftime("%Y-%m-%d") # if redis.exists(GOLD_RANK_CACHE % account): # return json.loads(redis.get(GOLD_RANK_CACHE % account)) res = {} res['gold_rank'] = [] res['win_rank'] = [] my_user_info = get_user_info(redis, account) # 财富排行榜 rank = 0 for _account, value in prredis.zrevrange(GOLD_MONEY_RANK_WITH_AGENT_ZSET % groupid, 0, 10 - 1, True): rank += 1 value = int(value) user_info = get_user_info(redis, _account) if not user_info: continue res['gold_rank'].append({'rank': rank, 'nickname': user_info['nickname'], 'value': value, 'account': _account, 'headImgUrl': user_info['headImgUrl']}) myrank = prredis.zrevrank(GOLD_MONEY_RANK_WITH_AGENT_ZSET % groupid, account) myvalue = prredis.zscore(GOLD_MONEY_RANK_WITH_AGENT_ZSET % groupid, account) if my_user_info and myrank != None: res['gold_rank'].append({'rank': int(myrank)+1, 'nickname': my_user_info['nickname'], 'value': myvalue, 'account': account, 'headImgUrl': my_user_info['headImgUrl'], 'self': '1'}) else: res['gold_rank'].append({'nickname': my_user_info['nickname'], 'account': account, 'headImgUrl': my_user_info['headImgUrl'], 'self': '1'}) # 胜局排行榜 rank = 0 for _account, value in get_gold_week_win_rank(prredis, groupid): rank += 1 value = int(value) user_info = get_user_info(redis, _account) if not user_info: continue res['win_rank'].append({'rank': rank, 'nickname': user_info['nickname'], 'value': value, 'desc': '本周胜局', 'account': _account, 'headImgUrl': user_info['headImgUrl']}) myrank = prredis.zrevrank('gold:win:rank:%s:thisweek:zset' % groupid, account) myvalue = prredis.zscore('gold:win:rank:%s:thisweek:zset' % groupid, account) if my_user_info and myrank != None: res['win_rank'].append({'rank': int(myrank)+1, 'nickname': my_user_info['nickname'], 'value': myvalue, 'desc': '本周胜局', 'account': account, 'headImgUrl': my_user_info['headImgUrl'], 'self': '1'}) else: res['win_rank'].append({'nickname': my_user_info['nickname'], 'account': account, 'headImgUrl': my_user_info['headImgUrl'], 'self': '1'}) redis.set(GOLD_RANK_CACHE % account, json.dumps(res)) redis.expire(GOLD_RANK_CACHE % account, 300) return res
def get_token(username, length=20, timeout=20): """ Obtain an access token that can be passed to a websocket client. """ redis = get_redis_client() token = get_random_string(length) token_key = 'token:{}'.format(token) redis.set(token_key, username) redis.expire(token_key, timeout) return token
def user_forget_task(email, active_code): try: redis.set(email, active_code) redis.expire(email, 120) body = 'http://localhost:8000/accounts/change-password/{}/{}/'.format( signer.sign(email), signer.sign(active_code)) send_mail('فراموشی رمز عبور', body, settings.EMAIL_HOST_USER, [email]) return 1 except User.DoesNotExist: return 4
def acquire_lock(lockname, identifier, wait_time=20, timeout=15): end = time.time() + wait_time while end > time.time(): if CONN.setnx(lockname, identifier): CONN.expire(lockname, timeout) # set expire time return identifier elif not redis.ttl(lockname): # 当锁未被设置过期时间时,重新设置其过期时间 redis.expire(lockname, timeout) time.sleep( 0.001) # wait until the lock expired or release by some thread return False
def checkRate(ip): if redis.llen(ip) == 0: print("First encounter") i = 1 redis.lpush(ip,1) redis.expire(ip,60) else: if redis.llen(ip) == 5: return False else: redis.lpush(ip,1) return True
def login(): oauth_token = vatsim.get_oauth_token( redirect=url_for('callback', _external=True)) redirect_uri = ( app.config['VATSIM_SSO_SERVER'] + '/auth/pre_login/?oauth_token=%s') % oauth_token['oauth_token'] redis.set(oauth_token['oauth_token'], oauth_token['oauth_token_secret']) redis.expire(oauth_token['oauth_token'], 30 * 60) return redirect(redirect_uri, code=302)
def api_create_link(stash_id): if redis.get('stash::'+stash_id) == None: abort(404) link = request.form['link'] print "got link" + link key = "stash::"+stash_id+"::link::"+shortuuid.uuid(name=link) redis.set(key, link) redis.expire(key, 10) url = url_for('api_stash_display', stash_id=stash_id) return redirect(url)
def user_register_task(username, email, password): check_user = User.objects.filter(email=email) if check_user.exists(): return 2 else: User.objects.create_user(username=username, email=email, password=password) active_code = randint(1000, 9999) redis.set(email, active_code) redis.expire(email, 120) body = 'http://localhost:8000/accounts/verify/{}/{}/'.format( signer.sign(email), active_code) send_mail('فعالسازی حساب', body, settings.EMAIL_HOST_USER, [email]) return 1
def message_location(event): lat = event.message.latitude long = event.message.longitude uid = event.source.user_id station = get_station(lat, long) token = event.reply_token if station: redis.hset(uid, 'lat', lat) redis.hset(uid, 'long', long) redis.expire(uid, 1800) message = '{}駅周辺のラーメン屋をお探しします!\nあなたの今の気分を教えて下さい\n(例)あっさりした醬油ラーメン'.format(station) send_message(token, message) else: send_message(token, "エラーが発生しました。やり直して下さい。")
def user_login_task(email): check_user = User.objects.filter(email=email) if check_user.exists() and check_user.filter(is_active=True).exists(): # user = authenticate(request, email=email, password=password) return 1 elif check_user.exists() and not (check_user.filter( is_active=True).exists()): active_code = randint(1000, 9999) redis.set(email, active_code) redis.expire(email, 120) body = 'http://localhost:8000/accounts/verify/{}/{}/'.format( signer.sign(email), active_code) send_mail('فعالسازی حساب', body, settings.EMAIL_HOST_USER, [email]) return 2 else: return 3
def acquire_lock(redis, lockname, acquire_timeout=10, lock_timeout=30): identifire = str(uuid.uuid4()) lockname = 'lock:' + lockname end = time.time() + acquire_timeout # 确保传给EXPIRE的都是整数 lock_timeout = int(math.ceil(lock_timeout)) while time.time() < end: if redis.set( lockname, identifire, ex=lock_timeout, nx=True): # 以锁名称为键,uuid的值为值,redis服务器setnx保证了只能有一个客户端成功设置键的原子性 # redis.expire(lockname, lock_timeout) # 设置键的过期时间,过期自动剔除,释放锁 return identifire elif not redis.ttl(lockname): # 当锁未被设置过期时间时,重新设置其过期时间 redis.expire(lockname, lock_timeout) time.sleep(0.001) return None
def enqueue(): data = json.loads(request.data.decode()) if 'input_url' not in data: response = { 'error': "The Youtube URL to download must be provided as 'input_url'", } logger.warn("Rejecting /api/enqueue request missing 'input_url'") return json.dumps(response), 400 # bad request clean_url = util.validate_url(data['input_url']) if clean_url is None: response = { 'error': "I'm sorry, that doesn't really look like a Youtube URL. :-(", 'info': "Please try again using a link starting with 'https://www.youtube.com'.", } logger.warn("Rejecting /api/enqueue request for %s" % data['input_url']) return json.dumps(response), 403 # forbidden logger.info("Accepting /api/enqueue request for %s" % clean_url) job = rqueue.enqueue_call( func=util.download, args=(clean_url, ), result_ttl=900 # 15 minutes ) job_id = job.get_id() redis.lpush(joblist, job_id) redis.ltrim(joblist, 0, 9) job_details = { 'job_id': job_id, 'request_url': clean_url, 'submitted': time.time(), 'page_title': '...', # just a placeholder to keep it pretty } redis.hmset(jobkey(job_id), job_details) redis.expire(jobkey(job_id), 86400) # 24 hours response = { 'job_id': job_id, } return json.dumps(response), 201 # created
def authenticate_user(json): email = json['email'] password = json["password"] user_hashed_pw = hashpw(password) db_hashed_pw = redis.get(redis_auth_key_from_email(email)) print user_hashed_pw, db_hashed_pw if user_hashed_pw == db_hashed_pw: print "Successful login, building session." token = uuid.uuid1() ip = get_remote_ip() session_key = redis_session_key(ip=ip, token=token) redis.set(session_key, email) redis.expire(session_key, SESSION_EXPIRY_SECONDS) json = {'logged_in': True, 'token': token} return json else: print "Failed login" return {'logged_in': False}
def list(): redis.expire(session['current_user'], time=300) user_path = app.upload_path.joinpath( redis.get(session['current_user']).decode('utf-8')).resolve() user = redis.get(session['current_user']).decode('utf-8') files = [] for filename in os.listdir(str(user_path)): data = [] data.append(filename) data.append("/download/" + filename) data.append("/delete/" + filename) data.append("/static/uploads/" + user + "/" + filename) files.append(data) tokens = {} return render_template('list.html', user=redis.get( session['current_user']).decode('utf-8'), files=files, tokens=tokens)
def acquire_lock_with_timeout2(redis, lockname, acquire_timeout=10, lock_timeout=30): """ 正确地实现基本地加锁, (设置超时时间的锁) setnx 命令天生就适用来实现锁地获取功能,这个命令只会在键不存在地情况下为键设置值, 而锁要做的就是将一个随机生成的128位uuid设置为键的值,并使用这个值来防止锁被其他进程取得。 set(name, value, ex=None, px=None, nx=False, xx=False) ex,过期时间(秒) px,过期时间(毫秒) nx,如果设置为True,则只有name不存在时,当前set操作才执行,同setnx(name, value) xx,如果设置为True,则只有name存在时,当前set操作才执行''' setex(name, value, time) #设置过期时间(秒) psetex(name, time_ms, value) #设置过期时间(豪秒) :param redis: :param lockname: 锁名称 :param acquire_timeout: :return: """ identifire = str(uuid.uuid4()) lockname = 'lock:' + lockname end = time.time() + acquire_timeout # 确保传给EXPIRE的都是整数 lock_timeout = int(math.ceil(lock_timeout)) while time.time() < end: if redis.set( lockname, identifire, ex=lock_timeout, nx=True): # 以锁名称为键,uuid的值为值,redis服务器setnx保证了只能有一个客户端成功设置键的原子性 # redis.expire(lockname, lock_timeout) # 设置键的过期时间,过期自动剔除,释放锁 return identifire elif not redis.ttl(lockname): # 当锁未被设置过期时间时,重新设置其过期时间 redis.expire(lockname, lock_timeout) time.sleep(0.001) return None
def loadmaskdata(): if redis.exists('mask:tw'): ret = json.loads(redis.get('mask:tw')) else: md = pd.read_csv(url, encoding='utf-8') md_cols = ['id', 'name', 'address', 'tel', 'adult', 'child', 'lastsync'] md.columns = md_cols md['now'] = int((datetime.now()-orig).total_seconds()) md['pos'] = md.apply(lambda x: ds_map[x.id] if (x.id in ds_map) else geolatlng(x.address), axis=1) md.to_csv('/tmp/md.csv', encoding='utf-8') ret = [] with open('/tmp/md.csv') as f: rows = csv.reader(f) for r in rows: if r[0]: ret.append({ 'id': r[1], 'name': r[2], 'address': r[3], 'tel': r[4], 'adult': r[5], 'child': r[6], 'lastsync': r[7], 'now': r[8], 'pos': r[9]}) redis.set('mask:tw', json.dumps(ret)) redis.expire('mask:tw', 3600) return ret
def output(sums): if config['out']['redis']: expire = config['out']['expire'] * 24 * 60 * 60 key = config['out']['prefix']+':'+config['in']['master'] redis.set(key,pickle.dumps(sums)) redis.expire(key,expire) logger.info('Output written to rediskey %s' % key) if config['out']['file']: fname = config['in']['master'].split('/')[-1] path = config['out']['path'] concat = path+fname+'.pickle' with open(concat,'wb') as handle: pickle.dump(sums,handle) logger.info('Output written to file %s' % concat) else: logger.warning('No output format specified.') logger.warning('Dropping into interactive session.') from code import interact interact(local=locals())
def enqueue(): data = json.loads(request.data.decode()) if 'input_url' not in data: response = { 'error': "The Youtube URL to download must be provided as 'input_url'", } logger.warn("Rejecting /api/enqueue request missing 'input_url'") return json.dumps(response), 400 # bad request clean_url = util.validate_url(data['input_url']) if clean_url is None: response = { 'error': "I'm sorry, that doesn't really look like a Youtube URL. :-(", 'info': "Please try again using a link starting with 'https://www.youtube.com'.", } logger.warn("Rejecting /api/enqueue request for %s" % data['input_url']) return json.dumps(response), 403 # forbidden logger.info("Accepting /api/enqueue request for %s" % clean_url) job = rqueue.enqueue_call( func=util.download, args=(clean_url,), result_ttl=900 # 15 minutes ) job_id = job.get_id() redis.lpush(joblist, job_id) redis.ltrim(joblist, 0, 9) job_details = { 'job_id': job_id, 'request_url': clean_url, 'submitted': time.time(), 'page_title': '...', # just a placeholder to keep it pretty } redis.hmset(jobkey(job_id), job_details) redis.expire(jobkey(job_id), 86400) # 24 hours response = { 'job_id': job_id, } return json.dumps(response), 201 # created
def EmailActivation(emailaddress, value): import smtplib from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText msg = MIMEMultipart('alternative') msg['Subject'] = "کد تایید هویت" msg['From'] = "*****@*****.**" msg['To'] = emailaddress html = open("templates/validateemail.html", "r").read() code = generateid(6) html = html.replace("{Code}", code).replace("{Name}", value) redis.set(code, emailaddress) redis.expire(code, 120) emailpart = MIMEText(html, 'html') msg.attach(emailpart) server = smtplib.SMTP_SSL('smtp.gmail.com', 465) server.ehlo() server.login("*****@*****.**", "09382138446m") server.sendmail("*****@*****.**", emailaddress, msg.as_string()) server.quit()
def store(redis, key, buffer_image): redis.set(key, buffer_image.getvalue()) three_hours = 60*60*3 return redis.expire(key, three_hours)
request_region = requests.get("https://public-crest.eveonline.com/regions/{:d}/".format(region['id'])) logger.debug(request_region) reg = request_region.json()['marketSellOrders'] logger.debug(result) r_url = reg['href'] t_url = "?type=https://public-crest.eveonline.com/types/{:d}/".format(result['typeID']) request_url = r_url + t_url logger.debug(request_url) marketdata_req = requests.get(request_url) logger.debug(marketdata_req) highest = 0 marketdata = marketdata_req.json() logger.debug(marketdata) if marketdata['totalCount'] == 0: exempt.append(region['id']) continue market_items = marketdata['items'] logger.debug(market_items) for item in market_items: logger.debug(type(item)) if item['price'] > highest: highest = item['price'] redis_dict = {region['id_str']:highest} logger.debug("redis_dict: {dict}".format(dict = redis_dict)) redis.hmset(i_type, redis_dict) redis.expire(i_type, 600) logger.info("Finished caching for {i_type} in {region}".format(i_type = i_type, region = region['id'])) logger.info("Finished caching for all regions for the i_type: {:d}".format(i_type)) pipe.execute() logger.info("Finished Caching")
def __multi_set_ttl(self, conn, flattened, ttl): redis = conn.pipeline() redis.mset(flattened) for key in flattened: redis.expire(key, ttl) redis.execute()
def create_dss_subscription(self, vertex_list, view_port): ''' This method PUTS /dss/subscriptions ''' subscription_response = { "created": 0, "subscription_id": 0, "notification_index": 0 } my_authorization_helper = AuthorityCredentialsGetter() audience = env.get("DSS_SELF_AUDIENCE", 0) error = True try: assert audience except AssertionError as ae: current_app.logger.error( "Error in getting Authority Access Token DSS_SELF_AUDIENCE is not set in the environment" ) return subscription_response try: auth_token = my_authorization_helper.get_cached_credentials( audience) except Exception as e: current_app.logger.error( "Error in getting Authority Access Token %s " % e) return subscription_response else: error = auth_token.get("error") try: assert error is None except AssertionError as ae: return subscription_response else: current_app.logger.info("Successfully received Token") # A token from authority was received, new_subscription_id = str(uuid.uuid4()) dss_subscription_url = self.dss_base_url + '/dss/subscriptions/' + new_subscription_id callback_url = env.get("SUBSCRIPTION_CALLBACK_URL", "/isa_callback") now = datetime.now() current_time = now.isoformat() one_hour_from_now = (now + timedelta(hours=1)).isoformat() headers = { 'content-type': 'application/json', 'Authorization': 'Bearer ' + auth_token } volume_object = { "spatial_volume": { "footprint": { "vertices": vertex_list }, "altitude_lo": 0.5, "altitude_hi": 400 }, "time_start": current_time, "time_end": one_hour_from_now } payload = { "extents": volume_object, "callbacks": { "identification_service_area_url": callback_url } } try: dss_r = requests.post(dss_subscription_url, data=json.dumps(payload), headers=headers) except Exception as re: current_app.logger.error( "Error in posting to subscription URL %s " % re) return subscription_response else: try: assert dss_r.status_code == 200 subscription_response["created"] = 1 except AssertionError as ae: current_app.logger.error( "Error in creating subscription in the DSS %s" % dss_r.text) return subscription_response else: dss_response = dss_r.json() service_areas = dss_response['service_areas'] subscription = dss_response['subscription'] subscription_id = subscription['id'] notification_index = subscription['notification_index'] subscription_response[ 'notification_index'] = notification_index subscription_response['subscription_id'] = subscription_id # iterate over the service areas to get flights URL to poll flights_url_list = [] for service_area in service_areas: flights_url = service_area['flights_url'] flights_url_list.append(flights_url) flights_dict = { 'subscription_id': subscription_id, 'all_flights_url': flights_url_list, 'notification_index': notification_index, 'view': view_port, 'expire_at': one_hour_from_now } redis = redis.Redis() hash_name = "all_uss_flights" redis.hmset(hash_name, flights_dict) # expire keys in one hour redis.expire(name=hash_name, time=timedelta(minutes=60)) return subscription_response
def mark_films_as_seen(user, film_ids): user_id = str(user.id or user.username) key = 'user:%s:seen:%s' % (user_id, datetime.date.today()) redis.sadd(key, *film_ids) redis.expire(key, SEEN_EXPIRES_IN_DAYS * 24 * 3600)
async def qrzLookup(origcall, config): '''Lookup call @QRZ''' my_lookuplib = LookupLib(lookuptype="qrz", username=config['qrz.com']['username'], pwd=config['qrz.com']['password']) cic = Callinfo(my_lookuplib) origcall = origcall.upper() try: call = cic.get_homecall(origcall) lookup = await qrzRedisLookup(call) except ValueError: callsign = None lookup = {} #dict() print("Not Found") return {'origcallsign': origcall, 'callsign': callsign} if lookup is False: try: lookup = cic.get_all(call) callsign = lookup['callsign'] redis.set('qrz' + call.upper(), json.dumps(lookup, default=str)) redis.expire('qrz' + call.upper(), 2629743000) redis.sadd('qrzCALLS', call.upper()) calls.append(call.upper()) except ValueError: callsign = None lookup = {} #dict() print("Not Found") return {'origcallsign': origcall, 'callsign': callsign} except KeyError: callsign = call lookup = {} #dict() print("Not Found") return {'origcallsign': origcall, 'callsign': callsign} else: callsign = lookup['callsign'] if callsign and 'aliases' in lookup: print( fg('blue') + '-=' + fg('turquoise_4') + attr('bold') + callsign + attr('reset') + fg('blue') + '=-' + attr('reset') + " (" + ','.join(lookup['aliases']) + ')') else: print( fg('blue') + '-=' + fg('turquoise_4') + attr('bold') + callsign + fg('blue') + '=-') print(fg('#884444') + attr('bold') + 'QTH: ', end="") await dictLookupAndPrint(lookup, '#a4a24f', 'fname', False) await dictLookupAndPrint(lookup, '#a4a24f', 'name', False, ", ") await dictLookupAndPrint(lookup, 'navajo_white_3', 'addr1', False, ", ") await dictLookupAndPrint(lookup, 'navajo_white_3', 'zipcode', False) await dictLookupAndPrint(lookup, 'navajo_white_3', 'addr2', False, ", ") await dictLookupAndPrint(lookup, 'navajo_white_3', 'country') print(fg('#884444') + attr('bold') + 'Grid square: ', end="") await dictLookupAndPrint(lookup, 'dark_sea_green_3b', 'locator', False) print(fg('#884444') + attr('bold') + 'Latitude: ', end="") latitude = await dictLookupAndPrint(lookup, 'dark_sea_green_3b', 'latitude', False) print(fg('#884444') + attr('bold') + 'Longitude: ', end="") longitude = await dictLookupAndPrint(lookup, 'dark_sea_green_3b', 'longitude') print(fg('#884444') + attr('bold') + 'CCode: ', end="") await dictLookupAndPrint(lookup, 'dark_sea_green_3b', 'ccode', False) print(fg('#884444') + attr('bold') + 'CQZone: ', end="") await dictLookupAndPrint(lookup, 'dark_sea_green_3b', 'cqz', False) print(fg('#884444') + attr('bold') + 'ITUZone: ', end="") await dictLookupAndPrint(lookup, 'dark_sea_green_3b', 'ituz') print(fg('#884444') + attr('bold') + 'QSL: ', end="") await dictLookupAndPrint(lookup, 'navajo_white_3', 'qslmgr', False) print(fg('#884444') + attr('bold') + 'eQSL: ', end="") await dictLookupAndPrint(lookup, 'navajo_white_3', 'eqsl', False) print(fg('#884444') + attr('bold') + 'lotw: ', end="") await dictLookupAndPrint(lookup, 'navajo_white_3', 'lotw') print(fg('#884444') + attr('bold') + 'E-Mail: ', end="") email = await dictLookupAndPrint(lookup, 'navajo_white_3', 'email', True) locator1 = latlong_to_locator(cfg['qth']['latitude'], cfg['qth']['longitude']) locator2 = latlong_to_locator(latitude, longitude) heading = calculate_heading(locator1, locator2) longpath = calculate_heading_longpath(locator1, locator2) print(fg('#884444') + attr('bold') + 'Heading: ', end="") print(fg('navajo_white_3') + "%.1f°" % heading, end="") print(fg('#884444') + attr('bold') + ' Longpath: ', end="") print(fg('navajo_white_3') + "%.1f°" % longpath, end="") print(attr('reset')) return { 'origcallsign': origcall, 'callsign': callsign, 'email': email, 'latitude': latitude, 'longitude': longitude, 'heading': heading, 'longpath': longpath }
def __iter__(self): redis = get_redis_connection() postfix = "" if self.event_type is not None: postfix += ":%s" % (self.event_type.slug) lookup_keys = [ obj.lookup_key() + postfix for obj in self.objs ] if len(lookup_keys) >= 2: s = hashlib.sha1() for lookup_key in lookup_keys: s.update(lookup_key) key = s.hexdigest() redis.zunionstore(key, lookup_keys, aggregate="MIN") # Expire it in 5 minutes, enough that paginating shouldn't require # a recompute, but short enough to not clutter the place up. redis.expire(key, 60 * 5) elif len(lookup_keys) == 1: key = lookup_keys[0] else: assert not self.event_type key = "ALL_EVENTS" statuses = defaultdict(lambda: Status(0, 0)) items = list(redis.zrevrange(key, self.offset, self.limit, withscores=True)) parsed_items = [] context_items = {} for cluster, score in items: data = json.loads(cluster) parsed_items.append((data, score)) for o in data["items"]: status_key = self._status_key(data["slug"], o) status = statuses[status_key] if o["remove"]: statuses[status_key] = status._replace(removes=status.removes+1) else: statuses[status_key] = status._replace(adds=status.adds+1) for key, val in o["context"].iteritems(): field = EventType.registry[data["slug"]].context_shape[key] key = field.unique_key() if key not in context_items: context_items[key] = RawResults(field, set()) context_items[key].vals.add(val) final_context_items = {} for key, (field, vals) in context_items.iteritems(): final_context_items[key] = field.deserialize_bulk(vals) for data, score in parsed_items: cluster_items = [] timestamp = datetime.fromtimestamp(score) for o in data["items"]: item = self._convert_item( data["slug"], o, timestamp, statuses, final_context_items, data["cluster_id"] ) if item is not None: cluster_items.append(item) if cluster_items: clustered_on = None if data["clustered_on"] is not None: clustered_on = cluster_items[0].context[data["clustered_on"]] yield StreamCluster( data["slug"], timestamp, cluster_items, clustered_on, data["cluster_id"] )
#Setting the key to be the data instead of something more creative. #There could be some really interesting data crunching that we could #do here, but for now, let's try capturing the rate of fatal encounters #across the last seven days. #Sets the data in Redis as the key, so that this can disappear within seven #days. #Converts date that was previously in a string format to a Python date #object so that we can easily manipulate it to extract only the month #and year. real_date = datetime.strptime(data["date"], '%B %d, %Y') #Gets the date that was seven days ago as we're calculating the average #across fourteen days. So, there's no point even adding datapoints that are #older for this specific exercise. This is more of a precautionary measure #as the initial run gets all the data, which spans 15 years. This simply #filters it down to data relevant in the last week or so. date_seven_days_ago = datetime.now() - timedelta(days=14) #Only add fatalities that occurred within the last week. if real_date >= date_seven_days_ago: print(data) redis.set(data, 1) #Delete keys that are older than fourteen days in the Redis-verse. #We have to specify the key in seconds; hence, the multiplication. redis.expire(data, 14*24*60*60) except JSONDecodeError: pass time.sleep(120)