def xaui_unpack(self, bram_dmp, hdr_index, pkt_len, skip_indices, redis): pkt_64bit_hdr = struct.unpack( "Q", bram_dmp["msb_data"][(4 * hdr_index) : (4 * hdr_index) + 4] + bram_dmp["lsb_data"][(4 * hdr_index) : (4 * hdr_index + 4)], )[0] pkt_mcnt = pkt_64bit_hdr >> 16 pkt_ant = pkt_64bit_hdr & 0xFFFF pkt_freq = pkt_mcnt % 2048 # mcnt % nchans raw_xaui_data = "" for pkt_index in range(1, (pkt_len)): abs_index = hdr_index + pkt_index if skip_indices.count(abs_index) > 0: continue raw_xaui_data += ( bram_dmp["msb_data"][(4 * abs_index) : (4 * abs_index) + 4] + bram_dmp["lsb_data"][(4 * abs_index) : (4 * abs_index) + 4] ) if len(raw_xaui_data) == 256: print "writing Ant%d, Chan%d into redis." % (pkt_ant, pkt_freq) try: redis.set("px%d:snap_xaui_raw:%d:%d" % (self.xeng[0] + 1, pkt_ant % 4, pkt_freq), raw_xaui_data) except Exception, e: print "REDIS ERROR(xaui unpack)", e
def start_test(self, repo, ref): base_repo = redis.get("source:" + repo).decode("utf-8") l = redis.lock(base_repo, timeout=60 * 60) log_key = "log:" + repo + "/" + ref log_url = "https://rosie-ci.ngrok.io/log/" + repo + "/" + ref print("grabbing lock " + base_repo) # Retry the task in 10 seconds if the lock can't be grabbed. if not l.acquire(blocking=False): if self.request.retries == 24: set_status(repo, ref, "error", log_url, "Hit max retries. Please ping the owner.") raise self.retry(countdown=30, max_retries=25) print("Lock grabbed " + base_repo) redis.set("owner-" + base_repo, log_url) set_status(repo, ref, "pending", log_url, "Commencing Rosie test.") repo_path = cwd + "/repos/" + base_repo os.chdir(repo_path) try: redis.append(log_key, git.checkout(ref)) except sh.ErrorReturnCode_128 as e: print("error 128") redis.append( log_key, e.full_cmd + "\n" + e.stdout.decode('utf-8') + "\n" + e.stderr.decode('utf-8')) final_status(repo, ref, "error", "Git error in Rosie.") except sh.ErrorReturnCode_1 as e: print("error 1") redis.append( log_key, e.full_cmd + "\n" + e.stdout.decode('utf-8') + "\n" + e.stderr.decode('utf-8')) final_status(repo, ref, "error", "Git checkout error in Rosie.") print("test started " + log_url) return l.local.token.decode("utf-8")
def do_create_total(redis): count = 0 for key in redis.keys(FORMAT_LOGIN_DATE_TABLE4FISH%('*')): count+=convert_util.to_int(redis.scard(key)) print 'do_create_total total[%s]'%(count) redis.set("fish:login:per:day:total",count)
def create(cls, data, listeners): if not 'id' in data: data['id'] = redis.incr('global:%s' % cls._namespace()) _id = cls._id(data['id']) redis.set(_id, data) redis.lpush(cls._namespace(), _id) return data, listeners
def odom_cb(msg): global px, py, pz, pose, moving, prev_moving, id_num pose = msg.pose.pose m = msg.twist.twist o = [ round(x, 3) for x in euler_from_quaternion([ pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w ]) ] l = round(m.linear.x, 3) a = round(m.angular.z, 3) lo = [ round(x, 3) for x in [pose.position.x, pose.position.y, pose.position.z] ] package = json.dumps( OrderedDict([("odom_id", id_num), ("real_odom_id", msg.header.seq), ("location", lo), ("orientation", o), ("linearvelocity", l), ("angularvelocity", a), ("time", msg.header.stamp.secs)])) id_num += 1 changes = [abs(i - j) for i, j in zip([px, py, pz], [lo[0], lo[1], o[2]])] moving = l != 0 or a != 0 # only post updates if enough has changed since the last send or if the robot is changing from motionless to in motion (or vice versa) if sum(changes) > send_thresh or moving != prev_moving: redis.set(redis_key, str(package)) px, py, pz = lo[0], lo[1], o[2] # update previous values bt.save_json_file("odomdump.json", json.loads(package)) prev_moving = moving
def update_sms_status(recipient_id, phone_number): status = sinch.check_status(phone_number) if status == 'Successful': redis.set('confirmation:{}'.format(recipient_id), 'successful') redis.sset('customers_confirmed', recipient_id) else: update_sms_status.apply_async(recipient_id, phone_number, countdown=1)
def insert_baidu_user(user_info,url): if is_exist_uid(user_info['uid']): return "redis uid exist uid="+user_info['uid']+" url="+url now = int(time.time()) data = ( str(user_info['uid']),str(user_info['username']),str(user_info['user_avatar']), str(user_info['share_num']),str(user_info['special_num']),str(user_info['sign_num']), str(user_info['fans_num']), str(now),'0' ) sql = "insert into so_baidu_user (`uid`,`username`,`user_avatar`,`share_num`,`special_num`," \ "`sign_num`,`fans_num`,`create_time`,`status`) values (%s,%s,%s, %s,%s,%s,%s,%s,%s)" cursor.execute(sql,data) result = db.commit() if result is not None: db.rollback() send_email("mysql:error rollback uid=") log = "mysql:error rollback uid="+user_info['uid']+" url="+url else: redis.set(user_info['uid'], 1) log = "mysql:ok result="+str(result)+" url="+url return log
def search_request(message): # Create request request = { "id": message.id, "type": "comment" if message.was_comment else "message", "author": str(message.author), "link": f"https://www.reddit.com{message.context}" if message.was_comment else f"https://www.reddit.com/message/messages/{message.id}", "retries": 0 } # Check for duplicates util.already_processed_check(redis, request) lock = util.get_lock(request['id']) if redis.exists(lock): raise CurrentlyProcessing(request['link']) # Lock request to avoid duplicates redis.set(lock, "") request_json = json.dumps(request) # Enqueue for filtering redis.sadd(config['REDIS_REQUESTS_FILTER'], request_json) message.mark_read() logging.info(f"Found new request {request['id']} : {request['link']}.")
def callback(): oauth_token = request.args.get('oauth_token') oauth_verifier = request.args.get('oauth_verifier') oauth_token_secret = redis.get(oauth_token) redis.delete(oauth_token) response = vatsim.get_user_details(oauth_token, oauth_token_secret, oauth_verifier) if response['request']['result'] != 'success': return json.dumps(response['request']['message']), 401 vatsim_user = response['user'] user_doc = current_app.data.driver.db['users'].find_one( {'vatsim_identity.id': vatsim_user['id']}) if not user_doc: now = datetime.utcnow() user_doc = { '_created': now, '_updated': now, 'vatsim_identity': vatsim_user } user_doc['_id'] = current_app.data.driver.db['users'].insert_one( user_doc).inserted_id redis.set(oauth_verifier, user_doc['_id']) redis.expire(oauth_verifier, 24 * 60 * 60) return JSONRenderer().render(user_doc), 200
def post(self): ip = self.get_body_argument('ip', default=None, strip=False) port = self.get_body_argument('port', default='3244', strip=False) name = self.get_body_argument('name', default=None, strip=False) key = self.get_body_argument('key', default=None, strip=False) sign = self.get_body_argument('sign', default=None, strip=False) logger.info( '----get_proxy_ip: ip:{}, port:{}, name:{}, key:{}, sign:{}'. format(ip, port, name, key, sign)) if sign == get_sign(key) and ip: proxy = ip + ':' + port redis.set('proxy_' + name, proxy) redis.sadd('use_ips', ip) response = { 'message': 'SUCCESS', 'reason': '', } elif sign != get_sign(key): response = { 'message': 'FALSE', 'reason': 'Wrong Token', } else: response = { 'message': 'FALSE', 'reason': 'No Client ip', } self.write(json.dumps(response, ensure_ascii=False))
def save(proxies): for proxy in proxies: redis.set(Proxy._get_redis_key(str(proxy)), json.dumps(proxy._proxy_info)) redis.sadd(Proxy._get_set_key(), str(proxy)) redis.zadd(Proxy._get_sorted_set_key(), {str(proxy): int(datetime.now().timestamp())})
def route_post_ad(slot): if not advertiser_id(): return '', 404 asset = request.files['asset'] id = next_ad_id() key = ad_key(slot, id) type = fetch(request.form, 'type') if not type: type = asset.mimetype if not type: type = 'video/mp4' redis = get_redis() redis.hmset( key, { 'slot': slot, 'id': id, 'title': fetch(request.form, 'title'), 'type': type, 'advertiser': advertiser_id(), 'destination': fetch(request.form, 'destination'), 'impressions': 0 }) redis.set(asset_key(slot, id), asset.read()) redis.rpush(slot_key(slot), id) redis.sadd(advertiser_key(advertiser_id()), key) return jsonify(get_ad(slot, id))
def SetLastDispatchApo(redis, app): try: redis.set("APO_DISPATCH_LAST_APP", json.dumps(app)) return True except Exception, e: traceback.print_exc() return None
def check_pm(m): if m.chat.type == 'supergroup': redis.sadd(m.content_type, m.message_id) if redis.sismember('groups', m.chat.id): if not redis.get('expire' + str(m.chat.id)): bot.reply_to(m, '🔴 اعتبار گروه به پایان رسیده است 🔴') redis.srem('groups', m.chat.id) bot.leave_chat(m.chat.id) else: check(m) if m.chat.type == 'private': if m.reply_to_message.forward_from: if is_sudo(m.from_user.id): id = m.reply_to_message.forward_from.id if m.text == 'مسدود': redis.set('ban' + str(id), True) bot.reply_to( m, 'این کاربر دیگر قادر به استفاده از پیوی ربات نیست ❌') elif m.text: bot.send_message(id, m.text) bot.reply_to(m, 'رفت براش') elif m.sticker: bot.send_sticker(id, m.sticker.file_id) bot.reply_to(m, 'رفت براش') elif m.photo: bot.send_photo(id, m.photo[-1].file_id) bot.reply_to(m, 'رفت براش')
def user_forget_task(email, active_code): redis.set(email, active_code) redis.expire(email, 120) body = 'http://localhost:8000/accounts/change-password/{}/{}/'.format( signer.sign(email), signer.sign(active_code)) send_mail('فعالسازی حساب', body, settings.EMAIL_HOST_USER, [email]) return 1
def _set_day(self, _date, sch): for key in sch.keys(): if(key == "date" or key == "mode" or key == "action"): continue print "setting - " + _date redis.set("scheduledItem:" + _date + ":" + key, sch[key]) redis.save()
def runScript(): mysqlConn = getMySQLConnection() cursor = mysqlConn.cursor() redis = getRedisConnection() while (True): tags = getAllTags(mysqlConn) for tag in tags: start_time = datetime.now() cursor.execute(QUERY_TEMPLATE % (tag, 0, LIMIT * NUM_PAGES_FOR_TAG)) all_rows = cursor.fetchall() for i in range(NUM_PAGES_FOR_TAG): offset = i * LIMIT redis_query_key = tag + ":" + str(offset) + ":" + str(LIMIT) rows_part = all_rows[i * LIMIT:(i + 1) * LIMIT] posts = [] for row in rows_part: posts.append(convertRowToPost(row)) redis.set(redis_query_key, json.dumps(posts)) print "Query \'" + redis_query_key + "\'" + " was updated. " end_time = datetime.now() delta = end_time - start_time output_str = "Total time elapsed for updating: \'" + tag + "\' " output_str += str(delta.total_seconds()) + " seconds" print output_str print "Pause between updates is started" time.sleep(UPDATE_ITERATION_PAUSE) print "Pause between updates is finished"
def register_mallu_vote(name, vote): KEY = "mallu_votes" mallu_votes = None try: mallu_votes = redis.get(KEY) except: print("Could not fetch key %s: %s" % (KEY, sys.exc_info()[0])) if not mallu_votes: mallu_votes = {} else: mallu_votes = json.loads(mallu_votes) current_count = 0 try: current_count = mallu_votes[name] except KeyError: pass mallu_votes[name] = current_count + vote s_val = json.dumps(mallu_votes) try: redis.set(KEY, s_val) except: print("Could not register vote %s with details %s for: %s" % (name + ":" + str(vote), s_val, sys.exc_info()[0])) raise
def dial(): try: From = request.args['From'] CallUUID = request.args['CallUUID'] redis.set(From, CallUUID) except KeyError: From = None CallUUID = None r = plivo.Response() speak_parameters = { 'language':'en-US', 'loop':'1', 'voice':'WOMAN', } r.addSpeak('Welcome to Plivo demo test.', **speak_parameters) play_parameters = { 'loop':'50', } r.addPlay('http://s3.amazonaws.com/plivocloud/music.mp3', **play_parameters) response = make_response(r.to_xml()) response.headers['Content-Type'] = 'text/xml' return response
def conductor(redis): groups = list(StaticGroups.objects.all()) + map( lambda x: x.group_info, Groups.objects.filter(behavior=1)) for group in groups: if redis.get(group.group_id): day = datetime.utcnow() data_delta = timedelta(hours=24) if group.impression == False and int( redis.get(group.group_id) ) >= group.limit_impressions and day - data_delta >= group.end_limit.replace( tzinfo=None): redis.set(group.group_id, '0') group.limit_impressions = random_generate() group.save() continue if group.impression == False and int(redis.get( group.group_id)) >= group.limit_impressions: group.impression = True group.save() redis.set(group.group_id, '0') continue if group.impression == True: delta = timedelta(hours=WAITING) if day - delta >= group.end_limit.replace(tzinfo=None): group.impression = False group.limit_impressions = random_generate() group.save() else: continue
def should_process(transcript, video_id): MIN_WORDS = 1000 MIN_UNIQUE_WORDS = 500 # {'text': 'is that a million yeah oh this is a', 'start': 0.06, 'duration': 5.04} unique_words = set() num_words = 0 num_of_chars = 0 for doc in transcript: line = doc["text"] words = line.split() num_words += len(words) num_of_chars += sum([len(word) for word in words]) unique_words.update(set(words)) avg_unique_word_size = sum([len(word) for word in unique_words]) / len(unique_words) avg_word_size = num_of_chars / num_words quality_score = len(unique_words) / num_words * 100 redis.set(name=video_id, value=avg_unique_word_size) print("https://youtube.com/watch?v=" + video_id) if num_words < MIN_WORDS: print("< MIN_WORDS") return False if len(unique_words) < MIN_UNIQUE_WORDS: print("< MIN_UNIQUE_WORDS") return False if avg_unique_word_size < 6.1: print("< avg_unique_word_size 6.1: {}".format(avg_unique_word_size)) return False return True
def random(self): youtube_ids = redis.srandmember("musicacommonset", 30) if not youtube_ids: return {"success": False} nonrecent = [] total = 0 for youtube_id in youtube_ids: youtube_id = youtube_id.decode() ltime = redis.get("musicatime.%s" % youtube_id) if ltime is None or time.time() - (float(ltime.decode()) or 0) >= 3600: for i in range( int( redis.get("musicacommon.%s" % youtube_id).decode()) or 1): nonrecent.append(youtube_id) if not youtube_ids: return {"success": False} youtube_id = query_search(random.choice(nonrecent), search=False) if youtube_id else None if not youtube_id: return {"success": False} youtube_id = youtube_id[0] redis.rpush( "musicaqueue", json.dumps({ "ytid": youtube_id, "uuid": str(uuid.uuid4()) })) redis.rpush("musicaload", youtube_id) redis.set("musicatime.%s" % youtube_id, time.time()) return {"success": True, "ytid": youtube_id}
def create_url(): data = request.get_json() full_url = data['full_url'] try: url_life = data['url_life'] except KeyError: url_life = 90 * 24 * 60 * 60 if not full_url.startswith('http://') and not full_url.startswith( 'https://'): return make_response('<h2>Invalid URL format</h2>', 400) result = redis.get(full_url) if result != None: return f"Shortened URL for {full_url} already exists: /{result}\n\n" else: short_url = ''.join(random.choice(alphabet) for i in range(url_len)) while redis.get(short_url): short_url = ''.join( random.choice(alphabet) for i in range(url_len)) if url_life <= 0: url_life = 90 * 24 * 60 * 60 #90 days else: url_life = url_life * 24 * 60 * 60 redis.set(full_url, short_url, url_life) redis.set(short_url, full_url, url_life) return f"Shortened URL for {full_url} is: /{short_url}\n\n"
def runScript(): mysqlConn = getMySQLConnection(); cursor = mysqlConn.cursor() redis = getRedisConnection() while(True): tags = getAllTags(mysqlConn) for tag in tags: start_time = datetime.now() cursor.execute(QUERY_TEMPLATE % (tag, 0, LIMIT * NUM_PAGES_FOR_TAG)); all_rows = cursor.fetchall() for i in range(NUM_PAGES_FOR_TAG): offset = i * LIMIT redis_query_key = tag + ":" + str(offset) + ":" + str(LIMIT) rows_part = all_rows[i * LIMIT: (i + 1) * LIMIT] posts = [] for row in rows_part: posts.append(convertRowToPost(row)) redis.set(redis_query_key, json.dumps(posts)) print "Query \'" + redis_query_key + "\'" + " was updated. " end_time = datetime.now() delta = end_time - start_time output_str = "Total time elapsed for updating: \'" + tag + "\' " output_str += str(delta.total_seconds()) + " seconds" print output_str print "Pause between updates is started" time.sleep(UPDATE_ITERATION_PAUSE) print "Pause between updates is finished"
def get_current_episode(file_path_b64=None): try: redis = redis_connect() if file_path_b64 is None: current_tv_show_name_b64 = redis_circular_list.current( redis, "STATE.USB_STORAGE.LIBRARY.TV_SHOWS") current_tv_show_name = utils.base64_decode( current_tv_show_name_b64) file_path_b64 = redis_circular_list.current( redis, f"STATE.USB_STORAGE.LIBRARY.TV_SHOWS.{current_tv_show_name_b64}" ) file_path = utils.base64_decode(file_path_b64) meta_data_key = f"STATE.USB_STORAGE.LIBRARY.META_DATA.{file_path_b64}" meta_data = redis.get(meta_data_key) if meta_data is None: meta_data = { "current_time": 0, "duration": 0, "last_watched_time": 0, "last_completed_time": 0, "file_path": file_path } redis.set(meta_data_key, json.dumps(meta_data)) else: meta_data = json.loads(meta_data) return meta_data except Exception as e: print(e) return False
def main(argv): params = getDefaults() options = [p + "=" for p in params.keys()] options.append("help") try: opts, args = getopt.getopt(argv[1:], '', options) except getopt.GetoptError: usage() sys.exit(2) for o, a in opts: o = o[2:] #chop off leading dashes if o == 'help': usage() sys.exit(0) params[o] = castParam(o,a) rd = new redis.Redis(host=params['redis-server'], port=params['redis-port'], db=params['redis-db'] ); for img in args: img_file = open(img, 'r') rd.set(img, img_file.read) try: urllib.urlopen(params['triggerurl'] + "/" + args.join(",")) except: print "hm, that didn't work"
def store_message(self, mailfrom, target, rcpttos, data): msg = email.message_from_string(data) headers = { "From": msg.get("From"), "To": msg.get("To"), "Subject": msg.get("Subject"), "Date": time.ctime(time.time()), } text_parts = [] html_parts = [] for part in msg.walk(): if part.get_content_type() == "text/plain": text_parts.append(part.get_payload()) elif part.get_content_type() == "text/html": html_parts.append(self.clean_html(part.get_payload())) simple_msg = {"headers": headers, "text_parts": text_parts, "html_parts": html_parts} simple_msg_json = json.dumps(simple_msg) timestamp = time.time() msgid = redis.get("msgid_counter") if msgid: msgid = redis.incr("msgid_counter") else: redis.set("msgid_counter", 1) msgid = 1 msgkey = "message:" + str(msgid) redis.set(msgkey, simple_msg_json) # storing the msg once redis.zadd("messages:" + target, msgkey, timestamp) # all messages to me redis.zadd("messages_from:" + target + ":" + mailfrom, msgkey, timestamp) # all messages from you to me
def mag_listredis(): if request.method == 'POST': input1 = int(request.form['fromdepth']) input2 = int(request.form['todepth']) input3 = int(request.form['iternum']) res = [] cache = "sanjeev" start_t = time.time() for i in range(0, int(input3)): ran_num1 = "{:.3f}".format(random.uniform(input1, input2)) ran_num2 = "{:.3f}".format(random.uniform(input1, input2)) if r.exists(cache + str(ran_num1) + str(ran_num2)): rows = pickle.loads( r.get(cache + str(ran_num1) + str(ran_num2))) else: query = 'select count(*) from Earthquake where "depthError" BETWEEN ' + str( ran_num1) + ' AND ' + str(ran_num2) + ' ' con = sql.connect("database.db") cur = con.cursor() cur.execute(query) rows = cur.fetchall() r.set(cache + str(ran_num1) + str(ran_num2), pickle.dumps(rows)) con.close() end_t = time.time() - start_t return render_template("index.html", e=end_t)
def save_model(model, redis): json_string = model.to_json() redis.set("{}_model".format(datetime.date.today()), json_string) model.save_weights("mnist_mlp.h5") with open("mnist_mlp.h5", "rb") as f: redis.set("{}_weights".format(datetime.date.today()), f.read()) os.remove("mnist_mlp.h5")
def smsPost(): global numbers global restList # setup response response = twiml.Response() # pull basic data from every message body = request.form['Body'].lower() num = request.form['From'] # cookie data lastRecIndex = session.get('lastrec', -1) if num not in numbers: response.sms('Welcome to RootRec! Your number has been added to the list. Reply with "Stop" at any time to be removed from this service') numbers.append(num) json_data = json.dumps(numbers) redis.set("nums", json_data) elif "yes" in body: # handle follow up response if lastRecIndex == -1: response.sms("Sorry, something went wrong") else: rest = restList[lastRecIndex] response.sms("Great choice! {} is at {}, you can call them at {}".format(rest["name"], rest["addr"], rest["phone"])) else: rest = random.choice(restList) index = restList.index(rest) session['lastrec'] = index randNum = random.randrange(1, 3) opt = "opt" + str(randNum) optPrice = "opt" + str(randNum) + "price" response.sms('Hello, here is a healthy option nearby: you could go to "{}" and get "{}" for {}. Reply with "next" for another option, or "yes" to get the address.'.format(rest["name"], rest[opt], rest[optPrice])) return str(response)
def check_printers(): app.logger.debug("Checking known printers...") for raw_printer in printers.get_printers(): printer = drivers.get_printer_instance(raw_printer) printer.is_alive() if printer.client.connected: webcam = printer.webcam() try: if "stream" in webcam: redis.set("webcam_%s" % (printer.ip, ), webcam["stream"]) else: redis.delete("webcam_%s" % (printer.ip, )) except Exception as e: app.logger.error( "Cannot save webcam proxy information into cache: %s", e) printers.update_printer( name=printer.name, hostname=printer.hostname, ip=printer.ip, client=printer.client_name(), client_props={ "version": printer.client.version, "connected": printer.client.connected, "read_only": printer.client.read_only, }, )
def run(self): # update redis ping_redis(redis) redis.set("exabgp_seen_bgp_update", "1", ex=MON_TIMEOUT_LAST_BGP_UPDATE) autoconf_running = self.shared_memory_manager_dict["autoconf_running"] if not autoconf_running: log.info("setting up autoconf updater process...") with Connection(RABBITMQ_URI) as connection: self.autoconf_updater = AutoconfUpdater( connection, self.shared_memory_manager_dict) shared_memory_locks["autoconf_updates"].acquire() self.shared_memory_manager_dict["autoconf_running"] = True shared_memory_locks["autoconf_updates"].release() mp.Process(target=self.autoconf_updater.run).start() log.info("autoignore checker set up") # start host processes host_processes = [] for host in self.hosts: host_process = mp.Process(target=self.run_host_sio_process, args=(host, )) host_processes.append(host_process) host_process.start() while True: if not self.shared_memory_manager_dict["data_worker_should_run"]: for host_process in host_processes: host_process.terminate() break time.sleep(1)
def update(self): data = json.loads(redis.get('activity_data')) # urlbase = 'http://care.floorinmotion.com/api/' + 'monitoring/I4.A.' urlbase = 'http://front.recipe.fim-team.net/api/monitoring/room/FMDEV.500.' eventactif = ('BEDROOM', 'BATHROOM', 'FALL') evenement = ('BEDROOM', 'BATHROOM', 'FALL', 'ABSENCE', 'PRESENCE') cookies = { 'JSESSIONID': '484886F867C4463491FDD873208123DC', 'AWSELB': '9913C50D10591FEE0CB0FFE69B89039701A79A2DE3E111BBCD0B4DFBAD1D8FCBE394CBFC2A759087B985EF5DAA1553D995017A7A1171AF03E432638AB9F7D019635067608A737F9545C2E17DE5B43AEAF0B54BC5FD', '_gat': '1 ', '_ga': 'GA1.4.1335402241.1496105804', '_gid': 'GA1.4.730554077.1496142416' } # Genere les urls pour les pool des données rs = (grequests.get( urlbase + str(key["n"]), cookies=cookies ) for key in data ) # Fais la requetes des données et les stocke sous # answer = (reponse1,reponse2,...,response n) answer = grequests.map(rs) print(answer) # # pour chaque chambre de la liste for i,room in enumerate(data): ro_n = json.loads(answer[i].text) # Update last event for each room # print(ro_n['room']['lastEvent']) room['lastEvent'] = ro_n['room']['lastEvent'] if room['lastEvent'] in eventactif: room['acti'] += '1' else: room['acti'] += '0' if '00000' in room['acti'] or '1' not in room['acti']: room['tmc'] = 0 room['acti'] = '' else: room['tmc'] = int(len(room['acti']) / 5) * 5 # update data # print(data) # Trie les chambres par actvités data = sorted(data, key=lambda room: room['tmc'])[::-1] redis.set('activity_data', json.dumps(data)) # app.logger.info(u'Inserting message: {}'.format(message)) # redis.publish(REDIS_CHAN, message) return True
def get_stations(): """Setup function to populate stations table if no data exists. Should only be run once.""" resp = requests.get(bike_data_url).text hashed = hashlib.sha256(resp).hexdigest() redis.set('station_data', hashed) redis.set('bike_data', hashed) stations_data = json.loads(resp) for s in stations_data: latitude, longitude = s['location'] station = Station(id=s['id'], station_status=s['station_stocking_status'], name=s['name'], description=s['description'], has_kiosk=s['has_kiosk'], has_ccreader=s['has_ccreader'], station_type=s['type'], latitude=latitude, longitude=longitude) db.session.add(station) soup = BeautifulSoup(s['popup'], 'html.parser') bikes = int(soup.select('span.station-bikes b')[0].string) docks = int(soup.select('span.station-docks b')[0].string) redis.hmset(s['name'], {'bikes': bikes, 'docks': docks}) redis.geoadd('stations', latitude, longitude, s['name']) db.session.commit()
def dev_from_null_hyp(grid, use_relative_deviation=False): """ Calc deviation from null hypothesis """ hashkey = "hypothesis grid6" + str(xBins) #null_hyp_grid = cache.get(hashkey) serialized = redis.get(hashkey) if serialized: null_hyp_grid = pickle.loads(serialized) if isinstance(null_hyp_grid, np.ndarray): # Found in cache #print "null hypothesis grid loaded from cache" pass else: # Not found in cache print "null hypothesis not found in cache" lons, lats = get_enough_data() null_hyp_grid = gen_grid(lats, lons) #cache.set(hashkey, null_hyp_grid, timeout=60*60*24*31*99999) redis.set(hashkey, pickle.dumps(null_hyp_grid)) if use_relative_deviation: quotent = np.divide(grid - null_hyp_grid, null_hyp_grid) NaNs = np.isnan(quotent) quotent[NaNs] = 0 Infs = np.isinf(quotent) quotent[Infs] = 0 maxerr = quotent.max() quotent = quotent + maxerr else: # Use absolute deviation plus max element # to remove entries < 0 (best try so far) quotent = grid - null_hyp_grid + null_hyp_grid.max() return quotent, null_hyp_grid
def insertdb(): pprint (request.get_json()) email = getHash(request.get_json()['email']) fn = getHash(request.get_json()['firstname']) ln = getHash(request.get_json()['lastname']) sex = getHash(request.get_json()['sex']) age = getHash(request.get_json()['age']) u = User( firstname=fn, lastname=ln, email=email, sex = sex, age = age, ssn = request.get_json()['ssn'], phone = request.get_json()['phone'], url = request.get_json()['url'], password =request.get_json()['password'], passwordStrngth = request.get_json()['passwordStrngth'], dob = request.get_json()['dob'], dobWithTime = request.get_json()['dobWithTime'], creditcardnumber = request.get_json()['creditcardnumber']) # Insert user data to database db.session.add(u) db.session.commit() # get the generated id for the user recoed userId = str(u.id) pprint('User saved with userID ' + userId) retrievedUser = User.query.filter_by(id=userId).first() # convert user record retrieved from database to json format userJson = json.dumps(retrievedUser.as_dict()) # store this json object as value with generted id as key in redis cache redis.set(userId, userJson) pprint('User cached in redis with key ' + userId) pprint('Value = ' + userJson) return 'row inserted!!!'
def init(queue): count = 0 while count< 5000: count +=1 try: header = {'User-Agent': 'Mozilla/5.0'} url = queue.pop() # print count if not redis.exists("%s:%s" % (key, url)): # print "crawling %s ... " % (url) req = urllib2.Request(url, headers=header) page = urllib2.urlopen(req) soup = BeautifulSoup(page) links = soup.find_all("a") ret_links = get_in_links(url, links) queue = ret_links + queue redis.set("%s:%s" % (key, url), 1) redis.sadd(key, url) dict = parse(url, soup) if dict: print dict else: # print "%s already crawled ....." % (url) pass except: print "error ........." pass
def sign_up(): data = request.get_json(force=True) hashed_email = data.get("hashed_email") email = data.get("email") redis.set(hashed_email, email) return jsonify(status=True, message='Sign up complete!'), 200
def hongbao(): """ 定期统计用户发送口令, 获取红包的情况 规则: 用户向派派发送口令, 获得红包 :return: """ from datetime import datetime from bson import ObjectId import re redis = _redis_client() # 获得已发红包的用户 processed_users = set(json.loads(redis.get('viae/viae.provisional.hongbao/processed_users') or '[]')) # 获得红包处理进度的时间戳 utc_tz = timezone('UTC') processed_since = redis.get('viae/viae.provisional.hongbao/processed_ts') logger.info('Processing from %s' % processed_since) processed_since = datetime.strptime(processed_since, '%Y-%m-%d %H:%M:%S').replace(tzinfo=utc_tz) dummy_id = ObjectId.from_datetime(processed_since) # 找到哪些用户发送过红包口令 pattern = re.compile(u'(体验旅行派APP领现金红包|新用户口令|领新用户红包|从微信过来领红包|下单送北京大房免费住)', re.IGNORECASE) sender_list = mongo_hedy.Message.distinct('senderId', {'_id': {'$gt': dummy_id}, 'receiverId': 10000, 'contents': pattern}) # 这些用户必须不在已发送红包的列表中, 并且为两天内注册的 final_senders = {} user_dummy_id = ObjectId.from_datetime(processed_since - timedelta(days=7)) for s in filter(lambda v: v not in processed_users, sender_list): u = mongo_yunkai.UserInfo.find_one({'userId': s, '_id': {'$gt': user_dummy_id}}, {'userId': 1, 'nickName': 1}) if not u: continue final_senders[u['userId']] = u if final_senders: # 准备报表 sections = [] for uid, user in sorted(final_senders.items(), key=lambda v: v[0]): messages = mongo_hedy.Message.find({'senderId': uid, 'receiverId': 10000}, {'contents': 1}) c = '\n'.join([tmp['contents'] for tmp in messages]) sections.append(u'%d: %s\n%s\n\n' % (uid, user['nickName'], c)) processed_users.add(uid) email_contents = ''.join(sections).strip() from viae.job import send_email_to_group, send_email logger.info('Sending hongbao stats') send_email_to_group(groups='MARKETPLACE', subject=u'红包申请统计', body=email_contents) # 默认7天过期 expire = 7 * 24 * 3600 redis.set('viae/viae.provisional.hongbao/processed_users', json.dumps(list(processed_users)), expire) redis.set('viae/viae.provisional.hongbao/processed_ts', (datetime.utcnow() - timedelta(minutes=20)).replace(tzinfo=utc_tz).strftime('%Y-%m-%d %H:%M:%S'), expire)
def status(): hits = redis.get("hits") if hits is None: hits = '0' hits = int(hits) redis.set('hits', hits + 1) return "OK"
def shorten(): url = request.form['url'] if validators.url(url): key = zlib.crc32(url) + (2**32) ## cast to unsigned int redis.set(key, url) return str(key) else: return 'Bad URL'
def add_route(): '''Add a new trace to the back of the queue''' data = json.loads(request.data) trace = data['trace'] target = data['target'] redis.rpush(trace_list, json.dumps(trace)) redis.set(last_ip, target) return ''
def struct_to_redis(redis, struct, prefix='CONTROL:'): for entry in struct.entries: if isinstance(entry, control.UnpackableStruct): struct_to_redis(redis, entry, prefix=prefix+entry.varname+':') else: if entry.varname != 'dummy': redis.set(prefix + entry.varname, entry.val) print 'Writing to redis: Key', prefix+entry.varname, 'val:', entry.val
def cull_playlists(item): playlist_keys = redis.keys('playlist:*') for i in playlist_keys: playlist_name = i.split(':')[1] playlist = json.loads(redis.get(i)) cull_playlist(playlist, item) redis.set('playlist:' + playlist_name, json.dumps(playlist)) redis.save()
def set_voice_recording(): play_url = request.form.get('play_url','') redis.set('play_url',play_url) redis.save() response = make_response(render_template("welcome.html", playfile_success_response = "Play file details saved!")) response.headers['Content-type'] = 'text/html' return response
def shorten(url): short_id = redis.get('reverse-url:' + url) if short_id is not None: return short_id url_num = redis.incr('last-url-id') short_id = b62_encode(url_num) redis.set('url-target:' + short_id, url) redis.set('reverse-url:' + url, short_id) return short_id
def index(): count = 1 bcount = redis.get('count') if bcount: count = int(bcount.decode('utf8')) + 1 redis.set('count', str(count)) return render_template('index.html', count=count)
def push_to_redis(redis, name, tasks): """ Helper function to push data to Redis :param redis: Redis connection :param name: task name :param tasks: data to push """ tasks_json = List.encode_json(tasks) redis.set(name, tasks_json)
def upload(): id = id_generator(32) with open(os.path.join(app.config['UPLOAD_FOLDER'], id + '.gif'), 'wb') as file: file.write(request.get_data()) redis.set(id, 'generating') redis.rpush('video', id) return id + '.gif'
def get_token(username, length=20, timeout=20): """ Obtain an access token that can be passed to a websocket client. """ redis = get_redis_client() token = get_random_string(length) token_key = 'token:{}'.format(token) redis.set(token_key, username) redis.expire(token_key, timeout) return token
def set_sms_response(): autoreply = request.form.get('autoreply','') print autoreply redis.set('auto_reply',autoreply) redis.save() response = make_response(render_template("welcome.html", success_response = "SMS Auto Response saved!")) response.headers['Content-type'] = 'text/html' return response
def joinLobby(self, u): if u.id not in self.members: self.members.append(u.id) self.save() self.sendAction({ "type": "join", "member": u.format(), "msg": "%s joined the lobby" % u.username }) redis.set("user:%s:lobby:%s:ping" % (u.id, self.id), time.time())
def newfunc(*args, **kwargs): key = '%s:%s' % (func.func_name, hashlib.md5(str(args) + str(kwargs)).hexdigest()) value = redis.get(key) if value: return pickle.loads(value) value = func(*args, **kwargs) redis.set(key, pickle.dumps(value)) return value
def index(): visitors = redis.get('visitors') os = redis.get('osdata') num = 0 if visitors is None else int(visitors) num += 1 redis.set('visitors', num) user_agent = request.user_agent return render_template('index.html', number=num, user_agent=user_agent)
def route_post_ad(slot): if not advertiser_id(): return '', 404 asset = request.files['asset'] id = next_ad_id() key = ad_key(slot, id) type = fetch(request.form, 'type') if not type: type = asset.mimetype if not type: type = 'video/mp4' redis = get_redis() redis.hmset(key, { 'slot': slot, 'id': id, 'title': fetch(request.form, 'title'), 'type': type, 'advertiser': advertiser_id(), 'destination': fetch(request.form, 'destination'), 'impressions': 0 }) redis.set(asset_key(slot, id), asset.read()) redis.rpush(slot_key(slot), id) redis.sadd(advertiser_key(advertiser_id()), key) return jsonify(get_ad(slot, id))