def store_event(event_name, data): event = dict( name=event_name, timestamp=datetime.utcnow().isoformat(), data=data, ) redis.rpush(EVENTS_KEY, json.dumps(event))
def resubmit_jobs(): ''' Examines the fetch and gather queues for items that are suspiciously old. These are removed from the queues and placed back on them afresh, to ensure the fetch & gather consumers are triggered to process it. ''' if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() # fetch queue harvest_object_pending = redis.keys(get_fetch_routing_key() + ':*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") # 3 minutes for fetch and import max if (datetime.datetime.now() - date_of_key).seconds > 180: redis.rpush(get_fetch_routing_key(), json.dumps({'harvest_object_id': key.split(':')[-1]}) ) redis.delete(key) # gather queue harvest_jobs_pending = redis.keys(get_gather_routing_key() + ':*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") # 3 hours for a gather if (datetime.datetime.now() - date_of_key).seconds > 7200: redis.rpush(get_gather_routing_key(), json.dumps({'harvest_job_id': key.split(':')[-1]}) ) redis.delete(key)
def parse(self, response): redis = self.redis re_url = re.compile(r'https://www.tianyancha.com/company/\d+') code = response.meta['code'] url = re_url.findall(response.text) if len(url) > 0: url = url[0] redis.lpush('lawcourt', code) province = response.xpath( '/html/body/div[2]/div/div[1]/div[2]/div[2]/div[1]/div/span/text()' ).extract_first() score = response.xpath( '/html/body/div[2]/div/div[1]/div[2]/div[2]/div[1]/div/div[4]/span[1]/text()' ).extract_first() yield scrapy.Request(url, callback=self.parse_second, meta={ 'url_link': url, 'code': code, 'province': province, 'score': score }) else: redis.rpush('base', code) code = redis.lpop('base') print(self.redis.llen('base')) url = 'https://www.tianyancha.com/search?key={}'.format(code) yield scrapy.Request(url, callback=self.parse, meta={'code': code}, dont_filter=True)
def resubmit_jobs(): if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() log.debug('_create_or_update_package') harvest_object_pending = redis.keys('harvest_object_id:*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key ).seconds > 180: # 3 minuites for fetch and import max redis.rpush('harvest_object_id', json.dumps({'harvest_object_id': key.split(':')[-1]})) redis.delete(key) harvest_jobs_pending = redis.keys('harvest_job_id:*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key).seconds > 7200: # 3 hours for a gather redis.rpush('harvest_job_id', json.dumps({'harvest_job_id': key.split(':')[-1]})) redis.delete(key)
def start_service(host, port, redis): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind((host, port)) sock.listen(1) while True: conn, addr = sock.accept() while True: size = 0 DATA_SIZE = 55 accumulated_data = '' while size < DATA_SIZE: data = bytes.decode(conn.recv(55), encoding='utf-8') if len(data) == 0: break accumulated_data += data size += len(data) if size < DATA_SIZE: break json_obj = json.loads(accumulated_data) party_number = json_obj["party_number"] value = json_obj["value"] try: redis.rpush(str(int(party_number, 16)), int(value, 16)) except Exception as e: print(e)
def process_links(project, url, spider): if redis.sismember("seen", json.dumps([project, url, spider])): print "Skipping %s; it has already been processed (For %s)" % (url.encode("utf-8"), project.encode("utf-8")) else: print "Processing %s for urls (For %s)" % (url.encode("utf-8"), project.encode("utf-8")) resp = session.get(url, timeout=15) resp.raise_for_status() html = lxml.html.document_fromstring(resp.content) if spider: for link in itertools.chain(html.find_rel_links("download"), html.find_rel_links("homepage")): try: link.make_links_absolute(url) except ValueError: continue if "href" in link.attrib and not installable(project, link.attrib["href"]): parsed = urlparse.urlparse(link.attrib["href"]) if parsed.scheme.lower() in ["http", "https"]: redis.rpush("queue", json.dumps([project, link.attrib["href"], False])) # Process all links in html for installable items for link in html.xpath("//a"): try: link.make_links_absolute(url) except ValueError: continue if "href" in link.attrib and installable(project, link.attrib["href"]): redis.rpush("results", json.dumps([project, url, link.attrib["href"]])) redis.sadd("seen", json.dumps([project, url, spider]))
def push(self, object): try: hashcode = serialize(self.type_name, object) redis.rpush(self.root_node[1], redis.rpush(self.root_node[0], hashcode) - 1) except Exception, e: redis.rpush(self.root_node[0], object)
def resubmit_jobs(): if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() log.debug('_create_or_update_package') harvest_object_pending = redis.keys('harvest_object_id:*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key).seconds > 180: # 3 minuites for fetch and import max redis.rpush('harvest_object_id', json.dumps({'harvest_object_id': key.split(':')[-1]}) ) redis.delete(key) harvest_jobs_pending = redis.keys('harvest_job_id:*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key).seconds > 7200: # 3 hours for a gather redis.rpush('harvest_job_id', json.dumps({'harvest_job_id': key.split(':')[-1]}) ) redis.delete(key)
def log_cb(msg): global special_log_keys levels = {1: "DEBUG", 2: "INFO", 4: "WARN", 8: "ERROR", 16: "FATAL"} # for reference: whitelist levels are 0: banned: 1: able to send to /Log, 2: able to send to /Log and /Feedback sender = msg.name # if the whitelist is empty, publish everything. If the whitelist is populated, only publish from approved nodes if (not whitelist) or whitelist.get(sender, 0) > 0: package = json.dumps( OrderedDict([("level", levels[msg.level]), ("from", sender), ("message", msg.msg)])) # if this is a tagged feedback message and the sending node has feedback priviledge, then send to the feedback key if whitelist.get(sender, 0) > 1 and msg.msg.startswith("[") and len( msg.msg.split(']')) == 2: parts = msg.msg.lstrip('[ ').split(']') info = parts[0] message = parts[1] key, code = info.split() package = json.dumps( OrderedDict([("level", levels[msg.level]), ("from", sender), ("code", code.upper()), ("message", message)])) key = bt.namespace_key('_'.join([ x.capitalize() for x in key.split('_') ])) # turns "key_name" to "Key_Name" redis.rpush(key, str(package)) special_log_keys.add(key) bt.save_json_file("special_log_keys.json", list(special_log_keys)) # otherwise, just send to the standard log key else: redis.rpush(redis_key, str(package))
def random(self): youtube_ids = redis.srandmember("musicacommonset", 30) if not youtube_ids: return {"success": False} nonrecent = [] total = 0 for youtube_id in youtube_ids: youtube_id = youtube_id.decode() ltime = redis.get("musicatime.%s" % youtube_id) if ltime is None or time.time() - (float(ltime.decode()) or 0) >= 3600: for i in range( int( redis.get("musicacommon.%s" % youtube_id).decode()) or 1): nonrecent.append(youtube_id) if not youtube_ids: return {"success": False} youtube_id = query_search(random.choice(nonrecent), search=False) if youtube_id else None if not youtube_id: return {"success": False} youtube_id = youtube_id[0] redis.rpush( "musicaqueue", json.dumps({ "ytid": youtube_id, "uuid": str(uuid.uuid4()) })) redis.rpush("musicaload", youtube_id) redis.set("musicatime.%s" % youtube_id, time.time()) return {"success": True, "ytid": youtube_id}
def run(self): dbJson = list() redis = open_connection_redis(self.schema_conv_output_option.host, self.schema_conv_output_option.username, self.schema_conv_output_option.password, self.schema_conv_output_option.dbname) mongoConnection = open_connection_mongodb(self.schema_conv_init_option) # TODO: Map nhe lai cai nay thanh check schema roi loop tren kia. for dataAndSchema in mongoConnection.list_collection_names(): data = dataAndSchema["data"] if dataAndSchema["schema"]["collection"] == "string": for item in data: redis.set(item["key"], item["value"]) elif dataAndSchema["schema"]["collection"] == "list": for item in data: redis.rpush(item["key"], *item["value"]) elif dataAndSchema["schema"]["collection"] == "set": for item in data: redis.sadd(item["key"], *item["value"]) elif dataAndSchema["schema"]["collection"] == "hash": for item in data: redis.hset(item["key"], mapping=item["value"]) elif dataAndSchema["schema"]["collection"] == "sortedSet": for item in data: redis.zadd(item["key"], item["value"]) else: for item in data: # TODO: bitmap hyperlog redis.set( dataAndSchema["schema"]["collection"] + "_" + item["key"], item["value"])
def handle_package(package, redis): print('{} Package: {}'.format(Consts.LOG_PREFIX, package.get('packageid'))) package_id = package.get('packageid') billing_type = package.get('billingtype') allow_cross_region_trading_and_gifting = str2bool( package.get('extended', {}).get('allowcrossregiontradingandgifting')) allow_purchase_from_restricted_countries = str2bool( package.get('extended', {}).get('allowpurchasefromrestrictedcountries')) purchase_restricted_countries = package.get( 'extended', {}).get('purchaserestrictedcountries') only_allow_run_in_countries = package.get( 'extended', {}).get('onlyallowrunincountries') package_json = { "package_id": package_id, "billing_type": billing_type, "allow_cross_region_trading_and_gifting": allow_cross_region_trading_and_gifting, "allow_purchase_from_restricted_countries": allow_purchase_from_restricted_countries, "purchase_restricted_countries": purchase_restricted_countries, "only_allow_run_in_countries": only_allow_run_in_countries, } for app_id in package.get('appids', {}).values(): app = {"app_id": app_id, "package": package_json} redis.rpush('apps-queue', json.dumps(app))
def route_post_ad(slot): if not advertiser_id(): return '', 404 asset = request.files['asset'] id = next_ad_id() key = ad_key(slot, id) type = fetch(request.form, 'type') if not type: type = asset.mimetype if not type: type = 'video/mp4' redis = get_redis() redis.hmset( key, { 'slot': slot, 'id': id, 'title': fetch(request.form, 'title'), 'type': type, 'advertiser': advertiser_id(), 'destination': fetch(request.form, 'destination'), 'impressions': 0 }) redis.set(asset_key(slot, id), asset.read()) redis.rpush(slot_key(slot), id) redis.sadd(advertiser_key(advertiser_id()), key) return jsonify(get_ad(slot, id))
def group_invite(): client, db = connect_mongo() if not client or not db: print('connect mongo failed') return False redis = connect_redis() if not redis: print('connect redis failed') return False count = db.task.count({'status': 1}) start, step = 0, 10 while start < count: this_loop_records = db.task.find({'status': 1}).limit(step).skip(start) for record in this_loop_records: if record.get('key') and record.get( 'trigger_time') > 0 and record.get('trigger_time') < int( time.time()): print(record) record['_id'] = str(record['_id']) redis.rpush('weixin_robot_admin_command', json.dumps(record)) db.task.find_one_and_update({'_id': ObjectId(record['_id'])}, {'$set': { 'status': 2 }}) start += step client.close() return True
def request_config_mfc(ip, data=None): if data is None: data = """<mfc-request><header><type>GET</type></header> <data>running-config mfc-cluster mfc</data></mfc-request>""" mfc_con = MfcHttpConnection(ip) resp = mfc_con.send_request(data) redis.rpush(config.get('constants', 'REDIS_CONFIG_XML_QUEUE_KEY'), [ip, resp]) return resp
def manage(): if request.method == 'POST': redis.rpush(user.username+"_phonebook", request.form['contact_name'] +" - " + request.form['contact_number'] ) return render_template('manage.html')
def execute_later(redis, queue, name, args, delay=0): identifier = str(uuid.uuid4()) item = json.dumps([identifier, queue, name, args]) if delay > 0: redis.zadd('delayed:queue:', {item: time.time() + delay}) else: redis.rpush('queue:' + queue, item) return identifier
def add_route(): '''Add a new trace to the back of the queue''' data = json.loads(request.data) trace = data['trace'] target = data['target'] redis.rpush(trace_list, json.dumps(trace)) redis.set(last_ip, target) return ''
def create_player(self, user): pid = redis.incr(self.key(':players:next')) player = Player(self.gid, pid) player.set_username(user.username) redis.rpush(self.key(':players'), pid) event_data = json.dumps({'action': 'join', 'player': pid}) redis.publish(self.key(':players_channel'), event_data) return player
def create(owner, name): gid = redis.incr('games:next') redis.rpush('games', gid) game = Game(gid) redis.hset(game.key(), 'owner', owner.username) redis.hset(game.key(), 'name', name) redis.hset(game.key(), 'state', 'setup') return Game(gid)
def delete(self, uuid): found = self.find(uuid) while found is not None: count = redis.lrem("musicaqueue", 0, found) redis.rpush( "musicaudit", "removed entry for %s at %s because of deletion request" % (found, time.ctime())) found = self.find(uuid)
def create(username, password): user = User(username) pw_hash = generate_password_hash(password) ok = redis.hsetnx(user.key(), 'password_hash', pw_hash) if not ok: return False redis.hset('users:{}'.format(username), 'score', 0) redis.rpush('users', username) return user
def enqueu_job(job_id, date_range, start_date, end_date): redis = get_redis_connection() job_definition = { "id": job_id, "date_range": date_range, "start_date": start_date, "end_date": end_date } redis.rpush("jobs", json.dumps(job_definition))
def scrape_ids_from(url): r = requests.get(url) ids = regex_ids(r.text) count = 0 for id in ids: if id not in bloom: redis.rpush(queue, id) count += 1 bloom.add(id)
def main(): # Grab a list of projects from PyPI projects = xmlrpclib.Server("http://pypi.python.org/pypi").list_packages() # Add some urls to our queue for project in projects: redis.rpush("queue", json.dumps([project, "https://pypi.python.org/simple/" + project + "/", True])) workers = [gevent.spawn(worker) for _ in xrange(WORKERS)] gevent.joinall(workers)
def upload(): id = id_generator(32) with open(os.path.join(app.config['UPLOAD_FOLDER'], id + '.gif'), 'wb') as file: file.write(request.get_data()) redis.set(id, 'generating') redis.rpush('video', id) return id + '.gif'
def qq6(): for i1 in range(1, 10): for i2 in range(0, 10): for i3 in range(0, 10): for i4 in range(0, 10): for i5 in range(0, 10): for i6 in range(0, 10): str = '{}{}{}{}{}{}@qq.com'.format(i1, i2, i3, i4, i5, i6) redis.rpush('qq6', str) redis.rpush('qq6','*****@*****.**')
def _(event): ''' update frequency/call ''' radios.current_value = radios.values[radios._selected_index][0] tunedata = radios.current_value.split(sep=" ", maxsplit=3) globalvars['lastcall'] = tunedata[2] if qrz.checked is True: redis.rpush('qrzLookupQueue', tunedata[2]) frequency.content=FormattedTextControl(HTML('<b fg="#884444">Freq.:</b> ' + (tunedata[1] + " Khz").rjust(15))) dx.content=FormattedTextControl(HTML('<b fg="#884444">Call:</b> ' + tunedata[2].rjust(12))) event.app.invalidate()
def reset_queue(redis, key, *fields): """ reset for queue-based keys """ while redis.llen(key) > 0: redis.lpop(key) d = {} for f in fields: d[f] = None redis.rpush(key, str(json.dumps(d)))
def enqueue(self, youtube_id): youtube_ids = query_search(youtube_id) if youtube_id else None if not youtube_ids: return json.dumps({"success": False}) for youtube_id in youtube_ids: redis.rpush("musicaqueue", json.dumps({"ytid": youtube_id, "uuid": str(uuid.uuid4())})) redis.rpush("musicaload", youtube_id) redis.incr("musicacommon.%s" % youtube_id) redis.sadd("musicacommonset", youtube_id) redis.set("musicatime.%s" % youtube_id, time.time()) return {"success": True}
def enqueue(self, youtube_id): youtube_id = query_search(youtube_id) if youtube_id else None if not youtube_id: return json.dumps({"success": False}) redis.rpush( "musicaqueue", json.dumps({ "ytid": youtube_id, "uuid": str(uuid.uuid4()) })) redis.rpush("musicaload", youtube_id) return {"success": True}
def resubmit_jobs(): ''' Examines the fetch and gather queues for items that are suspiciously old. These are removed from the queues and placed back on them afresh, to ensure the fetch & gather consumers are triggered to process it. ''' if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() # fetch queue harvest_object_pending = redis.keys(get_fetch_routing_key() + ':*') for key in harvest_object_pending: redis_value = redis.get(key) if redis_value is None: log.info( 'Fetch Queue: Redis cannot get value for key {}'.format(key)) continue date_of_key = datetime.datetime.strptime(redis_value, "%Y-%m-%d %H:%M:%S.%f") log.debug('[Fetch queue]: Check key {} with value {}'.format( key, date_of_key)) # 3 minutes for fetch and import max if (datetime.datetime.now() - date_of_key).seconds > 180: log.debug( '[Fetch queue]: Re-new harvest object with KEY {} in redis'. format(key)) redis.rpush(get_fetch_routing_key(), json.dumps({'harvest_object_id': key.split(':')[-1]})) redis.delete(key) # gather queue harvest_jobs_pending = redis.keys(get_gather_routing_key() + ':*') for key in harvest_jobs_pending: redis_value = redis.get(key) if redis_value is None: log.info( 'Gather Queue: Redis cannot get value for key {}'.format(key)) continue date_of_key = datetime.datetime.strptime(redis_value, "%Y-%m-%d %H:%M:%S.%f") log.debug('[Gather queue]: Check key {} with value {}'.format( key, date_of_key)) # 3 hours for a gather if (datetime.datetime.now() - date_of_key).seconds > 7200: log.debug( '[Gather queue]: Re-new harvest job with KEY {} in redis'. format(key)) redis.rpush(get_gather_routing_key(), json.dumps({'harvest_job_id': key.split(':')[-1]})) redis.delete(key)
def set_data(request): """ 从前台界面发送过来的数据传给mqtt服务器,并且存入redis数据库 :param request: :return: """ if request.method == 'POST': for i in request.POST: redis.rpush("set_value", i) redis.rpush("set_value", request.POST[i]) print "set_value", i print redis.llen('set_value') return JsonResponse({'success': True})
def send(): content = request.args.get("msg", "Hello World") timestamp = datetime.utcnow().strftime("%Y/%m/%d.%H:%M:%S.%f") message = { "command": "send_message", "timestamp": timestamp, "args": { "content": content } } commandStr = json.dumps(message) redis.rpush('messages', commandStr) return 'Sent: {0}!'.format(escape(content))
def finish(pid_file, filepath, operate, user_id, list_id, import_id): if operate == 'run': redis.hincrby(COMMON_VAR_COUNT_HASH, 'edm_web_mail_import_couont', -1) # 地址池 统计 redis.rpush(EDM_WEB_USER_MAIL_IMPORT_COUNT_QUEUE, '{}_{}'.format(user_id, list_id)) close_file() close_connect() if filepath and os.path.isfile(filepath): os.unlink(filepath) if os.path.exists(pid_file): os.unlink(pid_file)
def readInputs(gpio): for io_id in gpio: io = gpio[io_id] if (io.getMode() == "input"): state = io.getState() # logging.debug("Reading state " + str(state) + " from " + str(io_id)); if (state == "NULL"): pass else: mask = 255 back = mask - state if (back > 0 and back != io.getRecordedState()): io.setRecordedState(back) active = str(io.getAddress() ) + "," + io.getBank() + "," + str(back) logging.debug("sensors " + active) redis.rpush("sensors",active) if (state == 255): io.setRecordedState(255)
async def asynchronous(): start = time.time() async with aiohttp.ClientSession() as session: futures = [fetch_async(i, session) for i in range(1, MAX_THREADS + 1)] for i, future in enumerate(asyncio.as_completed(futures)): result = await future # Here we can make calculations if (type(result) is list): pos = random.randint(1, 10) data = result[pos].get('id') else: data = result.get('message') print('{} {}'.format(">>" * (i + 1), data)) # Insert in REDIS LIST redis.rpush('dates', data) print("The process took: {:.2f} seconds".format(time.time() - start))
def readInputs(gpio): for io_id in gpio: io = gpio[io_id] if (io.getMode() == "input"): state = io.getState() # logging.debug("Reading state " + str(state) + " from " + str(io_id)); if (state == "NULL"): pass else: mask = 255 back = mask - state if (back > 0 and back != io.getRecordedState()): io.setRecordedState(back) active = str( io.getAddress()) + "," + io.getBank() + "," + str(back) logging.debug("sensors " + active) redis.rpush("sensors", active) if (state == 255): io.setRecordedState(255)
def ta_orchestrator(): global market_list redis = ExchangeDataHandler().getInstance().getRedisDb() starttime = 0 diftime = 0 while True: try: if redis.llen("ta_markets") == 0: if starttime > 0: diftime = time.time() - starttime print("completed ta cycle in {} seconds".format(diftime)) for market in market_list.split(" "): redis.rpush("ta_markets", market) starttime = time.time() else: time.sleep(5) except Exception as ex: print("push_ta_queue exception thrown: {}".format(ex))
def mux_demux(): yt_url = request.args.get('yt_url', None) if yt_url is None: abort(400) ytid = get_ytid_from_url(yt_url) redis.rpush('yturls', yt_url) with open( '{output_dir}/{ytid}.status.json'.format(output_dir=output_dir, ytid=ytid), 'w' ) as f: f.write('{"status": "processing"}') return json.dumps( { 'video_url': '{video_url_prefix}/{ytid}.mp4'.format( video_url_prefix=video_url_prefix, ytid=ytid ), 'status_url': '{video_url_prefix}/{ytid}.status.json'.format( video_url_prefix=video_url_prefix, ytid=ytid ) } )
def hello(): voter_id = request.cookies.get('voter_id') if not voter_id: voter_id = hex(random.getrandbits(64))[2:-1] vote = None if request.method == 'POST': vote = request.form['vote'] data = json.dumps({'voter_id': voter_id, 'vote': vote}) redis.rpush('votes', data) resp = make_response(render_template( 'index.html', option_a=option_a, option_b=option_b, hostname=hostname, vote=vote, )) resp.set_cookie('voter_id', voter_id) return resp
def update_search_stack(api_session, tweet_stack, keyword): """Searches for a specific term on twitter public timeline""" # Storing last fetched id in order to make fewer requests since_id = get_since_id(redis, "%s:%s" % (LOLCOIFFEURS_LIST, keyword)) search_tweet = api_session.GetSearch(term=keyword, since_id=since_id) for t in search_tweet: computed_tweet = { "keyword": keyword, "username": t.user.screen_name, "created_at": t.created_at, "text": t.text, } sys.stdout.write("adding tweet with id %s by user %s to database\n" % (str(t.id), str(t.user.screen_name))) if (computed_tweet["username"] not in BLACKLISTED_USERS): redis.rpush((LOLCOIFFEURS_LIST + ":%s" % (keyword)), t.id) redis.hmset("%s:tweet:%s" % (LOLCOIFFEURS_LIST, t.id), computed_tweet) print "Last since id en vigueur pour ce mot cle : %s" % since_id return
def __setitem__(self, key, value): self.has_changed = True self.__delitem__(key) try: value.__iter__ if type(key) == IntType: redis.lset(self.root_node[0], key, serialize(self.type_name, value)) redis.rpush(self.root_node[1], key) else: redis.hset(self.root_node[1], key, serialize(self.type_name, value)) except Exception, e: if type(key) == StringType: if type(value) == FloatType or type(value) == IntType: redis.hset(self.root_node[0], key, value) else: redis.hset(self.root_node[0], key, value) else: if type(value) == FloatType or type(value) == IntType: redis.lset(self.root_node[0], key, value) else: redis.lset(self.root_node[0], key, value)
def add_key_f(key): try: strategies = { 'string': lambda (key, value): redis.set ( key, value ) , 'hash': lambda (key, value): redis.hmset (key, value) , 'list': lambda (key, value): [redis.rpush (key, x) for x in value] } print (strategies) print flask.request.json return json.dumps(definite_strategy (strategies, flask.request.json['kind'], (key, flask.request.json['value']) ), indent = 2) except Exception as e: return str(e)
def serialize_array(type_name, array): hashcode = str(uuid4()) namehash = "%s:%s" % (type_name, hashcode) metahash = "%s:linked_fields:%s" % (type_name, hashcode) index = 0 for item in array: try: item.__iter__ redis.rpush(metahash, index) redis.rpush(namehash, serialize(type_name, item)) except Exception, e: redis.rpush(namehash, item) index += 1
MONGO_HOST = os.getenv('MONGODB_HOST', 'localhost') MONGO_PORT = os.getenv('MONGODB_PORT', 27017) mongo = pymongo.MongoClient(MONGO_HOST, MONGO_PORT) redis = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT) # clear cnames and frontends for key in redis.keys('frontend:*') + redis.keys('cname:*'): redis.delete(key) for app in mongo.tsuru.apps.find(): backends = [app['name']] for container in mongo.tsuru.docker_containers.find({'appname': app['name']}): if container['appname'] == app['name']: backends.append("http://%s:%s" % (container['hostaddr'], container['hostport'])) print "frontend:%s" % app['ip'] for backend in backends: print " %s" % backend redis.rpush("frontend:%s" % app['ip'], backend) if ('cname' in app and type(app['cname']) is list and len(app['cname']) > 0): for cname in app['cname']: print "frontend:%s" % cname for backend in backends: print " %s" % backend redis.rpush("frontend:%s" % cname, backend) print "cname:%s" % app['name'] for cname in app['cname']: print " %s" % cname redis.rpush("cname:%s" % app['name'], cname)
import redis redis = redis.Redis(host='localhost', port=6379, db=0) # Data Type : String Value redis.set("name", "zedo") print(redis.get("name")) # Data Type : Integer Value redis.set("counter", 1) print(redis.get("counter")) # 1 redis.incr("counter") print(redis.get("counter")) # 2 redis.decr("counter") print(redis.get("counter")) #1 # List : possible to duplicate values redis.rpush("members", "r1") redis.rpush("members", "r2") redis.lpush("members", "l1") redis.lpush("members", "l2") print(redis.lrange("members", 0, 0)) print(redis.lrange("members", 0, 1)) print(redis.lrange("members", 0, 2)) print(redis.llen("members")) print(redis.lrange("members",0, redis.llen("members")-1)) print(redis.lindex("members",3)) print(redis.rpop("members")) print(redis.lpop("members")) print(redis.llen("members")) print(redis.lrange("members",0, redis.llen("members")-1)) redis.delete("members")
redis.hset("AI_rabbitmq","server", "xxxxx") redis.hset("AI_rabbitmq","queue" , 'alert_status_queue' ) redis.hset("Master_Web","crt_file", 'xxxxxxx') redis.hset("Master_Web","key_file",'xxxxxx') redis.hset("Master_Web","SECRET_KEY",'xxxxxx') redis.hset("Master_Web","DEBUG",False) redis.hset("Master_Web","RealmDigestDB",'xxxxxx') redis.hset("Master_Web","users",json.dumps([{ "user":"******","password":"******" } ])) # # # Proxy Web Server Setups # # redis.delete("vhosts") #LaCima Site redis.rpush("vhosts","xxxx") redis.hset("LaCima","crt_file", 'xxxxx') redis.hset("LaCima","key_file",'xxxxx') redis.hset("LaCima","SECRET_KEY",'xxxxx') redis.hset("LaCima","DEBUG",False) redis.hset("LaCima","RealmDigestDB",'xxxx') redis.hset("LaCima","users",json.dumps([{ "user":"******","password":"******" } ])) redis.hset("LaCima","web_port",'1025') redis.hset("LaCima","rabbit_username",'xxxxx') redis.hset("LaCima","rabbit_password",'xxxxx') redis.hset("LaCima","rabbit_port",5671 ) redis.hset("LaCima","rabbit_server",'xxxxxx') redis.hset("LaCima","rabbit_queue",'xxxxx')
def redis_set_directory(redis, relpath, sub_file): key = unicode(relpath, "utf-8") + CacheManager.POST_FIX_LIST_DIR redis.rpush(key, unicode(sub_file))
import redis import json host='localhost' port=6379 db=0 redis = redis.StrictRedis(host=host, port=port, db=db) start_command = {'op':'start', 'target':'project', 'target_id':'douban'} stop_command = {'op':'stop', 'target':'project', 'target_id':'douban'} redis.rpush('command_q', json.dumps(start_command)) # redis.rpush('command_q', json.dumps(stop_command)) exit_command = {'op':'exit'} # redis.rpush('command_q', json.dumps(exit_command))
def push_front(self, element): """Push an element to the front of the deque""" key = self.key push_it = redis.rpush(key, element) log.debug('Pushed ** %s ** for key ** %s **' % (element, self.key))
#!/usr/bin/python from pymongo import Connection import redis mongo = Connection() db = mongo.mysticpaste collection = db.pastes redis = redis.Redis() for paste in collection.find({"privateFlag": False, "abuseCount": {"$lt": 2}}).sort("pasteIndex", -1): redis.rpush('pasteHistory', paste['pasteIndex'])
datagen, headers = multipart_encode( { "user_id": user_id, "image": img_fp } ) try: request = urllib2.Request( "http://www.huaban123.com/Action/WeixinMpApi.aspx?action=uploadImg", datagen, headers ) res_json = urllib2.urlopen(request).read() except Exception , data: print "post exception: " , data if try_count <= 5: continue else: redis.rpush( "image_to_upload", image_info ) res = json.loads( res_json ) if res["type"] == "success": print image_info + " upload ok!" time.sleep(1) break else: # upload fail if try_count <= 5: print image_info + " upload fail but still trying, count" + try_count try_count = try_count + 1 time.sleep( backoff_time ) backoff_time = 2 * backoff_time else: # push the image_info back
def post_receive_hook(): """Listen for GitHub hooks and check the app's size again.""" sizes = size_of_url(os.environ.get("URL")) redis.rpush("fatmirror-%s" % APP_URL, json.dumps(sizes)) return json.dumps(sizes)
def logMessage(number, message): try: redis.rpush(user.username + "_Messages", number + " " + message) print("LOGGED") except Exception as e: print(e.message)
def get_realtime_data(): """Force a realtime data update via GET; for debugging.""" sizes = size_of_url(os.environ.get("URL")) redis.rpush("fatmirror-%s" % APP_URL, json.dumps(sizes)) return json.dumps(sizes)
def route_post_ad(slot): if not advertiser_id(): return '', 404 asset = request.files['asset'] id = next_ad_id() key = ad_key(slot, id) type = fetch(request.form, 'type') if not type: type = asset.mimetype if not type: type = 'video/mp4' redis = get_redis() redis.hmset(key, { 'slot': slot, 'id': id, 'title': fetch(request.form, 'title'), 'type': type, 'advertiser': advertiser_id(), 'destination': fetch(request.form, 'destination'), 'impressions': 0 }) redis.set(asset_key(slot, id), asset.read()) redis.rpush(slot_key(slot), id) redis.sadd(advertiser_key(advertiser_id()), key) return jsonify(get_ad(slot, id))