def deletelocation(locationkey): locations = querylocationkeys('L-' + str(locationkey)) if (len(locations)) <= 0: return jsonify({'result': False}) else: locationfullname = locations[0]['acname'] + '%L-' + str( locationkey) + '%' start = redis.zrank('locationfragments', locationfullname) previous = start - 1 locationfragment = locationfullname commonfragment = redis.zrange('locationfragments', start + 1, start + 1) while (len(locationfragment) > 0): locationfragment = redis.zrange('locationfragments', previous, previous) if (locationfragment[0][-1] == '%' or (len(commonfragment) > 0 and locationfragment[0] == commonfragment[0][0:-1])): break else: previous = previous - 1 redis.zremrangebyrank('locationfragments', previous + 1, start) redis.delete('L-' + str(locationkey)) return jsonify({'result': True})
def resubmit_jobs(): ''' Examines the fetch and gather queues for items that are suspiciously old. These are removed from the queues and placed back on them afresh, to ensure the fetch & gather consumers are triggered to process it. ''' if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() # fetch queue harvest_object_pending = redis.keys(get_fetch_routing_key() + ':*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") # 3 minutes for fetch and import max if (datetime.datetime.now() - date_of_key).seconds > 180: redis.rpush(get_fetch_routing_key(), json.dumps({'harvest_object_id': key.split(':')[-1]}) ) redis.delete(key) # gather queue harvest_jobs_pending = redis.keys(get_gather_routing_key() + ':*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") # 3 hours for a gather if (datetime.datetime.now() - date_of_key).seconds > 7200: redis.rpush(get_gather_routing_key(), json.dumps({'harvest_job_id': key.split(':')[-1]}) ) redis.delete(key)
def callback(): oauth_token = request.args.get('oauth_token') oauth_verifier = request.args.get('oauth_verifier') oauth_token_secret = redis.get(oauth_token) redis.delete(oauth_token) response = vatsim.get_user_details(oauth_token, oauth_token_secret, oauth_verifier) if response['request']['result'] != 'success': return json.dumps(response['request']['message']), 401 vatsim_user = response['user'] user_doc = current_app.data.driver.db['users'].find_one( {'vatsim_identity.id': vatsim_user['id']}) if not user_doc: now = datetime.utcnow() user_doc = { '_created': now, '_updated': now, 'vatsim_identity': vatsim_user } user_doc['_id'] = current_app.data.driver.db['users'].insert_one( user_doc).inserted_id redis.set(oauth_verifier, user_doc['_id']) redis.expire(oauth_verifier, 24 * 60 * 60) return JSONRenderer().render(user_doc), 200
def lane_start_worker(lane_id): key = redis_lane_start_lock_key % (lane_id, ) if redis.set(key, 1, ex=10, nx=True): try: bid_runtime_service.start(lane_id) finally: redis.delete(key)
def submitCode(request): if request.method == "POST": code = request.POST.get("code") email = request.session["email"] if "email" in request.session else None if redis.exists(code): if redis.get(code).decode("utf-8") == email: customer = DBModel.WebsiteUsers.objects(Email=email).get() customer.isActive = True customer.save() helper = DBModel.Helpers() helper.Name = customer.Fname + " " + customer.Lname helper.Email = customer.Email helper.Password = customer.Password helper.isActive = True helper.save() request.session["islogin"] = True redis.delete(code) return HttpResponse(json.dumps({"status": "ok"})) else: return HttpResponse(json.dumps({"status": "code"})) else: return HttpResponse(json.dumps({"status": "code"}))
def generateSet(products,tmpSetName): ''' Generate a temporary union set for the given products list. ''' print "generateSet()" if len(products)==0: print "Parameter(products) is empty!" return None redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) sets=[] for p in products: sets.append("ids:i:android.os.Build.PRODUCT:%s"%p) pipe.sunionstore(tmpSetName,sets) ret=pipe.execute() if ret[0]==0: redis.delete(tmpSetName) print "Generated an empty set!" return None else: return tmpSetName
def resubmit_jobs(): if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() log.debug('_create_or_update_package') harvest_object_pending = redis.keys('harvest_object_id:*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key).seconds > 180: # 3 minuites for fetch and import max redis.rpush('harvest_object_id', json.dumps({'harvest_object_id': key.split(':')[-1]}) ) redis.delete(key) harvest_jobs_pending = redis.keys('harvest_job_id:*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key).seconds > 7200: # 3 hours for a gather redis.rpush('harvest_job_id', json.dumps({'harvest_job_id': key.split(':')[-1]}) ) redis.delete(key)
def createlocation(): if not request.json or not 'displayname' in request.json or not 'id' in request.json: abort(400) location = { 'id': request.json['id'], 'displayname': request.json['displayname'], 'acname': request.json['acname'], 'icon': request.json.get('icon', ''), 'latitude': request.json.get('latitude', 0), 'longitude': request.json.get('longitude', 0) } locationname = location['acname'] for l in range(1, len(locationname)): locationfragment = locationname[0:l] redis.zadd('locationfragments', 0, locationfragment) locationwithid = locationname + '%L-' + str(location['id']) + '%' redis.zadd('locationfragments', 0, locationwithid) locationkey = 'L-' + str(location['id']) redis.delete(locationkey) redis.rpush(locationkey, location['id']) redis.rpush(locationkey, location['displayname']) redis.rpush(locationkey, location['acname']) redis.rpush(locationkey, location['icon']) redis.rpush(locationkey, location['latitude']) redis.rpush(locationkey, location['longitude']) return jsonify({'location': location}), 201
def delete(self): game = Game(self.gid) redis.lrem(game.key(':players'), 0, self.pid) redis.delete(self.key(':cards')) redis.delete(self.key(':hand')) event_data = json.dumps({'action': 'leave', 'player': self.pid}) redis.publish(game.key(':players_channel'), event_data)
def createhotel(): if not request.json or not 'displayname' in request.json or not 'id' in request.json: abort(400) hotel = { 'id': request.json['id'], 'displayname': request.json['displayname'], 'acname': request.json['acname'], 'image': request.json.get('image', ''), 'latitude': request.json.get('latitude', 0), 'longitude': request.json.get('longitude', 0), 'thirdpartyrating': request.json.get('thirdpartyrating', 0) } hotelname = hotel['acname'] for l in range(1, len(hotelname)): hotelfragment = hotelname[0:l] redis.zadd('hotelfragments', 0, hotelfragment) hotelwithid = hotelname + '%H-' + str(hotel['id']) + '%' redis.zadd('hotelfragments', 0, hotelwithid) hotelkey = 'H-' + str(hotel['id']) redis.execute_command('geoadd', 'hotels', '%f' % hotel['longitude'], '%f' % hotel['latitude'], hotelkey) redis.delete(hotelkey) redis.rpush(hotelkey, hotel['id']) redis.rpush(hotelkey, hotel['displayname']) redis.rpush(hotelkey, hotel['acname']) redis.rpush(hotelkey, hotel['image']) redis.rpush(hotelkey, hotel['latitude']) redis.rpush(hotelkey, hotel['longitude']) redis.rpush(hotelkey, hotel['thirdpartyrating']) return jsonify({ 'hotel': hotel }), 201
def check_printers(): app.logger.debug("Checking known printers...") for raw_printer in printers.get_printers(): printer = drivers.get_printer_instance(raw_printer) printer.is_alive() if printer.client.connected: webcam = printer.webcam() try: if "stream" in webcam: redis.set("webcam_%s" % (printer.ip, ), webcam["stream"]) else: redis.delete("webcam_%s" % (printer.ip, )) except Exception as e: app.logger.error( "Cannot save webcam proxy information into cache: %s", e) printers.update_printer( name=printer.name, hostname=printer.hostname, ip=printer.ip, client=printer.client_name(), client_props={ "version": printer.client.version, "connected": printer.client.connected, "read_only": printer.client.read_only, }, )
def DELETE(self, name): #params = web.input() #print params clipName = name redis.delete("cue:" + clipName) redis.save() cull_playlists(name)
def deltestdata_api(): print 'deleting test data' for email2key in [redis_book_key_from_email, redis_auth_key_from_email]: print email2key('*****@*****.**') redis.delete(email2key('*****@*****.**')) return jsonify({'ip': get_remote_ip()}), 200
def resubmit_jobs(): if config.get('ckan.harvest.mq.type') != 'redis': return redis = get_connection() log.debug('_create_or_update_package') harvest_object_pending = redis.keys('harvest_object_id:*') for key in harvest_object_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key ).seconds > 180: # 3 minuites for fetch and import max redis.rpush('harvest_object_id', json.dumps({'harvest_object_id': key.split(':')[-1]})) redis.delete(key) harvest_jobs_pending = redis.keys('harvest_job_id:*') for key in harvest_jobs_pending: date_of_key = datetime.datetime.strptime(redis.get(key), "%Y-%m-%d %H:%M:%S.%f") if (datetime.datetime.now() - date_of_key).seconds > 7200: # 3 hours for a gather redis.rpush('harvest_job_id', json.dumps({'harvest_job_id': key.split(':')[-1]})) redis.delete(key)
def diagnose_cb(): user_session = get_session(redis,request.values.get('CallSid')) symptom = urllib.unquote(request.args.get('symptom')) print symptom resp = twilio.twiml.Response() digit_pressed = request.values.get('Digits', None) if digit_pressed == "1": user_session['symptom_whitelist'].append(symptom) set_session(redis,request.values.get('CallSid'), user_session) else: user_session['symptom_blacklist'].append(symptom) set_session(redis,request.values.get('CallSid'), user_session) diseases = symptomelimination.calculate_probability_for_disease(user_session['location'], user_session['symptom_whitelist']) # its for the case that no user input is made (so people will not have a disease without symptom) if len(user_session['symptom_whitelist']) == 0: for disease in diseases: disease['probability'] = 0 diseases = sorted(diseases, cmp=lambda x, y: cmp(y['probability'],x['probability'])) for disease in diseases: print helpers.get_name_for_disease(disease['disease']), disease['probability'] if user_session['question_count'] >= 3 and diseases[0]['probability'] > 0.8: resp.say("We have determined there is a high probability you have {0}".format(helpers.get_name_for_disease(diseases[0]['disease'])),**default_ops) resp.say("We suggest you seek medical attention as soon as possible",**default_ops) resp.hangup() redis.delete(request.values.get('CallSid')) elif user_session['question_count'] >= 7: resp.say("Sorry, we are unable to determine what you are sick with.",**default_ops) resp.redirect('/ems') else: resp.redirect('/diagnose') return str(resp)
def hashes_redis(): import redis redis = redis.Redis(host='127.0.0.1', port=6379, db=0, charset="utf-8", decode_responses=True) print("-------------") print("HASH") print("-------------") #hmset(name, mapping) hget(name, key) hgetall(name) redis.hmset("user.1", {"name": "peter", "email": "*****@*****.**"}) print("map.1: ", redis.hgetall("user.1")) print("name.1:", redis.hget("user.1", "name")) print("email.1:", redis.hget("user.1", "email")) print("-------------") #hset(key, field, value) hget() redis.hset("user.2", "name1", "peter") print("map.2: ", redis.hgetall("user.2")) print("type map.2: ", redis.type("user.2")) print("name.2:", redis.hget("user.2", "name1")) print("-------------") #delete all keys for key in redis.scan_iter("prefix:*"): redis.delete(key)
def generateSet(products, tmpSetName): ''' Generate a temporary union set for the given products list. ''' print "generateSet()" if len(products) == 0: print "Parameter(products) is empty!" return None redis = brconfig.getRedis() pipe = redis.pipeline(transaction=True) sets = [] for p in products: sets.append("ids:i:android.os.Build.PRODUCT:%s" % p) pipe.sunionstore(tmpSetName, sets) ret = pipe.execute() if ret[0] == 0: redis.delete(tmpSetName) print "Generated an empty set!" return None else: return tmpSetName
def _handle_status(recieved_status): """What to do when the status changes.""" if event.is_set(): pass elif status == recieved_status: event.set() log.log( 5, "Received a '{0}' notification for '{1}', triggered event." .format(recieved_status, self.name)) if callback is not None: callback() elif initial_status != recieved_status: _handle_status._error = redis.get( redis_key_name(self) + ":error") redis.delete(redis_key_name(self) + ":error") log.log( 5, "Received a '{0}' notification for '{1}': {2}".format( recieved_status, self.name, _handle_status._error)) event.set() else: log.log( 5, "Received a '{0}' notification for '{1}', ignored.".format( recieved_status, self.name)) if event.is_set(): with self.service.pubsub() as pubsub: pubsub.punsubscribe("__keyspace@*__:" + redis_status_key(self))
def check_code(user, code): get_code = redis.get(user) if get_code.decode('utf-8') == code: redis.delete(user) return True else: return False
def upload(): with open(outputName, 'w', encoding='utf-8') as outfile: for key in redis.scan_iter(match='product:*'): data = redis.get(key).decode('utf-8') json.dump(json.loads(data), outfile) outfile.write('\n') redis.delete(key) outfile.close()
def wrapped_f( *args, **kwargs ): # define a wrapper that will finally call "fn" with all arguments # execute the function with all arguments passed res = fn(*args, **kwargs) # delete cache redis.delete(kwargs[key]) return res
def delete(self,line): cmdLines=line.split() if (len(cmdLines)<2): print("Express error,such as: delete key") return False redis=self.redis redis.delete(cmdLines[1]) return True
def reset_matches(): players = redis.lrange('players', 0, -1) for player in players: pname = str(player).replace("b'","").replace("'","") if(redis.exists(pname+'-level')): redis.delete(pname+'-level') redis.delete(pname+'-skill') return f"{head}{css} Matches Reseted!{foot}"
def mainpage(): if request.method == 'POST': text = urllib.parse.unquote(request.args.get('text')).strip() if text != '': redis.set(text, None) if request.method == 'DELETE': redis.delete(urllib.parse.unquote(request.args.get('text'))) return render_template('index.html')
def remove_member(number, sourceId): if redis.scard(number) == 1: redis.srem(number, sourceId) redis.delete(number + '_member') else: redis.srem(number, sourceId) redis.hset(sourceId, 'current', '-') redis.hset(sourceId, 'voted', 'N') redis.hdel(sourceId, 'status')
def send(self, client, data): try: d_data = json.loads(data) roomnum = d_data.get("roomnum", 0) if self.clients[client]["roomnum"] == roomnum: data = json.dumps(d_data) client.send(data) except Exception: self.delete_client(client) redis.delete("client")
def prune_data(): try: for key in redis.keys(): if not key in ['last_block', 'last_block_time_seconds', 'last_usage_total_sent']: if datetime.strptime(key, '%Y-%m-%d') < datetime.utcnow() - timedelta(days=8): redis.delete(key) logger.info(f'Deleted old data from DB: {key}') except Exception as e: logger.info('Could not prune data!') logger.info(traceback.format_exc())
def remove(sid, realm): redis = get_redis() redis.delete("session-%s" % sid) try: s = Session.select().join(User).where( (Session.id == sid) & (User.realm == realm)).get() s.delete_instance() return True except Session.DoesNotExist: return False
def _clear(self, pipe=None): """Helper for clear operations. :param pipe: Redis pipe in case update is performed as a part of transaction. :type pipe: :class:`redis.client.StrictPipeline` or :class:`redis.client.StrictRedis` """ redis = pipe if pipe is not None else self.redis redis.delete(self.key)
def rget(redis,key): str = redis.get(key) if str: try: object = Pickle.loads(str) return object except: print "fail removing" + key redis.delete(key) return False
def ems(): print "Showing number to user" user_session = get_session(redis,request.values.get('CallSid')) number = helpers.get_phone_for_country(user_session['location']) print number resp = twilio.twiml.Response() resp.say("To get emergency medical attention in your country hang up and dial {0}".format(" ".join(list(str(number)))),**default_ops) resp.hangup() redis.delete(request.values.get('CallSid')) return str(resp)
def get_accessible_ids(token, ids=None): tmpSetName = 'tmp_%s' % str(uuid.uuid4()) tmpSetName = getAccessibleSet(token, tmpSetName, ids=ids) if tmpSetName == None: return None else: redis = brconfig.getRedis() ret_ids = redis.smembers(tmpSetName) redis.delete(tmpSetName) return ret_ids
def get_accessible_ids(token,ids=None): tmpSetName='tmp_%s'%str(uuid.uuid4()) tmpSetName=getAccessibleSet(token,tmpSetName,ids=ids) if tmpSetName==None: return None else: redis=brconfig.getRedis() ret_ids=redis.smembers(tmpSetName) redis.delete(tmpSetName) return ret_ids
def get_file(self, fid, stop_event, driver=None, restart=False): """Transfers a file from a Driver to another. """ redis = self.plug.redis metadata = Metadata.get_by_id(self.plug, fid) filename = metadata.filename transfer_key = 'drivers:{}:transfers:{}'.format(self.plug.name, fid) if driver: redis.sadd('drivers:{}:transfers'.format(self.plug.name), fid) redis.hmset(transfer_key, {'from': driver, 'offset': 0}) offset = 0 self.logger.info("Starting to get '{}' from {}", filename, driver) else: transfer = redis.hgetall(transfer_key) driver = transfer['from'] offset = int(transfer['offset']) self.logger.info("Restarting transfer of '{}' from {}", filename, driver) dealer = self.context.socket(zmq.DEALER) port = redis.get('drivers:{}:router'.format(driver)) dealer.connect('tcp://localhost:{}'.format(port)) end = metadata.size chunk_size = self.plug.options.get('chunk_size', 1 * 1024 * 1024) if not restart: self._call('start_upload', metadata) while offset < end: if stop_event.is_set(): # another transaction for the same file has # probably started self.logger.info("Aborting transfer of '{}' from {}", filename, driver) return dealer.send_multipart((filename, str(offset), str(chunk_size))) chunk = dealer.recv() self.logger.debug("Received chunk of size {} from {} for '{}'", len(chunk), driver, filename) self._call('upload_chunk', filename, offset, chunk) offset = redis.hincrby(transfer_key, 'offset', len(chunk)) self._call('end_upload', metadata) redis.delete(transfer_key) redis.srem('drivers:{}:transfers'.format(self.plug.name), fid) self.logger.info("Transfer of '{}' from {} successful", filename, driver)
def edit_map(map_name): """Edit the name of a map and save it""" global current_map current_map.name = request.form.get("name") # redis.rename() is supposed to implicitly do this, but wasn't working redis.set(current_map.name, pickle.dumps(current_map)) redis.delete(map_name) print(current_map.name) return redirect(url_for('view_map', map_name=current_map.name))
def flush_dead_mappers(redis, mappers_key, ping_key): mappers = redis.smembers(mappers_key) for mapper in mappers: last_ping = redis.get(ping_key % mapper) if last_ping: now = datetime.now() last_ping = datetime.strptime(last_ping, DATETIME_FORMAT) if ((now - last_ping).seconds > TIMEOUT): logging.warning('MAPPER %s found to be inactive after %d seconds of not pinging back' % (mapper, TIMEOUT)) redis.srem(mappers_key, mapper) redis.delete(ping_key % mapper)
def reset(): the_time = datetime.now().strftime("%A, %d %b %Y %l:%M %p") redis.set("clients", 0) print("PRINTING CLIENTS", redis.get("clients")) redis.delete("languages") redis.delete("langs") print("PRINTING CLIENTS", redis.smembers("languages")) return "HELLO".format(time=the_time)
def route_post_initialize(): redis = get_redis() for key in redis.keys('isu4:*'): redis.delete(key) shutil.rmtree(get_dir('log')) response = make_response('OK') response.headers['Content-Type'] = 'text/plain' return response
def clean(): for item in checklist: queued = item + ":requests" queued_num = redis.zcard(queued) if queued_num > 0: redis.delete(queued) cached = item + ":items" cached_num = redis.llen(cached) if cached_num > 0: redis.delete(cached) print("Clean all queued requests successfully!")
def hangup(): try: From = request.args['From'] CallUUID = request.args['CallUUID'] if CallUUID == redis.get(From): redis.delete(From) except KeyError: pass return "Done"
def calc_per_recharge(): if not redis.exists(FISH_SYSTEM_RECHARGE_TOTAL): do_create_recharge_total(redis) else: already_create_day = len(redis.keys(FISH_SYSTEM_DATE_RECHARGE_TOTAL%('*'))) already_recharge_total = convert_util.to_int(redis.get(FISH_SYSTEM_RECHARGE_TOTAL)) result = already_recharge_total/already_create_day print 'already_create_day[%s] already_recharge[%s] result[%s]'%(already_create_day,already_recharge_total,result) redis.set("fish:per:recharge:rate",result) #删除当天充值人数统计 redis.delete(FISH_RECHARGE_USER_DAY_IDS)
def antiflood(m): if not is_sudo(m.from_user.id): _hash = "anti_flood:user:"******"شما به دلیل ارسال پشت سرهم پیغام، 60 ثانیه مسدود شدید 🚫") redis.delete(_hash) redis.setex('ban' + str(m.from_user.id), 60, True)
def remove_building(building_identifier): status = False try: bldg = Building.query.filter_by(building_identifier = building_identifier).all()[0] db.session.delete(bldg) db.session.commit() if config.buildings_cache_enabled == True: redis.delete(building_identifier) redis.delete('buildings') status = True except Exception as ex: db.session.rollback() log.debug('remove_building(): error: ' + str(ex)) return(status)
def clear_sub_nodes(type_name, hashcode): namehash = "%s:%s" % (type_name, hashcode) metahash = "%s:linked_fields:%s" % (type_name, hashcode) m_type = redis.type(metahash) if m_type == "list": for index in redis.lrange(metahash, 0, -1): i = int(index) clear_sub_nodes(type_name, redis.lindex(namehash, i)) elif m_type == "hash": meta_dictionary = redis.hgetall(namehash) for key in meta_dictionary: linked_node_hash = meta_dictionary[key] clear_sub_nodes(type_name, linked_node_hash) redis.delete(namehash) redis.delete(metahash)
def _clean_up_entries_from_shard(self, object_ids, task_ids, shard_index): redis = self.state.redis_clients[shard_index] # Clean up (in the future, save) entries for non-empty objects. object_ids_locs = set() object_ids_infos = set() for object_id in object_ids: # OL. obj_loc = redis.zrange(OBJECT_LOCATION_PREFIX + object_id, 0, -1) if obj_loc: object_ids_locs.add(object_id) # OI. obj_info = redis.hgetall(OBJECT_INFO_PREFIX + object_id) if obj_info: object_ids_infos.add(object_id) # Form the redis keys to delete. keys = [TASK_TABLE_PREFIX + k for k in task_ids] keys.extend([OBJECT_LOCATION_PREFIX + k for k in object_ids_locs]) keys.extend([OBJECT_INFO_PREFIX + k for k in object_ids_infos]) if not keys: return # Remove with best effort. num_deleted = redis.delete(*keys) log.info( "Removed {} dead redis entries of the driver from redis shard {}.". format(num_deleted, shard_index)) if num_deleted != len(keys): log.warning( "Failed to remove {} relevant redis entries" " from redis shard {}.".format(len(keys) - num_deleted))
def send_ui_reply(): sms_id = request.form.get('sms_id') sms_reply = request.form.get('action') print "Here I am: SMS Id - %s" % sms_id if ((sms_reply != "") and (not sms_id.startswith("del"))): src = redis.hget(sms_id,'From') plivo_did = redis.hget(sms_id,'Plivo DID') redis.hset(sms_id,"Replied",'y') redis.save() send_auto_response(src, plivo_did, sms_reply, 'True') elif ((sms_reply != "") and (sms_reply == "del")): redis.delete(sms_id[4:]) redis.save() return "OK"
def _xray_clean_up_entries_for_driver(self, driver_id): """Remove this driver's object/task entries from redis. Removes control-state entries of all tasks and task return objects belonging to the driver. Args: driver_id: The driver id. """ xray_task_table_prefix = ( ray.gcs_utils.TablePrefix_RAYLET_TASK_string.encode("ascii")) xray_object_table_prefix = ( ray.gcs_utils.TablePrefix_OBJECT_string.encode("ascii")) task_table_objects = self.state.task_table() driver_id_hex = binary_to_hex(driver_id) driver_task_id_bins = set() for task_id_hex, task_info in task_table_objects.items(): task_table_object = task_info["TaskSpec"] task_driver_id_hex = task_table_object["DriverID"] if driver_id_hex != task_driver_id_hex: # Ignore tasks that aren't from this driver. continue driver_task_id_bins.add(hex_to_binary(task_id_hex)) # Get objects associated with the driver. object_table_objects = self.state.object_table() driver_object_id_bins = set() for object_id, _ in object_table_objects.items(): task_id_bin = ray.raylet.compute_task_id(object_id).id() if task_id_bin in driver_task_id_bins: driver_object_id_bins.add(object_id.id()) def to_shard_index(id_bin): return binary_to_object_id(id_bin).redis_shard_hash() % len( self.state.redis_clients) # Form the redis keys to delete. sharded_keys = [[] for _ in range(len(self.state.redis_clients))] for task_id_bin in driver_task_id_bins: sharded_keys[to_shard_index(task_id_bin)].append( xray_task_table_prefix + task_id_bin) for object_id_bin in driver_object_id_bins: sharded_keys[to_shard_index(object_id_bin)].append( xray_object_table_prefix + object_id_bin) # Remove with best effort. for shard_index in range(len(sharded_keys)): keys = sharded_keys[shard_index] if len(keys) == 0: continue redis = self.state.redis_clients[shard_index] num_deleted = redis.delete(*keys) logger.info("Removed {} dead redis entries of the driver from" " redis shard {}.".format(num_deleted, shard_index)) if num_deleted != len(keys): logger.warning("Failed to remove {} relevant redis entries" " from redis shard {}.".format( len(keys) - num_deleted, shard_index))
def update_building(building): status = False try: bldg = Building.query.filter_by(building_identifier = building['building_identifier']).all()[0] for k,v in building.items(): setattr(bldg, k, v) db.session.add(bldg) db.session.commit() if config.buildings_cache_enabled == True: building_json = json.dumps(building) building_id = building['building_identifier'] redis.set(building_id, building_json, ex=config.buildings_cache_ttl) redis.delete('buildings') # invalidate the buildings collection cache entry status = True except Exception as ex: db.session.rollback() log.debug('update_building(): error: ' + str(ex)) return(status)
def _process_message(args, redis, message): log = logging.getLogger("_process_message") message_text = message["data"].decode("utf-8") redis_key, expected_count_str = message_text.split() expected_count = int(expected_count_str) members = redis.smembers(redis_key) if len(members) == expected_count: log.info("received key {0} with {1} set members".format(redis_key, len(members))) else: log.error("received key {0} with {1} set members expected {2}".format( redis_key, len(members), expected_count)) # we don't need this key anymore redis.delete(redis_key) for member in members: file_name = member.decode("utf-8") path = os.path.join(args.watch_path, file_name) log.info("removing {0}".format(path)) os.unlink(path)
def _handle_status(recieved_status): """What to do when the status changes.""" if event.is_set(): pass elif status == recieved_status: event.set() log.log(5, "Received a '{0}' notification for '{1}', triggered event.".format(recieved_status, self.name)) if callback is not None: callback() elif initial_status != recieved_status: _handle_status._error = redis.get(redis_key_name(self)+":error") redis.delete(redis_key_name(self)+":error") log.log(5, "Received a '{0}' notification for '{1}': {2}".format(recieved_status, self.name, _handle_status._error)) event.set() else: log.log(5, "Received a '{0}' notification for '{1}', ignored.".format(recieved_status, self.name)) if event.is_set(): with self.service.pubsub() as pubsub: pubsub.punsubscribe("__keyspace@*__:"+redis_status_key(self))
def _pass_turn(self, player_name, guesser=None, score=None): # Get the next player next_player = redis.zrange(self.turns_key, 1, 1) # Are we playing with ourself? if next_player: next_player = next_player[0] else: next_player = player_name # Set the new word word = redis.set(self.word_key, get_next_word().text) # Clear the skip key redis.delete(self.skip_key) # Tell everyone who's turn it is msg = Message('PASSED', player_name=next_player) msg.end_time = time.time() + 120 redis.set(self.end_key, msg.end_time) if score is not None: msg.guesser = guesser msg.score = score self.send(msg) # Ten points to win the game if score >= 10: # Send the won message self.send(Message('WON', player_name=guesser)) # Clear all scores players = redis.zrange(self.players_key, 0, -1) scores = dict((x, 0) for x in players) redis.zadd(self.players_key, **scores) # Move the old player to the end of the turn list redis.zadd(self.turns_key, player_name, time.time())
def delete(redis, name, task_id): """ Function to delete entries (tasks or whole task lists) from Redis :param redis: Redis connection :param name: task name :param task_id: task id """ if task_id == 'all' and name is None: # db clear redis.flushdb() logging.info("All the database was successfully cleared") return 0 elif name and task_id == 'all': # deleting a single task list redis.delete(name) logging.info("Task list was successfully deleted") return 0 else: # deleting a single task from concrete list tasks = List.pull_from_redis(redis, name) del tasks[str(task_id)] List.push_to_redis(redis, name, tasks) logging.info("The task was successfully deleted") return 0
def load(): # see: http://python-fitbit.readthedocs.org/en/latest/#fitbit-api fb = fitbit.Fitbit( os.getenv('CONSUMER_KEY'), os.getenv('CONSUMER_SECRET'), user_key=os.getenv('USER_KEY'), user_secret=os.getenv('USER_SECRET')) redis.delete('fitbit') if True: sleepData = dict(); sl1 = fb.time_series('sleep/startTime', period='max')['sleep-startTime'] sl2 = fb.time_series('sleep/timeInBed', period='max')['sleep-timeInBed'] sl3 = fb.time_series('sleep/minutesAsleep', period='max')['sleep-minutesAsleep'] sl4 = fb.time_series('sleep/minutesAwake', period='max')['sleep-minutesAwake'] sl5 = fb.time_series('sleep/minutesToFallAsleep', period='max')['sleep-minutesToFallAsleep'] sl6 = fb.time_series('sleep/minutesAfterWakeup', period='max')['sleep-minutesAfterWakeup'] sl7 = fb.time_series('sleep/efficiency', period='max')['sleep-efficiency'] for sl in range(len(sl1)): if sl1[sl]['value'] != '': sleepData['date'] = sl1[sl]['dateTime'] sleepData['startTime'] = sl1[sl]['value'] sleepData['timeInBed'] = sl2[sl]['value'] sleepData['minutesAsleep'] = sl3[sl]['value'] sleepData['minutesAwake'] = sl4[sl]['value'] sleepData['minutesToFallAsleep'] = sl5[sl]['value'] sleepData['minutesAfterWakeup'] = sl6[sl]['value'] sleepData['efficiency'] = sl7[sl]['value'] sleepData['timezone'] = fb.user_profile_get()['user']['timezone'] sleepData['offsetFromUTCMillis'] = fb.user_profile_get()['user']['offsetFromUTCMillis'] s = json.dumps(sleepData) redis.sadd('fitbit', s) print s
def check_pulse(self, signum, _): ekg = redis.lrange('EKG', 0, -1) redis.delete('active_feeds') for i in ekg: redis.sadd('active_feeds', i)
def clearhistory(): redis.delete(user.username + "_Messages") return redirect(url_for('index'))
def reset_subscriptions (chat_id): users = redis.srem("optimusprice.subscriptions.users", str(chat_id)) redis.delete("optimusprice.subscriptions." + str(chat_id)) fsm_reset(chat_id)
import redis import ast redis = redis.StrictRedis(host='localhost', port=6379, db=0) key = "28499950-7085-11e4-9890-7071bcbc887a" talk = ast.literal_eval(redis.get(key)) talk['t'] = talk['t'].replace('"', "'") exp = redis.ttl(key) redis.delete(key) redis.setnx(key, talk)