def __init__(self, pickle_protocol=pickle.DEFAULT_PROTOCOL, namespace=None, **kwargs): """Initialize Redis DB Args: pickle_protocol (int, optional): https://docs.python.org/3.7/library/pickle.html#data-stream-format namespace (str, optional): If set, key names in the Redis DB will be preceeded by "<namespace>:". **kwargs (optional): All these will be sent to redis.Redis(). See https://github.com/andymccurdy/redis-py for more info. """ from redis import Redis super().__init__() self._pickle_protocol = pickle_protocol self._namespace = namespace # Quick check so the number of databases is sufficient if "db" in kwargs and kwargs["db"] > 0: redis_kwargs = kwargs.copy() redis_kwargs["db"] = 0 redis = Redis(**redis_kwargs) dbs = redis.config_get("databases") if int(dbs["databases"]) <= kwargs["db"]: redis.close() raise ValueError( f"Tried to open Redis DB #{kwargs['db']}, but there are only {dbs['databases']} databases" ) redis.close() self._redis = Redis(**kwargs)
def create_payment_request(payment_request=None): # noqa: E501 """createPaymentRequest Creates a payment request for a user. API for: ACMESky # noqa: E501 :param payment_request: :type payment_request: dict | bytes :rtype: PaymentCreationResponse """ if connexion.request.is_json: payment_request = PaymentRequest.from_dict( connexion.request.get_json()) # noqa: E501 # This line simulates a network slowdown (comment if not needed). time.sleep(5) """ Generate an unique transaction id and associate the id with the payment request """ transaction_id = str(uuid.uuid1()) redis_connection = Redis(host="payment_provider_redis", port=6379, db=0) redis_connection.set(transaction_id, json.dumps(payment_request.to_dict())) redis_connection.close() """ Return the url where the user can pay """ frontend_url = environ.get("PAYMENT_PROVIDER_FRONTEND", "http://0.0.0.0:4002") redirect_page = f'{frontend_url}/?transaction_id={transaction_id}' # The transaction id will be used to retrieve the informations saved on redis return PaymentCreationResponse(redirect_page=redirect_page, transaction_id=transaction_id)
def image_search(self, request, pk=None): redis_connection = Redis() course = self.get_object() search_result = redis_connection.get(hash(course)) if not search_result: search_string = str(course) search_token = re.findall( r'vqd=\'(.*)\';', requests.post("https://duckduckgo.com/", data={ 'q': search_string }).text)[0] search_result = requests.get( "https://duckduckgo.com/i.js?l=it-it&o=json&vqd=%s&q=%s&f=,,,&p=2" % (search_token, search_string), headers={ 'Dnt': '1', 'X-Requested-With': 'XMLHttpRequest', 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36', 'Accept': 'application/json, text/javascript, */*; q=0.01', 'Referer': 'https://duckduckgo.com/', 'Authority': 'duckduckgo.com', 'Host': 'duckduckgo.com', 'Connection': 'keep-alive', 'Cache-Control': 'no-cache', }).text redis_connection.append(hash(course), search_result) redis_connection.close() return Response(json.loads(search_result)['results'][0])
class RedisMessageSubscriber(implements(MessageSubscriber)): def __init__(self): self.__client = Redis(connection_pool=settings.REDIS_CONNECTION_POOL) self.__stop = False async def subscribe_message(self, topic, message_handler): p = self.__client.pubsub() p.subscribe(topic) while self.stop is False: await asyncio.sleep(1) message = p.get_message(ignore_subscribe_messages=True) if message is None: continue try: message_handler.handle(pickle.loads(message['data'])) except Exception as ex: # TODO: refactor it # TODO: replace print() with logger print(''.join( traceback.format_exception(None, ex, ex.__traceback__))) @property def stop(self): return self.__stop def close(self): self.__stop = True self.__client.close()
def send_payment_information(payment_information=None): # noqa: E501 """sendPaymentInformation Sends the information received by the user for verification purposes. API for: Payment Provider # noqa: E501 :param payment_information: :type payment_information: dict | bytes :rtype: None """ if connexion.request.is_json: payment_information = PaymentInformation.from_dict(connexion.request.get_json()) # noqa: E501 """ Open the connection to redis to get the right process instance id in the Camunda Engine (saved by a worker) """ redis_connection = Redis(host="acmesky_redis", port=6379, db=0) process_instance_id = redis_connection.get(payment_information.transaction_id).decode("utf-8") redis_connection.close() """ Send a correlate message to Camunda with the information about the payment status. The message is for a specific process instance, identified by the process_instance_id variable """ r = send_string_as_correlate_message("payment_status", [("payment_status", json.dumps(payment_information.to_dict()))], process_instance_id) if r.status_code >= 300: logging.error(f"Fail to send message to Camunda. Response: {r.text}") return None, r.status_code
def main_currency(): """main f-n -- engine of the update currency""" r = Redis(host='redis', username='******', password='******', port=6379, db=0) subscribers = r.get('subscribers') logger.info(subscribers) if subscribers: response = requests.get(url=MAIN_URL) data = response.json()['rates'] text = f'<u>LAST UPDATE: {datetime.now(tz=timezone("Europe/Kiev")):%Y-%m-%d -- %H:%M:%S}</u>\n\n' for i in sorted(data, key=lambda key: key['pair']): # sorted Alphabetical if i['pair'] in [ 'BTC_ETH', 'ETH_BTC', 'BTC_USDT', 'ETH_UAH', 'USDT_UAH', 'ETH_USDT', 'BTC_UAH' ]: text += f"<b>{i['pair']}</b> -- {i['price']}\n" list_to_send = subscribers.decode("utf-8").split() logger.info(list_to_send) loop.run_until_complete(send(r, list_to_send, text)) # async run send f-n else: pass r.close()
def update_charts(signal, instance, **kwargs): channel_layer = layers.get_channel_layer() now = localtime() user = instance.user.name value = str(instance.customer_balance) db = Redis(host='localhost') redis_data = get_or_create_redis(user, db) redis_time = get_or_create_redis(str(user) + 'time', db) result = PushList(redis_data) time = PushList(redis_time, type=str) result.add(value) time.add(now.tm_hour) db.delete(user) db.delete(user + 'time') db.rpush(user, *result.data) db.rpush(user + 'time', *time.data) db.close() async_to_sync(channel_layer.group_send)(user, { 'type': 'chat_message', 'text': { 'time': time.data, 'data': result.data } })
def redis_reset_startup(redis_con: redis.Redis, process_name: str, key_run: str, key_time: str, key_time_ete: str, key_exception: str, key_current: str, key_total: str) -> None: """Check if Redis key for running is set and when ok than reset the keys stored in redis database :param redis_con: an instance of Redis connection :param process_name: the name of the action that will be started or not :param key_run: Redis key for run indication :param key_time: Redis key to store time information (start time or runtime) :param key_time_ete: Redis key to store start time of time calculation :param key_exception: Redis key where exception information is stored :param key_current: Redis key containing progressed items count :param key_total: Redis key containing total to progress item count """ if int(redis_con.get(key_run) or 0) == 1: redis_con.close() flask.abort(400, process_name + " already running") # (re)set Redis keys redis_con.set(key_run, 1) redis_con.set(key_time, int(datetime.timestamp(datetime.now()))) redis_con.delete(key_time_ete) redis_con.delete(key_exception) redis_con.set(key_current, 0) redis_con.set(key_total, 0)
class TestInvertedIndex: def setup_class(self): self.client = Redis(decode_responses=True) self.laptops = laptops = InvertedIndex(self.client) laptops.add_index("MacBook Pro", "Apple", "MacOS", "13inch") laptops.add_index("MacBook Air", "Apple", "MacOS", "13inch") laptops.add_index("X1 Carbon", "ThinkPad", "Windows", "13inch") laptops.add_index("T450", "ThinkPad", "Windows", "14inch") laptops.add_index("XPS", "DELL", "Windows", "13inch") def test_get_keywords(self): assert {"Apple", "MacOS", "13inch"} == self.laptops.get_keywords("MacBook Pro") def test_get_items(self): assert self.laptops.get_items("13inch") == { "MacBook Pro", "MacBook Air", "X1 Carbon", "XPS" } assert self.laptops.get_items( "13inch", "Apple") == {"MacBook Pro", "MacBook Air"} def teardown_class(self): self.client.close()
class RedisZ(object): def __init__(self): self.redis_conn = Redis(connection_pool=conn_pool) def __enter__(self): return self.redis_conn def __exit__(self, exc, value, traceback): self.redis_conn.close()
def redis_available(): params = dict(host=os.getenv('REDIS_HOST') or '127.0.0.1', port=int(os.getenv('REDIS_PORT') or 6379)) conn = Redis(**params) try: conn.keys('*') conn.close() return True except Exception: return False
def set_record(self, key, value): """ Function to set record to database. It has nothing to do with states. Made for backend compatibility """ connection = Redis(connection_pool=self.redis) connection.set(self.prefix + str(key), json.dumps(value)) connection.close() return True
def delete_record(self, key): """ Function to delete record from database. It has nothing to do with states. Made for backend compatibility """ connection = Redis(connection_pool=self.redis) connection.delete(self.prefix + str(key)) connection.close() return True
def get_record(self, key): """ Function to get record from database. It has nothing to do with states. Made for backend compatibility """ connection = Redis(connection_pool=self.redis) result = connection.get(self.prefix + str(key)) connection.close() if result: return json.loads(result) return
class RedisPlieLine: conn = None def open_spider(self, spider): self.conn = Redis(host='127.0.0.1', port=6379) def close_spider(self, spider): self.conn.close() def process_item(self, item, spider): self.conn.lpush(jdsn.Jdssspider().key, str(item))
def start_requests(self): redis = Redis(host='www.daoyang.top', port=6379, db=0) while redis.llen(REDIS_DS_KEY) > 0: spider.logger.debug("REDIS POP BEFORE") email = redis.lpop(REDIS_DS_KEY) redis.close() spider.logger.debug("REDIS POP AFTER") if email: yield scrapy.Request(bytes.decode(email), callback=self.parse) spider.logger.debug("REDIS YIELD YIELD YIELD YIELD") else: break
class ZhongwenPlieLine: conn = None def open_spider(self, spider): self.conn = Redis(host='127.0.0.1', port=6379) def close_spider(self, spider): self.conn.close() def process_item(self, item, spider): # self.conn.lpush(zhongwentwo.FenghuangSpider().Key, str(item)) self.conn.lpush(f'<<{item["title"]}>>{item["author"]}', str(item))
class RedisClient: def __init__(self, options): self.connection_options = options def __enter__(self): try: self.connection = Redis(**self.connection_options) return self.connection except Exception as e: raise Exception(f'ERROR: Cannot connect to Redis: {str(e)}') def __exit__(self, exc_type, exc_value, traceback): self.connection.close()
class PttSpiderPipeline(object): def open_spider(self, spider): # init connection with MongoDB and Redis self.client = MongoClient(MONGO_HOST) self.db = self.client[PTT_DB] self.r = Redis(host=REDIS_HOST) def close_spider(self, spider): # remove job_id from redis, since the job is done self.r.delete(spider.job_id) # close connection while closing spider self.client.close() self.r.close() def process_item(self, item, spider): # check needed fields exists otherwise drop item needed_fields = { 'authorId', "authorName", "title", "publishedTime", "content", "canonicalUrl" } missing_fields = needed_fields - set(item.keys()) if missing_fields != set(): raise DropItem(missing_fields) # init collection article_col = self.db['article'] comment_col = self.db['comment'] # use authorId and publishedTime as unique ID for document author_and_time = { "authorId": item.pop('authorId'), "publishedTime": item.pop('publishedTime') } # extract comments from article and update unique ID for comments comments = item.pop('comments', None) article = dict(item) now = dt.now() article['updateTime'] = now # update article and comments to MongoDB article_col.update_one(author_and_time, { "$set": article, "$setOnInsert": { 'createdTime': now } }, upsert=True) if comments is not None: comments = [{**c, **author_and_time} for c in comments] comment_col.remove(author_and_time) comment_col.insert_many(comments) # record url in redis self.r.sadd(spider.job_id, article['canonicalUrl']) return item
def get_payment_details(transaction_id): # noqa: E501 """Your GET endpoint Gets the information for the payment request for a user. API for: User # noqa: E501 :param transaction_id: ID of transaction :type transaction_id: :rtype: PaymentRequest """ redis_connection = Redis(host="payment_provider_redis", port=6379, db=0) payment_request = json.loads(redis_connection.get(transaction_id)) redis_connection.close() return PaymentRequest.from_dict(payment_request)
def restore_redis(): from redis import Redis r = Redis(host='0.0.0.0', db=5) updated_conf = { "HOST": "0.0.0.0", "PORT": "6000", "USER": "******", "PASSWORD": "******", "NAME": "postgres" } r.set('tenant_1_innovaccer_xyz_incare_orm_config_django_orm_default', json.dumps( updated_conf )) r.close()
class SimpleRedisDispatcher(AbstractRedisDispatcher): redis_args = None redis_kwargs = None def __init__(self, *redis_args, **redis_kwargs): self.redis_args = redis_args self.redis_kwargs = redis_kwargs AbstractRedisDispatcher.__init__(self) self.redis = Redis(*self.redis_args, **self.redis_kwargs) def get_redis(self): return self.redis def close(self): self.redis.close()
def is_backend_running() -> bool: try: conn = Redis(host=get_redis_host(), port=int(get_redis_port()), db=int(get_redis_dbnum()), password=get_redis_password()) conn.client_list() # Must perform an operation to check connection. except ConnectionError as e: print("Failed to connect to Redis instance at %s", get_redis_host()) print(repr(e)) return False conn.close() # type: ignore return True
class RedisRpcServer(implements(RpcServer)): def __init__(self): self.__client = Redis(connection_pool=settings.REDIS_CONNECTION_POOL) self.__stop = False async def register_handler(self, topic, request_handler): while self.stop is False: request = None try: if self.__client.llen(name=topic) < 1: await asyncio.sleep(1) continue request = self.__client.rpop(topic) request = pickle.loads(request) """ when request successfully unmarshalled, process request with request_handler """ result = request_handler.handle(request.params) """ send back result to client """ self.__client.rpush( request.id, pickle.dumps(RpcResponse(result=result, id=request.id))) self.__client.expire(request.id, time=15) except CancelledError: self.close() break except Exception as ex: # TODO: refactor it # TODO: replace print() with logger print(''.join( traceback.format_exception(None, ex, ex.__traceback__))) self.__client.rpush( request.id, pickle.dumps(RpcResponse(result=ex, id=request.id))) self.__client.expire(request.id, time=15) @property def stop(self): return self.__stop def close(self): self.__stop = True self.__client.close()
class TestAutoComplete: def setup_class(self): self.client = Redis(decode_responses=True) def test_auto_complete(self): auto_complete = AutoComplete(self.client) auto_complete.feed("黄晓明", 3) auto_complete.feed("黄建宏", 2) auto_complete.feed("黄晓军", 1) assert auto_complete.hint("黄") == ["黄晓明", "黄建宏", "黄晓军"] assert auto_complete.hint("黄晓") == ["黄晓明", "黄晓军"] def teardown_class(self): keys = [k for k in self.client.keys()] self.client.delete(*keys) self.client.close()
def update_redis(*args, **kwargs): from redis import Redis r = Redis(host='0.0.0.0', db=5) updated_conf = { "HOST": "0.0.0.0", "PORT": "6001", "USER": "******", "PASSWORD": "******", "NAME": "postgres" } r.set('tenant_1_innovaccer_xyz_incare_orm_config_django_orm_default', json.dumps( updated_conf )) r.close() expected_result.clear() print("new expected_result is ", expected_result)
def insert(): import json from redis import Redis from urllib import parse # 获取redis连接对象 redis = Redis(host="114.116.126.177", password="******") # redis = Redis() with open("kind.json", mode="r", encoding="utf-8") as f: datas = json.loads(f.read()) for data in datas: print(data) redis.lpush("hy:start_urls", f'http://so.huangye88.com/?kw={data}&type=company&') # 关闭redis redis.close()
def insert(): import json from redis import Redis from urllib import parse # 获取redis连接对象 redis = Redis(host="114.116.126.177", password="******") # redis = Redis() with open("kind.json", mode="r", encoding="utf-8") as f: datas = json.loads(f.read()) for data in datas: print(data) redis.lpush("lbw:start_urls", f'http://b2b.liebiao.com/sou-gs/{data}/?pn=1') # 关闭redis redis.close()
def test_data_retrieval_within_the_minute(): """check update doesn't occur within the 1 minute delete data and assert no data is retreivable """ redis_conn = Redis(app.config["REDIS_URL"]) # first call requests.post(url=UPDATE_URL) res = requests.get(url=GET_MOVIES_URL) assert res.status_code == 200 redis_conn.delete("ghibli") # second call res = requests.get(url=GET_MOVIES_URL) assert res.status_code != 200 redis_conn.close()
class GatherMetricToStorage(object): def __init__(self, duration, recsqant): """ Initialize storage connection """ echo(" * Initializing metric fetch system...") self.baseobjc = Redis(host="127.0.0.1", port=6379) self.duration = duration self.recsqant = recsqant def jsonify_system_live_updating_metrics(self): """ Convert metric data to a JSON-friendly format """ timestmp = str(time()).split(".")[0] hashiden = sha256(timestmp.encode()).hexdigest() keyvalpr = { timestmp: json.dumps({ "hashiden": hashiden, "liveupdt": LiveUpdatingElements().return_live_data() }) } return keyvalpr def continuously_store_data(self): """ Periodically push passive metrics to Redis store """ self.baseobjc.flushall() try: while True: if self.baseobjc.dbsize() == self.recsqant: self.baseobjc.keys().sort() self.baseobjc.delete(self.baseobjc.keys()[0]) self.baseobjc.mset(self.jsonify_system_live_updating_metrics()) # echo(" * [" + ctime() + "] Stored system metrics now...") sleep(self.duration) except KeyboardInterrupt as expt: self.baseobjc.close() echo("\n" + " * Closing storage connection...") exit()