def queue_cmd(conn: Redis, job_queue: str, cmd: str, email: Optional[str] = None) -> str: """Given a command CMD; (optional) EMAIL; and a redis connection CONN, queue it in Redis with an initial status of 'queued'. The following status codes are supported: queued: Unprocessed; Still in the queue running: Still running success: Successful completion error: Erroneous completion Returns the name of the specific redis hash for the specific task. """ if not conn.ping(): raise RedisConnectionError unique_id = ("cmd::" f"{datetime.now().strftime('%Y-%m-%d%H-%M%S-%M%S-')}" f"{str(uuid4())}") conn.rpush(job_queue, unique_id) for key, value in {"cmd": cmd, "result": "", "status": "queued"}.items(): conn.hset(name=unique_id, key=key, value=value) if email: conn.hset(name=unique_id, key="email", value=email) return unique_id
def POST(self): r = Redis(connection_pool=redis_pool) user_id = check_token(r) form = url_form() if not form.validates(): return render_template('add.html', form=form, user_id=user_id, is_add=True, ) url = form['url'].value token = hashlib.sha1() token.update(url.replace('http(s)?://', '').strip()) key = token.hexdigest()[:6] print key + url if not r.hget(URL_HASH_NAME, key): r.hset(URL_HASH_NAME, key, url) r.hset(COUNT_HASH_NAME, key, 0) if user_id[1]: r.hset(LOG_HASH_NAME, key, r.hget(TOKEN_HASH_NAME, web.input().token)) if user_id[1] == '': raise web.seeother('/%s/+' % key) else: raise web.seeother('/%s/+?token=%s' % (key, user_id[1]))
def queue_cmd(cmd: str, conn: Redis) -> str: """Given a command CMD, and a redis connection CONN, queue it in Redis with an initial status of 'queued'. The following status codes are supported: queued: Unprocessed; Still in the queue running: Still running success: Successful completion error: Erroneous completion """ if not conn.ping(): raise RedisConnectionError unique_id = ("cmd::" f"{datetime.now().strftime('%Y-%m-%d%H-%M%S-%M%S-')}" f"{str(uuid4())}") for key, value in {"cmd": cmd, "result": "", "status": "queued"}.items(): conn.hset(key, value, unique_id) conn.rpush("GN2::job-queue", unique_id) return unique_id
class SportPipeline(object): collection_name = 'sport_items' def __init__(self, redis_host, redis_port): self.redis_host = redis_host self.redis_port = redis_port @classmethod def from_crawler(cls, crawler, *args, **kwargs): return cls(redis_host=crawler.settings.get('REDIS_HOST'), redis_port=crawler.settings.get('REDIS_PORT')) def open_spider(self, spider): self.client = Redis(host=self.redis_host, port=self.redis_port, connection_pool=BlockingConnectionPool()) def close_spider(self, spider): self.client = None def process_item(self, item, spider): self.client.hset(item['key'], item['seq'], item) return item
# Wait for redis connection while True: try: db.ping() except ConnectionError: logging.info("Waiting for connection to redis database...") sleep(1) else: logging.info("SUCCESS: Connected to Redis database.") break # Kick dead sessions (cause socket.io reloaded) out of rooms for room_token, room in db.hgetall('rooms').items(): # For all rooms in db room = json.loads(room) # Deserialize room data room['count'] = 0 # Set connections to 0 because no body can be connected db.hset('rooms', room_token, json.dumps(room)) logging.info("Kick dead sessions out of room {}".format(room_token)) # Create cleanup deamon, that deletes abandoned rooms from the database # Get params cleanup_interval = int(os.getenv('CLEANUP_INTERVAL', 60 * 15)) cleanup_room_expire_time = int(os.getenv('CLEANUP_ROOM_EXPIRE_TIME', 60 * 60)) cleanup_max_room_life_time = int( os.getenv('CLEANUP_MAX_ROOM_LIFE_TIME', 60 * 60 * 24)) # Define function that cleans abandoned rooms in an interval def room_cleanup(): while True: for room_token, room in db.hgetall( 'rooms').items(): # For all rooms in db