async def push_job(self, job_type, job_info): if not self._pool: await self.init_pool() url = job_info.get('url', '') if url and url in self.url_filter: LOGGER.warn("%s job filtered. %s" % (job_type, str(job_info))) return else: self.url_filter.add(url) with await self._pool as conn: await conn.execute('lpush', str(job_type), json.dumps(job_info)) LOGGER.info("push %s job into redis: %s" % (job_type, str(job_info)))
def fetch_cookies(cls): # LOGGER.info('get cookies from reids') r = redis.Redis(connection_pool=cls.redis_pool) while True: user = r.spop('users') r.sadd('users', user) c = r.hget('account', user) if c: user_cookies = c.decode('utf-8') cookies_json = json.loads(user_cookies) # LOGGER.info(cookies_json) return cookies_json LOGGER.warn('cookies not get')
def push_job(cls, job_type, job_info): if 'url' in job_info: if job_info['url'] not in cls.url_filter: cls.url_filter.add(job_info['url']) r = redis.Redis(connection_pool=cls.redis_pool) r.lpush(str(job_type), json.dumps(job_info)) LOGGER.info("push %s job into redis: %s" % (job_type, str(job_info))) else: LOGGER.warn("%s job filtered. %s" % (job_type, str(job_info))) else: r = redis.Redis(connection_pool=cls.redis_pool) r.lpush(str(job_type), json.dumps(job_info)) LOGGER.info("push %s job into redis: %s" % (job_type, str(job_info)))
def sig_handler(sig, frame): LOGGER.warn('Caught signal: %s', sig) tornado.ioloop.IOLoop.instance().add_callback(shutdown)