def get_available_proxies(self, conn): """core algrithm to get proxies from redis""" start_time = int(time.time()) - self.ttl_validated_resource * 60 pipe = conn.pipeline(False) pipe.zrevrangebyscore(self.score_queue, '+inf', self.lowest_score) pipe.zrevrangebyscore(self.ttl_queue, '+inf', start_time) pipe.zrangebyscore(self.speed_queue, 0, 1000 * self.longest_response_time) scored_proxies, ttl_proxies, speed_proxies = pipe.execute() scored_proxies, ttl_proxies, speed_proxies = set(scored_proxies), set(ttl_proxies), set(speed_proxies) proxies = scored_proxies & ttl_proxies & speed_proxies if not proxies or len(proxies) < self.min_pool_size * 2: proxies = ttl_proxies & speed_proxies if not proxies or len(proxies) < self.min_pool_size * 2: proxies = ttl_proxies | scored_proxies proxies = decode_all(proxies) return proxies
def get_available_proxies(self, conn): """core algrithm to get proxies from redis""" start_time = int(time.time()) - TTL_VALIDATED_RESOURCE * 60 pipe = conn.pipeline(False) pipe.zrevrangebyscore(self.score_queue, '+inf', LOWEST_SCORE) pipe.zrevrangebyscore(self.ttl_queue, '+inf', start_time) pipe.zrangebyscore(self.speed_queue, 0, 1000 * LONGEST_RESPONSE_TIME) scored_proxies, ttl_proxies, speed_proxies = pipe.execute() scored_proxies, ttl_proxies, speed_proxies = set(scored_proxies), set(ttl_proxies), set(speed_proxies) proxies = scored_proxies & ttl_proxies & speed_proxies if not proxies or len(proxies) < LOWEST_TOTAL_PROXIES * 2: proxies = ttl_proxies & speed_proxies if not proxies or len(proxies) < LOWEST_TOTAL_PROXIES * 2: proxies = ttl_proxies | scored_proxies proxies = decode_all(proxies) return proxies
def get_available_proxies(self, conn): """core algrithm to get proxies from redis""" start_time = int(time.time()) - TTL_VALIDATED_RESOURCE * 60 pipe = conn.pipeline(False) pipe.zrevrangebyscore(self.score_queue, '+inf', LOWEST_SCORE) pipe.zrevrangebyscore(self.ttl_queue, '+inf', start_time) pipe.zrangebyscore(self.speed_queue, 0, 1000 * LONGEST_RESPONSE_TIME) scored_proxies, ttl_proxies, speed_proxies = pipe.execute() scored_proxies, ttl_proxies, speed_proxies = set(scored_proxies), set( ttl_proxies), set(speed_proxies) proxies = scored_proxies & ttl_proxies & speed_proxies if not proxies or len(proxies) < LOWEST_TOTAL_PROXIES * 2: proxies = (ttl_proxies & speed_proxies) | scored_proxies if not proxies or len(proxies) < LOWEST_TOTAL_PROXIES * 2: proxies = ttl_proxies | scored_proxies proxies = decode_all(proxies) return proxies
def get_proxies(self): """core algrithm to get proxies from redis""" start_time = int(time.time()) - TTL_VALIDATED_RESOURCE * 60 pipe = self.conn.pipeline(False) pipe.zrevrangebyscore(self.score_queue, '+inf', LOWEST_SCORE) pipe.zrevrangebyscore(self.ttl_queue, '+inf', start_time) pipe.zrangebyscore(self.speed_queue, 0, 1000*LONGEST_RESPONSE_TIME) scored_proxies, ttl_proxies, speed_proxies = pipe.execute() proxies = scored_proxies and ttl_proxies and speed_proxies if not proxies or len(proxies) < self.length*2: proxies = (ttl_proxies and speed_proxies) or scored_proxies if not proxies or len(proxies) < self.length*2: proxies = ttl_proxies or scored_proxies proxies = decode_all(proxies) # client_logger.info('{} proxies have been fetched'.format(len(proxies))) print('{} proxies have been fetched'.format(len(proxies))) self.pool.extend(proxies)
def update_conf(self): conn = get_redis_conn() start_time = int(time.time()) - TTL_VALIDATED_RESOURCE * 60 pipe = conn.pipeline(False) pipe.zrevrangebyscore(self.score_queue, '+inf', LOWEST_SCORE) pipe.zrevrangebyscore(self.ttl_queue, '+inf', start_time) pipe.zrangebyscore(self.speed_queue, 0, 1000 * LONGEST_RESPONSE_TIME) scored_proxies, ttl_proxies, speed_proxies = pipe.execute() proxies = scored_proxies and ttl_proxies and speed_proxies if not proxies: proxies = scored_proxies and ttl_proxies if not proxies: proxies = ttl_proxies proxies = decode_all(proxies) conts = list() with open(self.template_path, 'r') as fr, open(self.conf_path, 'w') as fw: original_conf = fr.read() if not proxies: fw.write(original_conf) client_logger.info('no proxies got at this turn') else: conts.append(original_conf) # if two proxies use the same ip and different ports and no name # if assigned,cache_peer error will raise. for index, proxy in enumerate(proxies): _, ip_port = proxy.split('://') ip, port = ip_port.split(':') conts.append( self.default_conf_detail.format(ip, port, index)) conts.extend(self.other_confs) conf = '\n'.join(conts) fw.write(conf) # in docker, execute with shell will fail subprocess.call([self.squid_path, '-k', 'reconfigure'], shell=False) client_logger.info('update squid conf successfully')