def game() -> None: """ Método correspondente ao funcionamento do jogo. Ele gerá os participantes e sorteia as cartelas (80) para o jogo. A cada sorteio, a cartela será listada. Quando algum participante atingir 15 pontos, o método listará o parcipante e será finalizado. Se não houver vencedores, será listado. """ generate_cards(CARDS) generate_participants() print('Start of the game!!') numbers = redis.spop(CARDS, 80) for index, number in enumerate(numbers): number = int(number) print(f'Round {index+1}: {number}') round_winners = '' for i in range(1, 51): i = str(i) if redis.sismember(CARD + i, number): redis.zincrby(SCORES, 1, SCORE + i) round_winners += f'player {i} - score {redis.zscore("scores", SCORE + i)} | ' print('Round winners: ' + round_winners) winners = redis.zrangebyscore(SCORES, 15, 15) if winners: winners = [re.sub(r'\D', '', str(s)) for s in winners] print('Winning participant(s): ' + str(winners)) print('End of the game') return print('No winner...')
def work(): while not domain_run.UrlsEnmpy(): # 队头url出队列 url = redis.spop(domain_run.redis_queue) if url is None or url == "": continue spider.crawl_page(threading.current_thread().name, url)
def get_animal(): animal = redis.spop('animals') if not animal: redis.sadd('animals', 'human', 'bird', 'dog', 'deer', 'mouse', 'fish', 'snake', 'bear', 'cat', 'duck') animal = redis.spop('animals') return animal
def main(): request_json = redis.spop(current_set) if request_json: request = json.loads(request_json.decode('utf-8')) try: reply_to_request(request) except InvalidRequest as ir: util.open_lock(redis, request['id']) logging.info(ir) except AlreadyProcessed as ap: util.open_lock(redis, request['id']) logging.error(ap) except CommentingFailed as cf: util.handle_failed_request(redis, request, current_set, cf) logging.error(cf) except RedditAPIException as rae: if "NOT_WHITELISTED_BY_USER_MESSAGE" in str(rae): redis.sadd(config['REDIS_NOT_WHITELISTED_USERS'], request['author']) util.open_lock(redis, request['id']) logging.error( f"User {request['author']} needs to whitelist me. Adding to {config['REDIS_NOT_WHITELISTED_USERS']}" ) else: util.handle_failed_request(redis, request, current_set, rae) logging.error( f"{type(rae).__name__} occurred while replying to request {request['id']} : {request['link']} : {rae}." ) except Exception as e: util.handle_failed_request(redis, request, current_set, e) logging.error( f"{type(e).__name__} occurred while replying to request {request['id']} : {request['link']} : {e}." )
def create_player(redis, i): redis.hset("player:" + str(i), "name", "player" + str(i)) draw_card = redis.spop("available_cards") redis.hset("player:" + str(i), "card", "card:" + str(int(draw_card))) redis.hset("player:" + str(i), "score", "score:" + str(0)) print("Imprimindo os dados do jogador player:" + str(i)) print(redis.hvals("player:" + str(i)))
def dequeue_item(queue_name): item = redis.spop(queue_name) if item: item = item.decode('utf-8') if item.startswith("{") and item.endswith("}"): item = json.loads(item) logging.debug(u"Dequeuing item from {}:\t{}".format(queue_name, item)) return item
def make_request(url, return_soup=True): # 全局的url request及response处理函数 url = format_url(url) log("make_request: " + url) trying_times = redis.spop(url) trying_times = int(trying_times.decode()) if trying_times else 0 log("tring_times: {}".format(trying_times)) if trying_times > settings.max_retrying_times: log("Tring too many times") return None, None # 超过重试次数 if "picassoRedirect" in url: return None, None # 跳过重定向的url global num_requests if num_requests >= settings.max_requests: raise Exception("Reached the max number of requests: {}".format( settings.max_requests)) headers = settings.headers headers["User-Agent"] = random.choice(settings.agents) proxy_dict = get_proxy() timeout = 20 if return_soup else 60 try: if proxy_dict: r = requests.get(url, headers=headers, proxies=proxy_dict, timeout=timeout) else: r = requests.get(url, headers=headers, timeout=timeout) except RequestException as e: log("WARNING: Request for {} failed. Retrying.....{} times".format( url, trying_times + 1)) redis.sadd(url, str(trying_times + 1)) return make_request(url, return_soup) num_requests += 1 if r.status_code != 200: os.system('say "Got non-200 Response"') log("WARNING: Got a {} status code for URL: {}".format( r.status_code, url)) return None, None if return_soup: return BeautifulSoup(r.text, "lxml"), r.text return r, r
def main(): request_json = redis.spop(current_set) if request_json: request = json.loads(request_json.decode('utf-8')) try: upload_request(request) except InvalidRequest as ie: util.open_lock(redis, request['id']) logging.info(f"Invalid upload request {request['id']} : {request['link']}: {ie}") except AlreadyProcessed as ape: util.open_lock(redis, request['id']) logging.error(ape) except Exception as e: util.handle_failed_request(redis, request, current_set, e) logging.error( f"{type(e).__name__} occurred while uploading request {request['id']} : {request['link']} : {e}")
def crawling(self, homepage): # 循环条件:抓取深度不超过crawl_deepth while self.current_deepth <= self.crawl_deepth: # 循环条件:待抓取的链接不空 print(self.UrlsEnmpy()) print(int(redis.scard(deepspider.redis_queue))) while self.UrlsEnmpy(): # 队头url出队列 visitUrl = redis.spop(deepspider.redis_queue) if visitUrl is None or visitUrl == "": continue # 将url放入已访问的url中 redis_cralw 数据库 redis.sadd(deepspider.redis_cralw, com.md5_url(visitUrl)) print(visitUrl) print(self.current_deepth) # 获取超链接 links = self.getHyperLinks(visitUrl, homepage) # 未访问的url入列 for link in links: redis.sadd(deepspider.redis_cralw, link) # print(link) self.current_deepth += 1
def draw_number(redis): return redis.spop("set:numbers")
def deq_redis(stack_name): return redis.spop(stack_name)
def dequeue_url(): return redis.spop("listing_url_queue").decode("utf-8")
def dequeue() : global redis return redis.spop('nodes')
mongo = pymongo.MongoClient() collection = mongo.twitter.tweets user_collection = mongo.twitter.users collection.create_index([('id', pymongo.ASCENDING)], unique=True) user_collection.create_index([('id', pymongo.ASCENDING)], unique=True) initialUsers = [ 27260086, ] redis.sadd('users', *initialUsers) while redis.scard('users') > 0: redis.delete('user_connections') uid = redis.spop('users') try: user = api.get_user(user_id=uid) except tweepy.TweepError as e: print(e) print('Error when looking up user {}'.format(uid)) continue if user.protected: print('Skipping protected user {} (@{})'.format( user.name, user.screen_name)) redis.sadd('users_done', uid) continue print('Downloading tweets for user {} (@{})'.format(
def dequeue_images_url(): image = redis.spop("images_queue") if image: image = image.decode().split("::::") return image[0], image[1] return None, None
def dequeue_items_url(): url = redis.spop("items_queue") if url: url = url.decode() return url
def dequeue_categories_url(): url = redis.spop("categories_queue") if url: url = url.decode() return url
def dequeue_url(): return redis.spop("listing_url_queue")
def dequeue_url(): ''' Removes and returns (pops) one *random* url from the listing_url_queue ''' return redis.spop("listing_url_queue")
def CreateNewStudentcode(batchcode,spycode,learningcentercode,totalnumber): for tcprow in session.query(tcp).filter_by(batchcode=batchcode).\ filter_by(spycode = spycode): #for every tcp and of every learningcenter add totalnumber new items codeTemple = batchcode[2:4]+learningcentercode[:3]+tcprow.studenttype+tcprow.professionallevel #find class classcode='' studentcategory=tcprow.studenttype+tcprow.professionallevel curclass= session.query(classinfo).filter_by(batchcode=batchcode).\ filter_by(learningcentercode=learningcentercode).\ filter_by(spycode=spycode).\ filter_by(studentcategory=studentcategory).first() if curclass is None: print('%s %s %s class not exists'%(batchcode,learningcentercode,spycode)) else: #print('classname %s'%(curclass.classname)) classcode=curclass.classcode for i in range(0,10): #if not exists dict.key then add new if codeTemple in hsStudentCode: stunum=hsStudentCode[codeTemple]+1 else: hsStudentCode[codeTemple]=1 stunum=1 hsStudentCode[codeTemple]=stunum newStudentCode=str(codeTemple)+(5-len(str(stunum)))*'0'+str(stunum) #print('tcpTemple %s ,tcpcode: %s learningcentercode %s studentcode:%s'%(codeTemple,tcprow.tcpcode,learningcentercode,newStudentCode)) thisstudent=session.query(student).filter_by(studentcode=newStudentCode).first() # search one by newstudentcode if exists then change this # if thisstudent is None: #print(' %s is not exists'%(newStudentCode,)) #find a student from redis oldstudentcode=redis.spop('newstudents') oldstudent=session.query(student).filter_by(studentcode=oldstudentcode.decode('utf-8')).first() if oldstudent is None: print(' %s is not exists'%(oldstudentcode.decode('utf-8'),)) else: print('studentcode[%s] is change to new '%(oldstudentcode.decode('utf-8'),)) oldstudent.studentcode=newStudentCode oldstudent.batchcode = batchcode oldstudent.tcpcode = tcprow.tcpcode oldstudent.learningcentercode=learningcentercode oldstudent.classcode=classcode oldstudent.spycode = spycode oldstudent.professionallevel=tcprow.professionallevel oldstudent.studenttype=tcprow.studenttype oldstudent.studentcategory=str(tcprow.studenttype)+str(tcprow.professionallevel) oldstudent.enrollmentstatus='1' oldstudent.createtime=func.now() oldstudentbasic=session.query(studentinfo).filter_by(studentid=oldstudent.studentid).first() if oldstudentbasic is not None: oldstudentbasic.studentcode=newStudentCode oldstudentbasic.createtime=func.now() session.flush() session.commit() else: print('studentcode[%s] is exists'%(thisstudent.studentcode,)) studentid = thisstudent.studentid print('delete elc %s'%(thisstudent.studentcode,)) for rowelc in session2.query(studentelc).filter_by(studentcode=thisstudent.studentcode): session2.delete(rowelc) session2.flush() session2.commit() thisstudent.batchcode = batchcode thisstudent.tcpcode = tcprow.tcpcode thisstudent.learningcentercode=learningcentercode thisstudent.classcode=classcode thisstudent.spycode = spycode thisstudent.professionallevel=tcprow.professionallevel thisstudent.studenttype=tcprow.studenttype thisstudent.studentcategory=str(tcprow.studenttype)+str(tcprow.professionallevel) thisstudent.enrollmentstatus='1' thisstudent.createtime=func.now() thisstudentbasic=session.query(studentinfo).filter_by(studentid=studentid).first() if thisstudentbasic is not None: thisstudentbasic.studentcode=thisstudent.studentcode thisstudentbasic.createtime=func.now() session.flush() session.commit()
uid = 1 ''' # 限制频繁请求 check_act = redis.incr('check_act'+uid) if check_act>1: redis.ttl('check_act'+uid, 1) return '请求过于频繁,请稍后再试' # 查询该用户是否抢购成功过 check = redis.incr('check_'+uid) if check and check>=1: return '你已经抢购过了' goods_id= redis.spop(goods_set) if goods_id: # 抢购成功 ''' business 抢购成功业务处理 ''' if business: # 业务处理成功: # 标记用户已经购买过 redis.incr('check_'+uid) else: # 将抢购的商品归还 redis.sadd(goods_set, goods_id) else: return '商品不足'
import redis import pika #get message from redis then send to rabbitmq mqconn = pika.BlockingConnection(pika.ConnectionParameters(host='localhost')) channel = mqconn.channel() channel.queue_declare(queue='StudyStatus') redis = redis.Redis(host='10.100.134.160',port=6380,db=1) redisKey = "student.6510000" while (redis.scard(redisKey)>0): messagebody = redis.spop(redisKey) channel.basic_publish(exchange='',routing_key='StudyStatus',\ body=messagebody) mqconn.close()
def dequeue_url(): url = redis.spop("listing_url_queue") url, category_code, mode = url.split(',') return url, category_code, int(mode)
import redis import pika #get message from redis then send to rabbitmq mqconn = pika.BlockingConnection(pika.ConnectionParameters(host='localhost')) channel = mqconn.channel() channel.queue_declare(queue='StudyStatus') redis = redis.Redis(host='10.100.134.160', port=6380, db=1) redisKey = "student.6510000" while (redis.scard(redisKey) > 0): messagebody = redis.spop(redisKey) channel.basic_publish(exchange='',routing_key='StudyStatus',\ body=messagebody) mqconn.close()