def push_third_party_vulnerabilities(self, vulnerabilities_id): """ Pushed to a third-party vulnerability management platform :param vulnerabilities_id: :return: """ try: status = Config('third_party_vulnerabilities', 'status').value if int(status): q = Queue(self.project_name, self.third_party_vulnerabilities_name, self.third_party_vulnerabilities_type, self.file_path, self.line_number, self.code_content, vulnerabilities_id) q.push() except Exception as e: traceback.print_exc() self.log('critical', e.message)
def __init__(self, connection): self.download_queue = Queue() self.current_download = None self.download_status = 'not_ready' self.connection = connection self.dcc_connection = None
def queue(): from utils.queue import Queue """ Pushed to a third-party vulnerability management platform Start the queue first celery -A daemon worker --loglevel=info :return: """ # Configure the project ID and the vulnerability ID project_id = request.json.get('project_id') rule_id = request.json.get('rule_id') if project_id is None or rule_id is None: return jsonify(code=1002, result='Project ID and Rule ID can\'t empty!') # Project Info project_info = CobraProjects.query.filter_by(id=project_id).first() # Unpunched vulnerability and rule information result_all = db.session().query(CobraRules, CobraResults).join(CobraResults, CobraResults.rule_id == CobraRules.id).filter( CobraResults.project_id == project_id, CobraResults.status == 0, CobraResults.rule_id == rule_id ).all() if len(result_all) == 0: return jsonify(code=1001, result="There are no unpacked vulnerabilities") # Dealing with vulnerabilities for index, (rule, result) in enumerate(result_all): try: # Query the vulnerability type information vul_info = CobraVuls.query.filter(CobraVuls.id == rule.vul_id).first() # Pushed to a third-party vulnerability management platform q = Queue(project_info.name, vul_info.name, vul_info.third_v_id, result.file, result.line, result.code, result.id) q.push() except: print(traceback.print_exc()) return jsonify(code=1001, result="Successfully pushed {0} vulnerabilities to a third-party vulnerability management platform".format(len(result_all)))
def __init__(self, **kwargs): if not hasattr(Init_App, '_has_init'): if 'env' in kwargs.keys(): env = kwargs['env'] else: raise Exception('缺少环境配置参数env') # 加载配置项 self.app.config.from_object(configs[env]) # 开启csrf防护验证 # CSRFProtect(self.app) # 设置日志 self.set_log(self.app.config.get('LOGGER_LEVAL')) # 数据库链接实例 self.db = db.init_app(self.app) # redis 数据库实例 self.redis_conn = redis.StrictRedis( host=self.app.config.get('REDIS_HOST'), port=self.app.config.get('REDIS_PORT'), db=self.app.config.get('REDIS_SELECT'), password=self.app.config.get('REDIS_PWD'), charset=self.app.config.get('REDIS_CHARSET')) global redis_conn redis_conn = self.redis_conn self.app.task_queue = Queue('task', redis_conn) Session(self.app) # 将自定义的路由转换器加入列表 self.app.url_map.converters['re'] = RegxConverter # 将蓝图中的路由注册到app中 from apps.api_1_0 import api, html_blueprint self.app.register_blueprint(api) self.app.register_blueprint(html_blueprint) # 初始化完成,修改类属性has_init为True Init_App._has_init = True
async def create(self, players: dict, maps: dict, team_names: dict): """ Creates match. https://github.com/ModuleLIFT/API/blob/master/docs/modules.md#createself-players-dict-maps-dict-team_names-dict """ queue = Queue( current_league=self.current_league, current_match=self, players=players, maps=maps, team_names=team_names, ) validation = await queue.validate() if validation.error: return validation # Working out player selection type. if queue.player_type.random: assign_random = queue.captain.random() if assign_random.error: return assign_random elif queue.player_type.elo: assign_elo = await queue.captain.elo() if assign_elo.error: return assign_elo else: assign_given = queue.captain.given() if assign_given: return assign_given # Working out map selection type. if queue.map_type.given: queue.map.given() elif queue.map_type.random: queue.map.random() else: queue.map.veto() # Creating the match. return await queue.create()
def _build_jump_nodes(self): """ Build a list of jump nodes for the tree. Designate the macro leaves (the leaves of the macro tree) as jump nodes. Sort the list in linear time. """ self._jump_nodes = [] Q = Queue() Q.enqueue(self._tree.root()) while not Q.is_empty(): p = Q.dequeue() if self._tree.height(p) > self._block_size: for ch in self._tree.children(p): if self._tree.height(ch) <= self._block_size: if p not in self._jump_nodes: self._jump_nodes.append(p) else: Q.enqueue(ch) # Bucket sort should be used for sorting! self._jump_nodes.sort(key=lambda p: self._tree.depth(p), reverse=True)
def getAiPath(self, start, goal): bfsQueue = Queue() used = set() currentState = (start, [], 1) bfsQueue.push(currentState) while (not bfsQueue.isEmpty() and not self.isGoalState(currentState[0], goal)): currentState = bfsQueue.pop() if (currentState[0] not in used and not self.isGoalState(currentState[0], goal)): used.add(currentState[0]) for state in self.getDirections(currentState): newState = (state[0], [], 1) for step in currentState[1]: newState[1].append(step) newState[1].append(state[1]) bfsQueue.push(newState) print start, goal, currentState[1] return currentState[1]
# coding:utf-8 __author__ = 'admin' # -------------------------------- # Created by admin on 2016/8/25. # --------------------------------- from utils.queue import PriorityQueue as Queue from threading import Thread import urllib2 import time import datetime from utils.encrypt import md5 import urlparse from pybloom import BloomFilter, ScalableBloomFilter import traceback _queue = Queue() _size = 0 _handle = None def spider_init(pool_size, handle=lambda url, content: None): '''初始化爬虫 pool_size 线程池的大小 handle 爬虫结果回调函数, 函数必须包含两个参数, 第一个参数是url,第二个参数是爬取到的结果 ''' print datetime.datetime.now(), "[Spider]:init...." global _size, _queue, _url_max_num, _proxy_list, _handle if _size == 0 and _handle == None: _size = pool_size _handle = handle
def analyse(self): if self.directory is None: logging.critical("Please set directory") sys.exit() logging.info('Start code static analyse...') d = directory.Directory(self.directory) files = d.collect_files(self.task_id) logging.info('Scan Files: {0}, Total Time: {1}s'.format( files['file_nums'], files['collect_time'])) ext_language = { # Image '.jpg': 'image', '.png': 'image', '.bmp': 'image', '.gif': 'image', '.ico': 'image', '.cur': 'image', # Font '.eot': 'font', '.otf': 'font', '.svg': 'font', '.ttf': 'font', '.woff': 'font', # CSS '.css': 'css', '.less': 'css', '.scss': 'css', '.styl': 'css', # Media '.mp3': 'media', '.swf': 'media', # Execute '.exe': 'execute', '.sh': 'execute', '.dll': 'execute', '.so': 'execute', '.bat': 'execute', '.pl': 'execute', # Edit '.swp': 'tmp', # Cert '.crt': 'cert', # Text '.txt': 'text', '.csv': 'text', '.md': 'markdown', # Backup '.zip': 'backup', '.bak': 'backup', '.tar': 'backup', '.rar': 'backup', '.tar.gz': 'backup', '.db': 'backup', # Config '.xml': 'config', '.yml': 'config', '.spf': 'config', '.iml': 'config', '.manifest': 'config', # Source '.psd': 'source', '.as': 'source', # Log '.log': 'log', # Template '.template': 'template', '.tpl': 'template', } for ext in files: if ext in ext_language: logging.info('{0} - {1}'.format(ext, files[ext])) continue else: logging.info(ext) languages = CobraLanguages.query.all() rules = CobraRules.query.filter_by(status=1).all() extensions = None # `grep` (`ggrep` on Mac) grep = '/bin/grep' # `find` (`gfind` on Mac) find = '/bin/find' if 'darwin' == sys.platform: ggrep = '' gfind = '' for root, dir_names, file_names in os.walk( '/usr/local/Cellar/grep'): for filename in file_names: if 'ggrep' == filename or 'grep' == filename: ggrep = os.path.join(root, filename) for root, dir_names, file_names in os.walk( '/usr/local/Cellar/findutils'): for filename in file_names: if 'gfind' == filename: gfind = os.path.join(root, filename) if ggrep == '': logging.critical("brew install ggrep pleases!") sys.exit(0) else: grep = ggrep if gfind == '': logging.critical("brew install findutils pleases!") sys.exit(0) else: find = gfind """ all vulnerabilities vulnerabilities_all[vuln_id] = {'name': 'vuln_name', 'third_v_id': 'third_v_id'} """ vulnerabilities_all = {} vulnerabilities = CobraVuls.query.all() for v in vulnerabilities: vulnerabilities_all[v.id] = { 'name': v.name, 'third_v_id': v.third_v_id } for rule in rules: rule.regex_location = rule.regex_location.strip() rule.regex_repair = rule.regex_repair.strip() logging.info('Scan rule id: {0} {1} {2}'.format( self.project_id, rule.id, rule.description)) # Filters for language in languages: if language.id == rule.language: extensions = language.extensions.split('|') if extensions is None: logging.critical("Rule Language Error") sys.exit(0) # White list white_list = [] ws = CobraWhiteList.query.filter_by(project_id=self.project_id, rule_id=rule.id, status=1).all() if ws is not None: for w in ws: white_list.append(w.path) try: if rule.regex_location == "": filters = [] for index, e in enumerate(extensions): if index > 1: filters.append('-o') filters.append('-name') filters.append('*' + e) # Find Special Ext Files param = [find, self.directory, "-type", "f"] + filters else: filters = [] for e in extensions: filters.append('--include=*' + e) # explode dirs explode_dirs = ['.svn', '.cvs', '.hg', '.git', '.bzr'] for explode_dir in explode_dirs: filters.append('--exclude-dir={0}'.format(explode_dir)) # -n Show Line number / -r Recursive / -P Perl regular expression param = [grep, "-n", "-r", "-P"] + filters + [ rule.regex_location, self.directory ] # logging.info(' '.join(param)) p = subprocess.Popen(param, stdout=subprocess.PIPE) result = p.communicate() # Exists result if len(result[0]): lines = str(result[0]).strip().split("\n") for line in lines: line = line.strip() if line == '': continue if rule.regex_location == '': # Find (special file) file_path = line.strip().replace( self.directory, '') logging.debug('File: {0}'.format(file_path)) exist_result = CobraResults.query.filter_by( project_id=self.project_id, rule_id=rule.id, file=file_path).first() if exist_result is not None: # push queue if exist_result.status == 0: try: q = Queue( self.project_name, vulnerabilities_all[rule.vul_id] ['name'], vulnerabilities_all[ rule.vul_id]['third_v_id'], file_path, 0, 0, exist_result.id) q.push() except Exception as e: print(traceback.print_exc()) logging.critical(e.message) logging.warning("Exists Result") else: vul = CobraResults(self.task_id, self.project_id, rule.id, file_path, 0, '', 0) db.session.add(vul) try: # push queue q = Queue( self.project_name, vulnerabilities_all[ rule.vul_id]['name'], vulnerabilities_all[ rule.vul_id]['third_v_id'], file_path, 0, 0, vul.id) q.push() except Exception as e: print(traceback.print_exc()) logging.critical(e.message) else: # Grep line_split = line.split(':', 1) file_path = line_split[0].strip() if len(line_split) < 2: logging.info("Line len < 2 {0}".format(line)) continue code_content = line_split[1].split(':', 1)[1].strip() line_number = line_split[1].split(':', 1)[0].strip() if file_path in white_list or ".min.js" in file_path: logging.info("In white list or min.js") else: only_match = rule.regex_location[: 1] == '(' and rule.regex_location[ -1] == ')' """ annotation (注释过滤) # // /* * Exclude: - (rule_location) - 当定位规则左右两边为括号时不过滤注释行,比如硬编码密码 """ match_result = re.match( "(#)?(//)?(\*)?(/\*)?", code_content) if match_result.group( 0) is not None and match_result.group( 0 ) is not "" and only_match is not True: logging.info("In Annotation") else: param_value = None # parse file function structure if only_match: found_vul = True else: if file_path[ -3:] == 'php' and rule.regex_repair.strip( ) != '': try: parse_instance = parse.Parse( rule.regex_location, file_path, line_number, code_content) if parse_instance.is_controllable_param( ): if parse_instance.is_repair( rule.regex_repair, rule.block_repair): logging.info( "Static: repaired") continue else: if parse_instance.param_value is not None: param_value = parse_instance.param_value found_vul = True else: logging.info( "Static: uncontrollable param" ) continue except: print(traceback.print_exc()) found_vul = False else: found_vul = True file_path = file_path.replace( self.directory, '') if found_vul: logging.info('In Insert') exist_result = CobraResults.query.filter_by( project_id=self.project_id, rule_id=rule.id, file=file_path, line=line_number).first() if exist_result is not None: logging.info("Exists Result") # push queue if exist_result.status == 0: try: q = Queue( self.project_name, vulnerabilities_all[ rule.vul_id] ['name'], vulnerabilities_all[ rule.vul_id] ['third_v_id'], file_path, line_number, code_content, exist_result.id) q.push() except Exception as e: print( traceback.print_exc()) logging.critical(e.message) else: code_content = code_content.encode( 'unicode_escape') if len(code_content) > 512: code_content = code_content[: 500] + '...' code_content = '# Trigger\r' + code_content if param_value is not None: code_content = '# Param\r' + param_value + '\r//\r// ------ Continue... ------\r//\r' + code_content logging.debug( 'File: {0}:{1} {2}'.format( file_path, line_number, code_content)) vul = CobraResults( self.task_id, self.project_id, rule.id, file_path, line_number, code_content, 0) db.session.add(vul) db.session.commit() try: q = Queue( self.project_name, vulnerabilities_all[ rule.vul_id]['name'], vulnerabilities_all[ rule.vul_id] ['third_v_id'], file_path, line_number, code_content, vul.id) q.push() except Exception as e: print(traceback.print_exc()) logging.critical(e.message) logging.info( 'Insert Results Success') else: logging.info('Not Found') except Exception as e: print(traceback.print_exc()) logging.critical('Error calling grep: ' + str(e)) # Set End Time For Task t = CobraTaskInfo.query.filter_by(id=self.task_id).first() t.status = 2 t.file_count = files['file_nums'] t.time_end = int(time.time()) t.time_consume = t.time_end - t.time_start t.updated_at = time.strftime('%Y-%m-%d %X', time.localtime()) try: db.session.add(t) db.session.commit() except Exception as e: logging.critical("Set start time failed:" + e.message) logging.info("Scan Done")
# "/media/georgen/LOCAL DISK/George_Ndirangu/Learning/py/music-cleaner/test-res/tinker" ] for i, arg in enumerate(sys.argv): if i == 0: continue directories.append(arg) list_of_paths = [] completed = [] failed = [] for directory in directories: list_of_paths += dloader.load_file_paths(directory) processing_queue = Queue() Queue.populate_queue(list_of_paths, processing_queue) for item in processing_queue.get_items(): item.update_status(QueueItemStatus.PROCESSING) result: Optional[MusicFile] = accoustid.search(item.music_file) if isinstance(result, MusicFile): item.update_status(QueueItemStatus.SUCCESS) on_success(completed, item.music_file) processing_queue.remove(item) elif isinstance(result, acoustid.WebServiceError): logging.warning( f"Network error encountered. Will try to handle this another day.For now moving on \n {result}" ) else:
def _run_uploader(input_queue, saver_list, saver_config): uploader = ImageUploader(input_queue, saver_list, saver_config) uploader.run() def start_uploader(input_queue): assert isinstance(input_queue, Queue), "Start Saver error: queue is invalid" saver_list = SAVER_LIST saver_config = SAVER_CONFIG process_num = SAVER_CONFIG["process"] for _ in range(process_num): p = Process(target=_run_uploader, args=(input_queue, saver_list, saver_config)) p.start() if __name__ == "__main__": task_queue = Queue(size=1000) picture_queue = Queue(size=1000) upload_queue = Queue(size=1000) tasks = [TaskInfo("phrasepicture"), TaskInfo("wordpicture"), TaskInfo("iconpicture"), TaskInfo("ninepicture")] for task in tasks: task.set_num(100000) task_queue.put(task) start_builder(task_queue, picture_queue) start_transformer(picture_queue, upload_queue) start_uploader(upload_queue)
class Downloader(object): """constructor""" def __init__(self, connection): self.download_queue = Queue() self.current_download = None self.download_status = 'not_ready' self.connection = connection self.dcc_connection = None """start downloader""" def start(self): self.download_status = 'ready' """stop downloader""" def stop(self): self.download_status = 'not_ready' """is it ready for download?""" def is_ready(self): return (self.download_status == 'ready') """add manga to download_queue""" def download(self, manga): print "Adding %s to download queue" % manga.title self.download_queue.push(manga) """check for download status and determine action to take""" def check(self): if self.is_ready(): self.download_next() elif ( self.download_status == 'requesting_download' and self.current_download.expired_request_time < time.time() ): self.finish_download() """pop an item from download_queue and request download""" def download_next(self): assert(self.is_ready()) if self.current_download is not None and not self.current_download.isDownloaded(): # restart download manga = self.current_download.manga else: manga = self.download_queue.pop() if (manga is None): return print "Requesting download of %s" % manga.title self.current_download = DownloadItem(manga) self.connection.privmsg(manga.bot, "XDCC SEND #%d" % manga.pack_id) self.download_status = 'requesting_download' """initiate a manga download connection""" def initiate_download(self, file_name, file_size, dcc_connection): print "Downloading %s" % self.current_download.manga.title self.download_status = 'downloading' self.dcc_connection = dcc_connection self.current_download.initiateDownload(file_name, file_size) """receive download data""" def receive_data(self, data): self.current_download.appendData(data) self.dcc_connection.privmsg(struct.pack("!I", self.current_download.received_bytes)) """clean up dcc connection and its state""" def finish_download(self): if (self.current_download.isDownloaded()): print "Finished downloading %s\n" % self.current_download.manga.title else: print "Failed downloading %s" % self.current_download.manga.title self.current_download.finishDownload() self.download_status = 'ready' self.dcc_connection = None
def setEnv(self): self.declareGame("Egyptian Rat Screw") self.declareRules(ERSRules().getAllRules()) self.createDeck(Deck(Deck.getDefaultDeck()['ranks'], Deck.getDefaultDeck()['suits'])) self.initializeMachine(ERSMachine(self)) self.slapQueue = Queue()
class Nautilus: def __init__(self, url, max_depth): self.seed_url = url self.result_urls = [] self.queue = Queue() self.max_depth = int(max_depth) self.cur_depth = 1 self.db = DBHelper() self.logger = get_logger(__name__) def is_file_link(self, url): file_url_test = re.compile(r'^.*?\.(pdf|docx|doc|rtf|mobi|azw3|epub)$') if file_url_test.match(url.lower()): return True else: return False def insert_links(self, file_url, parent_url): with requests.get(parent_url, timeout=TIMEOUT) as html_response: response_parse = BeautifulSoup(html_response.text, 'html.parser') filename = unquote(file_url).split('/').pop() [s.extract() for s in response_parse('script')] [s.extract() for s in response_parse('style')] content = response_parse.body.get_text() \ .replace(' ', '') \ .replace('\n', '') \ .replace('\r', '') timestamp = str(int(round(time.time()) * 1000)) values = (file_url, filename, content, timestamp) self.db.insert_item(values) self.logger.info('GET: ' + filename + ' AT ' + file_url) def resolve_links(self, item, parent_url): link = item.get('href') url_resolver = Urls(link) if url_resolver.check_if_url() \ and url_resolver.inner_url(self.seed_url): prefixed_url = url_resolver.prefix_url(self.seed_url, link) if prefixed_url not in self.result_urls: if self.is_file_link(prefixed_url): self.insert_links(prefixed_url, parent_url) else: self.result_urls.append(prefixed_url) self.queue.enqueue(prefixed_url) self.logger.info('FETCH: ' + prefixed_url) def get_url(self): for i in range(0, len(self.queue.get())): current_url = self.queue.dequeue() time.sleep(1.5) with requests.get(current_url, timeout=TIMEOUT) as html_response: soup = BeautifulSoup(html_response.text, 'html.parser') links = soup.find_all('a', {'href': True}) for item in links: time.sleep(1.5) self.resolve_links(item, current_url) def bfs_traverse(self): self.queue.enqueue(self.seed_url) while self.cur_depth < self.max_depth: try: self.get_url() self.cur_depth += 1 except Exception as e: self.logger.error(str(e)) pass def run(self): self.logger.info('START BFS FROM: ' + self.seed_url) time.sleep(1) self.bfs_traverse()
class Session: def __init__(self, bot: Client, config: SubRedis, cog: Cog, voice: VoiceChannel, *, log: TextChannel = None, run_forever: bool = False, **session_config): """ Args: voice_channel :class:`discord.VoiceChannel`: The voice channel the session should start playing in. Kwargs: run_forever :class:`bool`: Determines whether the session should run forever log_channel :class:`discord.TextChannel`: Specifies a channel to log playback history. """ self.bot = bot self.config = config self.cog = cog self.voice_channel = voice self.log_channel = log self.session_config = session_config self.queue_config = self.session_config.get('queue') self.skip_requests = list() self.repeat_requests = list() self.voice = None self.current_track = None if run_forever: self.queue = Radio(config, self.queue_config) else: self.queue = Queue(config, self.queue_config) self.volume = self.session_config.get('default_volume') or float( self.config.hget("config:defaults", "volume")) self.is_playing = True self.play_next_song = Event() # self.bot.loop.create_task(self.session_task()) @property def listeners(self) -> Generator[Member, None, None]: """Members listening to this session. A member is classified as a listener if: - They are not a bot account - They are not deafened Returns: `generator` of `discord.Member`: A generator consisting ow members listening to this session. """ for member in self.voice.channel.members: if not member.bot and not (member.voice.deaf or member.voice.self_deaf): yield member def user_has_permission(self, user: Member) -> bool: """Checks if a user has permission to interact with this session.""" if self.session_config.get("requires_role") is not None: return self.session_config.get("requires_role") in user.roles return True def change_volume(self, volume: float) -> None: """Changes this session's volume""" self.volume = volume self.current_track.volume = self.volume def toggle_next(self, error=None) -> None: """Sets the next track to start playing""" if error: pass self.skip_requests.clear() # self.repeat_requests.clear() self.bot.loop.call_soon_threadsafe(self.play_next_song.set) async def play_track(self): """Plays the next track in the queue.""" if self.log_channel: await self.log_channel.send(**self.current_track.playing_message) self.voice.play(self.current_track, after=self.toggle_next) def stop(self) -> None: """Stops this session.""" self.is_playing = False self.voice.stop() def check_listeners(self) -> None: """Checks if there is anyone listening and pauses / resumes accordingly.""" if list(self.listeners): if self.voice.is_paused(): self.voice.resume() elif self.voice.is_playing(): self.voice.pause() async def session_task(self): self.voice: VoiceClient = await self.voice_channel.connect() self.voice.session = self while self.is_playing: self.play_next_song.clear() # if no more tracks in queue exit self.current_track = self.queue.next_track() if self.current_track is None: self.stop() break # Set volume and play new track self.current_track.volume = self.volume await self.play_track() self.check_listeners() # Wait for track to finish before playing next track await self.play_next_song.wait()
class DraftInterface(ABC): pick_type: t.Type[Pick] passing_to: DraftInterface class ConnectionException(Exception): pass def __init__(self, drafter: Drafter, draft: Draft, draft_seat: DraftSeat): super().__init__() self._drafter = drafter self._draft = draft self._draft_seat = draft_seat self._pool = Cube() self._messages: t.List[t.Mapping[str, t.Any]] = [] self._pick_counter = 0 self._booster_queue = Queue() self._pick_queue = Queue() self._out_queue = Queue() self._current_booster: t.Optional[DraftBooster] = None self._terminating = threading.Event() self._booster_pusher = threading.Thread(target=self._draft_loop) self._connect_lock = threading.Lock() self._consumer: t.Optional[WebsocketConsumer] = None @property def messages(self) -> t.List[t.Mapping[str, t.Any]]: return self._messages @property def pool(self) -> Cube: return self._pool def connect(self, consumer: WebsocketConsumer) -> None: with self._connect_lock: if self._consumer is not None: raise self.ConnectionException('already connected') self._consumer = consumer def disconnect(self) -> None: with self._connect_lock: if self._consumer is None: raise self.ConnectionException('no consumer connected') self._consumer = None def send_message(self, message_type: str, **kwargs) -> None: self.out_queue.put({ 'type': message_type, **kwargs, }) def send_error(self, error_type: str, **kwargs): self.send_message('error', error_type=error_type, **kwargs) @property def booster_queue(self) -> Queue[DraftBooster]: return self._booster_queue @property def booster_amount(self) -> int: return self._booster_queue.qsize() + (1 if self._current_booster else 0) def give_booster(self, booster: DraftBooster) -> None: self._booster_queue.put(booster) self._draft.broadcast_message( 'booster_amount_update', drafter=self._drafter.user.pk, queue_size=self.booster_amount, ) @property def pick_queue(self) -> Queue[Cubeable]: return self._pick_queue @property def out_queue(self): return self._out_queue def receive_message(self, message: t.Any) -> None: message_type = message.get('type') if message_type == 'pick': pick = message.get('pick') if pick is None: self.send_error('empty_pick') return try: pick = RawStrategy(db).deserialize(self.pick_type, pick) except SerializationException: self.send_error('misconstrued_pick') return self._pick_queue.put(pick) else: self.send_error('unknown_message_type', message_type=message_type) def start(self) -> None: self.send_message( 'started', **self._draft.serialize(), ) self._booster_pusher.start() def stop(self) -> None: self._terminating.set() @abstractmethod def perform_pick(self, pick: Pick) -> bool: pass def _draft_loop(self) -> None: while not self._terminating.is_set(): try: booster = self._booster_queue.get(timeout=2) except Empty: continue self._current_booster = booster self.send_message('booster', booster=RawStrategy.serialize( self._current_booster)) while not self._terminating.is_set(): try: pick = self._pick_queue.get(timeout=2) except Empty: continue if not self.perform_pick(pick): self.send_error( 'invalid_pick', pick=pick.serialize(), ) continue self._pick_counter += 1 self.send_message( 'pick', pick=pick.serialize(), booster=RawStrategy.serialize(self._current_booster), pick_number=self._pick_counter, ) DraftPick.objects.create( seat=self._draft_seat, pack_number=self._draft.pack_counter, pick_number=self._current_booster.pick_number, global_pick_number=self._pick_counter - 1, pack=self._current_booster, pick=pick, ) self._current_booster.pick_number += 1 if self._current_booster.cubeables: self.passing_to.give_booster(self._current_booster) else: self._draft.booster_empty(self._current_booster) self._current_booster = None self._draft.broadcast_message( 'booster_amount_update', drafter=self._drafter.user.pk, queue_size=self.booster_amount, ) break
class EgyptianRatScrew(Game): #this sets up the environment for the game def setEnv(self): self.declareGame("Egyptian Rat Screw") self.declareRules(ERSRules().getAllRules()) self.createDeck(Deck(Deck.getDefaultDeck()['ranks'], Deck.getDefaultDeck()['suits'])) self.initializeMachine(ERSMachine(self)) self.slapQueue = Queue() # this creates the players def setPlayers(self, playersStr): self.createPlayers(playersStr) def begin(self): self.setCurrentPlayer() self.dealAllCards() def play(self): self.cardPlayed = self.currentPlayer.playTopCard() self.machine.currentState.check(self.cardPlayed) def serviceSlap(self): print("") print("") print(self.slapQueue.peek().getName() + " has slapped the pile!") print(self.displayMessage) self.slapQueue.peek().getHand().addCards(self.pile.removeAll()) # take all cards from pile and put it in players hand print(self.slapQueue.peek().getName() + " has " + str(self.slapQueue.peek().getHand().size()) + " cards in hand.") print("") print("") while(self.slapQueue.notEmpty()): #remove all elements from slap queue self.slapQueue.dequeue() def build(self, game): print("____Welcome to Egyptian Rats Crew____") print("") print("") names = ["Hanna", "Deepthi"] game.setPlayers(names) game.setEnv() game.begin() while( not isinstance(game.machine.currentState, self.machine.Win)): game.play() if (isinstance(self.machine.currentState, self.machine.Slappable)): slapFreq = random.randint(1, 10) if (slapFreq > 2): slap = random.randint(0, 1) if(slap == 0): game.slapQueue.enqueue(game.players[0]) else: game.slapQueue.enqueue(game.players[1]) if(game.slapQueue.notEmpty()): game.serviceSlap()