def set_player_ranks(self): for player in self.players: if player.in_hand: player.rank = hand_rank.get_rank( player.hand.read_as_list() + self.communal_cards) utils.out('%s(%s) has %s' % (player.name, player.hand.read_out(), player.rank._to_string()), self.debug_level)
def update_player_with_bet(self, player, bet_size): self.bet = bet_size player.curr_bet += self.bet player.chips -= self.bet self.pot += self.bet utils.out("%s(%d) bets %d. Pot is %d" % ( player.name, player.chips, self.bet, self.pot), self.debug_level)
def __init__(self): progDir = os.path.dirname(sys.argv[0]) (self.args, files) = self.parseArgs(sys.argv[1:]) out.level = self.args.verbosity out(out.DEBUG, 'Parsed arguments: %s', self.args) self.config = CONFIG gui.init(self.config, files)
def initiate_round(self, dealer_seat): for player in self.players: player.draw_hand(self.deck) player.in_hand = True player.curr_bet = 0 player.all_in = False player.has_acted = False player.sidepot = None self.pot = self.big_blind + self.small_blind self.bet = self.big_blind self.small_blind_seat = self.get_next_seat(dealer_seat) self.players[self.small_blind_seat].chips -= self.small_blind self.players[self.small_blind_seat].curr_bet = self.small_blind utils.out( '%s(%d) posts small blind of %d.' % (self.players[self.small_blind_seat].name, self.players[self.small_blind_seat].chips, self.small_blind), self.debug_level) big_blind_seat = self.get_next_seat(self.small_blind_seat) self.players[big_blind_seat].chips -= self.big_blind self.players[big_blind_seat].curr_bet = self.big_blind utils.out( '%s(%d) posts big blind of %d.' % (self.players[big_blind_seat].name, self.players[big_blind_seat].chips, self.big_blind), self.debug_level)
def __init__(self, input_stream, output_stream, w=-1, h=-1, fps=-1, frames=-1, force_gray=False, repetitions=1, options=None, resume=False, reset_stream_when_resuming=False): self.input_stream = input_stream self.output_stream = output_stream self.repetitions = repetitions self.__completed_repetitions = 0 self.__start_time = None self.__elapsed_time = None self.__rho = options['rho'] self.steps = 0.0 self.measured_fps = 0.0 self.save_scores_only = options['save_scores_only'] options['stream'] = self.input_stream self.input_stream.set_options(w, h, fps, force_gray, frames) self.fe = FeatureExtractor( w, h, options, resume) # here is the TensorFlow based feature extractor! self.blink_steps = [] if resume: out("RESUMING...") self.load(reset_stream_when_resuming)
def send(self, socket='None'): if socket != 'None': self.socket = socket if not self.socket: raise ValueError('Can not send message: socket is not set.') out('Message will be sent:', self) self.socket.write(self.to_send)
def initiate_round(self, dealer_seat): for player in self.players: player.draw_hand(self.deck) player.in_hand = True player.curr_bet = 0 player.all_in = False player.has_acted = False player.sidepot = None self.pot = self.big_blind + self.small_blind self.bet = self.big_blind self.small_blind_seat = self.get_next_seat(dealer_seat) self.players[self.small_blind_seat].chips -= self.small_blind self.players[self.small_blind_seat].curr_bet = self.small_blind utils.out('%s(%d) posts small blind of %d.' % ( self.players[self.small_blind_seat].name, self.players[self.small_blind_seat].chips, self.small_blind), self.debug_level) big_blind_seat = self.get_next_seat(self.small_blind_seat) self.players[big_blind_seat].chips -= self.big_blind self.players[big_blind_seat].curr_bet = self.big_blind utils.out('%s(%d) posts big blind of %d.' % ( self.players[big_blind_seat].name, self.players[big_blind_seat].chips, self.big_blind), self.debug_level)
def test_reward_packages(poco, dev, row0, row1): # ## tood: вынести в отдельную функцию i = row0 while i <= row1: time.sleep(1) # это обновляем постоянно node = wait_for_node_visible(poco, dr_wnd, 5) if not node.exists(): out('Window not showed', dr_wnd) return False node = get_node_child(node, content) lst_days = node.child() # в общемто тут все элементы надо получать заново lst_rewards = lst_days[i].child('Rewards') ii = 0 for btn in lst_rewards.child(): out('touch reward[' + str(i) + '][' + str(ii) + ']') touch_center(dev, btn) time.sleep(1) node = wait_for_node_visible(poco, back_descr_bg, 0) if node.exists(): touch_center(dev, wait_for_node_visible(poco, back_btn, 5)) ii = ii + 1 i = i + 1 return True
def update_player_with_bet(self, player, bet_size): self.bet = bet_size player.curr_bet += self.bet player.chips -= self.bet self.pot += self.bet utils.out( "%s(%d) bets %d. Pot is %d" % (player.name, player.chips, self.bet, self.pot), self.debug_level)
def print_output(name, src, toStdErr): try: while not finished and src is not None: out(toStdErr, src.readline()) flush(toStdErr) src.close() except: pass
def set_player_ranks(self): for player in self.players: if player.in_hand: player.rank = hand_rank.get_rank(player.hand.read_as_list() + self.communal_cards) utils.out( '%s(%s) has %s' % (player.name, player.hand.read_out(), player.rank._to_string()), self.debug_level)
def delete(self, dryrun=True): if dryrun: out(u"dry run: delete %s" % self.filepath) else: path = self.filepath.encode("utf-8") try: self.ftp.delete(path) except Exception, err: out(u"Error deleting %s: %s" % (repr(path), err))
def print_vocabulary(name, vocab): #special = {START, STOP, UNK} special = {START, STOP} out( args.logfile, "{}({:,}): {}".format( name, vocab.size, sorted(value for value in vocab.values if value in special) + sorted(value for value in vocab.values if value not in special)))
def update_player_with_raise(self, player, raise_increase): amount_to_call = self.bet - player.curr_bet player.chips -= amount_to_call + raise_increase self.bet += raise_increase player.curr_bet = self.bet self.pot += amount_to_call + raise_increase self.curr_raise += raise_increase utils.out("%s(%d) raises to %d. Pot is %d" % ( player.name, player.chips, self.bet, self.pot), self.debug_level)
def ava(configLoc): projectConfig = ProjectConfig.readProjectConfigs(configLoc) dest = projectConfig[utils.PROJECT][utils.DEST] cp = dest for path in projectConfig[utils.PROJECT][utils.CP]: cp += ":" + path compileFiles = projectConfig[utils.PROJECT][utils.COMPILE] javac = ["javac", "-cp", cp, "-d", dest, *compileFiles] utils.out(utils.LINE_H, "ava: ", utils.CMD, " ".join(javac), softest=utils.Q) errLines = 0 for line in utils.execute(javac, stdout=None, stderr=SubProcess.PIPE): utils.out(utils.LINE_H, "javac: ", utils.ERR, line, end="", softest=utils.S) errLines += 1 if errLines == 0: utils.out(utils.LINE_H, "ava: ", utils.AFFIRM, "Compiled " + ", ".join([os.path.basename(file) for file in compileFiles]) + " without any errors", softest=utils.Q) else: utils.exit() java = ["java", "-cp", cp, projectConfig[utils.PROJECT][utils.RUN]] javaString = " ".join(java) utils.out(utils.LINE_H, "ava: ", utils.CMD, javaString, softest=utils.Q) exceptionMatcher = re.compile(r"^.*Exception[^\n]+(\s+at [^\n]+)*\s*\Z", re.MULTILINE | re.DOTALL) runningLine = "" utils.openLog(configLoc, projectConfig[utils.PROJECT][utils.HOME], javaString) for line in utils.execute(java, stderr=SubProcess.STDOUT): runningLine += line outputColor = utils.ERR if exceptionMatcher.match( runningLine) else utils.OUT utils.out(utils.LINE_H, "java: ", outputColor, line, end="", softest=utils.S) utils.log(line) utils.closeLog() utils.exit()
def findProjectConfigFile(name): configLocation = '.' while not os.path.isfile(configLocation + '/' + name): if configLocation == '/': utils.out(utils.LINE_H, "ava: ", utils.ERR, "Project configuration file (", name, ") not found in parent directory", softest=utils.Q) utils.out(utils.LINE_H, "ava: ", utils.ERR, "Run with ", utils.CMD, "--make-project-config", utils.ERR, " or ", utils.CMD, "-m", utils.ERR, " to create a project configuration file", softest=utils.Q) utils.exit() configLocation = os.path.abspath(os.path.join(configLocation, os.pardir)) return os.path.relpath(configLocation + '/' + name)
def update_player_with_raise(self, player, raise_increase): amount_to_call = self.bet - player.curr_bet player.chips -= amount_to_call + raise_increase self.bet += raise_increase player.curr_bet = self.bet self.pot += amount_to_call + raise_increase self.curr_raise += raise_increase utils.out( "%s(%d) raises to %d. Pot is %d" % (player.name, player.chips, self.bet, self.pot), self.debug_level)
def main(): seen = set() matched = set() for artist in db.execute(query): if artist['gid'] in matched: continue colored_out( bcolors.OKBLUE, 'Looking up artist "%s" http://musicbrainz.org/artist/%s' % (artist['name'], artist['gid'])) out(' * wiki:', artist['wp_url']) page = WikiPage.fetch(artist['wp_url'], False) identifiers = determine_authority_identifiers(page) if 'VIAF' in identifiers: if not isinstance(identifiers['VIAF'], basestring): colored_out( bcolors.FAIL, ' * multiple VIAF found: %s' % ', '.join(identifiers['VIAF'])) elif identifiers['VIAF'] == '' or identifiers['VIAF'] is None: colored_out(bcolors.FAIL, ' * invalid empty VIAF found') else: viaf_url = 'http://viaf.org/viaf/%s' % identifiers['VIAF'] edit_note = 'From %s' % (artist['wp_url'], ) colored_out(bcolors.OKGREEN, ' * found VIAF:', viaf_url) # Check if this VIAF has not been deleted skip = False try: resp, content = httplib2.Http().request(viaf_url) except socket.error: colored_out(bcolors.FAIL, ' * timeout!') skip = True deleted_message = 'abandonedViafRecord' if skip == False and (resp.status == '404' or deleted_message in content): colored_out(bcolors.FAIL, ' * deleted VIAF!') skip = True if skip == False: time.sleep(3) out(' * edit note:', edit_note.replace('\n', ' ')) mb.add_url('artist', artist['gid'], str(VIAF_RELATIONSHIP_TYPES['artist']), viaf_url, edit_note) matched.add(artist['gid']) if artist['processed'] is None and artist['gid'] not in seen: db.execute( "INSERT INTO bot_wp_artist_viaf (gid, lang) VALUES (%s, %s)", (artist['gid'], page.lang)) else: db.execute( "UPDATE bot_wp_artist_viaf SET processed = now() WHERE (gid, lang) = (%s, %s)", (artist['gid'], page.lang)) seen.add(artist['gid'])
def main(ENTITY_TYPE): entity_type_table = ENTITY_TYPE.replace('-', '_') url_relationship_table = 'l_%s_url' % entity_type_table if ENTITY_TYPE != 'work' else 'l_url_%s' % entity_type_table main_entity_entity_point = "entity0" if ENTITY_TYPE != 'work' else "entity1" url_entity_point = "entity1" if ENTITY_TYPE != 'work' else "entity0" query = """ WITH entities_wo_wikidata AS ( SELECT DISTINCT e.id AS entity_id, e.gid AS entity_gid, u.url AS wp_url, substring(u.url from '//(([a-z]|-)+)\\.') as wp_lang FROM """ + entity_type_table + """ e JOIN """ + url_relationship_table + """ l ON l.""" + main_entity_entity_point + """ = e.id AND l.link IN (SELECT id FROM link WHERE link_type = """ + str(WIKIPEDIA_RELATIONSHIP_TYPES[ENTITY_TYPE]) + """) JOIN url u ON u.id = l.""" + url_entity_point + """ AND u.url LIKE 'http://%%.wikipedia.org/wiki/%%' WHERE /* No existing WikiData relationship for this entity */ NOT EXISTS (SELECT 1 FROM """ + url_relationship_table + """ ol WHERE ol.""" + main_entity_entity_point + """ = e.id AND ol.link IN (SELECT id FROM link WHERE link_type = """ + str(WIKIDATA_RELATIONSHIP_TYPES[ENTITY_TYPE]) + """)) /* WP link should only be linked to this entity */ AND NOT EXISTS (SELECT 1 FROM """ + url_relationship_table + """ ol WHERE ol.""" + url_entity_point + """ = u.id AND ol.""" + main_entity_entity_point + """ <> e.id) AND l.edits_pending = 0 ) SELECT e.id, e.gid, e.name, ewf.wp_url, b.processed FROM entities_wo_wikidata ewf JOIN """ + entity_type_table + """ e ON ewf.entity_id = e.id LEFT JOIN bot_wp_wikidata_links b ON e.gid = b.gid AND b.lang = ewf.wp_lang ORDER BY b.processed NULLS FIRST, e.id LIMIT 500 """ seen = set() matched = set() for entity in db.execute(query): if entity['gid'] in matched: continue colored_out(bcolors.OKBLUE, 'Looking up entity "%s" http://musicbrainz.org/%s/%s' % (entity['name'], ENTITY_TYPE, entity['gid'])) out(' * wiki:', entity['wp_url']) page = WikiPage.fetch(entity['wp_url'], False) if page.wikidata_id: wikidata_url = 'http://www.wikidata.org/wiki/%s' % page.wikidata_id.upper() edit_note = 'From %s' % (entity['wp_url'],) colored_out(bcolors.OKGREEN, ' * found WikiData identifier:', wikidata_url) time.sleep(1) out(' * edit note:', edit_note.replace('\n', ' ')) mb.add_url(ENTITY_TYPE.replace('-', '_'), entity['gid'], str(WIKIDATA_RELATIONSHIP_TYPES[ENTITY_TYPE]), wikidata_url, edit_note, True) matched.add(entity['gid']) if entity['processed'] is None and entity['gid'] not in seen: db.execute("INSERT INTO bot_wp_wikidata_links (gid, lang) VALUES (%s, %s)", (entity['gid'], page.lang)) else: db.execute("UPDATE bot_wp_wikidata_links SET processed = now() WHERE (gid, lang) = (%s, %s)", (entity['gid'], page.lang)) seen.add(entity['gid']) stats['seen'][ENTITY_TYPE] = len(seen) stats['matched'][ENTITY_TYPE] = len(matched)
def amazon_lookup_asin(url): params = {"ResponseGroup": "ItemAttributes,Medium,Images", "IdType": "ASIN"} loc = amazon_url_loc(url) asin = amazon_url_asin(url) if loc not in amazon_api: amazon_api[loc] = amazonproduct.API(locale=loc) try: root = amazon_api[loc].item_lookup(asin, **params) except amazonproduct.errors.InvalidParameterValue, e: out(e) return None
def wait_for_button_and_touch(dev, poco, path): node = wait_for_node_visible(poco, path, 5) if node.exists(): # тыкаем в кнопку touch_center(dev, node) time.sleep(1) else: out('Can\'t press button', path) return False return True
def makeProjectConfigFile(loc): utils.out(utils.LINE_H, "ava: ", utils.STD_OUT, "Creating project configuration file at ", loc, softest=utils.Q) config = utils.getConfigParser() for configLabel, defaults in utils.PROJECT_DEFAULTS.items(): config.add_section(configLabel) for param, value in defaults.items(): if value == None: config.set(configLabel, param) else: config.set(configLabel, param, value) with open(loc, 'w') as configFile: config.write(configFile)
def act(self, obj): if self.type == MS_TYPE_ERROR: error = self.params['error'] mess = 'JS ERROR (in %s.%s): %s'%(error['cl'], error['func'], error['text']) raise ValueError(mess) if self.type != MS_TYPE_COMMAND or not self.action: return False function = obj.__getattribute__(self.action) if function: out('Action "%s" from "%s" will be performed (command).'%(self.action, obj.name)) function(self); return True return False
def test_swipe_reward_packages(poco, row_from, row_to): node = wait_for_node_visible(poco, dr_wnd, 5) if not node.exists(): out('Window not showed', dr_wnd) return False node = get_node_child(node, content) lst_days = node.child() out('свайпаем экран с колонки ' + str(row_from + 1) + ' до колонки ' + str(row_to + 1)) lst_days[row_from].drag_to(lst_days[row_to]) return True
def get_sidepot(self, player): bets_this_round = sum(player.curr_bet for player in self.players) pot_before_bet = self.pot - bets_this_round sidepot = pot_before_bet + player.curr_bet for other in self.players: #To do: players cannot have the same name if other.name != player.name: if other.curr_bet < player.curr_bet: sidepot += other.curr_bet else: sidepot += player.curr_bet utils.out('%s has a sidepot of %d' % (player.name, sidepot), self.debug_level) return sidepot
def makeToolConfigFile(configMap): utils.out(utils.LINE_H, "ava: ", utils.STD_OUT, "Creating tool configuration file at ", utils.TOOL_CONFIG_PATH, softest=utils.Q) config = utils.getConfigParser() for configLabel, default in configMap.items(): config.add_section(configLabel) for param, value in default.items(): if value == None: config.set(configLabel, param) else: config.set(configLabel, param, str(value)) configFile = open(utils.TOOL_CONFIG_PATH, 'w') config.write(configFile) configFile.close()
def run_test(args): log = args.logfile trainer_count = fluid.dygraph.parallel.Env().nranks place = fluid.CUDAPlace(fluid.dygraph.parallel.Env().dev_id() ) if trainer_count > 1 else fluid.CUDAPlace(0) print("Loading data...") train_data, val_data = load_train_data() test_data = load_test_data() print("Loading model...") seq_vocab, bracket_vocab, mixture_vocab = process_vocabulary(args, train_data, quiet=True) network = Network( seq_vocab, bracket_vocab, mixture_vocab, dmodel=args.dmodel, layers=args.layers, dropout=0, ) exe = fluid.Executor(place) paddle.enable_static() fluid.io.load_inference_model(args.model_path_base, exe) test_reader = fluid.io.batch(reader_creator(args, test_data, seq_vocab, bracket_vocab, mixture_vocab, test=True), batch_size=args.batch_size) seq = fluid.data(name="seq", shape=[None], dtype="int64", lod_level=1) dot = fluid.data(name="dot", shape=[None], dtype="int64", lod_level=1) mix = fluid.data(name="mix", shape=[None], dtype="int64", lod_level=1) predictions = network(seq, dot, mix) main_program = fluid.default_main_program() test_program = main_program.clone(for_test=True) test_feeder = fluid.DataFeeder(place=place, feed_list=[seq, dot, mix]) test_results = [] for data in test_reader(): pred, = exe.run(test_program, feed=test_feeder.feed(data), fetch_list=[predictions.name], return_numpy=False) pred = list(np.array(pred)) test_results.append(pred) out(log, " ".join([str(x) for x in pred]))
def process_vocabulary(args, data, quiet=False): """ Creates and returns vocabulary objects. Only iterates through the first 100 sequences, to save computation. """ if not quiet: out(args.logfile, "initializing vacabularies... ", end="") seq_vocab = vocabulary.Vocabulary() bracket_vocab = vocabulary.Vocabulary() mixture_vocab = vocabulary.Vocabulary() #loop_type_vocab = vocabulary.Vocabulary() for vocab in [seq_vocab, bracket_vocab, mixture_vocab]: #, loop_type_vocab]: vocab.index(START) vocab.index(STOP) for x in data[:100]: seq = x["sequence"] dot = x["structure"] mixture = x['mixture'] #loop = x["predicted_loop_type"] for character in seq: seq_vocab.index(character) for character in dot: bracket_vocab.index(character) for character in mixture: mixture_vocab.index(character) #for character in loop: # loop_type_vocab.index(character) for vocab in [seq_vocab, bracket_vocab, mixture_vocab]: #, loop_type_vocab]: #vocab.index(UNK) vocab.freeze() if not quiet: out(args.logfile, "done.") def print_vocabulary(name, vocab): #special = {START, STOP, UNK} special = {START, STOP} out( args.logfile, "{}({:,}): {}".format( name, vocab.size, sorted(value for value in vocab.values if value in special) + sorted(value for value in vocab.values if value not in special))) if not quiet: print_vocabulary("Sequence", seq_vocab) print_vocabulary("Brackets", bracket_vocab) print_vocabulary("Mixture", mixture_vocab) return seq_vocab, bracket_vocab, mixture_vocab
def go_to_academia(dev, poco): out('нажимаем кнопку Академия') node = wait_for_node_visible(poco, btn_academy_on_map, 5) if not node.exists(): out('Button not showed', btn_academy_on_map) return False touch_center(dev, node) time.sleep(1) state = find_current_state(poco) while state != State.TOWN: out('ждём академию (3 sec)') time.sleep(3) state = find_current_state(poco) # todo: иногда бывает сообщение о жаловании для команды # потом отловить момент и сделать тест # если почта, собираем node = wait_for_node_visible(poco, btn_mail, 1) if node.exists(): touch_center(dev, node) time.sleep(1) node = wait_for_node_visible(poco, btn_back, 5) if not node.exists(): out('Button not showed', btn_back) return False touch_center(dev, node) time.sleep(1) return True
def repairToolConfigFile(): if not os.path.exists(utils.TOOL_CONFIG_PATH): makeToolConfigFile(utils.TOOL_DEFAULTS) return utils.out(utils.LINE_H, "ava: ", utils.STD_OUT, "Repairing tool configuration file at ", utils.TOOL_CONFIG_PATH, softest=utils.Q) config = utils.getConfigParser() config.read(utils.TOOL_CONFIG_PATH) repairedSections = 0 repairedParams = 0 for configLabel, default in utils.TOOL_DEFAULTS.items(): if configLabel not in config.sections(): config[configLabel] = {param: str(value) for param, value in default.items() } utils.out(utils.LINE_H, "ava: ", utils.STD_OUT, "Repairing section '" + configLabel + "' and all parameters using default values", softest=utils.N) repairedSections += 1 for param, value in default.items(): if not value == None: repairedParams += 1 continue for param, value in default.items(): if param not in config[configLabel]: if value == None: config.set(configLabel, param) else: config.set(configLabel, param, str(value)) utils.out(utils.LINE_H, "ava: ", utils.STD_OUT, "Repairing parameter '" + param + "' in section '" + configLabel + "' using default values", softest=utils.N) repairedParams += 1 with open(utils.TOOL_CONFIG_PATH, 'w') as configFile: config.write(configFile) utils.out(utils.LINE_H, "ava: ", utils.AFFIRM, "Successfully repaired " + str(repairedParams) + " parameters in " + str(repairedSections) + " sections", softest=utils.Q)
def test_journal_stats(dev, poco): node_wnd_journal = wait_for_node_visible(poco, wnd_journal, 2) if not node_wnd_journal.exists(): out('Window not showed', wnd_journal) return False if get_journal_state(node_wnd_journal) != js_stats: out('переходим во вкладку статистики') touch_center(dev, get_node_child(node_wnd_journal, btn_stats)) time.sleep(1) # получаем заново, т.к. сцена обновилась node_wnd_journal = wait_for_node_visible(poco, wnd_journal, 2) if not node_wnd_journal.exists(): out('Window not showed', wnd_journal) return False if get_journal_state(node_wnd_journal) != js_stats: out('не перешли во вкладку статистики', '') return False ### ------------------------------------------------------------ # тут тестов по сути нет, просто переход на вкладку return True
def repairProjectConfigFile(loc): if not os.path.exists(loc): makeProjectConfigFile(loc) return utils.out(utils.LINE_H, "ava: ", utils.STD_OUT, "Repairing project configuration file at ", loc) config = utils.getConfigParser() config.read(loc) repairedSections = 0 repairedParams = 0 for configLabel, default in utils.DEFAULTS.iteritems(): if configLabel not in config.sections(): config[configLabel] = {param: str(value).encode("string-escape") for param, value in default.iteritems() } utils.out(None, utils.LINE_H, "ava: ", utils.STD_OUT, "Repairing section '" + configLabel + "' and all parameters using default values") repairedSections += 1 repairedParams += len(default) continue for param, value in default.iteritems(): if param not in config[configLabel]: config[configLabel][param] = str(value).encode("string-escape") utils.out(None, utils.LINE_H, "ava: ", utils.STD_OUT, "Repairing parameter '" + param + "' in section '" + configLabel + "' using default values") repairedParams += 1 configFile = open(utils.TOOL_CONFIG_PATH, 'w') config.write(configFile) configFile.close() utils.out(None, utils.LINE_H, "ava: ", utils.AFFIRM, "Successfully repaired " + str(repairedParameters) + " parameters in " + str(repairedSections) + " sections")
def play_round(self): #Preflop for player in self.players: utils.out("%s is dealt %s" % (player.name, player.hand.read_out()), self.debug_level) seat_to_act = self.get_next_seat(self.small_blind_seat, num_seats=2) if self.play_all_actions(seat_to_act): return self.clean_up_betting_round() #Flop self.communal_cards += self.deck.draw(num_cards=3) utils.out("Flop: %s %s %s" % (self.communal_cards[0].read_out(), self.communal_cards[1].read_out(), self.communal_cards[2].read_out()), self.debug_level) if self.play_all_actions(self.small_blind_seat): return self.clean_up_betting_round() #Turn self.communal_cards += self.deck.draw() utils.out("Turn: %s" % self.communal_cards[3].read_out(), self.debug_level) if self.play_all_actions(self.small_blind_seat): return self.clean_up_betting_round() #River self.communal_cards += self.deck.draw() utils.out("River: %s" % self.communal_cards[4].read_out(), self.debug_level) if self.play_all_actions(self.small_blind_seat): return self.set_player_ranks() self.clean_up(players_by_rank = self.get_players_by_rank())
def rotate_cam_to(dev, poco, node_path, log_out=False): #node_path = 'Rank2(Clone)/MARKER_NEW(Clone)/10-MobileRelationsMark' pos_from = pos_scr2abs(dev, [0.5, 0.1]) f_exit = False i = 0 while not f_exit: pos_scr = [0,0.2] #pos_abs = pos_scr2abs(dev, pos_scr) node_case_lod = wait_for_node_visible(poco, node_path) if node_case_lod.exists(): if log_out: out(node_path+' exists...') pos_scr = pos_center(dev, node_case_lod, False) #pos_abs = pos_scr2abs(dev, pos_scr) step = 0.1 if pos_scr[0]>0.48 and pos_scr[0]<0.52: step = 0.0 f_exit = True if pos_scr[0]<0.48 or pos_scr[0]>0.52: step = 0.03 if pos_scr[0]<0.40 or pos_scr[0]>0.60: step = 0.05 if pos_scr[0]<0.30 or pos_scr[0]>0.70: step = 0.1 print('position:'+str(pos_scr)) if pos_scr[0] > 0.5: swipe(pos_from, vector=[step,0], duration=1) if log_out: out('swipe to left on '+str(step)) if pos_scr[0] < 0.5: swipe(pos_from, vector=[-step,0], duration=1) if log_out: out('swipe to right on '+str(step)) i += 1 if i>30: if log_out: out('долгое ожидание поворота к обьекту', node_path) return False out('it: '+str(i)) return True
def test_runner(dev, poco, test_id): f_exit = False while not f_exit: state = find_current_state(poco) print('test_runner()::state: ' + state) if state == State.NONE: time.sleep(10) if state == State.LOGO_1 or state == State.LOGO_2: time.sleep(1) if state == State.TERMS_WND: # touch_node(dev, poco, 'H_logos_Canvas/TermsOfUseWindow/AcceptButton') poco('AcceptButton').click() time.sleep(1) if state == State.LOADING: time.sleep(10) if state == State.MAIN_MENU: time.sleep(5) if state == State.SETTINGS: touch_node(dev, poco, 'H_Canvas/USER_Main_UI/BASE_MENU+CHAT/Avr_bg/Avr_pause'); if state == State.MAP_EVENT_REWARD: touch_node(dev, poco, 'H_Canvas/USER_Main_UI/BASE_MENU+CHAT/Avr_bg/Avr_pause'); time.sleep(1) if state == State.DAILY_REWARDS: touch_node(dev, poco, 'H_Canvas/USER_Main_UI/BASE_MENU+CHAT/Avr_bg/Avr_pause'); time.sleep(1) # пока единственное исключение из правил if state == State.TUTORIAL and test_id == Cases.TUTORIAL_0: # считаем что мы готовы к запуску теста result, runned = run_test(dev, poco, test_id) if not runned: out('тест не найден', 'test_id: ' + test_id) return result, runned # -------------------------------------- if state == State.MAP: # считаем что мы готовы к запуску теста result, runned = run_test(dev, poco, test_id) if not runned: out('тест не найден', 'test_id: ' + test_id) return result, runned
def pay_winnings(self, winnings, winners): for i, winner in enumerate(winners): #Pay remainder to the first winner if i == 0: remainder = winnings % len(winners) winner.chips += remainder self.pot -= remainder utils.out('%s(%d) wins remainder of %d' % (winner.name, winner.chips, remainder), self.debug_level) winner.chips += winnings / len(winners) utils.out('%s(%d) wins %d chips with %s' % (winner.name, winner.chips, winnings, winner.hand.read_out()), self.debug_level) self.pot -= winnings / len(winners) return winners
def update_player_with_call(self, player): #Note: call_amount is 0 in the case that the big blind calls preflop. amount_to_call = self.bet - player.curr_bet #Check if player is all-in if amount_to_call > player.chips: player.curr_bet += player.chips self.pot += player.chips utils.out('%s(0) calls for %d and is all in. Pot is %d' % ( player.name, player.chips, self.pot), self.debug_level) player.chips = 0 else: player.curr_bet = self.bet player.chips -= amount_to_call self.pot += amount_to_call utils.out("%s(%d) calls for %d. Pot is %d" % ( player.name, player.chips, amount_to_call, self.pot), self.debug_level)
def __init__(self, players, small_blind=1, big_blind=2, dealer_seat=0, debug_level=0): self.debug_level = debug_level self.players = players self.deck = Deck() self.small_blind = small_blind self.big_blind = big_blind self.pot = 0 self.curr_raise = 0 self.num_players_in_hand = len(self.players) self.num_active_players_in_hand = self.num_players_in_hand self.communal_cards = [] self.initiate_round(dealer_seat) utils.out('---------------------------------------', self.debug_level) utils.out('%s(%d) is dealer.' % (self.players[dealer_seat].name, self.players[dealer_seat].chips), self.debug_level)
def pay_winnings(self, winnings, winners): for i, winner in enumerate(winners): #Pay remainder to the first winner if i == 0: remainder = winnings % len(winners) winner.chips += remainder self.pot -= remainder utils.out( '%s(%d) wins remainder of %d' % (winner.name, winner.chips, remainder), self.debug_level) winner.chips += winnings / len(winners) utils.out( '%s(%d) wins %d chips with %s' % (winner.name, winner.chips, winnings, winner.hand.read_out()), self.debug_level) self.pot -= winnings / len(winners) return winners
def main(): seen = set() matched = set() for artist in db.execute(query): if artist['gid'] in matched: continue colored_out(bcolors.OKBLUE, 'Looking up artist "%s" http://musicbrainz.org/artist/%s' % (artist['name'], artist['gid'])) out(' * wiki:', artist['wp_url']) page = WikiPage.fetch(artist['wp_url'], False) identifiers = determine_authority_identifiers(page) if 'VIAF' in identifiers: if not isinstance(identifiers['VIAF'], basestring): colored_out(bcolors.FAIL, ' * multiple VIAF found: %s' % ', '.join(identifiers['VIAF'])) elif identifiers['VIAF'] == '' or identifiers['VIAF'] is None: colored_out(bcolors.FAIL, ' * invalid empty VIAF found') else: viaf_url = 'http://viaf.org/viaf/%s' % identifiers['VIAF'] edit_note = 'From %s' % (artist['wp_url'],) colored_out(bcolors.OKGREEN, ' * found VIAF:', viaf_url) # Check if this VIAF has not been deleted skip = False try: resp, content = httplib2.Http().request(viaf_url) except socket.error: colored_out(bcolors.FAIL, ' * timeout!') skip = True deleted_message = 'abandonedViafRecord' if skip == False and (resp.status == '404' or deleted_message in content): colored_out(bcolors.FAIL, ' * deleted VIAF!') skip = True if skip == False: time.sleep(3) out(' * edit note:', edit_note.replace('\n', ' ')) mb.add_url('artist', artist['gid'], str(VIAF_RELATIONSHIP_TYPES['artist']), viaf_url, edit_note) matched.add(artist['gid']) if artist['processed'] is None and artist['gid'] not in seen: db.execute("INSERT INTO bot_wp_artist_viaf (gid, lang) VALUES (%s, %s)", (artist['gid'], page.lang)) else: db.execute("UPDATE bot_wp_artist_viaf SET processed = now() WHERE (gid, lang) = (%s, %s)", (artist['gid'], page.lang)) seen.add(artist['gid'])
def update_player_with_call(self, player): #Note: call_amount is 0 in the case that the big blind calls preflop. amount_to_call = self.bet - player.curr_bet #Check if player is all-in if amount_to_call > player.chips: player.curr_bet += player.chips self.pot += player.chips utils.out( '%s(0) calls for %d and is all in. Pot is %d' % (player.name, player.chips, self.pot), self.debug_level) player.chips = 0 else: player.curr_bet = self.bet player.chips -= amount_to_call self.pot += amount_to_call utils.out( "%s(%d) calls for %d. Pot is %d" % (player.name, player.chips, amount_to_call, self.pot), self.debug_level)
def run_test(dev, poco, test_id): out('test_id: ' + test_id) if test_id == Cases.Z_SLEEP_TEST: return test_z_sleep_test(dev, poco), True if test_id == Cases.DAILY_REWARDS: return test_daily_rewards(dev, poco), True if test_id == Cases.JOURNAL: return test_journal(dev, poco), True if test_id == Cases.FACTION: return test_faction(dev, poco), True if test_id == Cases.TUTORIAL_0: return test_tutorial_0(dev, poco), True if test_id == Cases.TUTORIAL_1: return test_tutorial_1(dev, poco), True if test_id == Cases.TUTORIAL_2: return test_tutorial_2(dev, poco), True if test_id == Cases.TUTORIAL_3: return test_tutorial_3(dev, poco), True # if test_id == Cases.TUTORIAL_4: return test_tutorial_4(dev, poco), True if test_id == Cases.TUTORIAL_5: return test_tutorial_5(dev, poco), True if test_id == Cases.TUTORIAL_6: return test_tutorial_6(dev, poco), True if test_id == Cases.TUTORIAL_7: return test_tutorial_7(dev, poco), True if test_id == Cases.TUTORIAL_8: return test_tutorial_8(dev, poco), True if test_id == Cases.TUTORIAL_9: return test_tutorial_9(dev, poco), True if test_id == Cases.TUTORIAL_10: return test_tutorial_10(dev, poco), True return False, False
def clean_up(self, players_by_rank=None, winning_seat=None): #If the hand did not go to showdown, pay the entire pot to the remaining player. if winning_seat is not None: winner = self.players[winning_seat] winner.chips += self.pot utils.out("%s(%d) wins the pot of %d" % ( winner.name, winner.chips, self.pot), self.debug_level) #Otherwise, pay the pot to the winners in order of their hands, taking into account #sidepots and split pots. else: while self.pot > 0: #Get the set of winners with the highest rank winners, players_by_rank = self.get_winners_with_highest_rank(players_by_rank) #Pay out the winners with the highest rank that have sidepots. winners = self.divide_sidepots_among_winners(winners) #If at least one winner did not have a sidepot, pay the remaining pot. if len(winners) > 0: winners = self.pay_winnings(self.pot, winners)
def determine_country(page): all_countries = set() all_reasons = [] countries, reason = determine_country_from_infobox(page.infobox) if countries: all_countries.update(countries) all_reasons.append(reason) countries, reason = determine_country_from_text(page) if countries: all_countries.update(countries) all_reasons.append(reason) countries, reason, category_count = determine_country_from_categories(page.categories) has_categories = False if countries: all_countries.update(countries) all_reasons.append(reason) has_categories = True if len(all_reasons) < 1 or not all_countries or not has_categories: out(' * not enough sources for countries', all_countries, all_reasons) return None, [] if len(all_countries) > 1: out(' * conflicting countries', all_countries, all_reasons) return None, [] country = list(all_countries)[0] country_id = country_ids[country] out(' * new country:', country, country_id) return country_id, all_reasons
def build_stops_database(): """ Build the stops database """ stops_file_name = os.path.join( settings['build_folder'], settings['stops_file'] ) stop_line_info_file_name = os.path.join( settings['build_folder'], settings['stop_line_info_file'] ) with open(stop_line_info_file_name) as f: line_info = json.load(f) naptan_less_count = 0 database = [] with open(stops_file_name) as stops_file: reader = csv.DictReader(stops_file) for row in reader: if row['LocalityName'] in settings['localities']: obj = get_stop_definition(row, line_info) if obj['naptanCode'] != '': database.append(obj) else: naptan_less_count += 1 # Order them, and remove order info as it's not needed. database = sorted(database, key=lambda o: o['order']) for o in database: del o['order'] out("Found {c} stops.".format(c=len(database))) if naptan_less_count > 0: out("{x} stops didn't have a Naptan code and were ignored.".format(x=naptan_less_count)) out("Saving database...") with open(settings['result_file_path'], 'w') as result_file: result_file.write(json.dumps(database))
def build_stop_line_info_file(stop_line_info_file, name_index): """ Build the stop line info file """ file_matches = os.path.join( settings['build_folder'], settings['tnds_file_glob'] ) namespace = {'n': 'http://www.transxchange.org.uk/'} ref_xpath = './n:StopPoints/n:AnnotatedStopPointRef/n:StopPointRef' stop_sets = {} for name in glob.glob(file_matches): current_set = os.path.basename(name)[4:-4] if current_set not in name_index: out('Unknown set {}'.format(current_set)) line_name = '?' else: line_name = name_index[current_set] out('Parsing set {}...'.format(current_set)) root = xml.etree.ElementTree.parse(name).getroot() for ref in root.findall(ref_xpath, namespace): code = ref.text.strip() if code not in stop_sets: stop_sets[code] = [] stop_sets[code].append(line_name) out('Saving results file {}...'.format(stop_line_info_file)) with open(stop_line_info_file, 'w') as outfile: json.dump(stop_sets, outfile)
def readToolConfigs(): if not os.path.exists(utils.TOOL_CONFIG_PATH): makeToolConfigFile(utils.TOOL_DEFAULTS) config = utils.getConfigParser() config.read(utils.TOOL_CONFIG_PATH) configs = {} missing = False for configLabel, default in utils.TOOL_DEFAULTS.items(): configs[configLabel] = default if configLabel not in config.sections(): utils.out(utils.LINE_H, "ava: ", utils.BWARN, "Using default values for missing section '" + configLabel + "'", softest=utils.N) missing = True continue for param, value in default.items(): if value == None: continue if param not in config.options(configLabel): utils.out(utils.LINE_H, "ava: ", utils.BWARN, "Using default value for missing parameter '" + param + "' in section '" + configLabel + "'", softest=utils.N) missing = True else: configs[configLabel][param] = config.getboolean(configLabel, param) if isinstance(value, bool) else config.get(configLabel, param) if missing: utils.out(utils.LINE_H, "ava: ", utils.WARN, "Run with ", utils.CMD, "--repair-tool-config", utils.WARN, " or ", utils.CMD, "-r", utils.WARN, " to repair tool configurations", softest=utils.Q) return configs
def main(args): if not args: out('Usage: cancel_edits.py <edit_number edit_note>...\n') out('Example: cancel_edits.py "Edit #123 my mistake"') out(' cancel_edits.py 123 124 125') return edits = [] for arg in args: if not isinstance(arg, unicode): arg = unicode(arg, locale.getpreferredencoding()) m = re.match(ur'(?:[Ee]dit )?#?([0-9]+) ?(.*)$', arg) if not m: out('invalid edit number "%s", aborting!' % arg) return edit_nr = str(m.group(1)) edit_note = m.group(2).lstrip() edits.append((edit_nr, edit_note)) mb = MusicBrainzClient(cfg.MB_USERNAME, cfg.MB_PASSWORD, cfg.MB_SITE) for edit_nr, edit_note in edits: out(u'Cancel edit #%s: %s' % (edit_nr, edit_note if edit_note else u'<no edit note>')) mb.cancel_edit(str(edit_nr), edit_note)
def determine_type(page): all_types = set() all_reasons = [] types, reason = determine_type_from_page(page) if types: all_types.update(types) all_reasons.append(reason) if not all_reasons: out(' * not enough sources for types') return None, [] if len(all_types) > 1: out(' * conflicting types', all_types, all_reasons) return None, [] type = list(all_types)[0] type_id = artist_type_ids[type] out(' * new type:', type, type_id) return type_id, all_reasons
def run(): """ Run the data builder """ global settings arguments = docopt(__doc__, version=__version__) settings['overwrite_database'] = arguments['-f'] if (not settings['overwrite_database'] and os.path.exists(settings['result_file_path'])): out('Output file {f} already exists. Use -f to overwrite.'.format( f=settings['result_file_path'] ), 'error') return out('Building the database...') backup_and_remove(settings['result_file_path']) build_stops_database() out('Bye!')
def determine_gender(page): all_genders = set() all_reasons = [] genders, reason = determine_gender_from_categories(page.categories) if genders: all_genders.update(genders) all_reasons.append(reason) genders, reason = determine_gender_from_text(page.abstract) if genders: all_genders.update(genders) all_reasons.append(reason) if not all_reasons: out(' * not enough sources for genders') return None, [] if len(all_genders) > 1: out(' * conflicting genders', all_genders, all_reasons) return None, [] gender = list(all_genders)[0] gender_id = gender_ids[gender] out(' * new gender:', gender, gender_id) return gender_id, all_reasons
def run(): """ Run the stop line info builder """ arguments = docopt(__doc__, version=__version__) settings['overwrite_database'] = arguments['-f'] stop_line_info_file = os.path.join( settings['build_folder'], settings['stop_line_info_file'] ) if (not settings['overwrite_database'] and os.path.exists(stop_line_info_file)): raise BuildError('Output file {f} already exists. Use -f to overwrite.'.format( f=stop_line_info_file )) out('Build stop name index...') name_index = get_line_name_index() out('Building stop line info file...') backup_and_remove(stop_line_info_file) build_stop_line_info_file(stop_line_info_file, name_index) out('Done! Bye :)')
discogs_track = discogs_tracks[position] if not are_similar(discogs_track['title'], mb_track['name']): colored_out(bcolors.FAIL, ' * track #%s not similar enough' % discogs_track['position']) changed = False break if discogs_track['position'] != mb_track['number'] \ and re.match(r'^[A-Z]+[\.-]?\d*', discogs_track['position']) \ and re.match(r'^\d+$', mb_track['number']): new_track['number'] = discogs_track['position'] changed = True # Also set length if it's not defined on MB if discogs_track['duration'] != "" and mb_track['length'] is None: new_track['length'] = durationToMS(discogs_track['duration']) changed = True position += 1 if not changed: colored_out(bcolors.HEADER, ' * no changes found from %s' % release['discogs_url']) else: edit_note = 'Tracks number and/or length from attached Discogs link (%s)' % release['discogs_url'] out(' * edit note: %s' % (edit_note,)) time.sleep(5) mb.edit_release_tracklisting(release['gid'], new_mediums, edit_note, False) if release['processed'] is None: db.execute("INSERT INTO bot_discogs_track_number (gid) VALUES (%s)", (release['gid'],)) else: db.execute("UPDATE bot_discogs_track_number SET processed = now() WHERE gid = %s", (release['gid'],))
def main(verbose=False): rgs = [(rg, gid, name) for rg, gid, name in db.execute(query_rg_without_master)] count = len(rgs) for i, (rg, gid, name) in enumerate(rgs): if gid in discogs_release_group_missing or gid in discogs_release_group_problematic: continue urls = set(url for url, in db.execute(query_rg_release_discogs, rg)) if verbose: out(u'%d/%d - %.2f%%' % (i + 1, count, (i + 1) * 100.0 / count)) out(u'%s https://musicbrainz.org/release-group/%s' % (name, gid)) try: masters = list(discogs_get_master(urls)) except (discogs.HTTPError, discogs.requests.ConnectionError) as e: out(e) continue if len(masters) == 0: if verbose: out(u' aborting, no Discogs master!') db.execute("INSERT INTO bot_discogs_release_group_missing (gid) VALUES (%s)", gid) continue if len(set(masters)) > 1: if verbose: out(u' aborting, releases with different Discogs master in one group!') db.execute("INSERT INTO bot_discogs_release_group_problematic (gid) VALUES (%s)", gid) continue if len(masters) != len(urls): if verbose: out(u' aborting, releases without Discogs master in group!') db.execute("INSERT INTO bot_discogs_release_group_problematic (gid) VALUES (%s)", gid) continue master_name, master_id, master_artists = masters[0] if not are_similar(master_name, name): if verbose: out(u' Similarity too small: %s <-> %s' % (name, master_name)) db.execute("INSERT INTO bot_discogs_release_group_problematic (gid) VALUES (%s)", gid) continue master_url = 'http://www.discogs.com/master/%d' % master_id if (gid, master_url) in discogs_release_group_set: if verbose: out(u' already linked earlier (probably got removed by some editor!') continue if len(urls) >= 2: text = u'There are %d distinct Discogs links in this release group, and all point to this master URL.\n' % len(urls) else: text = u'There is one Discogs link in this release group, and it points to this master URL.\n%s\n' % list(urls)[0] text += u'Also, the name of the Discogs master “%s” (by %s) is similar to the release group name.' % (master_name, master_artists) try: out(u'https://musicbrainz.org/release-group/%s -> %s' % (gid, master_url)) mb.add_url('release_group', gid, 90, master_url, text, auto=(len(urls) >= 2)) db.execute("INSERT INTO bot_discogs_release_group_set (gid,url) VALUES (%s,%s)", (gid, master_url)) except (urllib2.HTTPError, urllib2.URLError, socket.timeout) as e: out(e)
def main(self): try: gtk.main() except KeyboardInterrupt: out(out.NOTICE, 'Caught keyboard interrupt, quitting')
def run(): """ Run the stop line info builder """ arguments = docopt(__doc__, version=__version__) settings['overwrite_database'] = arguments['-f'] stop_line_info_file = os.path.join( settings['build_folder'], settings['stop_line_info_file'] ) if (not settings['overwrite_database'] and os.path.exists(stop_line_info_file)): raise BuildError('Output file {f} already exists. Use -f to overwrite.'.format( f=stop_line_info_file )) out('Build stop name index...') name_index = get_line_name_index() out('Building stop line info file...') backup_and_remove(stop_line_info_file) build_stop_line_info_file(stop_line_info_file, name_index) out('Done! Bye :)') if __name__ == '__main__': try: run() except BuildError as e: out("Build error: " + str(e), 'error')
def main(verbose=False): download_if_modified(bbc_sitemap_url, bbc_sitemap) db = db_connect() release_redirects = dict(get_release_redirects(db)) release_groups = dict(get_release_groups(db)) releases = dict(get_releases(db)) bbc_reviews_set = set((gid, url) for gid, url in db.execute("""SELECT gid, url FROM bot_bbc_reviews_set""")) review_urls = defaultdict(set) for rg, url in get_review_urls(db): review_urls[rg].add(url) cleanup_review_urls = set() for cleanup_url in cleanup_urls: f = urllib.urlopen(cleanup_url) cleanup_review_urls |= set(re.findall(ur"http://www.bbc.co.uk/music/reviews/[0-9a-z]+", f.read())) editor_id = db.execute("""SELECT id FROM editor WHERE name = %s""", cfg.MB_USERNAME).first()[0] mb = MusicBrainzClient(cfg.MB_USERNAME, cfg.MB_PASSWORD, cfg.MB_SITE, editor_id=editor_id) normal_edits_left, edits_left = mb.edits_left() bbc_reviews = list(load_bbc_reviews(bbc_sitemap)) count = len(bbc_reviews) for i, (review_url, release_url, title) in enumerate(bbc_reviews): if normal_edits_left <= 0: break if verbose: out(u"%d/%d - %.2f%%" % (i + 1, count, (i + 1) * 100.0 / count)) out(u"%s %s" % (title, review_url)) out(release_url) if review_url in cleanup_review_urls: continue release_gid = utils.extract_mbid(release_url, "release") row = release_redirects.get(release_gid) if not row: row = releases.get(release_gid) if not row: if verbose: out(" non-existant release in review %s" % review_url) continue rg, ac, release_name = row gid, name = release_groups[rg] if review_url in review_urls[rg]: continue if (gid, review_url) in bbc_reviews_set: if verbose: out(u" already linked earlier (probably got removed by some editor!") continue mb_title = "%s - %s" % (artist_credit(db, ac), release_name) if not are_similar(title, mb_title): if verbose: out(u" similarity too small: %s <-> %s" % (title, mb_title)) # out(u'|-\n| [%s %s]\n| [[ReleaseGroup:%s|%s]]\n| [[Release:%s|%s]]' % (review_url, bbc_name, gid, name, release_gid, release_name)) continue text = ( u"Review is in BBC mapping [1], and review name “%s” is" " similar to the release name. If this is wrong," " please note it here and put the correct mapping in" " the wiki [2].\n\n[1] %s\n[2] %s" % (title, bbc_sitemap_url, cleanup_urls[0]) ) text += "\n\n%s" % prog try: out(u"http://musicbrainz.org/release-group/%s -> %s" % (gid, review_url)) mb.add_url("release_group", gid, 94, review_url, text, auto=False) db.execute("INSERT INTO bot_bbc_reviews_set (gid,url) VALUES (%s,%s)", (gid, review_url)) bbc_reviews_set.add((gid, review_url)) normal_edits_left -= 1 except (urllib2.HTTPError, urllib2.URLError, socket.timeout) as e: out(e)
continue delay = time.time() - last_wp_request if delay < 1.0: time.sleep(1.0 - delay) last_wp_request = time.time() wikipage = WikiPage.fetch('https://%s.wikipedia.org/wiki/%s' % (wp_lang, title)) page_orig = wikipage.text if not page_orig: continue page_title = title colored_out(bcolors.HEADER, ' * trying article %s' % (title,)) page = mangle_name(page_orig) is_canonical, reason = wp_is_canonical_page(title, page_orig) if (not is_canonical): out(' * %s, skipping' % reason) continue categories = category_re[wp_lang].findall(page_orig) is_album_page = False for category in categories: if wp_lang == 'en': if category.lower().endswith(' albums'): is_album_page = True break if category.lower().endswith(' soundtracks'): is_album_page = True break #if category.lower().endswith(' singles'): # is_album_page = True # break