def get_stage(year): for stage in range(0, 6): clicks = browser.find_elements( By.CSS_SELECTOR, '#SubSelectDiv td[class^="cupmatch_rw2"]') if (len(clicks) != 6): print("页面按钮个数不对" + str(len(clicks))) sys.exit() clicks[stage].click() check = wait.until( EC.presence_of_element_located((By.CSS_SELECTOR, '#mainTableDiv'))) time.sleep(random.randint(3, 8)) if stage == 0: get_stage1(year, "小组赛") elif stage == 1: match.get_match(browser, year, "世界杯", "16强") elif stage == 2: match.get_match(browser, year, "世界杯", "8强") elif stage == 3: match.get_match(browser, year, "世界杯", "4强") elif stage == 4: match.get_match(browser, year, "世界杯", "季军赛") elif stage == 5: match.get_match(browser, year, "世界杯", "决赛")
def get_stage1(year, stage): print("stage1") for group in range(0, 8): clicks = browser.find_elements(By.CSS_SELECTOR, '#showRound tr .lsm2') if (len(clicks) != 9): print("小组页面按钮个数不对") sys.exit() clicks[group].click() check = wait.until( EC.presence_of_element_located((By.CSS_SELECTOR, '#mainTableDiv'))) print(group) time.sleep(random.randint(3, 8)) match.get_match(browser, year, "世界杯", stage)
def get_match(match_id): f = open("data/reget.json", "a+") r = s.get('http://www.oddsportal.com/a/b/c/d-%s/' % match_id) tree = html.fromstring(r.text) try: match = { 'match_id': match_id } print(match_id) match = match_dl.get_match(match) name = tree.xpath( '//div[@id="col-content"]/h1')[0].text_content().split(' - ') match['home'] = name[0] match['away'] = name[1] match['event'] = get_league_info(r.url)[1:] event_request = requests.get( 'http://www.soccer24.com/match/' + match['match_id']) event_tree = html.fromstring(event_request.text) phrases = event_tree.xpath( '//table[@class="detail"]//a/text()')[0].split(' - ')[1:] match['event'] += phrases[::-1] f.write(json.dumps(match) + '\n') except: fail = open("to_reget.dat", 'a+') fail.write(match_id + '\n') fail.close() f.close()
def get_match(match_id): f = open("data/reget.json", "a+") r = s.get('http://www.oddsportal.com/a/b/c/d-%s/' % match_id) tree = html.fromstring(r.text) try: match = {'match_id': match_id} print(match_id) match = match_dl.get_match(match) name = tree.xpath( '//div[@id="col-content"]/h1')[0].text_content().split(' - ') match['home'] = name[0] match['away'] = name[1] league = get_league_info(r.url) # match['event'] = get_league_info(r.url)[1:] event_request = requests.get('http://www.soccer24.com/match/' + match['match_id']) event_tree = html.fromstring(event_request.text) phrases = event_tree.xpath( '//table[@class="detail"]//a/text()')[0].split(' - ')[1:] # match['event'] += phrases[::-1] match['event'] = league[1:-1] + \ [get_season(league, r.url)] + \ phrases[::-1] f.write(json.dumps(match) + '\n') # print(json.dumps(match) + '\n') except: fail = open("to_reget.dat", 'a+') fail.write(match_id + '\n') fail.close() f.close()
def new_sheet_request(): form = request.form tid = form.get("tid").lstrip("0") mid = form.get("mid").strip().upper() alliance = form.get("alliance") if not validate_match(mid): # Match id does not match the regex raise WebException("Invalid match id.") _match = match.get_match(mid=mid).first() if _match is None: # Match does not exist yet, so create one match.add_match(mid) _team = team.get_team(tid=tid).first() if _team is None: # Team does not exist yet, so create one team.add_team(tid) sheet = get_sheet(tid=tid, mid=mid, alliance=alliance).first() if sheet is not None: # Duplicate sheet exists, so alert the user raise WebException("Sheet already exists.") # Create a new sheet new_sheet(tid, mid, alliance) return {"success": 1, "message": "Sheet created."}
def main(argv): #delcare audio file 1 (eventually artist audio?) true_audio = "" #delcare audio file 2 (eventually candidate audio?) suspect_audio = "" #parse the audio files from the arguments true_audio, suspect_audio = parse_args(argv) #validate that these audio files are legit if (not (read_audio.validate_input(true_audio) and read_audio.validate_input(suspect_audio))): sys.exit(2) dstore = datastore.Datastore() true_audio = read_audio.create_file_array(true_audio) suspect_audio = read_audio.create_file_array(suspect_audio) for suspect_audio_path in suspect_audio: dstore.add_fingerprints(suspect_audio_path) for true_audio_path in true_audio: samples = read_audio.get_mono(true_audio_path) hashes = fingerprinting.get_fingerprints(samples) match_data = match.get_match(hashes, dstore, true_audio_path) match.print_match(true_audio_path, match_data)
def get_sub_stage(browser, wait, year, league, parent_stage): clicks = browser.find_elements(By.CSS_SELECTOR, '#showRound tr .lsm2') stage_count = len(clicks) if stage_count == 0: ret = match.get_match(browser, year, league['league'], parent_stage) else: for stage in range(0, stage_count): clicks = browser.find_elements(By.CSS_SELECTOR, '#showRound tr .lsm2') clicks[stage].click() check = wait.until( EC.presence_of_element_located((By.CSS_SELECTOR, '#Table3'))) time.sleep(random.randint(3, 5)) if parent_stage == None: stage_str = str(stage + 1) else: stage_str = parent_stage ret = match.get_match(browser, year, league['league'], stage_str) if ret == 'no_start': break return ret
def get_stage(year): for stage in range(0, 38): clicks = browser.find_elements(By.CSS_SELECTOR, '#showRound tr .lsm2') if (len(clicks) != 38): print("页面按钮个数不对" + str(len(clicks))) sys.exit() clicks[stage].click() check = wait.until( EC.presence_of_element_located((By.CSS_SELECTOR, '#Table3'))) time.sleep(random.randint(3, 8)) ret = match.get_match(browser, year, "法甲", str(stage + 1)) if ret == 'no_start': break
def build_match(tr): match = { 'match_id': tr.attrib['xeid'] } match = match_dl.get_match(match) date = tr.xpath('td[1]')[0] match['date'] = re.findall(r' t([^-]+)', date.attrib['class'])[0] name = tr.xpath('.//a[not(@id)]')[0].text_content().split(' - ') match['home'] = name[0] match['away'] = name[1] # match['event'] = leagues[i][1:-1] event_request = requests.get('http://www.soccer24.com/match/' + match['match_id']) event_tree = html.fromstring(event_request.text) phrases = event_tree.xpath('//table[@class="detail"]//a/text()')[0].split(' - ')[1:] match['event'] = phrases[::-1] return match
def build_match(tr): match = {'match_id': tr.attrib['xeid']} match = match_dl.get_match(match) date = tr.xpath('td[1]')[0] match['date'] = re.findall(r' t([^-]+)', date.attrib['class'])[0] name = tr.xpath('.//a[not(@id)]')[0].text_content().split(' - ') match['home'] = name[0] match['away'] = name[1] # match['event'] = leagues[i][1:-1] event_request = requests.get('http://www.soccer24.com/match/' + match['match_id']) event_tree = html.fromstring(event_request.text) phrases = event_tree.xpath('//table[@class="detail"]//a/text()')[0].split( ' - ')[1:] match['event'] = phrases[::-1] return match
def update_sheet_request(): form = request.form sid = form.get("sid") mid = form.get("mid").strip().upper() tid = form.get("tid").lstrip("0") alliance = form.get("alliance") if not validate_match(mid): # Match id does not match the regex raise WebException("Invalid match id.") _match = match.get_match(mid=mid).first() if _match is None: # Match does not exist yet, so create one match.add_match(mid) _team = team.get_team(tid=tid).first() if _team is None: # Team does not exist yet, so create one team.add_team(tid) update_sheet(sid, mid, tid, alliance) return {"success": 1, "message": "Sheet updated."}
if __name__ == "__main__": parser = argparse.ArgumentParser() # Day of the practice rounds or competition. parser.add_argument("--day") parser.add_argument("--teams", nargs="+") args = parser.parse_args() (day, orig_teams) = (args.day, args.teams) config = DAYS[day] # Loop through each graph for this day/competition round. for (graph, num_teams, num_nodes) in config: teams = [x for x in orig_teams] # If this is a one-on-one with a TA team. if type(num_teams) == str: ta_team = num_teams for team in teams: do_main(graph, [team, ta_team], "majority_colored") # Otherwise match just the student teams. else: # If there aren't enough teams to play on this graph, create random # teams. if len(teams) < num_teams: for i in range(num_teams - len(teams)): create_random_team(graph, num_nodes, "filler" + str(i + 1)) teams.append("filler" + str(i + 1)) matches = get_match(num_teams, teams) for match in matches: do_main(graph, match, "majority_colored")