def parse_bwin(url): selenium_init.DRIVER["bwin"].maximize_window() selenium_init.DRIVER["bwin"].get(url) match_odds_hash = {} match = None date_time = None index_column_result_odds = 1 if "handball" in url else 0 is_sport_page = "/0" in url reversed_odds = False WebDriverWait(selenium_init.DRIVER["bwin"], 15).until( EC.presence_of_all_elements_located( (By.CLASS_NAME, "participants-pair-game")) or sportsbetting.ABORT) if sportsbetting.ABORT: raise sportsbetting.AbortException if is_sport_page: scroll(selenium_init.DRIVER["bwin"], "bwin", "grid-event-detail", 3, 'getElementById("main-view")') for _ in range(10): inner_html = selenium_init.DRIVER["bwin"].execute_script( "return document.body.innerHTML") soup = BeautifulSoup(inner_html, features="lxml") for line in soup.findAll(): if "class" in line.attrs and "grid-group" in line["class"]: strings = list(line.stripped_strings) if "Pari sur le vainqueur" in strings: index_column_result_odds = strings.index( "Pari sur le vainqueur") if "class" in line.attrs and "participants-pair-game" in line[ "class"]: match = " - ".join(list(line.stripped_strings)) reversed_odds = "@" in match match = format_bwin_names(match) if "class" in line.attrs and "starting-time" in line["class"]: date_time = format_bwin_time(line.text) if "class" in line.attrs and "grid-group-container" in line[ "class"]: if line.findChildren(attrs={"class": "grid-option-group"} ) and "Pariez maintenant !" not in list( line.stripped_strings): odds_line = line.findChildren( attrs={"class": "grid-option-group" })[index_column_result_odds] odds = [] for odd in list(odds_line.stripped_strings): try: odds.append(float(odd)) except ValueError: break if match: if reversed_odds: match, odds = reverse_match_odds(match, odds) match_odds_hash[match] = {} match_odds_hash[match]['odds'] = {"bwin": odds} match_odds_hash[match]['date'] = date_time match = None date_time = "undefined" if match_odds_hash: return match_odds_hash return match_odds_hash
def parse_betclic(url): """ Gets the odds available on a Betclic url """ selenium_init.DRIVER["betclic"].get(url) is_sport_page = len([x for x in url.split("/") if x]) == 3 match_odds_hash = {} match = None date_time = None today = datetime.datetime.today().strftime("%d/%m/%Y") tomorrow = (datetime.datetime.today() + datetime.timedelta(days=1)).strftime("%d/%m/%Y") if (selenium_init.DRIVER["betclic"].current_url == "https://www.betclic.fr/"): raise sb.UnavailableCompetitionException WebDriverWait(selenium_init.DRIVER["betclic"], 15).until( EC.invisibility_of_element_located((By.TAG_NAME, "app-preloader")) or sb.ABORT) if sb.ABORT: raise sb.AbortException if is_sport_page: scroll(selenium_init.DRIVER["betclic"], "betclic", "betBox_match", 10) for _ in range(10): inner_html = selenium_init.DRIVER["betclic"].execute_script( "return document.body.innerHTML") soup = BeautifulSoup(inner_html, features="lxml") if "Désolé, cette compétition n'est plus disponible." in str(soup): raise sb.UnavailableCompetitionException for line in soup.findAll(): if "class" in line.attrs and "betBox_matchName" in line["class"]: match = " - ".join(list(line.stripped_strings)) if line.name == "app-date": string = " ".join( line.text.replace("Aujourd'hui", today).replace("Demain", tomorrow).split()) date_time = datetime.datetime.strptime(string, "%d/%m/%Y %H:%M") if "class" in line.attrs and "betBox_odds" in line["class"]: try: odds = list( map( lambda x: float(x.text.replace(",", ".")), list( line.findChildren("span", {"class": "oddValue"})))) if match: match_odds_hash[match] = {} match_odds_hash[match]['odds'] = {"betclic": odds} match_odds_hash[match]['date'] = date_time match = None except ValueError: pass if match_odds_hash: return match_odds_hash return match_odds_hash
def parse_unibet(url): """ Retourne les cotes disponibles sur unibet """ selenium_init.DRIVER["unibet"].get(url) match_odds_hash = {} is_sport_page = len([x for x in url.split("/") if x]) == 4 match = "" today = datetime.datetime.today() today = datetime.datetime(today.year, today.month, today.day) date_time = None WebDriverWait(selenium_init.DRIVER["unibet"], 30).until( EC.invisibility_of_element_located((By.CLASS_NAME, "ui-spinner")) or sb.ABORT) if sb.ABORT: raise sb.AbortException if is_sport_page: scroll(selenium_init.DRIVER["unibet"], "unibet", "calendar-event", 1) for _ in range(10): inner_html = selenium_init.DRIVER["unibet"].execute_script( "return document.body.innerHTML") soup = BeautifulSoup(inner_html, features="lxml") if any(x in str(soup) for x in [ "La page à laquelle vous souhaitez accéder n'existe plus.", "Aucun marché trouvé." ]): raise sb.UnavailableCompetitionException for line in soup.findAll(): if "class" in line.attrs and "cell-event" in line["class"]: match = line.text.strip() if match.count(" - ") > 1: opponents = list( line.find_parent(attrs={ "class": "calendar-event" }).findChildren(attrs={"class": "odd-longlabel"})) match = opponents[0].text.replace( " - ", "-") + " - " + opponents[-1].text.replace( " - ", "-") if "-" not in match: match = None break reg_exp = re.compile( r'\(\s?[0-7]-[0-7]\s?(,\s?[0-7]-[0-7]\s?)*([1-9]*[0-9]\/[1-9]*[0-9])*\)' r'|\([0-7]\-[0-7](\s[0-7]\-[0-7])*\)') if list(re.finditer(reg_exp, match)): # match tennis live match = match.split("(")[0].strip() if " - " not in match: match = match.replace("-", " - ") if "class" in line.attrs and "datetime" in line["class"]: date_time = datetime.datetime.strptime(line.text, "%d/%m/%Y %H:%M") if date_time < today: date_time = date_time.replace(year=date_time.year + 1) if "class" in line.attrs and "oddsbox" in line["class"]: odds = [ float(child.text) for child in line.findChildren("span", {"class": "price"}) if child.text ] if match: match_odds_hash[match] = {} match_odds_hash[match]['odds'] = {"unibet": odds} match_odds_hash[match]['date'] = date_time match = None if match_odds_hash: return match_odds_hash return match_odds_hash
def parse_parionssport(url=""): """ Retourne les cotes disponibles sur ParionsSport """ if not url: url = "https://www.enligne.parionssport.fdj.fr/paris-football/france/ligue-1-conforama" is_sport_page = "paris-" in url.split("/")[-1] and "?" not in url is_basket = False # "basket" in url selenium_init.DRIVER["parionssport"].get(url) if "maintenance technique" in selenium_init.DRIVER[ "parionssport"].execute_script("return document.body.innerHTML"): raise sb.UnavailableSiteException if (selenium_init.DRIVER["parionssport"].current_url == "https://www.enligne.parionssport.fdj.fr/"): raise sb.UnavailableSiteException elif (not is_sport_page ) and selenium_init.DRIVER["parionssport"].current_url == "/".join( url.split("?")[0].split("/")[:4]): raise sb.UnavailableCompetitionException if is_sport_page: scroll(selenium_init.DRIVER["parionssport"], "parionssport", "wpsel-desc", 5) match_odds_hash = {} urls_basket = [] today = datetime.datetime.today() today = datetime.datetime(today.year, today.month, today.day) year = " " + str(today.year) date = "" match = "" date_time = None live = False for _ in range(10): inner_html = selenium_init.DRIVER["parionssport"].execute_script( "return document.body.innerHTML") soup = BeautifulSoup(inner_html, features="lxml") for line in soup.findAll(): if is_basket: if ("href" in line.attrs and list(line.stripped_strings) and "+" in list(line.stripped_strings)[0]): urls_basket.append( "https://www.enligne.parionssport.fdj.fr" + line["href"]) else: if "Nous vous prions de bien vouloir nous en excuser" in line: raise sb.UnavailableCompetitionException if "class" in line.attrs and "wpsel-titleRubric" in line[ "class"]: if line.text.strip() == "aujourd'hui": date = datetime.date.today().strftime("%A %d %B %Y") else: date = line.text.strip().lower() + year if "class" in line.attrs and "wpsel-timerLabel" in line[ "class"]: try: date_time = datetime.datetime.strptime( date + " " + line.text, "%A %d %B %Y À %Hh%M") if date_time < today: date_time = date_time.replace(year=date_time.year + 1) except ValueError: date_time = "undefined" if "class" in line.attrs and "wpsel-desc" in line["class"]: match = line.text.split(" À")[0].strip().replace(" ", " ") if "class" in line.attrs and "tag__stateLive" in line["class"]: live = True if "class" in line.attrs and "buttonLine" in line["class"]: if live: live = False continue try: odds = list( map(lambda x: float(x.replace(",", ".")), list(line.stripped_strings))) match_odds_hash[match] = {} match_odds_hash[match]['odds'] = {"parionssport": odds} match_odds_hash[match]['date'] = date_time except ValueError: pass if match_odds_hash: return match_odds_hash elif urls_basket: list_odds = [] for match_url in urls_basket: if sb.ABORT: break list_odds.append(parse_match_nba_parionssport(match_url)) return merge_dicts(list_odds) return match_odds_hash
def parse_unibet(url): """ Retourne les cotes disponibles sur unibet """ selenium_init.DRIVER["unibet"].get(url) match_odds_hash = {} is_sport_page = len([x for x in url.split("/") if x]) == 4 match = "" today = datetime.datetime.today() today = datetime.datetime(today.year, today.month, today.day) date_time = None WebDriverWait(selenium_init.DRIVER["unibet"], 30).until( EC.invisibility_of_element_located((By.CLASS_NAME, "ui-spinner")) or sportsbetting.ABORT) if sportsbetting.ABORT: raise sportsbetting.AbortException if is_sport_page: scroll(selenium_init.DRIVER["unibet"], "unibet", "calendar-event", 1) for _ in range(10): inner_html = selenium_init.DRIVER["unibet"].execute_script( "return document.body.innerHTML") soup = BeautifulSoup(inner_html, features="lxml") if any(x in str(soup) for x in [ "La page à laquelle vous souhaitez accéder n'existe plus.", "Aucun marché trouvé." ]): raise sportsbetting.UnavailableCompetitionException for line in soup.findAll(): if "class" in line.attrs and "cell-event" in line["class"]: match = line.text.strip().replace("Bordeaux - Bègles", "Bordeaux-Bègles") match = match.replace("Flensburg - Handewitt", "Flensburg-Handewitt") match = match.replace("TSV Hannovre - Burgdorf", "TSV Hannovre-Burgdorf") match = match.replace("Tremblay - en - France", "Tremblay-en-France") match = match.replace("FC Vion Zlate Moravce - Vrable", "FC Vion Zlate Moravce-Vrable") match = match.replace("Toulon St - Cyr Var (F)", "Toulon St-Cyr Var (F)") match = match.replace("Châlons - Reims", "Châlons-Reims") match = match.replace("Colo - Colo", "Colo-Colo") match = match.replace("Bourg - en - Bresse", "Bourg-en-Bresse") match = match.replace("Grande - Bretagne", "Grande-Bretagne") match = match.replace("Rostov - Don (F)", "Rostov-Don (F)") match = match.replace("CS Hammam - Lif", "CS Hammam-Lif") if match.count(" - ") > 1: if not sportsbetting.TEST: print(match) match = input("Réentrez le nom du match :") if "-" not in match: break reg_exp = r'\(\s?[0-7]-[0-7]\s?(,\s?[0-7]-[0-7]\s?)*([1-9]*[0-9]\/[1-9]*[0-9])*\)|\([0-7]\-[0-7](\s[0-7]\-[0-7])*\)' if list(re.finditer(reg_exp, match)): # match tennis live match = match.split("(")[0].strip() if " - " not in match: match = match.replace("-", " - ") if "class" in line.attrs and "datetime" in line["class"]: date_time = datetime.datetime.strptime(line.text, "%d/%m/%Y %H:%M") if date_time < today: date_time = date_time.replace(year=date_time.year + 1) if "class" in line.attrs and "oddsbox" in line["class"]: odds = list( map(lambda x: float(x.text), list(line.findChildren("span", {"class": "price"})))) if match: match_odds_hash[match] = {} match_odds_hash[match]['odds'] = {"unibet": odds} match_odds_hash[match]['date'] = date_time match = None if match_odds_hash: return match_odds_hash return match_odds_hash
def parse_bwin(url): """ Retourne les cotes disponibles sur bwin """ selenium_init.DRIVER["bwin"].maximize_window() selenium_init.DRIVER["bwin"].get(url) match_odds_hash = {} match = None date_time = None index_column_result_odds = 1 if "handball" in url else 0 is_sport_page = "/0" in url reversed_odds = False live = False WebDriverWait(selenium_init.DRIVER["bwin"], 15).until( EC.presence_of_all_elements_located( (By.CLASS_NAME, "participants-pair-game")) or sb.ABORT) if sb.ABORT: raise sb.AbortException if is_sport_page: scroll(selenium_init.DRIVER["bwin"], "bwin", "grid-event-detail", 3, 'getElementById("main-view")') for _ in range(10): inner_html = selenium_init.DRIVER["bwin"].execute_script( "return document.body.innerHTML") soup = BeautifulSoup(inner_html, features="lxml") for line in soup.findAll(): if "class" in line.attrs and "grid-group" in line["class"]: strings = list(line.stripped_strings) if "Pari sur le vainqueur" in strings: index_column_result_odds = strings.index( "Pari sur le vainqueur") if "class" in line.attrs and "participants-pair-game" in line[ "class"]: teams = [] if line.findChildren(attrs={"class": "participant-container"}): names_and_countries = list( line.findChildren( attrs={"class": "participant-container"})) for name_and_country in names_and_countries: strings = list(name_and_country.stripped_strings) if len(strings) == 2 and strings[1] != '@': teams.append(strings[0] + " (" + strings[1] + ")") else: teams.append(strings[0]) match = " - ".join(teams) reversed_odds = True if line.findChildren( attrs={"class": "away-indicator"}) else False if "class" in line.attrs and "starting-time" in line["class"]: date_time = format_bwin_time(line.text) if "class" in line.attrs and "live-icon" in line["class"]: live = True if "class" in line.attrs and "grid-group-container" in line[ "class"]: if (line.findChildren(attrs={"class": "grid-option-group"}) and "Pariez maintenant !" not in list( line.stripped_strings)): odds_line = line.findChildren( attrs={"class": "grid-option-group" })[index_column_result_odds] odds = [] for odd in list(odds_line.stripped_strings): try: odds.append(float(odd)) except ValueError: break if match: if reversed_odds: match, odds = reverse_match_odds(match, odds) if not live: match_odds_hash[match] = {} match_odds_hash[match]['odds'] = {"bwin": odds} match_odds_hash[match]['date'] = date_time else: live = False match = None date_time = "undefined" if match_odds_hash: return match_odds_hash return match_odds_hash