def parse_betclic_api(id_league): """ Get odds from Betclic API """ url = ( "https://offer.cdn.betclic.fr/api/pub/v2/competitions/{}?application=2&countrycode=fr" "&fetchMultipleDefaultMarkets=true&language=fr&sitecode=frfr".format( id_league)) content = urllib.request.urlopen(url).read() parsed = json.loads(content) odds_match = {} if (not parsed) or "unifiedEvents" not in parsed: return odds_match matches = parsed["unifiedEvents"] for match in matches: if match["isLive"]: continue if "contestants" not in match: continue contestants = match["contestants"] if not contestants: continue name = " - ".join(contestant["name"] for contestant in contestants) date = dateutil.parser.isoparse( match["date"]) + datetime.timedelta(hours=2) markets = match["markets"] if not markets: continue odds = [selection["odds"] for selection in markets[0]["selections"]] odds_match[name] = {} odds_match[name]["date"] = truncate_datetime(date) odds_match[name]["odds"] = {"betclic": odds} odds_match[name]["id"] = {"betclic": match["id"]} return odds_match
def parse_betway(url): """ Get Betway odds from a competition URL """ if url.count("/") < 5: return parse_sport_betway(url) parsed = str(requests.get(url).content) if "prematch_event_list:" not in parsed or "params:{}}," not in parsed: raise sb.UnavailableCompetitionException parsed = parsed.split("prematch_event_list:")[-1] parsed = parsed.split("params:{}},")[0] + "params:{}}" parsed = re.sub("[A-Za-z_$]{1,2}[0-9]?,", '1.01,', parsed) parsed = re.sub(r"[A-Za-z_$]{1,2}[0-9]?\]", '1.01]', parsed) parsed = re.sub(r"[A-Za-z_$]{1,2}[0-9]?\}", '1.01}', parsed) parsed = demjson.decode(parsed) data = parsed["data"] odds_match = {} for match in data: id_match = str(match["id"]) odds = [] date_time = truncate_datetime(dateutil.parser.isoparse(match["start"])) for choice in match["choices"]: odds.append(choice["odd"]) name = match["label"].replace("\\u002F", "-") odds_match[name] = { "date": date_time, "odds": { "betway": odds }, "id": { "betway": id_match } } return odds_match
def parse_pinnacle(id_league): """ Get odds from Pinnacle API """ if not id_league.isnumeric(): return parse_sport_pinnacle(id_league) token = get_pinnacle_token() url_straight = "https://guest.api.arcadia.pinnacle.com/0.1/leagues/{}/markets/straight".format( id_league) url_matchup = "https://guest.api.arcadia.pinnacle.com/0.1/leagues/{}/matchups".format( id_league) req_straight = urllib.request.Request(url_straight, headers={'x-api-key': token}) req_matchup = urllib.request.Request(url_matchup, headers={'x-api-key': token}) content_straight = urllib.request.urlopen(req_straight).read() content_matchup = urllib.request.urlopen(req_matchup).read() all_odds = json.loads(content_straight) matches = json.loads(content_matchup) odds_match = {} set_matches = set() sports = { "Soccer": "football", "Tennis": "tennis", "Basketball": "basketball", "Rugby Union": "rugby", "Hockey": "hockey-sur-glace", "Handball": "handball" } for match in matches: if match["isLive"]: continue if "participants" not in match: continue if "id" not in match: continue sport = sports[match["league"]["sport"]["name"]] competition = match["league"]["name"] id_match = match["id"] match_name = " - ".join(sb.TRANSLATION[sport].get( match["participants"][x]["name"], match["participants"][x]["name"]) for x in [0, 1]) if "5 Sets" in match_name: continue date_time = truncate_datetime( dateutil.parser.isoparse(match["startTime"]) + datetime.timedelta(hours=2)) odds = get_pinnacle_odds_from_match_id(id_match, all_odds) if odds: odds_match[match_name] = { "odds": { "pinnacle": odds }, "date": date_time, "id": { "pinnacle": str(id_match) }, "competition": competition } return odds_match
def format_bwin_time(string): """ Returns time from a string available on bwin html """ today = datetime.datetime.today().strftime("%d/%m/%Y ") tomorrow = (datetime.datetime.today() + datetime.timedelta(days=1)).strftime("%d/%m/%Y ") string = " ".join( string.replace("Aujourd'hui/", today).replace("Demain/", tomorrow).split()) if "Commence dans" in string: date_time = truncate_datetime(datetime.datetime.today()) date_time += datetime.timedelta( minutes=int(string.split("dans ")[1].split("min")[0]) + 1) return date_time if "Commence maintenant" in string: return truncate_datetime(datetime.datetime.today()) return datetime.datetime.strptime(string, "%d/%m/%Y %H:%M")
def parse_pmu_html(soup): match_odds_hash = {} match = "" date_time = "undefined" live = False handicap = False date = "" for line in soup.find_all(): if "n'est pas accessible pour le moment !" in line.text: raise sb.UnavailableSiteException if "data-date" in line.attrs and "shadow" in line["class"]: date = line["data-date"] elif "class" in line.attrs and "trow--live--remaining-time" in line[ "class"]: hour = line.text if "'" in hour: date_time = datetime.datetime.today() + datetime.timedelta( minutes=int(hour.strip().strip("'")) + 1) date_time = truncate_datetime(date_time) continue try: date_time = datetime.datetime.strptime(date + " " + hour, "%Y-%m-%d %Hh%M") except ValueError: date_time = "undefined" elif "class" in line.attrs and "trow--event--name" in line["class"]: string = "".join(list(line.stripped_strings)) if "//" in string: live = line.find_parent( "a")["data-name"] == "sportif.clic.paris_live.details" is_rugby_13 = line.find_parent( "a")["data-sport_id"] == "rugby_a_xiii" if not (live or is_rugby_13): handicap = False if "+" in string or "Egalité" in string: handicap = True match, odds = parse_page_match_pmu( "https://paris-sportifs.pmu.fr" + line.parent["href"]) else: match = string.replace(" - ", "-") match = match.replace(" // ", " - ") match = match.replace("//", " - ") elif "class" in line.attrs and "event-list-odds-list" in line["class"]: if not live: if not handicap: odds = list( map(lambda x: float(x.replace(",", ".")), list(line.stripped_strings))) match_odds_hash[match] = {} match_odds_hash[match]['odds'] = {"pmu": odds} match_odds_hash[match]['date'] = date_time if not match_odds_hash: raise sb.UnavailableCompetitionException return match_odds_hash
def parse_bwin_api(parameter): """ Get Bwin odds from API """ token = get_bwin_token() if not token: return {} url = ( "https://cds-api.bwin.fr/bettingoffer/fixtures?x-bwin-accessid={}&lang=fr&country=FR&userCountry=FR" "&fixtureTypes=Standard&state=Latest&offerMapping=Filtered&offerCategories=Gridable&fixtureCategories=Gridable" "&{}&skip=0&take=1000&sortBy=Tags".format(token, parameter)) content = urllib.request.urlopen(url).read() parsed = json.loads(content) fixtures = parsed["fixtures"] odds_match = {} for fixture in fixtures: if fixture["stage"] == "Live": continue reversed_odds = " chez " in fixture["name"]["value"] odds = [] participants = fixture["participants"] name = " - ".join(map(lambda x: x["name"]["value"], participants)) games = fixture["games"] id = str(fixture["id"]) for game in games: odds_type = game["name"]["value"] if odds_type not in [ "1 X 2", "Pari sur le vainqueur (US)", "1X2 (temps réglementaire)", "Vainqueur 1 2" ]: continue for result in game["results"]: odds.append(result["odds"]) break date = truncate_datetime(dateutil.parser.isoparse( fixture["startDate"])) + datetime.timedelta(hours=2) if reversed_odds: name, odds = reverse_match_odds(name, odds) odds_match[name] = { "date": date, "odds": { "bwin": odds }, "id": { "bwin": id } } return odds_match
def parse_betclic_api(id_league): """ Get odds from Betclic API """ url = ( "https://offer.cdn.betclic.fr/api/pub/v2/competitions/{}?application=2&countrycode=fr" "&fetchMultipleDefaultMarkets=true&language=fr&sitecode=frfr".format( id_league)) req = requests.get(url) parsed = req.json() odds_match = {} if (not parsed) or "unifiedEvents" not in parsed: return odds_match matches = parsed["unifiedEvents"] competition = parsed["name"] for match in matches: if match["isLive"]: continue if "contestants" not in match: continue contestants = match["contestants"] if not contestants: continue name = " - ".join(contestant["name"] for contestant in contestants) date = dateutil.parser.isoparse( match["date"]) + datetime.timedelta(hours=2) markets = match["markets"] if not markets: continue if markets[0]["name"].strip() not in [ "Vainqueur du match", "Résultat du match", "Vainqueur Match", "Résultat", "Résultat du match (90 min.)", 'Résultat du match (hors prolong.)' ]: continue odds = [selection["odds"] for selection in markets[0]["selections"]] odds_match[name] = {} odds_match[name]["date"] = truncate_datetime(date) odds_match[name]["odds"] = {"betclic": odds} odds_match[name]["id"] = {"betclic": match["id"]} odds_match[name]["competition"] = competition return odds_match
def parse_netbet(url): """ Retourne les cotes disponibles sur netbet """ sport = None if url in [ "football", "tennis", "basketball", "hockey-glace", "rugby", "handball" ]: sport = url url = "https://www.netbet.fr/top-paris" headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4)" "AppleWebKit/537.36 (KHTML, like Gecko)" "Chrome/83.0.4103.97" "Safari/537.36" } for _ in range(3): try: request = urllib.request.Request(url, None, headers) response = urllib.request.urlopen(request, timeout=5) soup = BeautifulSoup(response, features="lxml") break except http.client.IncompleteRead: headers = {"User-Agent": fake_useragent.UserAgent().random} print("User agent change") except urllib.error.HTTPError: headers = {"User-Agent": fake_useragent.UserAgent().random} print("User agent change (403)") except urllib.error.URLError: headers = {"User-Agent": fake_useragent.UserAgent().random} print("User agent change (Timeout)") else: raise sb.UnavailableSiteException if soup.find(attrs={"class": "none"}): raise sb.UnavailableCompetitionException if response.geturl() == "https://www.netbet.fr/": raise sb.UnavailableCompetitionException match_odds_hash = {} today = datetime.datetime.today() today = datetime.datetime(today.year, today.month, today.day) date = "" year = " " + str(today.year) match = "" date_time = None valid_match = True for line in soup.find_all(): if sport and "class" in line.attrs and "nb-link-event" in line[ "class"] and "href" in line.attrs: valid_match = sport + "/" in line["href"] if "class" in line.attrs and "nb-event_datestart" in line["class"]: date = list(line.stripped_strings)[0] + year if "Auj." in date: date = datetime.datetime.today().strftime("%d/%m %Y") elif "class" in line.attrs and "nb-event_timestart" in line["class"]: hour = line.text if " min" in hour: date_time = datetime.datetime.today() + datetime.timedelta( minutes=int(hour.strip(" min"))) date_time = truncate_datetime(date_time) continue try: date_time = datetime.datetime.strptime(date + " " + hour, "%d/%m %Y %H:%M") if date_time < today: date_time = date_time.replace(year=date_time.year + 1) except ValueError: date_time = "undefined" elif "class" in line.attrs and "nb-event_actors" in line["class"]: match = " - ".join( list( map(lambda x: x.replace(" - ", "-"), line.stripped_strings))) reg_exp = r'\[[0-7]\/[0-7]\s?([0-7]\/[0-7]\s?)*\]|\[[0-7]\-[0-7]\s?([0-7]\-[0-7]\s?)*\]' if list(re.finditer(reg_exp, match)): # match tennis live match = match.split("[")[0].strip() elif "class" in line.attrs and "nb-event_odds_wrapper" in line["class"]: try: odds = list( map(lambda x: float(x.replace(",", ".")), list(line.stripped_strings)[1::2])) if valid_match and match and match not in match_odds_hash and date_time: match_odds_hash[match] = {} match_odds_hash[match]['odds'] = {"netbet": odds} match_odds_hash[match]['date'] = date_time except ValueError: # match live (cotes non disponibles) pass return match_odds_hash
def parse_pmu_html(soup): """ Retourne les cotes disponibles sur une page html pmu """ match_odds_hash = {} match = "" date_time = "undefined" live = False handicap = False date = "" match_id = None for line in soup.find_all(): if "n'est pas accessible pour le moment !" in line.text: raise sb.UnavailableSiteException if "data-date" in line.attrs and "shadow" in line["class"]: date = line["data-date"] elif "class" in line.attrs and "trow--live--remaining-time" in line["class"]: hour = line.text if "'" in hour: date_time = datetime.datetime.today()+datetime.timedelta(minutes=int(hour.strip().strip("'")) - 1) date_time = truncate_datetime(date_time) continue try: date_time = datetime.datetime.strptime( date + " " + hour, "%Y-%m-%d %Hh%M") except ValueError: date_time = "undefined" elif "class" in line.attrs and "trow--live--logo-active" in line["class"]: live = True elif "class" in line.attrs and "trow--event--name" in line["class"]: string = "".join(list(line.stripped_strings)) if "//" in string: try: is_rugby_13 = line.find_parent("a")["data-sport_id"] == "rugby_a_xiii" except TypeError: is_rugby_13 = False if is_rugby_13 or live: continue handicap = False if "+" in string or "Egalité" in string: handicap = True match, odds = parse_page_match_pmu("https://paris-sportifs.pmu.fr" + line.parent["href"]) else: match = string.replace(" - ", "-") match = match.replace(" // ", " - ") match = match.replace("//", " - ") elif "class" in line.attrs and "event-list-odds-list" in line["class"]: if live or is_rugby_13: live = False is_rugby_13 = False continue if not handicap: odds = [] for child in line.findChildren("a", recursive=True): if "class" in child.attrs and "btn-disabled" in child["class"]: odds.append(1.01) else: odds.append(float(child.text.strip().replace(",", "."))) match_odds_hash[match] = {} match_odds_hash[match]['odds'] = {"pmu": odds} match_odds_hash[match]['date'] = date_time match_odds_hash[match]['id'] = {"pmu": match_id} elif "data-ev_id" in line.attrs: match_id = line["data-ev_id"] if not match_odds_hash: raise sb.UnavailableCompetitionException return match_odds_hash
def get_odds_from_back_lay_market_ids(back_lay_markets): market_ids = [ item for sublist in [list(x.values()) for x in back_lay_markets] for item in sublist if item ] token = get_betfair_token() odds_match = {} url = "https://ero.betfair.com/www/sports/exchange/readonly/v1/bymarket?_ak={}&alt=json¤cyCode=EUR&locale=fr_FR&marketIds={}&rollupLimit=10&rollupModel=STAKE&types=MARKET_DESCRIPTION,EVENT,RUNNER_DESCRIPTION,RUNNER_EXCHANGE_PRICES_BEST".format( token, ",".join(market_ids)) content = requests.get(url).content parsed = json.loads(content) event_type = parsed.get("eventTypes", {}) if not event_type: return {} event_nodes = event_type[0].get("eventNodes", {}) for event in event_nodes: reversed_odds = False event_back_lay = {} name = event["event"]["eventName"].replace(" v ", " - ") if " @ " in name: name = " - ".join( reversed(event["event"]["eventName"].split(" @ "))) reversed_odds = True date = truncate_datetime( dateutil.parser.isoparse(event["event"]["openDate"]) + datetime.timedelta(hours=2)) event_id = str(event["eventId"]) odds = [[], []] for i, market_node in enumerate(event.get("marketNodes", {})): runners = market_node.get("runners", {}) back_eq_lay = len(runners) == 2 for runner in runners: exchange = runner.get("exchange", {}) lay = i % 2 if back_eq_lay or not lay: odd_back = float( exchange.get("availableToBack", [{ "price": 1.01 }])[0]["price"]) odd = round(1 + (1 - 0.03) * (odd_back - 1), 3) if runner["description"]["runnerName"] in ["Match Nul"]: odds[0].insert(1, odd) else: odds[0].append(odd) if back_eq_lay or lay: odd_lay = float( exchange.get("availableToLay", [{ "price": 100 }])[0]["price"]) odd = round(1 + (1 - 0.03) / (odd_lay - 1), 3) if runner["description"]["runnerName"] in ["Home or Away"]: odds[1].insert(1, odd) else: odds[1].append(odd) odds[1].reverse() best_odds = odds[0] if odds[1] and len(odds[0]) == len(odds[1]): best_odds = [ max(odd_lay, odd_back) for odd_lay, odd_back in zip(*odds) ] if reversed_odds: best_odds.reverse() odds_match[name] = { "odds": { "betfair": best_odds }, "date": date, "id": { "betfair": event_id } } return odds_match