예제 #1
0
def parse_competitions(competitions, sport="football", *sites):
    """
    Retourne les cotes de plusieurs competitions
    """
    selenium_sites = {
        "betstars", "bwin", "joa", "parionssport", "pasinobet", "unibet"
    }
    selenium_required = (inspect.currentframe().f_back.f_code.co_name
                         == "<module>"
                         and (selenium_sites.intersection(sites) or not sites))
    if selenium_required:
        selenium_init.start_selenium()
    list_odds = []
    for competition in competitions:
        list_odds.append(parse_competition(competition, sport, *sites))
        print()
    if selenium_required:
        selenium_init.DRIVER.quit()
    if inspect.currentframe().f_back.f_code.co_name == "<module>":
        try:
            toaster = ToastNotifier()
            toaster.show_toast("Sports-betting", "Fin du parsing")
        except NameError:
            subprocess.Popen(['notify-send', "Fin du parsing"])
    if inspect.currentframe().f_back.f_code.co_name != "<module>":
        return merge_dicts(list_odds)
    sportsbetting.ODDS[sport] = merge_dicts(list_odds)
예제 #2
0
def parse_buteurs():
    """
    Stocke les cotes des duels de buteurs disponibles sur Betclic
    """
    competitions = ["france ligue 1", "espagne liga", "italie serie", "allemagne bundesliga"]
    list_odds = []
    for competition in competitions:
        print(get_id_formatted_competition_name(competition, "football")[1])
        url = get_competition_url(competition, "football", "betclic")
        list_odds.append(parse_buteurs_betclic(url))
    if inspect.currentframe().f_back.f_code.co_name != "<module>":
        return merge_dicts(list_odds)
    sportsbetting.ODDS["buteurs"] = merge_dicts(list_odds)
예제 #3
0
async def get_json_sport_pasinobet_api(sport, barriere):
    """
    Get odds JSON from sport
    """
    site_id = "1869622" if barriere else "599"
    async with websockets.connect('wss://swarm-2.vbet.fr/', ssl=ssl.SSLContext(protocol=ssl.PROTOCOL_TLS)) as websocket:
        data = {"command":"request_session",
                "params":{"language":"fra", "site_id":"599"}}
        await websocket.send(json.dumps(data))
        response = await websocket.recv()
        data = {"command":"get",
                "params":{"source":"betting",
                          "what":{"competition":["id", "name"]},
                          "where":{"sport":{"name":sport},
                                   "game":{"@or":[{"type":{"@in":[0, 2]}},
                                                  {"visible_in_prematch":1, "type":1}]}}}}
        await websocket.send(json.dumps(data))
        response = await websocket.recv()
        parsed = json.loads(response)
        list_odds = []
        for league in parsed["data"]["data"]["competition"].values():
            if "Compétition" in league["name"]:
                continue
            data = ('{"command":"get","params":{"source":"betting","what":{"competition":["teams_reversed"], '
                    '"game":["id", "start_ts","team1_name","team2_name","is_started"],"market":["event"],"event":["price","order"]},'
                    '"where":{"competition":{"id":'+str(league["id"])+'},"game":{"@or":[{"type":{"@in":[0,2]}},'
                    '{"visible_in_prematch":1,"type":1}]},"market":{"display_key":"WINNER", "type":{"@in":["P1P2", "P1XP2"]}}'
                    '}}}}')
            await websocket.send(data)
            response = await websocket.recv()
            parsed_league = json.loads(response)
            odds_league = get_odds_from_league_json(parsed_league, barriere)
            list_odds.append(odds_league)
        return merge_dicts(list_odds)
예제 #4
0
def parse_sport_parionssport(sport):
    """
    Get ParionsSport odds from sport
    """
    sports_alias = {
        "football": "FOOT",
        "basketball": "BASK",
        "tennis": "TENN",
        "handball": "HAND",
        "rugby": "RUGU",
        "hockey-sur-glace": "ICEH"
    }
    url = "https://www.enligne.parionssport.fdj.fr/lvs-api/leagues?sport={}".format(
        sports_alias[sport])
    req = urllib.request.Request(
        url, headers={'X-LVS-HSToken': sb.TOKENS["parionssport"]})
    content = urllib.request.urlopen(req).read()
    competitions = json.loads(content)
    list_odds = []
    for competition in competitions:
        for id_competition in competition["items"]:
            if "Cotes Boostées" in competition["name"]:
                continue
            list_odds.append(parse_parionssport_api(id_competition))
    return merge_dicts(list_odds)
예제 #5
0
def parse_sport_pmu(sport):
    """
    Retourne les cotes disponibles sur pmu pour un sport donné
    """
    list_odds = []
    id_sport = {
        "football": 8,
        "tennis": 11,
        "rugby": 7,
        "hockey-sur-glace": 44,
        "basketball": 5
    }
    i = 0
    _id = id_sport[sport]
    while True:
        url = "https://paris-sportifs.pmu.fr/pservices/more_events/{0}/{1}/pmu-event-list-load-more-{0}".format(
            _id, i)
        response = urllib.request.urlopen(url)
        data = json.loads(response.read())
        soup = BeautifulSoup(data[1]["html"], features="lxml")
        try:
            list_odds.append(parse_pmu_html(soup))
            i += 1
        except sb.UnavailableCompetitionException:
            break
    return merge_dicts(list_odds)
예제 #6
0
def parse_betfair(id_league):
    event_ids = get_event_ids(id_league)
    split_events = [event_ids[x:x + 5] for x in range(0, len(event_ids), 5)]
    odds = []
    for event_group in split_events:
        odds.append(
            get_odds_from_back_lay_market_ids(
                get_back_lay_markets(event_group)))
    return merge_dicts(odds)
예제 #7
0
def parse_joa_sport(url):
    """
    Retourne les cotes disponibles sur joa pour un sport donné
    """
    selenium_init.DRIVER["joa"].maximize_window()
    selenium_init.DRIVER["joa"].get(url)
    list_odds = []
    cookies = WebDriverWait(selenium_init.DRIVER["joa"], 15).until(
        EC.element_to_be_clickable((By.CLASS_NAME, "cc-cookie-accept"))
        or sb.ABORT)
    if sb.ABORT:
        raise sb.AbortException
    cookies.click()
    try:
        filtres = WebDriverWait(selenium_init.DRIVER["joa"], 15).until(
            EC.presence_of_all_elements_located((By.CLASS_NAME, "Filtres"))
            or sb.ABORT)
        if sb.ABORT:
            raise sb.AbortException
    except selenium.common.exceptions.TimeoutException:
        raise sb.UnavailableCompetitionException
    for i, _ in enumerate(filtres):
        selenium_init.DRIVER["joa"].execute_script("window.scrollTo(0, 0)")
        selenium_init.DRIVER["joa"].execute_script(
            'document.getElementsByClassName("Filtres")[{}].click()'.format(i))
        match_odds_hash = {}
        try:
            WebDriverWait(selenium_init.DRIVER["joa"], 15).until(
                EC.presence_of_all_elements_located(
                    (By.CLASS_NAME, "bet-event-name")) or sb.ABORT)
            if sb.ABORT:
                raise sb.AbortException
        except selenium.common.exceptions.TimeoutException:
            raise sb.UnavailableCompetitionException
        while True:
            try:
                show_more = WebDriverWait(
                    selenium_init.DRIVER["joa"], 5).until(
                        EC.presence_of_all_elements_located(
                            (By.CLASS_NAME, "show-more-leagues"))
                        or sb.ABORT)[0]
                if sb.ABORT:
                    raise sb.AbortException
                show_more.find_element_by_tag_name("button").click()
            except selenium.common.exceptions.TimeoutException:
                break
        inner_html = selenium_init.DRIVER["joa"].execute_script(
            "return document.body.innerHTML")
        match_odds_hash = parse_joa_html(inner_html)
        if match_odds_hash:
            list_odds.append(match_odds_hash)
    return merge_dicts(list_odds)
예제 #8
0
def parse_buteurs():
    """
    Stocke les cotes des duels de buteurs disponibles sur Betclic
    """
    competitions = [
        "france ligue 1", "espagne liga", "italie serie",
        "allemagne bundesliga"
    ]
    list_odds = []
    for competition in competitions:
        print(get_id_formated_competition_name(competition, "football")[1])
        url = get_competition_url(competition, "football", "betclic")
        list_odds.append(parse_buteurs_betclic(url))
    if inspect.currentframe().f_back.f_code.co_name == "<module>":
        try:
            toaster = ToastNotifier()
            toaster.show_toast("Sports-betting", "Fin du parsing")
        except NameError:
            subprocess.Popen(['notify-send', "Fin du parsing"])
    if inspect.currentframe().f_back.f_code.co_name != "<module>":
        return merge_dicts(list_odds)
    sportsbetting.ODDS["buteurs"] = merge_dicts(list_odds)
예제 #9
0
def parse_sport_pokerstars(sport):
    """
    Get pokerstars odds from sport
    """
    url = ("https://sports.pokerstarssports.fr/sportsbook/v1/api/getSportTree?sport={}&includeOutrights=false"
           "&includeEvents=false&includeCoupons=true&channelId=11&locale=fr-fr&siteId=32".format(sport.upper()))
    req = requests.get(url)
    parsed = req.json()
    list_odds = []
    competitions = parsed["categories"]
    for competition in competitions:
        id_competition = competition["id"]
        list_odds.append(parse_pokerstars_api(id_competition))
    return merge_dicts(list_odds)
예제 #10
0
def parse_sport_betclic(id_sport):
    """
    Get odds from Betclic sport id
    """
    url = (
        "https://offer.cdn.betclic.fr/api/pub/v2/sports/{}?application=2&countrycode=fr&language=fr&sitecode=frfr"
        .format(id_sport))
    req = requests.get(url)
    parsed = req.json()
    list_odds = []
    competitions = parsed["competitions"]
    for competition in competitions:
        id_competition = competition["id"]
        list_odds.append(parse_betclic_api(id_competition))
    return merge_dicts(list_odds)
예제 #11
0
def parse_parionssport(url):
    """
    Get ParionsSport odds from url
    """
    if "paris-" in url.split("/")[-1] and "?" not in url:
        sport = url.split("/")[-1].split("paris-")[-1]
        return parse_sport_parionssport(sport)
    if "filtre" not in url:
        print("Wrong parionssport url")
    regex = re.findall(r'\d+', url.split("filtre=")[-1])
    list_odds = []
    odds = {}
    for id_league in regex:
        try:
            odds = parse_parionssport_api("p" + str(id_league))
        except TypeError:
            odds = {}
        list_odds.append(odds)
    return merge_dicts(list_odds)
예제 #12
0
def parse_sport_pokerstars(sport):
    """
    Get pokerstars odds from sport
    """
    url = (
        "https://sports.pokerstarssports.fr/sportsbook/v1/api/getSportTree?sport={}&includeOutrights=false"
        "&includeEvents=false&includeCoupons=true&channelId=11&locale=fr-fr&siteId=32&foo={}"
        .format(sport.upper(),
                str(random.random())[2:10]))
    req = requests.get(url)
    if req.status_code == 503:
        raise sb.UnavailableSiteException
    parsed = req.json()
    list_odds = []
    competitions = parsed["categories"]
    for competition in competitions:
        id_competition = competition["id"]
        list_odds.append(parse_pokerstars_api(id_competition))
    return merge_dicts(list_odds)
예제 #13
0
def parse_competitions(competitions, sport="football", *sites):
    """
    Retourne les cotes de plusieurs competitions
    """
    selenium_sites = {"betstars", "bwin", "joa", "parionssport", "pasinobet", "unibet"}
    selenium_required = (
            inspect.currentframe().f_back.f_code.co_name in ["<module>", "parse_thread"]
            and (selenium_sites.intersection(sites) or not sites))
    if selenium_required:
        selenium_init.start_selenium()
    list_odds = []
    sportsbetting.PROGRESS = 0
    sportsbetting.SUB_PROGRESS_LIMIT = len(competitions)
    for competition in competitions:
        list_odds.append(parse_competition(competition, sport, *sites))
        print()
    if selenium_required:
        selenium_init.DRIVER.quit()
    #     if inspect.currentframe().f_back.f_code.co_name != "<module>":
    #         return merge_dicts(list_odds)
    sportsbetting.ODDS[sport] = merge_dicts(list_odds)
예제 #14
0
def parse_sport_pinnacle(sport):
    id_sports = {
        "football": 29,
        "tennis": 33,
        "basketball": 4,
        "rugby": 27,
        "hockey-sur-glace": 19,
        "handball": 18
    }
    url = "https://guest.api.arcadia.pinnacle.com/0.1/sports/{}/leagues?all=false".format(
        id_sports[sport])
    token = get_pinnacle_token()
    req = urllib.request.Request(url, headers={'x-api-key': token})
    content = urllib.request.urlopen(req).read()
    leagues = json.loads(content)
    list_odds = []
    for league in leagues:
        if any([x in league["name"] for x in ["ITF", "Challenger"]]):
            continue
        id_league = str(league["id"])
        list_odds.append(parse_pinnacle(id_league))
    return merge_dicts(list_odds)
예제 #15
0
def parse_sport_betstars(sport):
    """
    Retourne les cotes disponibles sur betstars pour un sport donné
    """
    selenium_init.DRIVER["betstars"].get(
        "https://www.pokerstarssports.fr/#/{}/competitions".format(sport))
    urls = []
    competitions = []
    WebDriverWait(selenium_init.DRIVER["betstars"], 15).until(
        EC.presence_of_element_located((By.CLASS_NAME, "basicList__item"))
        or sb.ABORT)
    if sb.ABORT:
        raise sb.AbortException
    inner_html = selenium_init.DRIVER["betstars"].execute_script(
        "return document.body.innerHTML")
    if ("Nous procédons à une mise à jour" in inner_html or
            "Nous devons procéder à la correction ou à la mise à jour d’un élément"
            in inner_html):
        raise sb.UnavailableSiteException
    soup = BeautifulSoup(inner_html, features="lxml")
    for line in soup.findAll(["a"]):
        if ("href" in line.attrs and sport + "/competitions/" in line["href"]
                and "data-leagueid" in line.attrs):
            url = "https://www.pokerstarssports.fr/" + line["href"]
            if url not in urls:
                urls.append(url)
                competitions.append(line.text.strip())
    list_odds = []
    for url, competition in zip(urls, competitions):
        print("\t" + competition)
        try:
            odds = parse_betstars(url)
            list_odds.append(odds)
        except KeyboardInterrupt:
            pass
    return merge_dicts(list_odds)
예제 #16
0
def parse_parionssport(url=""):
    """
    Retourne les cotes disponibles sur ParionsSport
    """
    if not url:
        url = "https://www.enligne.parionssport.fdj.fr/paris-football/france/ligue-1-conforama"
    is_sport_page = "paris-" in url.split("/")[-1] and "?" not in url
    is_basket = False  # "basket" in url
    selenium_init.DRIVER["parionssport"].get(url)
    if "maintenance technique" in selenium_init.DRIVER[
            "parionssport"].execute_script("return document.body.innerHTML"):
        raise sb.UnavailableSiteException
    if (selenium_init.DRIVER["parionssport"].current_url ==
            "https://www.enligne.parionssport.fdj.fr/"):
        raise sb.UnavailableSiteException
    elif (not is_sport_page
          ) and selenium_init.DRIVER["parionssport"].current_url == "/".join(
              url.split("?")[0].split("/")[:4]):
        raise sb.UnavailableCompetitionException
    if is_sport_page:
        scroll(selenium_init.DRIVER["parionssport"], "parionssport",
               "wpsel-desc", 5)
    match_odds_hash = {}
    urls_basket = []
    today = datetime.datetime.today()
    today = datetime.datetime(today.year, today.month, today.day)
    year = " " + str(today.year)
    date = ""
    match = ""
    date_time = None
    live = False
    for _ in range(10):
        inner_html = selenium_init.DRIVER["parionssport"].execute_script(
            "return document.body.innerHTML")
        soup = BeautifulSoup(inner_html, features="lxml")
        for line in soup.findAll():
            if is_basket:
                if ("href" in line.attrs and list(line.stripped_strings)
                        and "+" in list(line.stripped_strings)[0]):
                    urls_basket.append(
                        "https://www.enligne.parionssport.fdj.fr" +
                        line["href"])
            else:
                if "Nous vous prions de bien vouloir nous en excuser" in line:
                    raise sb.UnavailableCompetitionException
                if "class" in line.attrs and "wpsel-titleRubric" in line[
                        "class"]:
                    if line.text.strip() == "aujourd'hui":
                        date = datetime.date.today().strftime("%A %d %B %Y")
                    else:
                        date = line.text.strip().lower() + year
                if "class" in line.attrs and "wpsel-timerLabel" in line[
                        "class"]:
                    try:
                        date_time = datetime.datetime.strptime(
                            date + " " + line.text, "%A %d %B %Y À %Hh%M")
                        if date_time < today:
                            date_time = date_time.replace(year=date_time.year +
                                                          1)
                    except ValueError:
                        date_time = "undefined"
                if "class" in line.attrs and "wpsel-desc" in line["class"]:
                    match = line.text.split(" À")[0].strip().replace("  ", " ")
                if "class" in line.attrs and "tag__stateLive" in line["class"]:
                    live = True
                if "class" in line.attrs and "buttonLine" in line["class"]:
                    if live:
                        live = False
                        continue
                    try:
                        odds = list(
                            map(lambda x: float(x.replace(",", ".")),
                                list(line.stripped_strings)))
                        match_odds_hash[match] = {}
                        match_odds_hash[match]['odds'] = {"parionssport": odds}
                        match_odds_hash[match]['date'] = date_time
                    except ValueError:
                        pass
        if match_odds_hash:
            return match_odds_hash
        elif urls_basket:
            list_odds = []
            for match_url in urls_basket:
                if sb.ABORT:
                    break
                list_odds.append(parse_match_nba_parionssport(match_url))
            return merge_dicts(list_odds)
    return match_odds_hash