def get_surebets_players_nba(bookmakers, reload_matches=False):
    if reload_matches:
        parse_competitions(["Etats-Unis - NBA"], "basketball", *bookmakers)
    surebets = {}
    middles = {}
    sb.PROGRESS = 0
    n = len(sb.ODDS["basketball"])
    for match in sb.ODDS["basketball"]:
        if "id" not in sb.ODDS["basketball"][match]:
            continue
        id_betclic = sb.ODDS["basketball"][match]["id"].get("betclic")
        id_parionssport = sb.ODDS["basketball"][match]["id"].get(
            "parionssport")
        id_pinnacle = sb.ODDS["basketball"][match]["id"].get("pinnacle")
        id_pmu = sb.ODDS["basketball"][match]["id"].get("pmu")
        id_unibet = sb.ODDS["basketball"][match]["id"].get("unibet")
        id_winamax = sb.ODDS["basketball"][match]["id"].get("winamax")
        id_zebet = sb.ODDS["basketball"][match]["id"].get("zebet")
        surebets_match, middles_match = merge_dicts_nba(
            match, id_betclic, id_parionssport, id_pinnacle, id_pmu, id_unibet,
            id_winamax, id_zebet)
        surebets = {**surebets, **surebets_match}
        middles = {**middles, **middles_match}
        sb.PROGRESS += 100 / n
    return surebets, middles
 def parse_thread():
     """
     :return: Crée un thread pour le parsing des compétitions
     """
     sportsbetting.PROGRESS = 0
     parse_competitions(selected_competitions, sport,
                        *selected_sites)
Beispiel #3
0
def test_parsing():
    """
    :return:Test simple
    """
    url = "http://www.comparateur-de-cotes.fr/comparateur/football"
    soup = BeautifulSoup(urllib.request.urlopen(url), features="lxml")
    sportsbetting.ODDS = {}
    names = []
    for line in soup.find_all(attrs={"class": "subhead"}):
        if "Principaux championnats" in str(line):
            for link in line.findParent().find_all(["a"]):
                names.append(link.text.strip())
            break
    name_competition = random.choice(names)
    parse_competitions([name_competition], "football", "betclic", "unibet")
    assert len(sportsbetting.ODDS) > 0
Beispiel #4
0
def test_parsing_chromedriver():
    """
    :return:Test simple
    """
    sb.TEST = True
    url = "http://www.comparateur-de-cotes.fr/comparateur/football"
    soup = BeautifulSoup(urllib.request.urlopen(url), features="lxml")
    sb.ODDS = {}
    names = []
    for line in soup.find_all(attrs={"class": "subhead"}):
        if "Principaux championnats" in str(line):
            for link in line.findParent().find_all(["a"]):
                names.append(link.text.strip())
            break
    name_competition = random.choice(names)
    print(name_competition)
    parse_competitions([name_competition], "football", "joa")
    sb.selenium_init.DRIVER["joa"].quit()
    sb.TEST = False
    assert len(sb.ODDS) > 0
Beispiel #5
0
def test_parsing():
    """
    :return:Test simple
    """
    sb.TEST = True
    url = "http://www.comparateur-de-cotes.fr/comparateur/football"
    soup = BeautifulSoup(urllib.request.urlopen(url), features="lxml")
    sb.ODDS = {}
    names = []
    for line in soup.find_all(attrs={"class": "subhead"}):
        if "Principaux championnats" in str(line):
            for link in line.findParent().find_all(["a"]):
                names.append(link.text.strip())
            break
    main_competitions = ['France - Ligue 1', 'Angleterre - Premier League', 'Espagne - LaLiga',
                         'Allemagne - Bundesliga', 'Italie - Serie A']
    competitions = [name for name in main_competitions if name in names] 
    name_competition = random.choice(names)
    parse_competitions([name_competition], "football", "pmu", "winamax")
    assert len(sb.ODDS) > 0
    sb.TEST = False