Esempio n. 1
0
    def test_ordinar(self):
        ordinar = '<div class="is-hidden desktop-version one-bet data win ordinar even"> \
                    <div class="type">Ординар</div> \
                    <div class="date">11.05.2019</div>\
                        <div class="match">\
                            <a target="_blank" href="https://bookmaker-ratings.ru/tips/kristal-pe-las-bornmut-prognoz-i-stavka-artura-petros-yana/" class="match-name">«Кристал Пэлас» – «Борнмут»</a> \
                            <div class="express-date">12.05.2019</div>\
                        </div>\
                        <div class="stake">ТБ (2,5) и обе забьют</div>\
                        <div class="factor">\
                            <div class="factor-label factor-value" data-factor-dec="1,85" data-factor-fract="17/20" data-factor-american="-117"></div>\
                        </div>\
                        <div class="status">Выигрыш</div>\
                    </div>'

        soup_level1 = BeautifulSoup(ordinar, 'lxml')
        bet_src = soup_level1.find('div', "one-bet")
        self.assertEqual(
            br_scrape.read_bet(bet_src, "2001-01-01", "me"), {
                "author": "me",
                'crawled-date': '2001-01-01',
                'date': u'12.05.2019',
                'factor': 1.85,
                'match': u'«Кристал Пэлас» – «Борнмут»',
                'placed-date': u'11.05.2019',
                'stake': u'ТБ (2,5) и обе забьют',
                'status': 'W',
                'type': 'single'
            })
Esempio n. 2
0
    def test_tablet(self):
        tablet = '<div class="is-hidden tablet-version one-bet data loss ordinar">\
                    <div class="bet-row top-row">\
                        <div>\
                            <span class="type">\
                                Ординар\
                                <span class="date">(11.05.2019)</span>\
                            </span>\
                        </div>\
                        <div>\
                            <div class="status">Проигрыш</div>\
                        </div>\
                    </div>\
                    <div class="bet-row">\
                        <div>\
                            <div class="match">\
                                <div class="express-date">12.05.2019</div>\
                                <a target="_blank" href="https://bookmaker-ratings.ru/tips/fulhe-m-n-yukasl-prognoz-i-stavka-artura-petros-yana/" class="match-name">«Фулхэм» – «Ньюкасл»</a>\
                            </div>\
                            <div class="stake">ТБ (2,5) и обе забьют</div>\
                        </div>\
                        <div>\
                            <div class="factor">\
                                    <div class="factor-label factor-value" data-factor-dec="2,10" data-factor-fract="11/10" data-factor-american="+110"></div>\
                            </div>\
                        </div>\
                        </div>\
                        </div>'

        soup_level1 = BeautifulSoup(tablet, 'lxml')
        bet_src = soup_level1.find('div', "one-bet")
        self.assertEqual(br_scrape.read_bet(bet_src, "2001-01-01", "me"), None)
Esempio n. 3
0
    def getExpertBets(self, name, year, month, time_crawled):
        self.get(self.author_stats_pattern % (name, year, month))
        datum = "%4d-%02d" % (year, month
                              )  # TODO refactor br_scrape.is_page_consistent()

        # try to obtain data from page
        max_tries = 10
        while max_tries > 0:
            #page source to Beautiful Soup
            page_source = BeautifulSoup(self.webdriver.page_source, 'lxml')
            if (br_scrape.is_page_consistent(page_source, name, datum)):
                break
            else:
                time.sleep(random.randrange(4, 8))  #wait for AJAX stuff
            max_tries = max_tries - 1
        if max_tries == 0:
            logging.error('Could not load page for: name=%s and date=%4d-%02d',
                          name, year, month)
            return []

        bets = []
        for bet in page_source.find_all('div', "one-bet"):
            newBet = br_scrape.read_bet(bet, time_crawled, name)
            if newBet is None:
                #skip tablet and head rows
                continue
            else:
                bets.append(newBet)

        return bets
Esempio n. 4
0
    def test_accu(self):
        accu = '<div class="is-hidden desktop-version one-bet data loss express">\
                    <div class="type">Экспресс</div>\
                    <div class="date">11.05.2019</div>\
                        <div class="express-group">\
                            <div class="express-row">\
                            <div class="match">\
                                <a target="_blank" href="https://bookmaker-ratings.ru/tips/e-kspress-na-bundesligu-prognoz-i-stavki-artura-petros-yana/" class="match-name">«Айнтрахт» - «Майнц»</a>\
                                <div class="express-date">12.05.2019</div>\
                            </div>\
                            <div class="stake">П1</div>\
                            <div class="factor">\
                                <div class="factor-label factor-value" data-factor-dec="1,60" data-factor-fract="3/5" data-factor-american="-166"></div>\
                                </div>\
                        </div>\
                            <div class="express-row">\
                            <div class="match">\
                                <a target="_blank" href="https://bookmaker-ratings.ru/tips/e-kspress-na-bundesligu-prognoz-i-stavki-artura-petros-yana/" class="match-name">«Бавария» - «Айнтрахт»</a>\
                                <div class="express-date">18.05.2019</div>\
                            </div>\
                            <div class="stake">Х</div>\
                            <div class="factor">\
                                    <div class="factor-label factor-value" data-factor-dec="6,00" data-factor-fract="5/1" data-factor-american="+500"></div>\
                            </div>\
                        </div>\
                        <div class="express-row final-row">\
                            <div class="match"></div>\
                            <div class="stake">Итоговый коэфф.</div>\
                            <div class="factor">\
                                    <div class="factor-label factor-value" data-factor-dec="9,60" data-factor-fract="43/5" data-factor-american="+860"></div>\
                            </div>\
                        </div>\
                </div>\
                    <div class="status">Проигрыш</div>\
                </div>'

        soup_level1 = BeautifulSoup(accu, 'lxml')
        bet_src = soup_level1.find('div', "one-bet")
        self.assertEqual(
            br_scrape.read_bet(bet_src, "2001-01-01", "me"), {
                "author": "me",
                'crawled-date': '2001-01-01',
                'date': u'18.05.2019',
                'factor': 9.6,
                'match': u' 1.60 6.00',
                'placed-date': u'11.05.2019',
                'stake': "2",
                'status': 'L',
                'type': 'accu'
            })
Esempio n. 5
0
    def test_empty_factor(self):
        ordinar = '<div class="is-hidden desktop-version one-bet data win ordinar even"> \
                    <div class="type">Ординар</div> \
                    <div class="date">11.05.2019</div>\
                        <div class="match">\
                            <a target="_blank" href="https://bookmaker-ratings.ru/tips/kristal-pe-las-bornmut-prognoz-i-stavka-artura-petros-yana/" class="match-name">«Кристал Пэлас» – «Борнмут»</a> \
                            <div class="express-date">12.05.2019</div>\
                        </div>\
                        <div class="stake">ТБ (2,5) и обе забьют</div>\
                        <div class="factor">\
                        </div>\
                        <div class="status">Выигрыш</div>\
                    </div>'

        soup_level1 = BeautifulSoup(ordinar, 'lxml')
        bet_src = soup_level1.find('div', "one-bet")
        self.assertEqual(br_scrape.read_bet(bet_src, "2001-01-01", "me"), None)