コード例 #1
0
TFTChampionsURL = r'https://tftactics.gg/tierlist/champions'


def scrape_champions() -> List[Champion]:
    with ScraperWebDriver() as driver:
        html = driver.fetch_content_html(TFTChampionsURL)
        characters = BeautifulSoup(
            html, 'html.parser').select('.characters-list > .characters-item')
        champions = list(map(_build_champion_from_character, characters))
        return champions


def _scrape_and_persist(collection: Collection):
    result = scrape_champions()
    print('Found {count} champions\n{separator}\n'.format(count=len(result),
                                                          separator="-" * 15))

    for champion in result:
        print(
            f'Name: {champion.name}\nImage: {champion.image}\nCost: {champion.cost}\n'
        )

    collection.drop()
    collection.insert_many([comp.dict() for comp in result])
    print('Saved latest champions to db successfully!')


if __name__ == '__main__':
    db = DB().connect()
    _scrape_and_persist(db.get_champions_collection())
コード例 #2
0
ファイル: __main__.py プロジェクト: Flexicon/tft-suggester
from common.db import DB
from scraper.helpers import _trigger_webhook_if_set
from scraper.scrape_champions import _scrape_and_persist as scrape_champions
from scraper.scrape_comps import _scrape_and_persist as scrape_comps

if __name__ == '__main__':
    db = DB().connect()
    scrape_champions(db.get_champions_collection())
    scrape_comps(db.get_comps_collection())
    _trigger_webhook_if_set()