Example #1
0
    name = team.find_next(class_='team-name-elipsis').get_text().replace(
        playstyle, '')

    tier = team.find_next(class_='team-rank').get_text()
    characters = team.select('.team-characters > .characters-item')
    champions = list(map(_build_champion_from_character, characters))

    return Comp(name, champions, tier, playstyle)


def _scrape_and_persist(collection: Collection):
    result = scrape_comps()
    print('Found {count} comps\n{separator}\n'.format(count=len(result),
                                                      separator="-" * 15))

    for comp in result:
        champions_line = ', '.join(
            [champion.name for champion in comp.champions])
        print(
            f'Tier: {comp.tier}\nName: {comp.name}\nChampions: {champions_line}\n'
        )

    collection.drop()
    collection.insert_many([comp.dict() for comp in result])
    print('Saved latest ranking to db successfully!')


if __name__ == '__main__':
    db = DB().connect()
    _scrape_and_persist(db.get_comps_collection())
Example #2
0
from common.db import DB
from scraper.helpers import _trigger_webhook_if_set
from scraper.scrape_champions import _scrape_and_persist as scrape_champions
from scraper.scrape_comps import _scrape_and_persist as scrape_comps

if __name__ == '__main__':
    db = DB().connect()
    scrape_champions(db.get_champions_collection())
    scrape_comps(db.get_comps_collection())
    _trigger_webhook_if_set()