Пример #1
0
def fetch():
    all_prices, timestamps = {}, []
    for i, url in enumerate(configuration.get('cardhoarder_urls')):
        s = fetcher_internal.fetch(url)
        s = ftfy.fix_encoding(s)
        timestamps.append(dtutil.parse_to_ts(s.split('\n', 1)[0].replace('UPDATED ', ''), '%Y-%m-%dT%H:%M:%S+00:00', dtutil.CARDHOARDER_TZ))
        all_prices[i] = parse_cardhoarder_prices(s)
    url = configuration.get('mtgotraders_url')
    if url:
        s = fetcher_internal.fetch(configuration.get('mtgotraders_url'))
        timestamps.append(dtutil.dt2ts(dtutil.now()))
        all_prices['mtgotraders'] = parse_mtgotraders_prices(s)
    if not timestamps:
        raise TooFewItemsException('Did not get any prices when fetching {urls} ({all_prices})'.format(urls=configuration.get('cardhoarder_urls') + [configuration.get('mtgotraders_url')], all_prices=all_prices))
    store(min(timestamps), all_prices)
Пример #2
0
def set_values(raw_deck: DeckType) -> DeckType:
    raw_deck = translation.translate(translation.TAPPEDOUT, raw_deck)
    raw_decklist = fetcher_internal.fetch('{base_url}?fmt=txt'.format(base_url=raw_deck['url']))
    raw_deck['cards'] = decklist.parse(raw_decklist)
    raw_deck['source'] = 'Tapped Out'
    raw_deck['identifier'] = raw_deck['url']
    return raw_deck
Пример #3
0
def parse_printable(raw_deck):
    """If we're not authorized for the TappedOut API, this method will collect name and author of a deck.
    It could also grab a date, but I haven't implemented that yet."""
    s = fetcher_internal.fetch(raw_deck['url'] + '?fmt=printable')
    soup = BeautifulSoup(s, 'html.parser')
    raw_deck['name'] = soup.find('h2').string.strip('"')
    infobox = soup.find('table', {'id': 'info_box'})
    user = infobox.find('td', string="User")
    raw_deck['user'] = user.find_next_sibling('td').string
    return raw_deck
Пример #4
0
def scrape_user(username):
    parsed = {}
    parsed['username'] = username
    s = fetcher_internal.fetch('https://tappedout.net/users/{0}/'.format(username))
    soup = BeautifulSoup(s, 'html.parser')
    mtgo = soup.find('td', string='MTGO Username')
    if mtgo is not None:
        parsed['mtgo_username'] = mtgo.find_next_sibling('td').string
    else:
        parsed['mtgo_username'] = None
    return parsed
Пример #5
0
def fetch():
    all_prices = {}
    url = 'https://www.mtggoldfish.com/prices/select'
    s = fetcher_internal.fetch(url)
    sets = parse_sets(s) + ['TD0', 'TD2'] # Theme decks pages not linked from /prices/select
    for code in sets:
        for suffix in ['', '_F']:
            if code == 'PZ2' and suffix == '_F':
                print('Explicitly skipping PZ2_F because it is a lie.')
                continue
            code = '{code}{suffix}'.format(code=code, suffix=suffix)
            url = set_url(code)
            time.sleep(1)
            s = fetcher_internal.fetch(url, force=True)
            prices = parse_prices(s)
            if not prices:
                print('Found no prices for {code}'.format(code=code))
            all_prices[code] = prices
    timestamp = int(time.time())
    store(timestamp, all_prices)
Пример #6
0
def legal_cards(force=False, season=None):
    if season is None and os.path.exists('legal_cards.txt'):
        print("HACK: Using local legal_cards override.")
        h = open('legal_cards.txt')
        legal = h.readlines()
        h.close()
        return [l.strip() for l in legal]
    if season is None:
        url = 'http://pdmtgo.com/legal_cards.txt'
    else:
        url = 'http://pdmtgo.com/{season}_legal_cards.txt'.format(season=season)
    encoding = 'utf-8' if season != 'EMN' else 'latin-1' # EMN was encoded weirdly.
    legal_txt = internal.fetch(url, encoding, force=force)
    return legal_txt.strip().split('\n')
Пример #7
0
def run() -> None:
    files = rotation.files()
    n = len(files)
    time_until = min(
        TIME_UNTIL_FULL_ROTATION,
        TIME_UNTIL_SUPPLEMENTAL_ROTATION) - datetime.timedelta(weeks=1)
    if n >= TOTAL_RUNS:
        print(
            'It is the moment of discovery, the triumph of the mind, and the end of this rotation.'
        )

        if time_until < datetime.timedelta(days=3):
            for f in files:
                print('Removing {f}'.format(f=f))
                os.remove(f)
        return
    if n == 0 and TIME_UNTIL_FULL_ROTATION > datetime.timedelta(
            7) and TIME_UNTIL_SUPPLEMENTAL_ROTATION > datetime.timedelta(7):
        print(
            'The monks of the North Tree rarely saw their kodama until the rotation, when it woke like a slumbering, angry bear.'
        )
        print('ETA: {t}'.format(
            t=dtutil.display_time(time_until.total_seconds())))
        return
    all_prices = {}
    for url in configuration.get_list('cardhoarder_urls'):
        s = fetcher_internal.fetch(url)
        s = ftfy.fix_encoding(s)
        all_prices[url] = parse_cardhoarder_prices(s)
    url = configuration.get_str('mtgotraders_url')
    if url:
        s = fetcher_internal.fetch(url)
        all_prices['mtgotraders'] = parse_mtgotraders_prices(s)

    run_number = process(all_prices)
    if run_number == TOTAL_RUNS:
        make_final_list()
Пример #8
0
def get_auth():
    cookie = fetcher_internal.SESSION.cookies.get('tapped')
    token = configuration.get("tapped_API_key")
    return fetcher_internal.fetch(
        "https://tappedout.net/api/v1/cookie/{0}/?access_token={1}".format(
            cookie, token))
Пример #9
0
def bugged_cards():
    text = internal.fetch('https://pennydreadfulmtg.github.io/modo-bugs/bugs.tsv')
    if text is None:
        return None
    lines = [l.split('\t') for l in text.split('\n')]
    return lines[1:-1]