if 'fixed' in error: fixederrors.append(error) else: unfixederrors.append(error) errorlog = {"unfixed": unfixederrors, "fixed": fixederrors} with open('out/errors.json', 'w') as outfile: json.dump(errorlog, outfile, sort_keys=True, indent=2, separators=(',', ': ')) if __name__ == '__main__': AllSets = spoilers.get_allsets() mtgs = spoilers.scrape_mtgs('http://www.mtgsalvation.com/spoilers.rss') mtgs = spoilers.parse_mtgs(mtgs) mtgs = spoilers.correct_cards(mtgs, manual_cards, card_corrections, delete_cards) #errorlog.append(temperror) #scryfall = spoilers.get_scryfall('https://api.scryfall.com/cards/search?q=++e:' + setinfos['setname'].lower()) mtgs = spoilers.get_image_urls(mtgs, presets['isfullspoil'], setinfos['setname'], setinfos['setlongname'], setinfos['setsize']) #errorlog.append(temperror) [mtgs, errors] = spoilers.errorcheck(mtgs) errorlog += errors spoilers.write_xml(mtgs, setinfos['setname'], setinfos['setlongname'], setinfos['setreleasedate']) mtgs = spoilers.add_headers(mtgs, setinfos) AllSets = spoilers.make_allsets(AllSets, mtgs, setinfos['setname']) if 'masterpieces' in setinfos: masterpieces = spoilers.make_masterpieces(setinfos['masterpieces'], AllSets, mtgs) [masterpieces, errors] = spoilers.errorcheck(masterpieces) errorlog += errors #errorlog.append(temperror) spoilers.write_xml(masterpieces, setinfos['masterpieces']['setname'], setinfos['masterpieces']['setlongname'], setinfos['masterpieces']['setreleasedate']) AllSets = spoilers.make_allsets(AllSets, masterpieces, setinfos['masterpieces']['setname']) save_masterpieces(masterpieces) save_errorlog(errorlog)
if presets['oldRSS'] or 'noRSS' in setinfo and setinfo['noRSS']: mtgs = {"cards": []} else: mtgs = spoilers.scrape_mtgs( 'http://www.mtgsalvation.com/spoilers.rss' ) #scrape mtgs rss feed [mtgs, split_cards] = spoilers.parse_mtgs( mtgs, [], [], [], presets['split_cards']) #parse spoilers into mtgjson format mtgs = spoilers.correct_cards(mtgs, manual_sets[setinfo['setname']]['cards'], card_corrections, delete_cards) #fix using the fixfiles mtgjson = spoilers.get_image_urls(mtgs, presets['isfullspoil'], setinfo['setname'], setinfo['setlongname'], setinfo['setsize'], setinfo) #get images if presets['scryfallComparison']: scryfall = spoilers.get_scryfall( 'https://api.scryfall.com/cards/search?q=++e:' + setinfo['setname'].lower()) mtgjson = spoilers.smash_mtgs_scryfall(mtgs, scryfall) if 'fullSpoil' in setinfo and setinfo['fullSpoil']: wotc = spoilers.scrape_fullspoil('', setinfo) spoilers.smash_fullspoil(mtgjson, wotc) [mtgjson, errors] = spoilers.error_check( mtgjson, card_corrections) #check for errors where possible errorlog += errors spoilers.write_xml(mtgjson, setinfo['setname'], setinfo['setlongname'], setinfo['setreleasedate'])
if presets['oldRSS'] or 'noRSS' in setinfo and setinfo['noRSS']: mtgs = {"cards": []} else: mtgs = mtgs_scraper.scrape_mtgs( 'http://www.mtgsalvation.com/spoilers.rss' ) # scrape mtgs rss feed mtgs = mtgs_scraper.parse_mtgs( mtgs, setinfo=setinfo) # parse spoilers into mtgjson format if manual_sets and manual_sets != '' and setinfo['code'] in manual_sets: manual_cards = manual_sets[setinfo['code']] else: manual_cards = [] mtgs = spoilers.correct_cards( mtgs, manual_cards, card_corrections, delete_cards['delete']) # fix using the fixfiles mtgjson = spoilers.get_image_urls(mtgs, presets['isfullspoil'], setinfo) # get images if presets['scryfallOnly'] or 'scryfallOnly' in setinfo and setinfo[ 'scryfallOnly']: scryfall = scryfall_scraper.get_scryfall( 'https://api.scryfall.com/cards/search?q=++e:' + setinfo['code'].lower()) mtgjson = scryfall #_scraper.smash_mtgs_scryfall(mtgs, scryfall) if 'fullSpoil' in setinfo and setinfo['fullSpoil']: wotc = wizards_scraper.scrape_fullspoil('', setinfo) wizards_scraper.smash_fullspoil(mtgjson, wotc) [mtgjson, errors] = spoilers.error_check( mtgjson, card_corrections) # check for errors where possible errorlog += errors if not 'cards' in mtgjson or mtgjson[ 'cards'] == [] or not mtgjson['cards']: noCards.append(setinfo['code'])
continue if presets['oldRSS'] or 'noRSS' in setinfo and setinfo['noRSS']: mtgs = {"cards": []} else: mtgs = mtgs_scraper.scrape_mtgs( 'http://www.mtgsalvation.com/spoilers.rss') # scrape mtgs rss feed mtgs = mtgs_scraper.parse_mtgs(mtgs, setinfo=setinfo) # parse spoilers into mtgjson format if manual_sets and manual_sets != '' and setinfo['code'] in manual_sets: manual_cards = manual_sets[setinfo['code']] else: manual_cards = [] mtgs = spoilers.correct_cards( mtgs, manual_cards, card_corrections, delete_cards['delete']) # fix using the fixfiles if not 'mythicCode' in setinfo: setinfo['mythicCode'] = setinfo['code'] mtgjson = spoilers.get_image_urls( mtgs, presets['isfullspoil'], setinfo['code'], setinfo['mythicCode'], setinfo['name'], setinfo['size'], setinfo) # get images if presets['scryfallOnly'] or 'scryfallOnly' in setinfo and setinfo['scryfallOnly']: scryfall = scryfall_scraper.get_scryfall( 'https://api.scryfall.com/cards/search?q=++e:' + setinfo['code'].lower()) mtgjson = scryfall #_scraper.smash_mtgs_scryfall(mtgs, scryfall) if 'fullSpoil' in setinfo and setinfo['fullSpoil']: wotc = wizards_scraper.scrape_fullspoil('', setinfo) wizards_scraper.smash_fullspoil(mtgjson, wotc) [mtgjson, errors] = spoilers.error_check( mtgjson, card_corrections) # check for errors where possible errorlog += errors spoilers.write_xml( mtgjson, setinfo['code'], setinfo['name'], setinfo['releaseDate']) #save_xml(spoilers.pretty_xml(setinfo['code']), 'out/spoiler.xml') mtgjson = spoilers.add_headers(mtgjson, setinfo) AllSets = spoilers.make_allsets(AllSets, mtgjson, setinfo['code'])