def test_parse3(): s = """ 3 Barter in Blood 4 Consume Spirit 4 Demonic Rising 4 Devour Flesh 2 Distress 2 Dread Statuary 4 Haunted Plate Mail 3 Homicidal Seclusion 4 Hymn to Tourach 2 Infest 4 Quicksand 4 Spawning Pool 14 Swamp 2 Ultimate Price 4 Underworld Connections 1 Distress 2 Dystopia 1 Infest 4 Memoricide 2 Nature's Ruin 1 Pharika's Cure 4 Scrabbling Claws""" s = textwrap.dedent(s) d = decklist.parse(s) assert len(d['maindeck']) == 15 assert len(d['sideboard']) == 7
def test_parse9(): s = """ 2 Bonded Horncrest 4 Boros Guildgate 2 Charging Monstrosaur 2 Frenzied Raptor 3 Imperial Lancer 8 Mountain 2 Nest Robber 2 Pious Interdiction 9 Plains 1 Pterodon Knight 2 Rallying Roar 2 Shining Aerosaur 3 Sky Terror 2 Slash of Talons 4 Stone Quarry 2 Sure Strike 2 Swashbuckling 3 Territorial Hammerskull 2 Thrash of Raptors 1 Tilonalli's Skinshifter 2 Unfriendly Fire Sideboard""" s = textwrap.dedent(s) d = decklist.parse(s) assert sum(d['maindeck'].values()) == 60 assert sum(d['sideboard'].values()) == 0 assert d['maindeck']['Shining Aerosaur'] == 2
def test_parse10(): s = """ Sideboard""" s = textwrap.dedent(s) d = decklist.parse(s) assert sum(d['maindeck'].values()) == 0 assert sum(d['sideboard'].values()) == 0
def test_parse7(): s = """ 16 Forest 4 Animist's Awakening 4 Copperhorn Scout 4 Harvest Season 4 Jaddi Offshoot 4 Krosan Wayfarer 4 Loam Dryad 4 Nantuko Monastery 4 Nest Invader 4 Quest for Renewal 4 Rofellos, Llanowar Emissary 4 Sky Skiff 4 Dystopia 1 Infest 4 Memoricide 2 Nature's Ruin 1 Pharika's Cure 4 Scrabbling Claws """ s = textwrap.dedent(s) d = decklist.parse(s) assert sum(d['maindeck'].values()) == 64 assert sum(d['sideboard'].values()) == 12 assert len(d['maindeck']) == 13 assert len(d['sideboard']) == 5
def test_parse8(): s = """ 24 Island 4 Curious Homunculus 4 Prism Ring 4 Anticipate 4 Take Inventory 4 Dissolve 3 Void Shatter 3 Jace's Sanctum 3 Whelming Wave 2 Control Magic 2 Confirm Suspicions 2 Counterbore 1 Rise from the Tides 1 Cryptic Serpent 1 Convolute 1 Lone Revenant 1 Careful Consideration 1 Opportunity 4 Annul 4 Invasive Surgery 3 Sentinel Totem """ s = textwrap.dedent(s) d = decklist.parse(s) assert sum(d['maindeck'].values()) == 61 assert sum(d['sideboard'].values()) == 15 assert d['maindeck']['Cryptic Serpent'] == 1 assert d['sideboard']['Convolute'] == 1
def set_values(raw_deck: DeckType) -> DeckType: raw_deck = translation.translate(translation.TAPPEDOUT, raw_deck) raw_decklist = fetcher_internal.fetch('{base_url}?fmt=txt'.format(base_url=raw_deck['url'])) raw_deck['cards'] = decklist.parse(raw_decklist) raw_deck['source'] = 'Tapped Out' raw_deck['identifier'] = raw_deck['url'] return raw_deck
def tournament_deck(cells, competition_id: int, date: datetime.datetime, final: Dict[str, int]) -> Optional[deck.Deck]: d = { 'source': 'Gatherling', 'competition_id': competition_id, 'created_date': dtutil.dt2ts(date) } player = cells[2] username = player.a.contents[0].string d['mtgo_username'] = username d['finish'] = final.get(username) link = cells[4].a d['url'] = gatherling_url(link['href']) d['name'] = link.string if cells[5].find('a'): d['archetype'] = cells[5].a.string else: d['archetype'] = cells[5].string gatherling_id = urllib.parse.parse_qs( urllib.parse.urlparse(str(d['url'])).query)['id'][0] d['identifier'] = gatherling_id existing = deck.get_deck_id(d['source'], d['identifier']) if existing is not None: return deck.load_deck(existing) dlist = decklist.parse( fetcher.internal.post(gatherling_url('deckdl.php'), {'id': gatherling_id})) d['cards'] = dlist if len(dlist['maindeck']) + len(dlist['sideboard']) == 0: logger.warning( 'Rejecting deck with id {id} because it has no cards.'.format( id=gatherling_id)) return None return deck.add_deck(d)
def parse_decklist(self): self.cards = None if self.decklist.startswith('<?xml'): try: self.cards = decklist.parse_xml(self.decklist) except InvalidDataException as e: self.errors['decklist'] = 'Unable to read .dek decklist. Try exporting from Magic Online as Text and pasting the result.'.format(specific=str(e)) else: try: self.cards = decklist.parse(self.decklist) except InvalidDataException as e: self.errors['decklist'] = '{specific}. Try exporting from Magic Online as Text and pasting the result.'.format(specific=str(e))
def tournament_deck(cells, competition_id, date, ranks): d = { 'source': 'Gatherling', 'competition_id': competition_id, 'created_date': dtutil.dt2ts(date) } player = cells[2] d['mtgo_username'] = player.a.contents[0] if player.find('img'): img = re.sub(r'styles/Chandra/images/(.*?)\.png', r'\1', player.img['src']) if img == WINNER: d['finish'] = 1 elif img == SECOND: d['finish'] = 2 elif img == TOP_4: d['finish'] = 3 elif img == TOP_8: d['finish'] = 5 elif img == 'verified': d['finish'] = ranks.get(d['mtgo_username'], None) else: raise InvalidDataException( 'Unknown player image `{img}`'.format(img=img)) else: d['finish'] = ranks.get(d['mtgo_username'], None) parts = cells[3].string.split('-') d['wins'] = parts[0] d['losses'] = parts[1] d['draws'] = 0 if len(parts) < 3 else parts[2] link = cells[4].a d['url'] = gatherling_url(link['href']) d['name'] = link.string if cells[5].find('a'): d['archetype'] = cells[5].a.string else: d['archetype'] = cells[5].string gatherling_id = urllib.parse.parse_qs( urllib.parse.urlparse(d['url']).query)['id'][0] d['identifier'] = gatherling_id if deck.get_deck_id(d['source'], d['identifier']) is not None: return None d['cards'] = decklist.parse( fetcher.internal.post(gatherling_url('deckdl.php'), {'id': gatherling_id})) if len(d['cards']) == 0: print('Rejecting deck with id {id} because it has no cards.'.format( id=gatherling_id)) return None return deck.add_deck(d)
def do_validation(self): if len(self.mtgo_username) == 0: self.errors['mtgo_username'] = "******" elif active_decks_by(self.mtgo_username.strip()): self.errors[ 'mtgo_username'] = "******" if len(self.name.strip()) == 0: self.errors['name'] = 'Deck Name is required' else: self.source = 'League' self.competition_id = db().value(active_competition_id_query()) self.identifier = identifier(self) self.url = url_for('competitions', competition_id=self.competition_id) self.decklist = self.decklist.strip() if len(self.decklist) == 0: self.errors['decklist'] = 'Decklist is required' else: self.cards = None if self.decklist.startswith('<?xml'): try: self.cards = decklist.parse_xml(self.decklist) except InvalidDataException as e: self.errors[ 'decklist'] = 'Unable to read .dek decklist. Try exporting from MTGO as Text and pasting the result.'.format( specific=str(e)) else: try: self.cards = decklist.parse(self.decklist) except InvalidDataException as e: self.errors[ 'decklist'] = '{specific}. Try exporting from MTGO as Text and pasting the result.'.format( specific=str(e)) if self.cards is not None: try: vivified = decklist.vivify(self.cards) errors = {} if 'Penny Dreadful' not in legality.legal_formats( vivified, None, errors): self.errors[ 'decklist'] = 'Deck is not legal in Penny Dreadful - {error}'.format( error=errors.get('Penny Dreadful')) except InvalidDataException as e: self.errors['decklist'] = str(e)
def test_parse5(): s = """ 4 Animist's Awakening 4 Copperhorn Scout 14 Forest 4 Harvest Season 4 Jaddi Offshoot 4 Krosan Wayfarer 4 Loam Dryad 4 Nantuko Monastery 4 Nest Invader 4 Quest for Renewal 4 Rofellos, Llanowar Emissary 2 Sky Skiff 4 Throne of the God-Pharaoh 0 Villainous Wealth """ s = textwrap.dedent(s) d = decklist.parse(s) assert len(d['maindeck']) == 13 assert len(d['sideboard']) == 1
def scrape_decklist(d): url = 'https://www.mtggoldfish.com/deck/download/{identifier}'.format( identifier=d.identifier) return decklist.parse(fetcher.internal.fetch(url))