Exemple #1
0
def valid_name(name: str) -> str:
    if name in CARDS_BY_NAME:
        return name
    canonicalized = card.canonicalize(name)
    for k in CARDS_BY_NAME:
        if canonicalized == card.canonicalize(k):
            return k
    raise InvalidDataException('Did not find any cards looking for `{name}`'.format(name=name))
def name_lookup(name):
    if not CARDS:
        rs = database.DATABASE.execute(oracle.base_query())
        for row in rs:
            CARDS[card.canonicalize(row['name'])] = row['name']
    canonical = card.canonicalize(name)
    if canonical not in CARDS:
        print("Bogus name {name} ({canonical}) found.".format(name=name, canonical=canonical))
        return name
    return CARDS[canonical]
Exemple #3
0
def name_lookup(name):
    if name == 'Kongming, Sleeping Dragon':
        name = 'Kongming, "Sleeping Dragon"'
    elif name == 'Pang Tong, Young Phoenix':
        name = 'Pang Tong, "Young Phoenix"'
    if not CARDS:
        rs = db().execute(multiverse.base_query())
        for row in rs:
            CARDS[card.canonicalize(row['name'])] = row['name']
    canonical = card.canonicalize(name)
    if canonical not in CARDS:
        raise InvalidDataException("Bogus name {name} ({canonical}) found.".format(name=name, canonical=canonical))
    return CARDS[canonical]
Exemple #4
0
    def search(self, w):
        if not self.ix.up_to_date():
            self.initialize_trie(
            )  # if the index is not up to date, someone has added cards, so we reinitialize the trie

        # If we searched for an alias, make it the exact hit
        for alias, name in fetcher.card_aliases():
            if w == card.canonicalize(alias):
                return SearchResult(name, None, None, None)

        normalized = list(WhooshConstants.normalized_analyzer(w))[0].text

        # If we get matches by prefix, we return that
        exact, prefix_whole_word, other_prefixed = self.find_matches_by_prefix(
            normalized)
        if exact or len(prefix_whole_word) > 0 or len(other_prefixed) > 0:
            return SearchResult(exact, prefix_whole_word, other_prefixed, None)

        # We try fuzzy and stemmed queries
        query_normalized = fuzzy_term(normalized, self.DIST, "name_normalized")
        query_stemmed = And([
            Term('name_stemmed', q.text)
            for q in WhooshConstants.stem_analyzer(w)
        ])
        query_tokenized = And([
            fuzzy_term(q.text, self.DIST, "name_tokenized")
            for q in WhooshConstants.tokenized_analyzer(w)
        ])
        query = Or([query_normalized, query_tokenized, query_stemmed])

        with self.ix.searcher() as searcher:
            fuzzy = [(r['name'], r.score)
                     for r in searcher.search(query, limit=40)]
        return SearchResult(exact, prefix_whole_word, other_prefixed, fuzzy)
def parse_queries(content: str, scryfall_compatability_mode: bool) -> List[str]:
    to_scan = re.sub('`{1,3}[^`]*?`{1,3}', '', content, re.DOTALL)  # Ignore angle brackets inside backticks. It's annoying in #code.
    if scryfall_compatability_mode:
        queries = re.findall(r'(?<!\[)\[([^\]]*)\](?!\])', to_scan)  # match [card] but not [[card]]
    else:
        queries = re.findall(r'\[?\[([^\]]*)\]\]?', to_scan)
    return [card.canonicalize(query) for query in queries if len(query) > 2]
Exemple #6
0
def search(query):
    like_query = '%{query}%'.format(query=card.canonicalize(query))
    sql = """
        {base_query}
        HAVING name_ascii LIKE %s OR names LIKE %s
        ORDER BY pd_legal DESC, name
    """.format(base_query=multiverse.base_query())
    rs = db().execute(sql, [like_query, like_query])
    return [card.Card(r) for r in rs]
def cards_from_query(query):
    # Skip searching if the request is too short.
    if len(query) <= 2:
        return []

    mode = 0
    if query.startswith('$'):
        mode = '$'
        query = query[1:]

    query = card.canonicalize(query)

    # If we searched for an alias, change query so we can find the card in the results.
    for alias, name in fetcher.card_aliases():
        if query == card.canonicalize(alias):
            query = card.canonicalize(name)

    cards = search(query)
    cards = [c for c in cards if c.layout != 'token' and c.type != 'Vanguard']

    # First look for an exact match.
    results = []
    for c in cards:
        c.mode = mode
        if query == card.canonicalize(c.name):
            results.append(c)
    if len(results) > 0:
        return results

    for c in cards:
        names = [card.canonicalize(name) for name in c.names]
        if query in names:
            results.append(c)
    if len(results) > 0:
        return results

    # If not found, use cards that start with the query and a punctuation char.
    for c in cards:
        names = [card.canonicalize(name) for name in c.names]
        for name in names:
            if name.startswith(
                    '{query} '.format(query=query)) or name.startswith(
                        '{query},'.format(query=query)):
                results.append(c)
    if len(results) > 0:
        return results

    # If not found, use cards that start with the query.
    for c in cards:
        names = [card.canonicalize(name) for name in c.names]
        for name in names:
            if name.startswith(query):
                results.append(c)
    if len(results) > 0:
        return results

    # If we didn't find any of those then use all search results.
    return cards
Exemple #8
0
def name_lookup(name: str) -> str:
    if name == 'Kongming, Sleeping Dragon':
        name = 'Kongming, "Sleeping Dragon"'
    elif name == 'Pang Tong, Young Phoenix':
        name = 'Pang Tong, "Young Phoenix"'
    try:
        if not CARDS:
            rs = db().select(multiverse.base_query())
            for row in rs:
                CARDS[card.canonicalize(row['name'])] = row['name']
    except DatabaseException:
        tb = traceback.format_exc()
        print(tb)
        if not CARDS:
            CARDS[
                ''] = ''  # Filler, so that we don't try to do this every lookup.

    canonical = card.canonicalize(name)
    if canonical not in CARDS:
        if CARDS.get('', None) is None:
            print('WARNING: Bogus name {name} ({canonical}) found.'.format(
                name=name, canonical=canonical))
        return name
    return CARDS[canonical]
Exemple #9
0
def search(query, fuzzy_threshold=260):
    query = card.canonicalize(query)
    like_query = '%{query}%'.format(query=query)
    if db().is_mysql():
        having = 'name_ascii LIKE ? OR names LIKE ?'
        args = [like_query, like_query]
    else:
        having = """LOWER({name_query}) IN (SELECT word FROM fuzzy WHERE word MATCH ? AND distance <= {fuzzy_threshold})
            OR {name_ascii_query} LIKE ?
            OR SUM(CASE WHEN LOWER(face_name) IN (SELECT word FROM fuzzy WHERE word MATCH ? AND distance <= {fuzzy_threshold}) THEN 1 ELSE 0 END) > 0
        """.format(name_query=card.name_query().format(table='u'), name_ascii_query=card.name_query('name_ascii').format(table='u'), fuzzy_threshold=fuzzy_threshold)
        fuzzy_query = '{query}*'.format(query=query)
        args = [fuzzy_query, like_query, fuzzy_query]
    sql = """
        {base_query}
        HAVING {having}
        ORDER BY pd_legal DESC, name
    """.format(base_query=base_query(), having=having)
    rs = db().execute(sql, args)
    return [card.Card(r) for r in rs]
Exemple #10
0
def basename(cards) -> str:
    from magic import card
    return '_'.join(
        re.sub('[^a-z-]', '-', card.canonicalize(c.name)) for c in cards)
Exemple #11
0
def parse_queries(content: str) -> List[str]:
    queries = re.findall(r'\[?\[([^\]]*)\]\]?', content)
    return [card.canonicalize(query) for query in queries if len(query) > 2]
Exemple #12
0
def uniqify_cards(cards: List[Card]) -> List[Card]:
    # Remove multiple printings of the same card from the result set.
    results: Dict[str, Card] = collections.OrderedDict()
    for c in cards:
        results[card.canonicalize(c.name)] = c
    return list(results.values())
Exemple #13
0
def test_canonicalize() -> None:
    assert card.canonicalize('Jötun Grunt') == 'jotun grunt'
    assert card.canonicalize('Séance') == 'seance'
    assert card.canonicalize('Far/Away') == 'far // away'
    assert card.canonicalize('Dark Ritual') == 'dark ritual'
Exemple #14
0
def parse_queries(content: str) -> List[str]:
    to_scan = re.sub(
        '`{1,3}[^`]*?`{1,3}', '', content, re.DOTALL
    )  # Ignore angle brackets inside backticks. It's annoying in #code.
    queries = re.findall(r'\[?\[([^\]]*)\]\]?', to_scan)
    return [card.canonicalize(query) for query in queries if len(query) > 2]
def basename(cards: List[Card]) -> str:
    return '_'.join(
        re.sub('[^a-z-]', '-', card.canonicalize(c.name)) +
        (c.get('preferred_printing', '') or '') for c in cards)
def basename(cards: List[Card]) -> str:
    return '_'.join(
        re.sub('[^a-z-]', '-', card.canonicalize(c.name)) for c in cards)