Ejemplo n.º 1
0
def rewrite_classes_ifi(query_object, format, fieldname):
    """
    Rewrite all patent classifications in query
    expression ast from OPS format to IFI format
    """
    def token_callback(token, *args, **kwargs):

        if len(token) == 1:
            try:
                class_ifi = ifi_convert_class(token[0])
                token[0] = format_expression(format, fieldname, class_ifi)

            except:
                pass

    walk_token_results(query_object.tokens, token_callback=token_callback)
Ejemplo n.º 2
0
def normalize_patentnumbers(tokens):
    """
    normalize patent numbers in query

    >>> tokens = parse_cql('pn=EP666666')
    >>> normalize_patentnumbers(tokens)
    >>> tokens_to_cql(tokens)
    u'pn=EP0666666'

    """
    def action(token, index, binop, term):
        term = term.replace('"', '')
        # apply document number normalization to values of certain indexes only
        if index.lower() in indexes_publication_number:
            term = normalize_patent(term, fix_kindcode=True)
            if term:
                token[2] = term

    walk_token_results(tokens, triple_callback=action)
Ejemplo n.º 3
0
def rewrite_classes_ops(query_object):
    """
    Rewrite all patent classifications in query
    expression ast from IFI format to OPS format
    """

    if not query_object:
        return

    def triple_callback(token, index, binop, term):

        if index in ['ic', 'cpc']:
            try:
                # Decode IPC or CPC class from format "G01F000184"
                patent_class = IpcDecoder(term)

                # Encode IPC or CPC class to format "G01F1/84"
                # token[2] has a reference to "term"
                token[2] = patent_class.formatOPS()

            except:
                pass

    walk_token_results(query_object.tokens, triple_callback=triple_callback)