Example #1
0
def mted(tokenPath, sources, compress):
    tok = tokenizer.Tokenizer(tokenPath)
    functions = tok.split_functions(False)
    
    # sort them appropriately
    def comp(a,b):
        lena = len(a[1])
        lenb = len(b[1])
        if lena == lenb:
            # if lengths are tied, sort alphabetically based on function name
            if a[0] < b[0]:
                return -1
            else:
                return 1
        else:
            return lena - lenb
            
    functions.sort(comp)
    
    # compress and output
    results = ""
    for funct in functions:
        if funct[2] in sources:
            if compress:
                results += tokenizer.compress_tokens(funct[1])
            else:
                results += " ".join(funct[1])

        if compress == False:
            results += " "
    
    # return results
    return results.strip()
Example #2
0
def simple(filepath):
    tok = tokenizer.Tokenizer(filepath)
    results = tok.full_tokenize()
    return tokenizer.compress_tokens(results)