예제 #1
0
def apply_obfuscation(source):
    """
    Returns 'source' all obfuscated.
    """
    global keyword_args
    global imported_modules

    tokens = token_utils.listified_tokenizer(source)
    keyword_args = analyze.enumerate_keyword_args(tokens)
    imported_modules = analyze.enumerate_imports(tokens)

    variables = find_obfuscatables(tokens, obfuscatable_variable)
    classes = find_obfuscatables(tokens, obfuscatable_class)
    functions = find_obfuscatables(tokens, obfuscatable_function)

    variables = list(set(variables).difference(set(imported_modules)))
    for variable in variables:
        replace_obfuscatables(imported_modules, tokens, obfuscate_variable,
                              variable, name_generator)
    # for function in functions:
    #     replace_obfuscatables(imported_modules,
    #         tokens, obfuscate_function, function, name_generator)
    # for _class in classes:
    #     replace_obfuscatables(imported_modules,tokens, obfuscate_class, _class, name_generator)
    return token_utils.untokenize(tokens)
예제 #2
0
def insert_in_next_line(tokens, index, string):
    """
    Inserts the given string after the next newline inside tokens starting at
    *tokens[index]*.  Indents must be a list of indentation tokens that will
    preceeed the insert (can be an empty list).
    """
    tokenized_string = token_utils.listified_tokenizer(string)
    for i, tok in list(enumerate(tokens[index:])):
        token_type = tok[0]
        if token_type in [tokenize.NL, tokenize.NEWLINE]:
            for count, item in enumerate(tokenized_string):
                tokens.insert(index + count + i + 1, item)
            break
예제 #3
0
def pyminify(options, _file):

    module = os.path.split(_file)[1]
    module = ".".join(module.split('.')[:-1])
    filesize = os.path.getsize(_file)
    source = open(_file, 'rb').read()
    tokens = token_utils.listified_tokenizer(source)

     # Perform obfuscation if any of the related options were set
    if options['obfuscate']:
        identifier_length = int(options['replacement_length'])
        name_generator = obfuscate.obfuscation_machine(identifier_length=identifier_length)
        obfuscate.obfuscate(module, tokens, options)

    result = token_utils.untokenize(tokens).strip()
    #result = filter(lambda x: x != '\r' and x != '\n', ' '.join(result.split()))
    print result
예제 #4
0
    for function in functions:
        replace_obfuscatables(module, tokens, obfuscate_function, function,
                              name_generator, table)

    for _class in classes:
        replace_obfuscatables(module, tokens, obfuscate_class, _class,
                              name_generator, table)

    obfuscate_global_import_methods(module, tokens, name_generator, table)
    obfuscate_builtins(module, tokens, name_generator, table)


if __name__ == "__main__":
    global name_generator
    if len(sys.argv) != 3:
        print("Usage: %s <emoji_length> <filename.py>" % sys.argv[0])
        sys.exit(1)

    source = open(sys.argv[2]).read()
    replacement_length = int(sys.argv[1])

    tokens = token_utils.listified_tokenizer(source)
    source = minification.minify(tokens)

    tokens = token_utils.listified_tokenizer(source)

    obfuscate(source, tokens, replacement_length)
    result = ''
    result += token_utils.untokenize(tokens)
    # print(result)