Ejemplo n.º 1
0
def apply_obfuscation(source):
    """
    Returns 'source' all obfuscated.
    """
    global keyword_args
    global imported_modules

    tokens = token_utils.listified_tokenizer(source)
    keyword_args = analyze.enumerate_keyword_args(tokens)
    imported_modules = analyze.enumerate_imports(tokens)

    variables = find_obfuscatables(tokens, obfuscatable_variable)
    classes = find_obfuscatables(tokens, obfuscatable_class)
    functions = find_obfuscatables(tokens, obfuscatable_function)

    variables = list(set(variables).difference(set(imported_modules)))
    for variable in variables:
        replace_obfuscatables(imported_modules, tokens, obfuscate_variable,
                              variable, name_generator)
    # for function in functions:
    #     replace_obfuscatables(imported_modules,
    #         tokens, obfuscate_function, function, name_generator)
    # for _class in classes:
    #     replace_obfuscatables(imported_modules,tokens, obfuscate_class, _class, name_generator)
    return token_utils.untokenize(tokens)
Ejemplo n.º 2
0
def find_obfuscatables(tokens, obfunc, ignore_length=False):
    """
    Iterates over *tokens*, which must be an equivalent output to what
    tokenize.generate_tokens() produces, calling *obfunc* on each with the
    following parameters:

        - **tokens:**     The current list of tokens.
        - **index:**      The current position in the list.

    *obfunc* is expected to return the token string if that token can be safely
    obfuscated **or** one of the following optional values which will instruct
    find_obfuscatables() how to proceed:

        - **'__skipline__'**   Keep skipping tokens until a newline is reached.
        - **'__skipnext__'**   Skip the next token in the sequence.

    If *ignore_length* is ``True`` then single-character obfuscatables will
    be obfuscated anyway (even though it wouldn't save any space).
    """
    global keyword_args
    keyword_args = analyze.enumerate_keyword_args(tokens)
    global imported_modules
    imported_modules = analyze.enumerate_imports(tokens)
    #print("imported_modules: %s" % imported_modules)
    skip_line = False
    skip_next = False
    obfuscatables = []
    for index, tok in enumerate(tokens):
        token_type = tok[0]
        if token_type == tokenize.NEWLINE:
            skip_line = False
        if skip_line:
            continue
        result = obfunc(tokens, index, ignore_length=ignore_length)
        if result:
            if skip_next:
                skip_next = False
            elif result == '__skipline__':
                skip_line = True
            elif result == '__skipnext__':
                skip_next = True
            elif result in obfuscatables:
                pass
            else:
                obfuscatables.append(result)
        else:  # If result is empty we need to reset skip_next so we don't
            skip_next = False  # accidentally skip the next identifier
    return obfuscatables
Ejemplo n.º 3
0
def obfuscate(module, tokens, options, name_generator=None, table=None):

    # Need a universal instance of our generator to avoid duplicates
    identifier_length = int(options['replacement_length'])
    ignore_length = False
    global keyword_args
    keyword_args = analyze.enumerate_keyword_args(tokens)
    if not name_generator:

        name_generator = obfuscation_machine(
            identifier_length=identifier_length)
    if options['obfuscate']:
        variables = find_obfuscatables(tokens, obfuscatable_variable)
        classes = find_obfuscatables(tokens, obfuscatable_class_function)
        for variable in variables:
            replace_obfuscatables(module, tokens, obfuscate_variable, variable,
                                  name_generator, table)

        for _class in classes:
            replace_obfuscatables(module, tokens, obfuscate_class, _class,
                                  name_generator, table)