예제 #1
0
  def deep_tokenize(self, function, input):

    if isinstance(input, dict):
      tokenized = {}
      for left, right in input.iteritems():
        tkey =self.deep_tokenize(function, left)
        tokenized[tkey] = self.deep_tokenize(function, right)
      return tokenized
    elif isinstance(input, list):
      return [self.deep_tokenize(function, expr) for expr in input]
    elif isinstance(input, assignable_t) or isinstance(input, expr_t):
      t = c.tokenizer(function)
      tokens = list(t.expression_tokens(input))
      return ''.join([str(t) for t in tokens])
    elif isinstance(input, value_t):
      t = c.tokenizer(function)
      tokens = list(t.expression_tokens(input))
      return ''.join([str(t) for t in tokens])
    else:
      return repr(input)

    raise
예제 #2
0
  def update(self, function):

    self.function = function

    t = c.tokenizer(function)
    tokens = list(t.flow_tokens())

    self.clear()

    # insert all tokens as text with proper colors
    self.inserting = True
    for tok in tokens:
      self.insert_token(tok)
    self.inserting = False

    # build a map of which text fragments belong to which token.
    doc = self.document()
    block = doc.begin()
    while block != doc.end():

      for it in block:
        frag = it.fragment()
        fmt = frag.charFormat()
        tok = fmt.property(QtGui.QTextFormat.UserProperty)

        s = str(tok)

        tf = token_fragment(frag, tok)
        if s not in self.__textmap:
            self.__textmap[s] = []
        self.__textmap[s].append(tf)

        self.__fragments.append(tf)

      block = block.next()

    return
예제 #3
0
    def update(self, function):

        self.function = function

        t = c.tokenizer(function)
        tokens = list(t.flow_tokens())

        self.clear()

        # insert all tokens as text with proper colors
        self.inserting = True
        for tok in tokens:
            self.insert_token(tok)
        self.inserting = False

        # build a map of which text fragments belong to which token.
        doc = self.document()
        block = doc.begin()
        while block != doc.end():

            for it in block:
                frag = it.fragment()
                fmt = frag.charFormat()
                tok = fmt.property(QtGui.QTextFormat.UserProperty)

                s = str(tok)

                tf = token_fragment(frag, tok)
                if s not in self.__textmap:
                    self.__textmap[s] = []
                self.__textmap[s].append(tf)

                self.__fragments.append(tf)

            block = block.next()

        return
예제 #4
0
 def update(self, flow):
     
     self.flow = flow
     
     t = c.tokenizer(flow)
     tokens = list(t.flow_tokens())
     
     self.clear()
     
     self.inserting = True
     for tok in tokens:
         self.insert_token(tok)
     self.inserting = False
     
     doc = self.document()
     block = doc.begin()
     while block != doc.end():
         
         for it in block:
             frag = it.fragment()
             fmt = frag.charFormat()
             tok = fmt.property(QtGui.QTextFormat.UserProperty)
             
             s = str(tok)
             print 'inserted %s %s' % (frag.text(), s)
             
             tf = token_fragment(frag, tok)
             if s not in self.__textmap:
                 self.__textmap[s] = []
             self.__textmap[s].append(tf)
             
             self.__fragments.append(tf)
         
         block = block.next()
     
     return
예제 #5
0
import sys
from capstone import *
from decompiler import *
from host import dis
from output import c

# Create a Capstone object, which will be used as disassembler
md = Cs(CS_ARCH_X86, CS_MODE_32)

# Define a bunch of bytes to disassemble
code = sys.argv[1]

# Create the capstone-specific backend; it will yield expressions that the decompiler is able to use.
disasm = dis.available_disassemblers['capstone'].create(md, code, 0x1000)
#print(disasm)
# Create the decompiler
dec = decompiler_t(disasm, 0x1000)

# Transform the function until it is decompiled
dec.step_until(step_decompiled)

# Tokenize and output the function as string
#print(dec.function)
print(''.join([str(o) for o in c.tokenizer(dec.function).tokens]))

예제 #6
0
 def tokenize(self, function):
   t = c.tokenizer(function, indent='  ')
   tokens = list(t.tokens)
   return self.unindent(''.join([str(t) for t in tokens]))
예제 #7
0
 def tokenize(self, flow):
   t = c.tokenizer(flow, indent='  ')
   tokens = list(t.flow_tokens())
   return self.unindent(''.join([str(t) for t in tokens]))