Пример #1
0
    def __setstate__(self, state):
        self.eprobs = state[0]
        self.IS_LEAF = state[1]
        self.column = state[2]
        self.cutoff = state[3]
        self._less_tree = state[4]
        self._ge_tree = state[5]
        self.g = DirectedGraph()
        self.g.add_node(self)

        self.probs = 0.5 * self.eprobs
        self.probs = self.probs / numpy.sum(self.probs)

        if self._less_tree != None:
            assert self._ge_tree != None

            self.g.union(self._less_tree.g)
            self.g.union(self._ge_tree.g)
            self._less_tree.g = self.g
            self._ge_tree.g = self.g

            less_edge = self.__symbolic_column_name__(
                self.column) < self.cutoff
            ge_edge = self.__symbolic_column_name__(self.column) >= self.cutoff

            self.g.connect(self, self._ge_tree, edgeValue=str(ge_edge))
            self.g.connect(self, self._less_tree, edgeValue=str(less_edge))
Пример #2
0
def forward_data_flow(source, ea=None, calldepth=0):
    if ea == None:
        ea = ScreenEA()

    _clear_colored()

    inst, dst, src = wilds("inst dst src")
    w = WildResults()

    tainted = VersionedSet()
    tainted.version = -1
    tainted.add(source)

    def _fix_esp(ea, exp):
        spd = GetSpd(ea)
        return exp.substitute({esp: (esp + spd).simplify()})

    fg = FunctionGraph(ea)

    # data connections graph
    TAINTED = symbols("TAINTED")
    dg = DirectedGraph()
    dg.connect(TAINTED, source)

    for addr, level in fg.walk(ea, depthfirst=True):
        if level <= tainted.version:
            print "reverting to version %s" % (level - 1)
            tainted = tainted.get_version(level - 1)

        tainted.version = level

        syminst = symdecode(addr)

        if syminst.match(inst(dst, src), w) and w.inst in tainted_dst_src_insts:
            print "analyzing %s" % (syminst,)

            # untaint cleared registers
            if syminst.match(XOR(dst, dst)) and w.dst in tainted:
                tainted.remove(w.dst)

            elif w.src in tainted:
                _color(addr)
                print "tainting %s" % (w.dst,)
                tainted.add(w.dst)

            elif w.dst in tainted:
                tainted.remove(w.dst)

    return tainted
Пример #3
0
  def __setstate__(self, state):
    self.eprobs = state[0]
    self.IS_LEAF = state[1]
    self.column = state[2]
    self.cutoff = state[3]
    self._less_tree = state[4]
    self._ge_tree = state[5]
    self.g = DirectedGraph()
    self.g.add_node(self)

    self.probs = 0.5 * self.eprobs
    self.probs = self.probs / numpy.sum(self.probs)

    if self._less_tree != None:
      assert self._ge_tree != None

      self.g.union(self._less_tree.g)
      self.g.union(self._ge_tree.g)
      self._less_tree.g = self.g
      self._ge_tree.g = self.g
  
      less_edge = self.__symbolic_column_name__(self.column) < self.cutoff
      ge_edge = self.__symbolic_column_name__(self.column) >= self.cutoff
  
      self.g.connect(self, self._ge_tree, edgeValue=str(ge_edge))
      self.g.connect(self, self._less_tree, edgeValue=str(less_edge))
Пример #4
0
class Fuzzy(object):
    def __init__(self):
        self.graph = DirectedGraph()

    def add(self, prop):
        self.graph.add_node(prop)

        if prop.is_Function:
            for a in prop.args:
                self.add(a)
                self.graph.connect(a, prop)
                if str(prop.func) in bidirectional:
                    self.graph.connect(prop, a)

    def implies(self, condition, result):
        self.add(condition)
        self.add(result)
        self.graph.connect(condition, result, "Imp")
Пример #5
0
    def __init__(self,
                 X,
                 oracle,
                 target=0.6,
                 maxdepth=5,
                 g=None,
                 eprobs=None,
                 bias=0.0):

        if g == None:
            g = DirectedGraph()

        if eprobs == None:
            eprobs = numpy.array([1.0, 1.0])

        self.g = g
        self.eprobs = eprobs
        self.IS_LEAF = False
        self.column = None
        self.cutoff = None
        self._less_tree = None
        self._ge_tree = None

        probs = 0.5 * self.eprobs
        probs = probs / numpy.sum(probs)
        self.probs = probs

        met_target = numpy.all(
            [p <= 1.0 - target or p >= target for p in probs])

        self.g.add_node(self)

        if met_target:
            self._make_leaf()

        elif X.shape[0] < 10:
            self._make_leaf()

        elif maxdepth <= 1:
            self._make_leaf()

        else:
            self._make_split(X, oracle, target, maxdepth, bias)
Пример #6
0
class OneClassTree(object):

  def __init__(self, X, oracle, target=0.6, maxdepth=5, g=None, eprobs=None, bias=0.0):

    if g == None:
      g = DirectedGraph()

    if eprobs == None:
      eprobs = numpy.array([1.0, 1.0])

    self.g = g
    self.eprobs = eprobs
    self.IS_LEAF = False
    self.column = None
    self.cutoff = None
    self._less_tree = None
    self._ge_tree = None

    probs = 0.5 * self.eprobs
    probs = probs / numpy.sum(probs)
    self.probs = probs

    met_target = numpy.all([p <= 1.0 - target or p >= target for p in probs])

    self.g.add_node(self)

    if met_target:
      self._make_leaf()

    elif X.shape[0] < 10:
      self._make_leaf()

    elif maxdepth <= 1:
      self._make_leaf()

    else:
      self._make_split(X, oracle, target, maxdepth, bias)

  def selected_columns(self):
    if self.IS_LEAF:
      return set([])
    else:
      left = self._less_tree.selected_columns()
      right = self._ge_tree.selected_columns()
      middle = set([self.column])
      return middle.union(left).union(right)

  def __getstate__(self):
    return (self.eprobs, self.IS_LEAF, self.column, self.cutoff, self._less_tree, self._ge_tree)

  def __setstate__(self, state):
    self.eprobs = state[0]
    self.IS_LEAF = state[1]
    self.column = state[2]
    self.cutoff = state[3]
    self._less_tree = state[4]
    self._ge_tree = state[5]
    self.g = DirectedGraph()
    self.g.add_node(self)

    self.probs = 0.5 * self.eprobs
    self.probs = self.probs / numpy.sum(self.probs)

    if self._less_tree != None:
      assert self._ge_tree != None

      self.g.union(self._less_tree.g)
      self.g.union(self._ge_tree.g)
      self._less_tree.g = self.g
      self._ge_tree.g = self.g
  
      less_edge = self.__symbolic_column_name__(self.column) < self.cutoff
      ge_edge = self.__symbolic_column_name__(self.column) >= self.cutoff
  
      self.g.connect(self, self._ge_tree, edgeValue=str(ge_edge))
      self.g.connect(self, self._less_tree, edgeValue=str(less_edge))


  def classify(self, x):
    assert x.shape[0] > 0

    if x.shape[0] > 1:
      rv = numpy.vstack([self.classify(x[i]) for i in range(x.shape[0])])

    elif self.IS_LEAF:
      rv = self.eprobs

    else:
      rv = self._less_tree.classify(x) if x[0, self.column] < self.cutoff else  self._ge_tree.classify(x)

    if len(rv.shape) == 1:
      rv = numpy.array([rv])

    return rv

  def _make_leaf(self):
    self.IS_LEAF = True

  def _make_split(self, X, oracle, target, maxdepth, bias):
    self.IS_LEAF = False

    try:
      best = select_best_column(X, oracle, bias=bias)
    except NoBestColumn:
      self._make_leaf()
      return

    self.column = int(best[0])
    self.cutoff = float(best[1])

    # get indexes for >= and <
    columndata = _toarray(X[:,self.column].T)[0]
    geidx = columndata >= self.cutoff
    lidx = geidx == False

    lX = X[lidx.nonzero()[0],:] if not numpy.all(lidx == False) else numpy.matrix((0, X.shape[1]))
    geX = X[geidx.nonzero()[0],:] if not numpy.all(geidx == False) else numpy.matrix((0, X.shape[1]))

    ge_eprobs = numpy.array([oracle(self.column, self.cutoff), float(geX.shape[0]) / X.shape[0]], dtype=numpy.double)
    l_eprobs = 1.0 - ge_eprobs
    ge_eprobs = ge_eprobs * self.eprobs
    l_eprobs = l_eprobs * self.eprobs

    # need to rebuild oracles to account for this split
    def loracle(col, cut):
      if col != self.column:
        return oracle(col, cut)

      if cut >= self.cutoff:
        return 0.0
      else:
        pless = (1.0 - oracle(col, cut)) / (1.0 - oracle(col, self.cutoff))
        return 1.0 - pless

    def georacle(col, cut):
      if col != self.column:
        return oracle(col, cut)

      if cut < self.cutoff:
        return 0.0
      else:
        origp = oracle(col, self.cutoff)
        newp = oracle(col, cut)
        assert origp >= newp

        lessorig = 1.0 - origp
        lessnew = 1.0 - newp

        less = (lessnew - lessorig) / (1.0 - lessorig)
        return 1.0 - less

    # generate trees
    self._ge_tree = self.__class__(geX, georacle, target, maxdepth-1, g=self.g, eprobs=ge_eprobs, bias=bias)
    self._less_tree = self.__class__(lX, loracle, target, maxdepth-1, g=self.g, eprobs=l_eprobs, bias=bias)

    less_edge = self.__symbolic_column_name__(self.column) < self.cutoff
    ge_edge = self.__symbolic_column_name__(self.column) >= self.cutoff

    self.g.connect(self, self._ge_tree, edgeValue=str(ge_edge))
    self.g.connect(self, self._less_tree, edgeValue=str(less_edge))

  def __symbolic_column_name__(self, colnum):
    x = symbols('x')
    return x(colnum)

  def __str__(self):
    return str(id(self)) + "\n" + str(numpy.vstack([self.probs, self.eprobs]))

  def __repr__(self):
    return str(self)
Пример #7
0
class OneClassTree(object):
    def __init__(self,
                 X,
                 oracle,
                 target=0.6,
                 maxdepth=5,
                 g=None,
                 eprobs=None,
                 bias=0.0):

        if g == None:
            g = DirectedGraph()

        if eprobs == None:
            eprobs = numpy.array([1.0, 1.0])

        self.g = g
        self.eprobs = eprobs
        self.IS_LEAF = False
        self.column = None
        self.cutoff = None
        self._less_tree = None
        self._ge_tree = None

        probs = 0.5 * self.eprobs
        probs = probs / numpy.sum(probs)
        self.probs = probs

        met_target = numpy.all(
            [p <= 1.0 - target or p >= target for p in probs])

        self.g.add_node(self)

        if met_target:
            self._make_leaf()

        elif X.shape[0] < 10:
            self._make_leaf()

        elif maxdepth <= 1:
            self._make_leaf()

        else:
            self._make_split(X, oracle, target, maxdepth, bias)

    def selected_columns(self):
        if self.IS_LEAF:
            return set([])
        else:
            left = self._less_tree.selected_columns()
            right = self._ge_tree.selected_columns()
            middle = set([self.column])
            return middle.union(left).union(right)

    def __getstate__(self):
        return (self.eprobs, self.IS_LEAF, self.column, self.cutoff,
                self._less_tree, self._ge_tree)

    def __setstate__(self, state):
        self.eprobs = state[0]
        self.IS_LEAF = state[1]
        self.column = state[2]
        self.cutoff = state[3]
        self._less_tree = state[4]
        self._ge_tree = state[5]
        self.g = DirectedGraph()
        self.g.add_node(self)

        self.probs = 0.5 * self.eprobs
        self.probs = self.probs / numpy.sum(self.probs)

        if self._less_tree != None:
            assert self._ge_tree != None

            self.g.union(self._less_tree.g)
            self.g.union(self._ge_tree.g)
            self._less_tree.g = self.g
            self._ge_tree.g = self.g

            less_edge = self.__symbolic_column_name__(
                self.column) < self.cutoff
            ge_edge = self.__symbolic_column_name__(self.column) >= self.cutoff

            self.g.connect(self, self._ge_tree, edgeValue=str(ge_edge))
            self.g.connect(self, self._less_tree, edgeValue=str(less_edge))

    def classify(self, x):
        assert x.shape[0] > 0

        if x.shape[0] > 1:
            rv = numpy.vstack([self.classify(x[i]) for i in range(x.shape[0])])

        elif self.IS_LEAF:
            rv = self.eprobs

        else:
            rv = self._less_tree.classify(x) if x[
                0, self.column] < self.cutoff else self._ge_tree.classify(x)

        if len(rv.shape) == 1:
            rv = numpy.array([rv])

        return rv

    def _make_leaf(self):
        self.IS_LEAF = True

    def _make_split(self, X, oracle, target, maxdepth, bias):
        self.IS_LEAF = False

        try:
            best = select_best_column(X, oracle, bias=bias)
        except NoBestColumn:
            self._make_leaf()
            return

        self.column = int(best[0])
        self.cutoff = float(best[1])

        # get indexes for >= and <
        columndata = _toarray(X[:, self.column].T)[0]
        geidx = columndata >= self.cutoff
        lidx = geidx == False

        lX = X[lidx.nonzero()[0], :] if not numpy.all(
            lidx == False) else numpy.matrix((0, X.shape[1]))
        geX = X[geidx.nonzero()[0], :] if not numpy.all(
            geidx == False) else numpy.matrix((0, X.shape[1]))

        ge_eprobs = numpy.array([
            oracle(self.column, self.cutoff),
            float(geX.shape[0]) / X.shape[0]
        ],
                                dtype=numpy.double)
        l_eprobs = 1.0 - ge_eprobs
        ge_eprobs = ge_eprobs * self.eprobs
        l_eprobs = l_eprobs * self.eprobs

        # need to rebuild oracles to account for this split
        def loracle(col, cut):
            if col != self.column:
                return oracle(col, cut)

            if cut >= self.cutoff:
                return 0.0
            else:
                pless = (1.0 - oracle(col, cut)) / (1.0 -
                                                    oracle(col, self.cutoff))
                return 1.0 - pless

        def georacle(col, cut):
            if col != self.column:
                return oracle(col, cut)

            if cut < self.cutoff:
                return 0.0
            else:
                origp = oracle(col, self.cutoff)
                newp = oracle(col, cut)
                assert origp >= newp

                lessorig = 1.0 - origp
                lessnew = 1.0 - newp

                less = (lessnew - lessorig) / (1.0 - lessorig)
                return 1.0 - less

        # generate trees
        self._ge_tree = self.__class__(geX,
                                       georacle,
                                       target,
                                       maxdepth - 1,
                                       g=self.g,
                                       eprobs=ge_eprobs,
                                       bias=bias)
        self._less_tree = self.__class__(lX,
                                         loracle,
                                         target,
                                         maxdepth - 1,
                                         g=self.g,
                                         eprobs=l_eprobs,
                                         bias=bias)

        less_edge = self.__symbolic_column_name__(self.column) < self.cutoff
        ge_edge = self.__symbolic_column_name__(self.column) >= self.cutoff

        self.g.connect(self, self._ge_tree, edgeValue=str(ge_edge))
        self.g.connect(self, self._less_tree, edgeValue=str(less_edge))

    def __symbolic_column_name__(self, colnum):
        x = symbols('x')
        return x(colnum)

    def __str__(self):
        return str(id(self)) + "\n" + str(
            numpy.vstack([self.probs, self.eprobs]))

    def __repr__(self):
        return str(self)
Пример #8
0
 def __init__(self):
     self.graph = DirectedGraph()