def save(binary, state): # reset the dl ana.set_dl(pickle_dir='/tmp/ana') p = angr.Project(binary) e = angr.surveyors.Explorer(p).run(10) pickle.dump(e.active[0].previous_run, open(state, 'w'), -1)
def test_dir(): ana.dl = ana.DirDataLayer(pickle_dir="/tmp/test_ana") one = A(1) nose.tools.assert_is(one, A.ana_load(one.ana_store())) nose.tools.assert_true(os.path.exists("/tmp/test_ana/%s.p" % one.ana_uuid)) uuid = one.ana_uuid old_id = id(one) del one gc.collect() ana.dl = ana.DirDataLayer(pickle_dir="/tmp/test_ana") two = A.ana_load(uuid) nose.tools.assert_equals(uuid, two.ana_uuid) nose.tools.assert_not_equals(old_id, id(two)) # reset the datalayer to make sure we handle it properly ana.set_dl(ana.DictDataLayer()) try: two = A.ana_load(uuid) assert False except KeyError: pass two.ana_store() del two three = A.ana_load(uuid) assert uuid, three.ana_uuid
def load(binary, state): # reset the dl ana.set_dl(pickle_dir='/tmp/ana') s = pickle.load(open(state)) p = angr.Project(binary) e2 = angr.surveyors.Explorer(p, start=p.exit_to(0x400958, state=s.initial_state)).run(10) nose.tools.assert_equals(e2.active[0].addr, 0x40075c)
def setup(): # clean up AST cache in claripy, because a cached AST might believe it has been stored in ana after we clean up the # ana storage import claripy claripy.ast.bv._bvv_cache.clear() claripy.ast.bv.BV._hash_cache.clear() ana.set_dl(ana.DictDataLayer())
def load(binary, state): # reset the dl ana.set_dl(pickle_dir='/tmp/ana') s = pickle.load(open(state)) p = angr.Project(binary) e2 = angr.surveyors.Explorer(p, start=p.exit_to( 0x400958, state=s.initial_state)).run(10) nose.tools.assert_equals(e2.active[0].addr, 0x40075c)
def test_dict(): ana.set_dl(ana.DictDataLayer()) l.debug("Initializing 1") one = A(1) l.debug("Initializing 2") two = A(2) one.make_uuid() l.debug("Copying 1") one_p = pickle.dumps(one) one_copy = pickle.loads(one_p) l.debug("Copying 2") two_p = pickle.dumps(two) two_copy = pickle.loads(two_p) nose.tools.assert_is(one_copy, one) nose.tools.assert_is_not(two_copy, two) nose.tools.assert_equal(str(two_copy), str(two)) nose.tools.assert_is(one, A.ana_load(one.ana_store())) nose.tools.assert_is(two, A.ana_load(two.ana_store())) two_copy2 = pickle.loads(pickle.dumps(two)) nose.tools.assert_equal(str(two_copy2), str(two)) l.debug("Initializing 3") three = A(3) three_str = str(three) l.debug("Storing 3") three_uuid = three.ana_store() l.debug("Deleting 3") del three gc.collect() nose.tools.assert_false(three_uuid in ana.get_dl().uuid_cache) l.debug("Loading 3") three_copy = A.ana_load(three_uuid) nose.tools.assert_equal(three_copy.ana_uuid, three_uuid) #pylint:disable=no-member nose.tools.assert_equal(str(three_copy), three_str) known = set() first_json = three_copy.to_literal(known) nose.tools.assert_true(three_copy.ana_uuid in first_json['objects']) nose.tools.assert_equal( first_json['objects'][three_copy.ana_uuid]['object']['n'], three_copy.n) nose.tools.assert_equal(first_json['value']['ana_uuid'], three_copy.ana_uuid) second_json = three_copy.to_literal(known) nose.tools.assert_false(three_copy.ana_uuid in second_json['objects']) nose.tools.assert_equal(second_json['value']['ana_uuid'], three_copy.ana_uuid)
def test_serialization(): ana.set_dl(pickle_dir="/tmp/ana") for d in internaltest_arch: for f in internaltest_files: fpath = os.path.join(internaltest_location, d, f) if os.path.isfile(fpath) and os.access(fpath, os.X_OK): p = angr.Project(fpath) internaltest_project(p) p = angr.Project(os.path.join(internaltest_location, "i386/fauxware"), load_options={"auto_load_libs": False}) cfg = internaltest_cfg(p) internaltest_vfg(p, cfg)
def test_serialization(): ana.set_dl(ana.DirDataLayer('/tmp/ana')) for d in internaltest_arch: for f in internaltest_files: fpath = os.path.join(internaltest_location, d,f) if os.path.isfile(fpath) and os.access(fpath, os.X_OK): p = angr.Project(fpath) internaltest_project(p) p = angr.Project(os.path.join(internaltest_location, 'i386/fauxware'), load_options={'auto_load_libs': False}) cfg = internaltest_cfg(p) internaltest_vfg(p, cfg)
def test_simple(): ana.set_dl(ana.SimpleDataLayer()) one = A(1) one.make_uuid() o = pickle.dumps(one) one_copy = pickle.loads(o) assert one is one_copy two = A(1) t = pickle.dumps(one) two_copy = pickle.loads(t) assert two_copy is not two assert pickle.load(open(os.path.join(os.path.dirname(__file__), 'test_pickle.p'), 'rb')).n == 1337
def test_dict(): ana.set_dl(ana.DictDataLayer()) l.debug("Initializing 1") one = A(1) l.debug("Initializing 2") two = A(2) one.make_uuid() l.debug("Copying 1") one_p = pickle.dumps(one) one_copy = pickle.loads(one_p) l.debug("Copying 2") two_p = pickle.dumps(two) two_copy = pickle.loads(two_p) nose.tools.assert_is(one_copy, one) nose.tools.assert_is_not(two_copy, two) nose.tools.assert_equal(str(two_copy), str(two)) nose.tools.assert_is(one, A.ana_load(one.ana_store())) nose.tools.assert_is(two, A.ana_load(two.ana_store())) two_copy2 = pickle.loads(pickle.dumps(two)) nose.tools.assert_equal(str(two_copy2), str(two)) l.debug("Initializing 3") three = A(3) three_str = str(three) l.debug("Storing 3") three_uuid = three.ana_store() l.debug("Deleting 3") del three gc.collect() nose.tools.assert_false(three_uuid in ana.get_dl().uuid_cache) l.debug("Loading 3") three_copy = A.ana_load(three_uuid) nose.tools.assert_equal(three_copy.ana_uuid, three_uuid) #pylint:disable=no-member nose.tools.assert_equal(str(three_copy), three_str) known = set() first_json = three_copy.to_literal(known) nose.tools.assert_true(three_copy.ana_uuid in first_json['objects']) nose.tools.assert_equal(first_json['objects'][three_copy.ana_uuid]['object']['n'], three_copy.n) nose.tools.assert_equal(first_json['value']['ana_uuid'], three_copy.ana_uuid) second_json = three_copy.to_literal(known) nose.tools.assert_false(three_copy.ana_uuid in second_json['objects']) nose.tools.assert_equal(second_json['value']['ana_uuid'], three_copy.ana_uuid)
def test_basic(): ana.set_dl(ana.DictDataLayer()) def pickle_callback(path): path.info['pickled'] = True def unpickle_callback(path): path.info['unpickled'] = True project = angr.Project(_bin('tests/cgc/sc2_0b32aa01_01')) path = project.factory.path() spiller = angr.exploration_techniques.Spiller(pickle_callback=pickle_callback, unpickle_callback=unpickle_callback) spiller._pickle([path]) del path gc.collect() path = spiller._unpickle(1)[0] assert path.info['pickled'] assert path.info['unpickled']
def test_basic(): ana.set_dl(ana.DictDataLayer()) def pickle_callback(path): path.globals['pickled'] = True def unpickle_callback(path): path.globals['unpickled'] = True project = angr.Project(_bin('tests/cgc/sc2_0b32aa01_01')) path = project.factory.entry_state() spiller = angr.exploration_techniques.Spiller(pickle_callback=pickle_callback, unpickle_callback=unpickle_callback) spiller._pickle([path]) del path gc.collect() path = spiller._unpickle(1)[0] assert path.globals['pickled'] assert path.globals['unpickled']
def test_state_pickle(): old_dl = ana.dl ana.set_dl(pickle_dir='/tmp/picklez') try: s = SimState() s.memory.store(100, s.se.BVV(0x4141414241414241424300, 88), endness='Iend_BE') s.regs.rax = 100 sp = pickle.dumps(s) del s gc.collect() s = pickle.loads(sp) nose.tools.assert_equals(s.se.any_str(s.memory.load(100, 10)), "AAABAABABC") finally: ana.dl = old_dl
def test_serialization(): ana.set_dl(pickle_dir='/tmp/ana') internaltest_arch = [ 'i386', 'armel' ] for d in internaltest_arch: tests = os.path.join(internaltest_location, d) for f in os.listdir(tests): fpath = os.path.join(tests,f) if os.path.isfile(fpath) and os.access(fpath, os.X_OK): p = angr.Project(fpath) internaltest_project(p) p = angr.Project(os.path.join(internaltest_location, 'i386/fauxware'), load_options={'auto_load_libs': False}) cfg = internaltest_cfg(p) internaltest_vfg(p, cfg)
def test_datalayer(): l.info("Running test_datalayer") pickle_dir = tempfile.mkdtemp() ana.set_dl(pickle_dir=pickle_dir) l.debug("Pickling to %s", pickle_dir) a = claripy.BVV(1, 32) b = claripy.BVS("x", 32) c = a + b d = a + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b + b l.debug("Storing!") a.ana_store() c_info = c.ana_store() d_info = d.ana_store() l.debug("Loading!") ana.set_dl(pickle_dir=pickle_dir) #nose.tools.assert_equal(len(claripy.dl._cache), 0) cc = claripy.ast.BV.ana_load(c_info) nose.tools.assert_equal(str(cc), str(c)) cd = claripy.ast.BV.ana_load(d_info) nose.tools.assert_equal(str(cd), str(d)) l.debug("Time to test some solvers!") s = claripy.FullFrontend(claripy.backends.z3) x = claripy.BVS("x", 32) s.add(x == 3) s.finalize() ss = claripy.FullFrontend.ana_load(s.ana_store()) nose.tools.assert_equal(str(s.constraints), str(ss.constraints)) nose.tools.assert_equal(str(s.variables), str(ss.variables)) s = claripy.CompositeFrontend(claripy.FullFrontend(claripy.backends.z3)) x = claripy.BVS("x", 32) s.add(x == 3) s.finalize() ss = claripy.CompositeFrontend.ana_load(s.ana_store()) old_constraint_sets = [[hash(j) for j in k.constraints] for k in s._solver_list] new_constraint_sets = [[hash(j) for j in k.constraints] for k in ss._solver_list] nose.tools.assert_items_equal(old_constraint_sets, new_constraint_sets) nose.tools.assert_equal(str(s.variables), str(ss.variables))
def teardown(): try: shutil.rmtree('pickletest') except: pass try: shutil.rmtree('pickletest2') except: pass try: os.remove('pickletest_good') except: pass try: os.remove('pickletest_bad') except: pass ana.set_dl(ana.SimpleDataLayer())
def test_datalayer(): l.info("Running test_datalayer") pickle_dir = tempfile.mkdtemp() ana.set_dl(pickle_dir=pickle_dir) l.debug("Pickling to %s",pickle_dir) a = claripy.BVV(0, 32) b = claripy.BVS("x", 32) c = a + b d = a+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b+b l.debug("Storing!") a.ana_store() c_info = c.ana_store() d_info = d.ana_store() l.debug("Loading!") ana.set_dl(pickle_dir=pickle_dir) #nose.tools.assert_equal(len(claripy.dl._cache), 0) cc = claripy.ast.BV.ana_load(c_info) nose.tools.assert_equal(str(cc), str(c)) cd = claripy.ast.BV.ana_load(d_info) nose.tools.assert_equal(str(cd), str(d)) l.debug("Time to test some solvers!") s = claripy.FullFrontend(claripy.backends.z3) x = claripy.BVS("x", 32) s.add(x == 3) s.finalize() ss = claripy.FullFrontend.ana_load(s.ana_store()) nose.tools.assert_equal(str(s.constraints), str(ss.constraints)) nose.tools.assert_equal(str(s.variables), str(ss.variables)) s = claripy.CompositeFrontend(claripy.FullFrontend(claripy.backends.z3)) x = claripy.BVS("x", 32) s.add(x == 3) s.finalize() ss = claripy.CompositeFrontend.ana_load(s.ana_store()) old_constraint_sets = [[hash(j) for j in k.constraints] for k in s._solver_list] new_constraint_sets = [[hash(j) for j in k.constraints] for k in ss._solver_list] nose.tools.assert_items_equal(old_constraint_sets, new_constraint_sets) nose.tools.assert_equal(str(s.variables), str(ss.variables))
def test_palindrome2(): ana.set_dl(ana.DictDataLayer()) project = angr.Project(_bin('tests/cgc/sc2_0b32aa01_01')) pg = project.factory.simgr() limiter = angr.exploration_techniques.LengthLimiter(max_length=250) pg.use_technique(limiter) def pickle_callback(path): path.globals['pickled'] = True def unpickle_callback(path): path.globals['unpickled'] = True def priority_key(path): return hash(tuple(path.history.bbl_addrs)) # to help ensure determinism spiller = angr.exploration_techniques.Spiller( pickle_callback=pickle_callback, unpickle_callback=unpickle_callback, priority_key=priority_key ) pg.use_technique(spiller) #pg.step(until=lambda lpg: len(lpg.active) == 10) #pg.step(until=lambda lpg: len(lpg.spill_stage) > 15) #pg.step(until=lambda lpg: spiller._pickled_paths) pg.run() assert spiller._ever_pickled > 0 assert spiller._ever_unpickled == spiller._ever_pickled assert all(('pickled' not in path.globals and 'unpickled' not in path.globals) or (path.globals['pickled'] and path.globals['unpickled']) for path in pg.cut)
def test_palindrome2(): ana.set_dl(ana.DictDataLayer()) project = angr.Project(_bin('tests/cgc/sc2_0b32aa01_01')) pg = project.factory.path_group() pg.active[0].state.options.discard('LAZY_SOLVES') limiter = angr.exploration_techniques.LengthLimiter(max_length=250) pg.use_technique(limiter) def pickle_callback(path): path.info['pickled'] = True def unpickle_callback(path): path.info['unpickled'] = True def priority_key(path): return hash(tuple(path.addr_trace)) # to help ensure determinism spiller = angr.exploration_techniques.Spiller( pickle_callback=pickle_callback, unpickle_callback=unpickle_callback, priority_key=priority_key ) pg.use_technique(spiller) #pg.step(until=lambda lpg: len(lpg.active) == 10) #pg.step(until=lambda lpg: len(lpg.spill_stage) > 15) #pg.step(until=lambda lpg: spiller._pickled_paths) pg.run() assert spiller._ever_pickled > 0 assert spiller._ever_unpickled == spiller._ever_pickled assert all(('pickled' not in path.info and 'unpickled' not in path.info) or (path.info['pickled'] and path.info['unpickled']) for path in pg.cut)
def test_pickling(): # set up ANA and make the pickles ana.set_dl(pickle_dir='/tmp/pickletest') make_pickles() # make sure the pickles work in the same "session" load_pickles() # reset ANA, and load the pickles ana.set_dl(pickle_dir='/tmp/pickletest') gc.collect() load_pickles() # purposefully set the wrong directory to make sure this excepts out ana.set_dl(pickle_dir='/tmp/pickletest2') gc.collect() #load_pickles() nose.tools.assert_raises(Exception, load_pickles)
def test_pickling(): # set up ANA and make the pickles ana.set_dl(ana.DirDataLayer('/tmp/pickletest')) make_pickles() # make sure the pickles work in the same "session" load_pickles() # reset ANA, and load the pickles ana.set_dl(ana.DirDataLayer('/tmp/pickletest')) gc.collect() load_pickles() # purposefully set the wrong directory to make sure this excepts out ana.set_dl(ana.DirDataLayer('/tmp/pickletest2')) gc.collect() #load_pickles() nose.tools.assert_raises(Exception, load_pickles)
def test_pickling(): try: # set up ANA and make the pickles ana.set_dl(ana.DirDataLayer('pickletest')) make_pickles() # make sure the pickles work in the same "session" load_pickles() # reset ANA, and load the pickles ana.set_dl(ana.DirDataLayer('pickletest')) gc.collect() load_pickles() # purposefully set the wrong directory to make sure this excepts out ana.set_dl(ana.DirDataLayer('pickletest2')) gc.collect() #load_pickles() nose.tools.assert_raises(Exception, load_pickles) finally: try: shutil.rmtree('pickletest') except: pass try: shutil.rmtree('pickletest2') except: pass try: os.remove('pickletest_good') except: pass try: os.remove('pickletest_bad') except: pass
from .errors import * from . import operations from . import ops as _all_operations # This is here for later, because we'll f**k the namespace in a few lines from . import backends as _backends_module from .backends import Backend # # connect to ANA # import ana if os.environ.get('REMOTE', False): ana.set_dl(ana.MongoDataLayer(())) # # backend objects # from . import bv from . import fp from . import vsa from .fp import FSORT_DOUBLE, FSORT_FLOAT from .annotation import * # # Operations #
l.addHandler(logging.NullHandler()) from .errors import * from . import operations from . import ops as _all_operations # This is here for later, because we'll f**k the namespace in a few lines from . import backends as _backends_module # # connect to ANA # import ana if os.environ.get('REMOTE', False): ana.set_dl(mongo_args=()) # # Some other misguided setup # _recurse = 15000 l.warning("Claripy is setting the recursion limit to %d. If Python segfaults, I am sorry.", _recurse) sys.setrecursionlimit(_recurse) # # solvers # from .frontend import Frontend as _Frontend from .frontends import LightFrontend, FullFrontend, CompositeFrontend, HybridFrontend, ReplacementFrontend
def setup(): ana.set_dl(ana.DirDataLayer('/tmp/ana'))
def setup(): tmp_dir = tempfile.mkdtemp(prefix='test_serialization_ana') ana.set_dl(ana.DirDataLayer(tmp_dir))
l.addHandler(logging.NullHandler()) from .errors import * from . import operations from . import ops as _all_operations # This is here for later, because we'll f**k the namespace in a few lines from . import backends as _backends_module # # connect to ANA # import ana if os.environ.get('REMOTE', False): ana.set_dl(mongo_args=()) # # Some other misguided setup # _recurse = 15000 l.warning("Claripy is setting the recursion limit to %d. If Python segfaults, I am sorry.", _recurse) sys.setrecursionlimit(_recurse) # # solvers # from .frontend import Frontend as _Frontend from .frontends import LightFrontend, FullFrontend, CompositeFrontend, HybridFrontend, ReplacementFrontend, hybrid_vsa_z3
def setup(): ana.set_dl(ana.DictDataLayer())
def setup(): ana.set_dl(ana.DirDataLayer('/tmp/picklez'))
from .errors import * from . import operations from . import ops as _all_operations # This is here for later, because we'll f**k the namespace in a few lines from . import backends as _backends_module from .backends import Backend # # connect to ANA # import ana if os.environ.get('REMOTE', False): ana.set_dl(ana.MongoDataLayer(())) # # Some other misguided setup # _recurse = 15000 l.info("Claripy is setting the recursion limit to %d. If Python segfaults, I am sorry.", _recurse) sys.setrecursionlimit(_recurse) # # backend objects # from . import bv from . import fp
def setup(): tmp_dir = tempfile.mkdtemp(prefix='test_state_picklez') ana.set_dl(ana.DirDataLayer(tmp_dir))
def teardown(): ana.set_dl(ana.SimpleDataLayer())