def start_cuckoo2(self,url,parent_dir): state1="" list_file_operation1=[] list_command_line1=[] refer_url1="" list_url_chain1=[] ek_name1="" set_cwd('/opt/cuckoo') db = Database() db.connect() id=db.add_url(url.split()[0]) print "folder {} is running".format(id) while True: if str(db.guest_get_status(id))=="stopped": break print "folder {} is stopped".format(id) time.sleep(70) #subfolder = os.listdir("/opt/cuckoo/storage/analyses") #print subfolder state_analyse_=state_analyse(id,parent_dir) state1=state_analyse_.state_decide_single() list_file_operation1=state_analyse_.analyse_file_operation_single() list_command_line1=state_analyse_.analyse_command_line_single() refer_url1=state_analyse_.acquire_refer_url_single() list_url_chain1,ek_name1=state_analyse_.analyse_url_chain_single() campaign_name=state_analyse_.analyse_Campaign_single() #print list_file_operation #print "Unexpected error:{}".format(sys.exc_info()[0]) print " foler {} state is {}".format(id,state1) return id,state1,list_file_operation1,list_command_line1,refer_url1,list_url_chain1,ek_name1,campaign_name
def investigate(file_path): # Clone our Clean snapshot my_call("VBoxManage startvm User1") time.sleep(52) subprocess.call(CREATE_SNAPSHOT, shell=True) time.sleep(2) task_id = -1 my_call("cuckoo") # web = my_call("cuckoo api") # TODO implement # Send the file to cuckoo time.sleep(2) db = Database() db.connect() task_id = db.add_path(file_path) # a = subprocess.check_output(CALL_CUCKOO + file_path, shell= True) #a = subprocess.call(CALL_CUCKOO + file_path, shell=True) print "The id is " + str(task_id) # wait until it finishes running subprocess.check_output(CREATE_SNAPSHOT) status = None while status != 'reported': lst = db.list_tasks() status = lst[0].status time.sleep(1) print "Reported" # THIS IS NOT THE CUCKOO PID!!!! # os.kill(newpid, signal.SIGTERM) # according to the ID, get the reports from this system. subprocess.call(REMOVE_SNAPSHOT, shell=True) # subprocess.call(KILL_CUCKOO, shell=True) kp = subprocess.Popen(["ps", "-ef"], stdout=subprocess.PIPE) grep = subprocess.Popen(["grep", "cuckoo"], stdin=kp.stdout, stdout=subprocess.PIPE) awk = subprocess.Popen(["awk", "{print $2}"], stdin=grep.stdout, stdout=subprocess.PIPE) kill = subprocess.Popen(["xargs", "kill"], stdin=awk.stdout, stdout=subprocess.PIPE) kp.stdout.close() grep.stdout.close() awk.stdout.close() return task_id # # for i in "abcdefghijklmnopqrstuvwxyz": # try: # investigate("/home/user/Desktop/sandboxing/ransomwares/" + i) # except: # print "Skipping..." # Stages: Save file. # Investigate # Grade # Signatures # return all to server.
def test_import_noconfirm(self, p): set_cwd(tempfile.mkdtemp()) p.side_effect = True, False dirpath = init_legacy_analyses() os.makedirs(os.path.join(dirpath, "lib", "cuckoo", "common")) open(os.path.join( dirpath, "lib", "cuckoo", "common", "constants.py" ), "wb").write(constants_11_py) shutil.copytree( "tests/files/conf/110_plain", os.path.join(dirpath, "conf") ) filepath = os.path.join(dirpath, "conf", "cuckoo.conf") buf = open(filepath, "rb").read() open(filepath, "wb").write(buf.replace( "connection =", "connection = %s" % self.URI )) main.main( ("--cwd", cwd(), "import", dirpath), standalone_mode=False ) db = Database() db.connect() assert db.engine.name == self.ENGINE assert open(cwd("logs", "a.txt", analysis=1), "rb").read() == "a" assert config("cuckoo:database:connection") == self.URI assert db.count_tasks() == 2
def test_import_confirm(self, p): set_cwd(tempfile.mkdtemp()) p.return_value = True dirpath = init_legacy_analyses() os.makedirs(os.path.join(dirpath, "lib", "cuckoo", "common")) open(os.path.join(dirpath, "lib", "cuckoo", "common", "constants.py"), "wb").write(constants_11_py) shutil.copytree("tests/files/conf/110_plain", os.path.join(dirpath, "conf")) filepath = os.path.join(dirpath, "conf", "cuckoo.conf") buf = open(filepath, "rb").read() open(filepath, "wb").write( buf.replace("connection =", "connection = %s" % self.URI)) try: main.main(("--cwd", cwd(), "import", dirpath), standalone_mode=False) except CuckooOperationalError as e: assert "SQL database dump as the command" in e.message assert not is_linux() return db = Database() db.connect() assert db.engine.name == self.ENGINE assert open(cwd("logs", "a.txt", analysis=1), "rb").read() == "a" assert config("cuckoo:database:connection") == self.URI assert db.count_tasks() == 2
def test_import_noconfirm(self, p): set_cwd(tempfile.mkdtemp()) p.side_effect = True, False dirpath = init_legacy_analyses() os.makedirs(os.path.join(dirpath, "lib", "cuckoo", "common")) open(os.path.join(dirpath, "lib", "cuckoo", "common", "constants.py"), "wb").write(constants_11_py) shutil.copytree("tests/files/conf/110_plain", os.path.join(dirpath, "conf")) filepath = os.path.join(dirpath, "conf", "cuckoo.conf") buf = open(filepath, "rb").read() open(filepath, "wb").write( buf.replace("connection =", "connection = %s" % self.URI)) main.main(("--cwd", cwd(), "import", dirpath), standalone_mode=False) db = Database() db.connect() assert db.engine.name == self.ENGINE assert open(cwd("logs", "a.txt", analysis=1), "rb").read() == "a" assert config("cuckoo:database:connection") == self.URI assert db.count_tasks() == 2
def test_import_confirm(self, p): set_cwd(tempfile.mkdtemp()) p.return_value = True dirpath = init_legacy_analyses() os.makedirs(os.path.join(dirpath, "lib", "cuckoo", "common")) open(os.path.join( dirpath, "lib", "cuckoo", "common", "constants.py" ), "wb").write(constants_11_py) shutil.copytree( "tests/files/conf/110_plain", os.path.join(dirpath, "conf") ) filepath = os.path.join(dirpath, "conf", "cuckoo.conf") buf = open(filepath, "rb").read() open(filepath, "wb").write(buf.replace( "connection =", "connection = %s" % self.URI )) try: main.main( ("--cwd", cwd(), "import", dirpath), standalone_mode=False ) except CuckooOperationalError as e: assert "SQL database dump as the command" in e.message assert not is_linux() return db = Database() db.connect() assert db.engine.name == self.ENGINE assert open(cwd("logs", "a.txt", analysis=1), "rb").read() == "a" assert config("cuckoo:database:connection") == self.URI assert db.count_tasks() == 2
def test_connect_default(p, q): set_cwd(tempfile.mkdtemp()) cuckoo_create() db = Database() db.connect(create=False) q.assert_called_once_with("sqlite:///%s" % cwd("cuckoo.db"), connect_args={"check_same_thread": False}) assert db.engine.pool_timeout == 60
def __init__(self): db = Database() try: db.connect(schema_check=False) except CuckooDatabaseError: exit('Invalid cuckoo database credentials') db.engine.echo = False db.engine.pool_timeout = 60 self.session = sessionmaker(bind=db.engine)()
def test_connect_default(p, q): set_cwd(tempfile.mkdtemp()) cuckoo_create() db = Database() db.connect(create=False) q.assert_called_once_with( "sqlite:///%s" % cwd("cuckoo.db"), connect_args={"check_same_thread": False} ) assert db.engine.pool_timeout == 60
def test_connect_pg(p, q): set_cwd(tempfile.mkdtemp()) cuckoo_create( cfg={ "cuckoo": { "database": { "connection": "postgresql://*****:*****@localhost/foobar", "timeout": 120, } } }) db = Database() db.connect(create=False) q.assert_called_once_with("postgresql://*****:*****@localhost/foobar", connect_args={"sslmode": "disable"}) assert db.engine.pool_timeout == 120
def test_connect_pg(p, q): set_cwd(tempfile.mkdtemp()) cuckoo_create(cfg={ "cuckoo": { "database": { "connection": "postgresql://*****:*****@localhost/foobar", "timeout": 120, } } }) db = Database() db.connect(create=False) q.assert_called_once_with( "postgresql://*****:*****@localhost/foobar", connect_args={"sslmode": "disable"} ) assert db.engine.pool_timeout == 120
def test_machines(): set_cwd(tempfile.mkdtemp()) Folders.create(cwd(), "conf") Files.create( cwd("conf"), "cuckoo.conf", """ [cuckoo] machinery = virtualbox [database] connection = timeout = [resultserver] ip = 9.8.7.6 port = 9876 """) Files.create( cwd("conf"), "virtualbox.conf", """ [virtualbox] machines = a, b, c [a] label = a snapshot = derpa platform = windows ip = 1.2.3.4 [b] label = b snapshot = derpb platform = windows ip = 5.6.7.8 resultserver_ip = 7.5.3.1 [c] label = c snapshot = derpc platform = windows ip = 1.3.5.7 resultserver_port = 4242 """) class mock(object): port = 9001 Singleton._instances[ResultServer] = mock() db = Database() db.connect() m = Machinery() m.set_options(Config("virtualbox")) m._initialize("virtualbox") machines = db.list_machines() assert len(machines) == 3 assert machines[0].label == "a" assert machines[0].snapshot == "derpa" assert machines[0].ip == "1.2.3.4" assert machines[0].resultserver_ip == "9.8.7.6" assert machines[0].resultserver_port == 9001 assert machines[1].label == "b" assert machines[1].snapshot == "derpb" assert machines[1].ip == "5.6.7.8" assert machines[1].resultserver_ip == "7.5.3.1" assert machines[1].resultserver_port == 9001 assert machines[2].label == "c" assert machines[2].snapshot == "derpc" assert machines[2].ip == "1.3.5.7" assert machines[2].resultserver_ip == "9.8.7.6" assert machines[2].resultserver_port == 4242 Singleton._instances.pop(ResultServer)
def test_machines(): set_cwd(tempfile.mkdtemp()) Folders.create(cwd(), "conf") Files.create(cwd("conf"), "cuckoo.conf", """ [cuckoo] machinery = virtualbox [database] connection = timeout = [resultserver] ip = 9.8.7.6 port = 9876 """) Files.create(cwd("conf"), "virtualbox.conf", """ [virtualbox] machines = a, b, c [a] label = a snapshot = derpa platform = windows ip = 1.2.3.4 [b] label = b snapshot = derpb platform = windows ip = 5.6.7.8 resultserver_ip = 7.5.3.1 [c] label = c snapshot = derpc platform = windows ip = 1.3.5.7 resultserver_port = 4242 """) class mock(object): port = 9001 Singleton._instances[ResultServer] = mock() db = Database() db.connect() m = Machinery() m.set_options(Config("virtualbox")) m._initialize("virtualbox") machines = db.list_machines() assert len(machines) == 3 assert machines[0].label == "a" assert machines[0].snapshot == "derpa" assert machines[0].ip == "1.2.3.4" assert machines[0].resultserver_ip == "9.8.7.6" assert machines[0].resultserver_port == 9001 assert machines[1].label == "b" assert machines[1].snapshot == "derpb" assert machines[1].ip == "5.6.7.8" assert machines[1].resultserver_ip == "7.5.3.1" assert machines[1].resultserver_port == 9001 assert machines[2].label == "c" assert machines[2].snapshot == "derpc" assert machines[2].ip == "1.3.5.7" assert machines[2].resultserver_ip == "9.8.7.6" assert machines[2].resultserver_port == 4242 Singleton._instances.pop(ResultServer)
def cuckoo_clean(): """Clean up cuckoo setup. It deletes logs, all stored data from file system and configured databases (SQL and MongoDB). """ # Init logging (without writing to file). init_console_logging() try: # Initialize the database connection. db = Database() db.connect(schema_check=False) # Drop all tables. db.drop() except (CuckooDependencyError, CuckooDatabaseError) as e: # If something is screwed due to incorrect database migrations or bad # database SqlAlchemy would be unable to connect and operate. log.warning( "Error connecting to database: it is suggested to check " "the connectivity, apply all migrations if needed or purge " "it manually. Error description: %s", e) # Check if MongoDB reporting is enabled and drop the database if it is. if mongo.init(): try: mongo.connect() mongo.drop() mongo.close() except Exception as e: log.warning("Unable to drop MongoDB database: %s", e) # Check if ElasticSearch reporting is enabled and drop its data if it is. if elastic.init(): elastic.connect() # TODO This should be moved to the elastic abstract. # TODO We should also drop historic data, i.e., from pervious days, # months, and years. date_index = datetime.datetime.utcnow().strftime({ "yearly": "%Y", "monthly": "%Y-%m", "daily": "%Y-%m-%d", }[elastic.index_time_pattern]) dated_index = "%s-%s" % (elastic.index, date_index) elastic.client.indices.delete(index=dated_index, ignore=[400, 404]) template_name = "%s_template" % dated_index if elastic.client.indices.exists_template(template_name): elastic.client.indices.delete_template(template_name) # Paths to clean. paths = [ cwd("cuckoo.db"), cwd("log"), cwd("storage", "analyses"), cwd("storage", "baseline"), cwd("storage", "binaries"), ] # Delete the various files and directories. In case of directories, keep # the parent directories, so to keep the state of the CWD in tact. for path in paths: if os.path.isdir(path): try: shutil.rmtree(path) os.mkdir(path) except (IOError, OSError) as e: log.warning("Error removing directory %s: %s", path, e) elif os.path.isfile(path): try: os.unlink(path) except (IOError, OSError) as e: log.warning("Error removing file %s: %s", path, e)
class DatabaseEngine(object): """Tests database stuff.""" URI = None def setup_class(self): set_cwd(tempfile.mkdtemp()) self.d = Database() self.d.connect(dsn=self.URI) def add_url(self, url, priority=1, status="pending"): task_id = self.d.add_url(url, priority=priority) self.d.set_status(task_id, status) return task_id def test_add_tasks(self): fd, sample_path = tempfile.mkstemp() os.write(fd, "hehe") os.close(fd) # Add task. count = self.d.Session().query(Task).count() self.d.add_path(sample_path) assert self.d.Session().query(Task).count() == count + 1 # Add url. self.d.add_url("http://foo.bar") assert self.d.Session().query(Task).count() == count + 2 def test_processing_get_task(self): # First reset all existing rows so that earlier exceptions don't affect # this unit test run. null, session = None, self.d.Session() session.query(Task).filter( Task.status == "completed", Task.processing == null ).update({ "processing": "something", }) session.commit() t1 = self.add_url("http://google.com/1", priority=1, status="completed") t2 = self.add_url("http://google.com/2", priority=2, status="completed") t3 = self.add_url("http://google.com/3", priority=1, status="completed") t4 = self.add_url("http://google.com/4", priority=1, status="completed") t5 = self.add_url("http://google.com/5", priority=3, status="completed") t6 = self.add_url("http://google.com/6", priority=1, status="completed") t7 = self.add_url("http://google.com/7", priority=1, status="completed") assert self.d.processing_get_task("foo") == t5 assert self.d.processing_get_task("foo") == t2 assert self.d.processing_get_task("foo") == t1 assert self.d.processing_get_task("foo") == t3 assert self.d.processing_get_task("foo") == t4 assert self.d.processing_get_task("foo") == t6 assert self.d.processing_get_task("foo") == t7 assert self.d.processing_get_task("foo") is None def test_error_exists(self): task_id = self.add_url("http://google.com/") self.d.add_error("A"*1024, task_id) assert len(self.d.view_errors(task_id)) == 1 self.d.add_error("A"*1024, task_id) assert len(self.d.view_errors(task_id)) == 2 def test_long_error(self): self.add_url("http://google.com/") self.d.add_error("A"*1024, 1) err = self.d.view_errors(1) assert err and len(err[0].message) == 1024 def test_submit(self): dirpath = tempfile.mkdtemp() submit_id = self.d.add_submit(dirpath, "files", { "foo": "bar", }) submit = self.d.view_submit(submit_id) assert submit.id == submit_id assert submit.tmp_path == dirpath assert submit.submit_type == "files" assert submit.data == { "foo": "bar", } def test_connect_no_create(self): AlembicVersion.__table__.drop(self.d.engine) self.d.connect(dsn=self.URI, create=False) assert "alembic_version" not in self.d.engine.table_names() self.d.connect(dsn=self.URI) assert "alembic_version" in self.d.engine.table_names() def test_view_submit_tasks(self): submit_id = self.d.add_submit(None, None, None) t1 = self.d.add_path(__file__, custom="1", submit_id=submit_id) t2 = self.d.add_path(__file__, custom="2", submit_id=submit_id) submit = self.d.view_submit(submit_id) assert submit.id == submit_id with pytest.raises(DetachedInstanceError): print submit.tasks submit = self.d.view_submit(submit_id, tasks=True) assert len(submit.tasks) == 2 tasks = sorted((task.id, task) for task in submit.tasks) assert tasks[0][1].id == t1 assert tasks[0][1].custom == "1" assert tasks[1][1].id == t2 assert tasks[1][1].custom == "2" def test_add_reboot(self): t0 = self.d.add_path(__file__) s0 = self.d.add_submit(None, None, None) t1 = self.d.add_reboot(task_id=t0, submit_id=s0) t = self.d.view_task(t1) assert t.custom == "%s" % t0 assert t.submit_id == s0 def test_task_set_options(self): t0 = self.d.add_path(__file__, options={"foo": "bar"}) t1 = self.d.add_path(__file__, options="foo=bar") assert self.d.view_task(t0).options == {"foo": "bar"} assert self.d.view_task(t1).options == {"foo": "bar"} def test_task_tags_str(self): task = self.d.add_path(__file__, tags="foo,,bar") tag0, tag1 = self.d.view_task(task).tags assert sorted((tag0.name, tag1.name)) == ["bar", "foo"] def test_task_tags_list(self): task = self.d.add_path(__file__, tags=["tag1", "tag2", "", 1, "tag3"]) tag0, tag1, tag2 = self.d.view_task(task).tags assert sorted((tag0.name, tag1.name, tag2.name)) == [ "tag1", "tag2", "tag3" ] def test_error_action(self): task_id = self.d.add_path(__file__) self.d.add_error("message1", task_id) self.d.add_error("message2", task_id, "actionhere") e1, e2 = self.d.view_errors(task_id) assert e1.message == "message1" assert e1.action is None assert e2.message == "message2" assert e2.action == "actionhere" def test_view_tasks(self): t1 = self.d.add_path(__file__) t2 = self.d.add_url("http://google.com/") tasks = self.d.view_tasks([t1, t2]) assert tasks[0].to_dict() == self.d.view_task(t1).to_dict() assert tasks[1].to_dict() == self.d.view_task(t2).to_dict() def test_add_machine(self): self.d.add_machine( "name1", "label", "1.2.3.4", "windows", None, "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043 ) self.d.add_machine( "name2", "label", "1.2.3.4", "windows", "", "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043 ) self.d.add_machine( "name3", "label", "1.2.3.4", "windows", "opt1 opt2", "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043 ) self.d.add_machine( "name4", "label", "1.2.3.4", "windows", ["opt3", "opt4"], "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043 ) m1 = self.d.view_machine("name1") m2 = self.d.view_machine("name2") m3 = self.d.view_machine("name3") m4 = self.d.view_machine("name4") assert m1.options == [] assert m2.options == [] assert m3.options == ["opt1", "opt2"] assert m4.options == ["opt3", "opt4"] @mock.patch("cuckoo.common.objects.magic") def test_add_sample(self, p): p.from_file.return_value = "" assert self.d.add_path(Files.temp_put(os.urandom(16))) is not None
class DatabaseEngine(object): """Tests database stuff.""" URI = None def setup_class(self): set_cwd(tempfile.mkdtemp()) def setup(self): self.d = Database() self.d.connect(dsn=self.URI) def teardown(self): # Clear all tables without dropping them # This is done after each test to ensure a test doesn't fail because # of data of a previous test meta = MetaData() meta.reflect(self.d.engine) ses = self.d.Session() try: for t in reversed(meta.sorted_tables): ses.execute(t.delete()) ses.commit() finally: ses.close() def test_add_target(self): count = self.d.Session().query(Target).count() add_target("http://example.com", category="url") assert self.d.Session().query(Target).count() == count + 1 def test_add_task(self): fd, sample_path = tempfile.mkstemp() os.write(fd, "hehe") os.close(fd) # Add task. count = self.d.Session().query(Task).count() add_task(sample_path, category="file") assert self.d.Session().query(Task).count() == count + 1 def test_processing_get_task(self): # First reset all existing rows so that earlier exceptions don't affect # this unit test run. null, session = None, self.d.Session() session.query(Task).filter(Task.status == "completed", Task.processing == null).update({ "processing": "something", }) session.commit() t1 = add_task("http://google.com/1", priority=1, status="completed", category="url") t2 = add_task("http://google.com/2", priority=2, status="completed", category="url") t3 = add_task("http://google.com/3", priority=1, status="completed", category="url") t4 = add_task("http://google.com/4", priority=1, status="completed", category="url") t5 = add_task("http://google.com/5", priority=3, status="completed", category="url") t6 = add_task("http://google.com/6", priority=1, status="completed", category="url") t7 = add_task("http://google.com/7", priority=1, status="completed", category="url") assert self.d.processing_get_task("foo") == t5 assert self.d.processing_get_task("foo") == t2 assert self.d.processing_get_task("foo") == t1 assert self.d.processing_get_task("foo") == t3 assert self.d.processing_get_task("foo") == t4 assert self.d.processing_get_task("foo") == t6 assert self.d.processing_get_task("foo") == t7 assert self.d.processing_get_task("foo") is None def test_error_exists(self): task_id = add_task("http://google.com/7", category="url") self.d.add_error("A" * 1024, task_id) assert len(self.d.view_errors(task_id)) == 1 self.d.add_error("A" * 1024, task_id) assert len(self.d.view_errors(task_id)) == 2 def test_long_error(self): add_task("http://google.com/", category="url") self.d.add_error("A" * 1024, 1) err = self.d.view_errors(1) assert err and len(err[0].message) == 1024 def test_submit(self): dirpath = tempfile.mkdtemp() submit_id = self.d.add_submit(dirpath, "files", { "foo": "bar", }) submit = self.d.view_submit(submit_id) assert submit.id == submit_id assert submit.tmp_path == dirpath assert submit.submit_type == "files" assert submit.data == { "foo": "bar", } def test_connect_no_create(self): AlembicVersion.__table__.drop(self.d.engine) self.d.connect(dsn=self.URI, create=False) assert "alembic_version" not in self.d.engine.table_names() self.d.connect(dsn=self.URI) assert "alembic_version" in self.d.engine.table_names() def test_view_submit_tasks(self): submit_id = self.d.add_submit(None, None, None) target_id = add_target(__file__, category="file") t1 = add_task(custom="1", submit_id=submit_id) t2 = add_task(custom="2", submit_id=submit_id) submit = self.d.view_submit(submit_id) assert submit.id == submit_id with pytest.raises(DetachedInstanceError): print submit.tasks submit = self.d.view_submit(submit_id, tasks=True) assert len(submit.tasks) == 2 tasks = sorted((task.id, task) for task in submit.tasks) assert tasks[0][1].id == t1 assert tasks[0][1].custom == "1" assert tasks[1][1].id == t2 assert tasks[1][1].custom == "2" def test_task_set_options(self): t0 = add_task(__file__, options={"foo": "bar"}) t1 = add_task(__file__, options="foo=bar") assert self.d.view_task(t0).options == {"foo": "bar"} assert self.d.view_task(t1).options == {"foo": "bar"} def test_error_action(self): task_id = add_task(__file__) self.d.add_error("message1", task_id) self.d.add_error("message2", task_id, "actionhere") e1, e2 = self.d.view_errors(task_id) assert e1.message == "message1" assert e1.action is None assert e2.message == "message2" assert e2.action == "actionhere" def test_view_tasks(self): t1 = add_task(__file__) t2 = add_task("http://example.com", category="url") tasks = self.d.view_tasks([t1, t2]) assert tasks[0].to_dict() == self.d.view_task(t1).to_dict() assert tasks[1].to_dict() == self.d.view_task(t2).to_dict() def test_add_machine(self): self.d.add_machine("name1", "label", "1.2.3.4", "windows", None, "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.add_machine("name2", "label", "1.2.3.4", "windows", "", "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.add_machine("name3", "label", "1.2.3.4", "windows", "opt1 opt2", "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.add_machine("name4", "label", "1.2.3.4", "windows", ["opt3", "opt4"], "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043, "virtualbox", reserved_by=1600) m1 = self.d.view_machine("name1") m2 = self.d.view_machine("name2") m3 = self.d.view_machine("name3") m4 = self.d.view_machine("name4") assert m1.options == [] assert m2.options == [] assert m3.options == ["opt1", "opt2"] assert m4.options == ["opt3", "opt4"] assert m1.manager == "virtualbox" assert m4.reserved_by == 1600 def test_adding_task(self): now = datetime.datetime.now() id = add_task(__file__, "file", 0, "py", "free=yes", 3, "custom", "owner", "machine1", "DogeOS", ["tag1"], False, False, now, "regular", None, now) task = self.d.view_task(id) assert id is not None assert task.timeout == 0 assert task.package == "py" assert task.options == {"free": "yes"} assert task.priority == 3 assert task.custom == "custom" assert task.owner == "owner" assert task.machine == "machine1" assert task.platform == "DogeOS" assert len(task.tags) == 1 assert task.tags[0].name == "tag1" assert task.memory == False assert task.enforce_timeout == False assert task.clock == now assert task.submit_id is None assert task.start_on == now assert len(task.targets) == 1 assert task.targets[0].category == "file" assert task.targets[0].target == __file__ def test_set_machine_rcparams(self): self.d.add_machine("name5", "label5", "1.2.3.4", "windows", None, "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.set_machine_rcparams("label5", { "protocol": "rdp", "host": "127.0.0.1", "port": 3389, }) m = self.d.view_machine("name5") assert m.rcparams == { "protocol": "rdp", "host": "127.0.0.1", "port": "3389", } def test_add_target_file(self): fd, sample_path = tempfile.mkstemp() os.write(fd, os.urandom(64)) os.close(fd) target = File(sample_path) id = add_target(sample_path, "file") db_target = self.d.find_target(id=id) assert id is not None assert db_target.file_size == 64 assert db_target.file_type == target.get_type() assert db_target.md5 == target.get_md5() assert db_target.crc32 == target.get_crc32() assert db_target.sha1 == target.get_sha1() assert db_target.sha256 == target.get_sha256() assert db_target.sha512 == target.get_sha512() assert db_target.ssdeep == target.get_ssdeep() assert db_target.category == "file" def test_add_target_url(self): target = URL("http://example.com/") id = add_target(target.url, "url") db_target = self.d.find_target(id=id) assert id is not None assert db_target.md5 == target.get_md5() assert db_target.crc32 == target.get_crc32() assert db_target.sha1 == target.get_sha1() assert db_target.sha256 == target.get_sha256() assert db_target.sha512 == target.get_sha512() assert db_target.ssdeep == target.get_ssdeep() assert db_target.category == "url" def test_find_target(self): fd, sample_path = tempfile.mkstemp() os.write(fd, os.urandom(64)) os.close(fd) target = File(sample_path) id = add_target(sample_path, category="file") assert self.d.find_target(id=id).id == id assert self.d.find_target(crc32=target.get_crc32()).id == id assert self.d.find_target(md5=target.get_md5()).id == id assert self.d.find_target(sha1=target.get_sha1()).id == id assert self.d.find_target(sha256=target.get_sha256()).id == id assert self.d.find_target(sha512=target.get_sha512()).id == id def test_find_target_multifilter(self): ids = [] paths = [] target = None for x in range(2): fd, sample_path = tempfile.mkstemp() randbytes = os.urandom(64) paths.append(sample_path) os.write(fd, randbytes) os.close(fd) target = File(sample_path) ids.append(add_target(sample_path, category="file")) db_target = self.d.find_target(sha256=target.get_sha256(), target=paths[1]) assert self.d.find_target(id=ids[0], md5=target.get_md5()) is None assert db_target.id == ids[1] def test_fetch_with_machine(self): future = datetime.datetime(2200, 5, 12, 12, 12) add_task(__file__, category="file", tags=["service"]) t2 = add_task(__file__, category="file", machine="machine1") add_task(__file__, category="file", start_on=future) add_task(__file__, category="file") t = self.d.fetch(machine="machine1", service=False) assert t.id == t2 assert t.status == "pending" def test_fetch_service_false(self): add_task(__file__, category="file", tags=["service"]) t2 = add_task(__file__, category="file") t = self.d.fetch(service=False) assert t.id == t2 assert t.status == "pending" def test_fetch_service_true(self): t1 = add_task(__file__, category="file", tags=["service"]) add_task(__file__, category="file", machine="machine1") add_task(__file__) add_task(__file__) task = self.d.fetch() assert task.id == t1 assert task.status == "pending" def test_fetch_use_start_on_true(self): future = datetime.datetime(2200, 5, 12, 12, 12) add_task(__file__, category="file", start_on=future, priority=999) t2 = add_task(__file__, category="file") t = self.d.fetch(service=False) assert t.id == t2 assert t.status == "pending" def test_fetch_use_start_on_false(self): future = datetime.datetime(2200, 5, 12, 12, 12) t1 = add_task(__file__, category="file", start_on=future, priority=999) add_task(__file__, category="file") t = self.d.fetch(use_start_on=False, service=False) assert t.id == t1 assert t.status == "pending" def test_fetch_use_exclude(self): t1 = add_task(__file__, category="file", priority=999) t2 = add_task(__file__, category="file", priority=999) t3 = add_task(__file__, category="file", priority=999) t4 = add_task(__file__, category="file", priority=999) t = self.d.fetch(service=False, exclude=[t1, t2, t3]) assert t.id == t4 assert t.status == "pending" def test_fetch_specific_task(self): t1 = add_task(__file__, category="file", priority=999) t2 = add_task(__file__, category="file", priority=999) t = self.d.fetch(task_id=t1) assert t.id == t1 assert t.status == "pending" def test_lock_machine(self): t1 = add_task(__file__, category="file", tags=["app1", "office7"]) t2 = add_task(__file__, category="file", tags=["app1", "office15"]) self.d.add_machine("name1", "name1", "1.2.3.4", "windows", "", "app1,office7", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.add_machine("name2", "name2", "1.2.3.4", "DogeOS", "opt1 opt2", "office13", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.add_machine("name3", "name3", "1.2.3.4", "CoffeeOS", ["opt3", "opt4"], "cofOS,office7", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") task1 = self.d.view_task(t1) task2 = self.d.view_task(t2) m1 = self.d.lock_machine(tags=task1.tags) assert m1.locked assert m1.name == "name1" with pytest.raises(CuckooOperationalError): self.d.lock_machine(platform="DogeOS", tags=task2.tags) m2 = self.d.lock_machine(platform="DogeOS") assert m2.name == "name2" m3 = self.d.lock_machine(label="name3") assert m3.locked assert m3.name == "name3" def test_list_tasks(self): t1 = add_task(__file__, category="file", owner="doge", options={"route": "vpn511"}) t2 = add_task(__file__, category="file") add_task(__file__, category="file") self.d.set_status(t2, "reported") self.d.set_status(t1, "reported") tasks = self.d.list_tasks(owner="doge", status="reported") tasks2 = self.d.list_tasks() tasks3 = self.d.list_tasks(status="reported") assert tasks[0].id == t1 assert len(tasks2) == 3 assert len(tasks3) == 2 def test_list_tasks_between(self): for x in range(5): add_task(__file__, category="file") tasks = self.d.list_tasks(filter_by="id", operators="between", values=(1, 3)) assert len(tasks) == 3 def test_list_tasks_multiple_filter(self): ids = [] future = None for x in range(10): id = add_task(__file__, category="file") ids.append(id) future = datetime.datetime.now() + datetime.timedelta(days=id) ses = self.d.Session() task = ses.query(Task).get(id) task.completed_on = future ses.commit() ses.close() tasks = self.d.list_tasks(filter_by=["id", "completed_on"], operators=[">", "<"], values=[4, future], order_by="id", limit=1) assert len(tasks) == 1 assert tasks[0].id == 5 def test_list_tasks_offset_limit(self): for x in range(10): add_task(__file__, category="file") tasks = self.d.list_tasks(offset=5, limit=10, order_by="id") assert len(tasks) == 5 assert tasks[4].id == 10 def test_list_tasks_notvalue(self): for x in range(10): id = add_task(__file__, category="file") if id % 2 == 0: self.d.set_status(id, "running") tasks = self.d.list_tasks(filter_by="status", operators="!=", values="running", order_by="id") assert len(tasks) == 5 assert tasks[4].id == 9 def test_list_tasks_noresults(self): for x in range(5): add_task(__file__, category="file") tasks = self.d.list_tasks(status="reported") assert tasks == [] def test_get_available_machines(self): self.d.add_machine("name1", "name1", "1.2.3.4", "windows", "", "app1,office7", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.add_machine("name2", "name2", "1.2.3.4", "DogeOS", "opt1 opt2", "office13", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.add_machine("name3", "name3", "1.2.3.4", "CoffeeOS", ["opt3", "opt4"], "cofOS,office7", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.machine_reserve(label="name2", task_id=1337) self.d.lock_machine(label="name3") available = self.d.get_available_machines() names = [m["name"] for m in [db_m.to_dict() for db_m in available]] assert len(available) == 2 assert "name2" in names assert "name1" in names def test_unlock_machine(self): self.d.add_machine("name1", "name1", "1.2.3.4", "windows", "", "app1,office7", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.lock_machine(label="name1") assert self.d.view_machine(name="name1").locked self.d.unlock_machine(label="name1") assert not self.d.view_machine(name="name1").locked def test_list_machines(self): self.d.add_machine("name1", "name1", "1.2.3.4", "windows", "", "app1,office7", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.add_machine("name2", "name2", "1.2.3.4", "DogeOS", "opt1 opt2", "office13", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") allmachines = self.d.list_machines() names = [m["name"] for m in [db_m.to_dict() for db_m in allmachines]] assert len(allmachines) == 2 assert "name2" in names assert "name1" in names def test_machine_reserve(self): self.d.add_machine("name1", "name1", "1.2.3.4", "windows", "", "app1,office7", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") assert self.d.view_machine(name="name1").reserved_by is None self.d.machine_reserve(label="name1", task_id=42) assert self.d.view_machine(name="name1").reserved_by == 42 def test_clear_reservation(self): self.d.add_machine("name1", "name1", "1.2.3.4", "windows", "", "app1,office7", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") self.d.machine_reserve(label="name1", task_id=42) assert self.d.view_machine(name="name1").reserved_by == 42 self.d.clear_reservation(label="name1") assert self.d.view_machine(name="name1").reserved_by is None def test_clean_machines(self): for x in range(6): name = "name%s" % x self.d.add_machine(name, name, "1.2.3.4", "windows", "", "app1,office7", "int0", "snap0", "5.6.7.8", 2043, "virtualbox") assert len(self.d.list_machines()) == 6 self.d.clean_machines() assert len(self.d.list_machines()) == 0 def test_target_to_dict(self): fd, sample_path = tempfile.mkstemp() os.write(fd, os.urandom(64)) os.close(fd) target = File(sample_path) id = add_target(sample_path, category="file") db_target = self.d.find_target(id=id) db_target = db_target.to_dict() assert db_target["id"] == id assert db_target["file_size"] == 64 assert db_target["file_type"] == target.get_type() assert db_target["md5"] == target.get_md5() assert db_target["crc32"] == target.get_crc32() assert db_target["sha1"] == target.get_sha1() assert db_target["sha256"] == target.get_sha256() assert db_target["sha512"] == target.get_sha512() assert db_target["ssdeep"] == target.get_ssdeep() assert db_target["category"] == "file" assert db_target["target"] == sample_path def test_task_multiple_targets(self): db_targets = [] task_id = add_task() for x in range(10): fd, sample_path = tempfile.mkstemp() os.write(fd, os.urandom(64)) os.close(fd) add_target(sample_path, category="file", task_id=task_id) task = self.d.view_task(task_id) assert task.id == task_id assert len(task.targets) == 10
def cuckoo_clean(): """Clean up cuckoo setup. It deletes logs, all stored data from file system and configured databases (SQL and MongoDB). """ # Init logging (without writing to file). init_console_logging() try: # Initialize the database connection. db = Database() db.connect(schema_check=False) # Drop all tables. db.drop() except (CuckooDependencyError, CuckooDatabaseError) as e: # If something is screwed due to incorrect database migrations or bad # database SqlAlchemy would be unable to connect and operate. log.warning("Error connecting to database: it is suggested to check " "the connectivity, apply all migrations if needed or purge " "it manually. Error description: %s", e) # Check if MongoDB reporting is enabled and drop the database if it is. if mongo.init(): try: mongo.connect() mongo.drop() mongo.close() except Exception as e: log.warning("Unable to drop MongoDB database: %s", e) # Check if ElasticSearch reporting is enabled and drop its data if it is. if elastic.init(): elastic.connect() # TODO This should be moved to the elastic abstract. # TODO We should also drop historic data, i.e., from pervious days, # months, and years. date_index = datetime.datetime.utcnow().strftime({ "yearly": "%Y", "monthly": "%Y-%m", "daily": "%Y-%m-%d", }[elastic.index_time_pattern]) dated_index = "%s-%s" % (elastic.index, date_index) elastic.client.indices.delete( index=dated_index, ignore=[400, 404] ) template_name = "%s_template" % dated_index if elastic.client.indices.exists_template(template_name): elastic.client.indices.delete_template(template_name) # Paths to clean. paths = [ cwd("cuckoo.db"), cwd("log"), cwd("storage", "analyses"), cwd("storage", "baseline"), cwd("storage", "binaries"), ] # Delete the various files and directories. In case of directories, keep # the parent directories, so to keep the state of the CWD in tact. for path in paths: if os.path.isdir(path): try: shutil.rmtree(path) os.mkdir(path) except (IOError, OSError) as e: log.warning("Error removing directory %s: %s", path, e) elif os.path.isfile(path): try: os.unlink(path) except (IOError, OSError) as e: log.warning("Error removing file %s: %s", path, e)
class TestTask(object): def setup(self): self.cwd = tempfile.mkdtemp() set_cwd(self.cwd) cuckoo_create() self.db = Database() self.db.connect() self.tmpfile = None self.files = [] def teardown(self): shutil.rmtree(self.cwd) for path in self.files: try: return os.remove(path) except OSError: pass def get_file(self): fd, target = tempfile.mkstemp() os.write(fd, os.urandom(64)) os.close(fd) self.files.append(target) return target def add_task(self, category="file", url=None, **kwargs): if category == "file": db_target = create_target.create_file(self.get_file()) elif category == "url": db_target = create_target.create_url(url) newtask = DbTask() newtask.type = kwargs.get("type") newtask.timeout = kwargs.get("timeout") newtask.priority = kwargs.get("priority") newtask.custom = kwargs.get("custom") newtask.owner = kwargs.get("owner") newtask.machine = kwargs.get("machine") newtask.package = kwargs.get("package") newtask.options = kwargs.get("options") newtask.platform = kwargs.get("platform") newtask.memory = kwargs.get("memory") newtask.enforce_timeout = kwargs.get("enforce_timeout") newtask.clock = kwargs.get("clock") newtask.submit_id = kwargs.get("submit_id") newtask.start_on = kwargs.get("start_on") newtask.longterm_id = kwargs.get("longterm_id") ses = self.db.Session() try: ses.add(newtask) ses.commit() task_id = newtask.id db_target.task_id = task_id ses.add(db_target) ses.commit() target = db_target.target finally: ses.close() return [task_id, target] def test_defined_task_dirs(self): assert Task.dirs == [ "shots", "logs", "files", "extracted", "buffer", "memory" ] def test_load_from_db(self): id = self.add_task()[0] task = Task() assert task.load_from_db(id) assert task.id == id assert task.category == "file" assert task.path == cwd(analysis=id) def test_set_task_constructor(self): id = self.add_task()[0] db_task = self.db.view_task(id) task = Task(db_task) assert task.id == id assert task.category == "file" assert task.path == cwd(analysis=id) assert task.db_task == db_task def test_set_task(self): id, sample = self.add_task() db_task = self.db.view_task(id) task = Task() task.set_task(db_task) assert task.id == id assert task.category == "file" assert task.path == cwd(analysis=id) assert task.db_task == db_task assert task.target == sample assert len(task.targets) == 1 assert isinstance(task.targets[0], Target) def test_load_task_from_dict(self): task_dict = { "id": 42, "category": "file", "target": "/tmp/stuff/doge42.exe", } task = Task() task.load_task_dict(task_dict) assert task.id == 42 assert task.category == "file" assert task.target == "/tmp/stuff/doge42.exe" assert task.path == cwd(analysis=42) assert task.type == "regular" def test_create_dirs(self): id, sample = self.add_task() task = Task() task.load_from_db(id) dirs = ["shots", "logs", "files", "extracted", "buffer", "memory"] task_path = cwd(analysis=id) dir_paths = [cwd(task_path, dir) for dir in dirs] for path in dir_paths: assert not os.path.exists(path) assert task.create_dirs() assert os.path.exists(task_path) for path in dir_paths: assert os.path.exists(path) def test_dir_exists(self): id, sample = self.add_task() task = Task() task.load_from_db(id) assert not task.dir_exists() os.mkdir(cwd(analysis=id)) assert task.dir_exists() def test_is_reported(self): id, sample = self.add_task() task = Task() task.load_from_db(id) task.create_dirs() assert not task.is_reported() reports = os.path.join(task.path, "reports") os.mkdir(reports) with open(os.path.join(reports, "report.json"), "wb") as fw: fw.write(os.urandom(64)) assert task.is_reported() @mock.patch("cuckoo.core.task.RunReporting.run") @mock.patch("cuckoo.core.task.RunSignatures.run") @mock.patch("cuckoo.core.task.RunProcessing.run") def test_process(self, mp, ms, mr): id, sample = self.add_task() task = Task() task.load_from_db(id) mp.return_value = {"x":"x"} task.process() mp.assert_called_once() ms.assert_called_once() mr.assert_called_once() @mock.patch("cuckoo.core.task.RunReporting") @mock.patch("cuckoo.core.task.RunSignatures") @mock.patch("cuckoo.core.task.RunProcessing") def test_process_nodelete(self, mp, ms, mr): set_cwd(tempfile.mkdtemp()) cuckoo_create(cfg={ "cuckoo": { "cuckoo": { "delete_original": False, "delete_bin_copy": False, }, }, }) id, sample = self.add_task() task = Task() task.load_from_db(id) task.create_dirs() copied_binary = cwd("storage", "binaries", File(sample).get_sha256()) task.process() assert os.path.exists(copied_binary) assert os.path.exists(sample) @mock.patch("cuckoo.core.task.RunReporting") @mock.patch("cuckoo.core.task.RunSignatures") @mock.patch("cuckoo.core.task.RunProcessing") def test_process_dodelete(self, mp, ms, mr): set_cwd(tempfile.mkdtemp()) cuckoo_create(cfg={ "cuckoo": { "cuckoo": { "delete_original": True, "delete_bin_copy": True, }, }, }) id, sample = self.add_task() task = Task() task.load_from_db(id) task.create_dirs() assert os.path.exists(task.target) assert os.path.exists(task.targets[0].copied_binary) task.process() assert not os.path.exists(sample) assert not os.path.exists(task.targets[0].copied_binary) def test_get_tags_list(self): task = Task() tags = " doge,stuff,things" tags2 = ("doge", "things ") tags3 = "foo,,bar" tags4 = ["tag1", 1, "", "tag2"] assert task.get_tags_list(tags) == ["doge", "stuff", "things"] assert task.get_tags_list(tags2) == ["doge", "things"] assert task.get_tags_list(tags3) == ["foo", "bar"] assert task.get_tags_list(tags4) == ["tag1", "tag2"] assert task.get_tags_list("") == [] assert task.get_tags_list([]) == [] assert task.get_tags_list(()) == [] assert task.get_tags_list(1) == [] def test_set_latest(self): id, sample = self.add_task() task = Task() task.load_from_db(id) task.create_dirs() sym_latest = cwd("storage", "analyses", "latest") task.set_latest() assert os.path.realpath(sym_latest) == task.path def test_set_status(self): id, sample = self.add_task() task = Task() task.load_from_db(id) task.set_status("reported") assert task.status == "reported" assert task["status"] == "reported" def test_refresh(self): id, sample = self.add_task() task = Task() task.load_from_db(id) self.db.set_machine(id, "machine1") assert task.machine is None assert task["machine"] is None task.refresh() assert task.machine == "machine1" assert task["machine"] == "machine1" def test_write_task_json(self): id = submit_task.add_path("tests/files/pdf0.pdf") session = self.db.Session() db_task = session.query(DbTask).filter_by(id=id).first() db_task.status = "reported" db_task.machine = "DogeOS1" db_task.start_on = datetime.datetime(2017, 5, 10, 18, 0) db_task.added_on = datetime.datetime(2017, 5, 10, 18, 0) db_task.clock = datetime.datetime(2017, 5, 10, 18, 0) session.commit() session.refresh(db_task) session.close() task = Task() task.load_from_db(id) task.write_task_json() correct = open("tests/files/tasktest-taskjson.json", "rb") correct_json = json.load(correct) generated = open(os.path.join(task.path, "task.json"), "rb") generated_json = json.load(generated) assert generated_json == correct_json def test_get_item(self): id, sample = self.add_task() task = Task() task.load_from_db(id) assert task["id"] == id assert task["category"] == "file" assert task["target"] == sample assert task["machine"] is None assert len(task["targets"]) == 1 def test_get_attribute(self): id, sample = self.add_task() task = Task() task.load_from_db(id) path = cwd(analysis=id) assert task.id == id assert task.path == path assert task.category == "file" assert task.target == sample def test_requirement_str(self): id, sample = self.add_task( tags=["doge"], platform="DogeOS", machine="Doge1" ) id = submit_task.add_path( self.get_file(), tags=["doge"], platform="DogeOS", machine="Doge1" ) task = Task() task.load_from_db(id) req_str = task.requirements_str(task.db_task) assert req_str == "machine=Doge1 platform=DogeOS tags=doge, " def test_reschedule_file(self): id, sample = self.add_task() task = Task() task.load_from_db(id) newid = task.reschedule(priority=3) oldtask = self.db.view_task(id) newtask = self.db.view_task(newid) assert newid is not None assert oldtask.status == "recovered" assert newtask.targets[0].category == "file" assert newtask.targets[0].target == sample assert newtask.priority == 3 def test_reschedule_url(self): id, sample = self.add_task( url="http://example.com/42", category="url" ) task = Task() task.load_from_db(id) newid = task.reschedule(priority=2) oldtask = self.db.view_task(id) newtask = self.db.view_task(newid) assert newid is not None assert oldtask.status == "recovered" assert newtask.targets[0].category == "url" assert newtask.priority == 2 assert newtask.targets[0].target == "http://example.com/42" def test_reschedule_id(self): id, sample = self.add_task() task = Task() newid = task.reschedule(task_id=id) oldtask = self.db.view_task(id) newtask = self.db.view_task(newid) assert newid is not None assert oldtask.status == "recovered" assert newtask.targets[0].category == "file" def test_reschedule_fail(self): newid = submit_task.reschedule() assert newid is None def test_reschedule_nonexistant(self): newid = submit_task.reschedule(task_id=42) assert newid is None def test_add_service(self): task = Task() id = task.add_service(timeout=60, tags=["officepc"], owner="Doge") task_path = cwd(analysis=id) db_task = self.db.view_task(id) assert id is not None assert os.path.exists(task_path) assert db_task.type == "service" assert db_task.owner == "Doge" assert db_task.timeout == 60 assert db_task.priority == 999 assert db_task.tags[0].name == "officepc" assert db_task.targets == [] def test_add_baseline(self): task = Task() id = task.add_baseline(timeout=60, owner="Doge", machine="machine1") task_path = cwd(analysis=id) db_task = self.db.view_task(id) assert id is not None assert os.path.exists(task_path) assert db_task.type == "baseline" assert db_task.owner == "Doge" assert db_task.timeout == 60 assert db_task.priority == 999 assert db_task.machine == "machine1" assert db_task.memory == False assert db_task.targets == [] def test_add_reboot(self): id, sample = self.add_task(owner="MrDoge") sid = self.db.add_submit(None, None, None) task = Task() task.load_from_db(id) task.create_empty() newid = task.add_reboot(id, owner="Doge", submit_id=sid) task_path = cwd(analysis=newid) db_task = self.db.view_task(newid) assert newid is not None assert os.path.exists(task_path) assert db_task.targets[0].category == "file" assert db_task.package == "reboot" assert db_task.owner == "Doge" assert db_task.priority == 1 assert db_task.custom == "%s" % id assert db_task.memory == False assert db_task.targets[0].target == sample assert db_task.submit_id == sid assert len(task.targets) == 1 assert isinstance(task.targets[0], Target) def test_add_reboot_nonexistant(self): newid = submit_task.add_reboot(42) assert newid is None def test_add_reboot_binary_removed(self): id, sample = self.add_task() task = Task() task.load_from_db(id) task.create_empty() os.remove(task.targets[0].copied_binary) newid = task.add_reboot(id) assert newid is None def test_add_url(self): id = submit_task.add_url("http://example.com/42") db_task = self.db.view_task(id) task = Task(db_task) task_path = cwd(analysis=id) assert id is not None assert os.path.exists(task_path) assert db_task.targets[0].category == "url" assert db_task.targets[0].target == "http://example.com/42" assert task.targets[0].target == "http://example.com/42" assert len(task.targets) == 1 assert isinstance(task.targets[0], Target) def test_add_archive(self): fakezip = self.get_file() id = submit_task.add_archive(fakezip, "file1.exe", "exe") task_path = cwd(analysis=id) db_task = self.db.view_task(id) task = Task(db_task) assert id is not None assert os.path.exists(task_path) assert db_task.targets[0].category == "archive" assert db_task.options == {"filename": "file1.exe"} assert db_task.targets[0].target == fakezip assert db_task.package == "exe" assert task.targets[0].target == fakezip assert len(task.targets) == 1 assert isinstance(task.targets[0], Target) def test_add_archive_nonexistant(self): id = submit_task.add_archive("/tmp/BfUbuYByg.zip", "file1.exe", "exe") assert id is None def test_add_path(self): sample = self.get_file() id = submit_task.add_path(sample) task_path = cwd(analysis=id) db_task = self.db.view_task(id) task = Task(db_task) assert id is not None assert os.path.exists(task_path) assert db_task.targets[0].category == "file" assert db_task.targets[0].target == sample assert task.targets[0].target == sample assert len(task.targets) == 1 assert isinstance(task.targets[0], Target) def test_add_path_nonexistant(self): id = submit_task.add_path("/tmp/YtcukGBYTTBYU.exe") assert id is None def test_add_path_invalid_starton(self): tmpfile = self.get_file() id = submit_task.add_path(tmpfile, start_on="13-11-2013") assert id is None def test_add_massurl(self): urls = ["http://example%s.com" % n for n in range(500)] id = submit_task.add_massurl(urls) task = Task() task.load_from_db(id) assert id is not None assert os.path.exists(cwd(analysis=id)) assert task.path == cwd(analysis=id) assert len(task.targets) == 500 assert task.type == "massurl" def test_add_file(self): sample = self.get_file() db_target = create_target.create_file(sample) starton = datetime.datetime.now() id = submit_task.add( [db_target], clock="5-17-2017 13:37:13", package="exe", owner="Doge", custom="stuff", machine="machine1", platform="DogeOS", tags="tag1", memory=True, enforce_timeout=True, submit_id=1500, start_on=starton ) task_path = cwd(analysis=id) db_task = self.db.view_task(id) task = Task(db_task) assert id is not None assert os.path.exists(task_path) assert db_task.targets[0].category == "file" assert db_task.targets[0].target == sample assert db_task.clock == datetime.datetime( year=2017, month=5, day=17, hour=13,minute=37,second=13 ) assert db_task.timeout == 0 assert db_task.package == "exe" assert db_task.options == {} assert db_task.priority == 1 assert db_task.custom == "stuff" assert db_task.owner == "Doge" assert db_task.machine == "machine1" assert db_task.platform == "DogeOS" assert len(db_task.tags) == 1 assert db_task.tags[0].name == "tag1" assert db_task.memory assert db_task.enforce_timeout assert db_task.submit_id == 1500 assert db_task.start_on == starton assert task.id == id assert task.target == sample assert task.category == "file" assert task.type == "regular" def test_add_base_url(self): db_target = create_target.create_url("http://example.com/42") id = submit_task.add([db_target]) task_path = cwd(analysis=id) db_task = self.db.view_task(id) task = Task(db_task) assert id is not None assert os.path.exists(task_path) assert db_task.targets[0].category == "url" assert db_task.targets[0].target == "http://example.com/42" assert db_task.clock is not None assert task.id == id assert task.target == "http://example.com/42" assert task.category == "url" def test_estimate_export_size(self): fake_task = cwd(analysis=1) shutil.copytree("tests/files/sample_analysis_storage", fake_task) est_size = Task.estimate_export_size(1, ["logs"], ["dump.pcap"]) assert int(est_size) == 7861 def test_get_files(self): fake_task = cwd(analysis=1) shutil.copytree("tests/files/sample_analysis_storage", fake_task) dirs, files = Task.get_files(1) assert len(dirs) == 6 assert len(files) == 10 assert "dump.pcap" in files assert ("logs", 1) in dirs def test_create_zip(self): fake_task = cwd(analysis=1) shutil.copytree("tests/files/sample_analysis_storage", fake_task) zfileio = Task.create_zip( 1, ["logs", "report"], ["cuckoo.log", "files.json"] ) assert isinstance(zfileio, io.BytesIO) zfile = zipfile.ZipFile(zfileio) assert len(zfile.read("files.json")) == 1856 assert len(zfileio.getvalue()) == 13938 def test_all_properties(self): id, sample = self.add_task() task = Task() task.load_from_db(id) task_properties = [ "id", "target", "category", "timeout", "priority", "custom", "owner", "machine", "package", "tags", "options", "platform", "memory", "enforce_timeout", "clock", "added_on", "start_on", "started_on", "completed_on", "status", "sample_id", "submit_id", "processing", "route", "targets", "longterm_id" ] try: for field in task_properties: getattr(task, field) except Exception as e: pytest.fail( "One or more properties of Task raised an error: %s" % e )
class TestRegular(object): createcwd = True def setup_class(self): self.remove_paths = [] self.db = Database() def create_cwd(self, cfg=None): if not TestRegular.createcwd and cfg is None: return TestRegular.createcwd = False newcwd = tempfile.mkdtemp() set_cwd(newcwd) cuckoo_create(cfg=cfg) self.remove_paths.append(newcwd) self.db.connect() def teardown_class(self): for path in self.remove_paths: if os.path.isdir(path): shutil.rmtree(path) def get_manager(self, task=None): if task is None: task = Task() fd, fpath = tempfile.mkstemp() os.write(fd, b"\x00" * 32) os.close(fd) newname = os.path.join(os.path.dirname(fpath), "testanalysis.exe") os.rename(fpath, newname) id = task.add_path(newname) task.load_from_db(id) manager = Regular(FakeMachine(), mock.MagicMock(), mock.MagicMock()) manager.set_task(task) manager.set_target(task.targets) return manager def test_set_task(self): self.create_cwd() task = Task() id = task.add_path(__file__) task.load_from_db(id) manager = self.get_manager() manager.set_task(task) assert manager.task == task assert manager.analysis is not None assert manager.name == "task_%s_Regular" % task.id def test_set_target(self): self.create_cwd() task = Task() id = task.add_path(__file__) task.load_from_db(id) manager = self.get_manager() manager.set_target(task.targets) assert manager.target == task.targets[0] def test_set_target_empty(self): self.create_cwd() task = Task() id = task.add_path(__file__) task.load_from_db(id) task.task_dict["targets"] = [] manager = self.get_manager() manager.set_target(task.targets) assert isinstance(manager.target, Target) @mock.patch("cuckoo.common.abstracts.AnalysisManager.build_options") def test_init(self, mb): self.create_cwd() manager = self.get_manager() result = manager.init(self.db) mb.assert_called_once_with( options={ "category": "file", "target": manager.target.target, "file_type": "data", "file_name": "testanalysis.exe", "pe_exports": "", "options": {} }) assert result assert isinstance(manager.guest_manager, GuestManager) assert isinstance(manager.aux, RunAuxiliary) assert os.path.isfile(os.path.join(manager.task.path, "task.json")) @mock.patch("cuckoo.common.abstracts.AnalysisManager.build_options") @mock.patch("cuckoo.core.target.File.get_apk_entry") def test_init_apk_options(self, mae, mb): self.create_cwd() manager = self.get_manager() mae.return_value = ("package", "activity") result = manager.init(self.db) mb.assert_called_once_with( options={ "category": "file", "target": manager.target.target, "file_type": "data", "file_name": "testanalysis.exe", "pe_exports": "", "options": { "apk_entry": "package:activity" } }) assert result assert isinstance(manager.guest_manager, GuestManager) assert isinstance(manager.aux, RunAuxiliary) assert os.path.isfile(os.path.join(manager.task.path, "task.json")) @mock.patch("cuckoo.common.abstracts.AnalysisManager.build_options") def test_init_non_file(self, mb): self.create_cwd() task = Task() id = task.add_url("http://example.com/42") task.load_from_db(id) manager = self.get_manager(task) result = manager.init(self.db) mb.assert_called_once() assert result assert isinstance(manager.guest_manager, GuestManager) assert isinstance(manager.aux, RunAuxiliary) assert os.path.isfile(os.path.join(task.path, "task.json")) def test_init_remov_original(self): self.create_cwd() task = Task() fd, tmpfile = tempfile.mkstemp() os.write(fd, os.urandom(64)) os.close(fd) id = task.add_path(tmpfile) task.load_from_db(id) tmpfile_obj = File(tmpfile) tmpfile_obj.calc_hashes() manager = self.get_manager(task) # Remove so init fails to find the original target os.remove(tmpfile) result = manager.init(self.db) assert result assert manager.options["target"] == tmpfile assert manager.options["file_name"] == tmpfile_obj.get_name() assert isinstance(manager.guest_manager, GuestManager) assert isinstance(manager.aux, RunAuxiliary) assert os.path.isfile(os.path.join(task.path, "task.json")) def test_init_fail(self): self.create_cwd() task = Task() fd, tmpfile = tempfile.mkstemp() os.write(fd, os.urandom(64)) os.close(fd) id = task.add_path(tmpfile) task.load_from_db(id) manager = self.get_manager(task) copy_path = cwd("storage", "binaries", File(tmpfile).get_sha256()) # Remove both binaries to make init fail os.remove(copy_path) os.remove(tmpfile) result = manager.init(self.db) assert not result def test_init_copied_bin_none(self): self.create_cwd() manager = self.get_manager() manager.target.copied_binary = None result = manager.init(self.db) assert not result @mock.patch("cuckoo.analysis.regular.ResultServer") @mock.patch("cuckoo.common.abstracts.AnalysisManager.set_analysis_status") @mock.patch("cuckoo.common.abstracts.AnalysisManager." "request_scheduler_action") def test_start_and_wait(self, mrsa, msas, mrs): self.create_cwd() manager = self.get_manager() # Mock resultserver obj so we can check if add_task was called resulserver_obj = mock.MagicMock() mrs.return_value = resulserver_obj manager.init(self.db) manager.machinery = mock.MagicMock() manager.route = mock.MagicMock() manager.aux = mock.MagicMock() manager.guest_manager = mock.MagicMock() # Set status manually, because the method used is mocked manager.analysis.status = "starting" result = manager.start_and_wait() # Check if all required methods were called successfully msas.assert_has_calls([mock.call("starting"), mock.call("running")]) resulserver_obj.add_task.assert_called_once_with( manager.task.db_task, manager.machine) manager.aux.start.assert_called_once() manager.machinery.start.assert_called_once_with( "machine1", manager.task.db_task) manager.route.route_network.assert_called_once() manager.machine_lock.release.assert_called_once() mrsa.assert_called_once_with("starting") manager.guest_manager.start_analysis.assert_called_once() manager.guest_manager.wait_for_completion.assert_called_once() assert result @mock.patch("cuckoo.analysis.regular.ResultServer") @mock.patch("cuckoo.common.abstracts.AnalysisManager.set_analysis_status") @mock.patch("cuckoo.common.abstracts.AnalysisManager." "request_scheduler_action") def test_start_and_wait_url(self, mrsa, msas, mrs): self.create_cwd() task = Task() id = task.add_url("http://example.com/42") task.load_from_db(id) # Mock resultserver obj so we can check if add_task was called resulserver_obj = mock.MagicMock() mrs.return_value = resulserver_obj manager = self.get_manager(task) manager.init(self.db) manager.machinery = mock.MagicMock() manager.route = mock.MagicMock() manager.aux = mock.MagicMock() manager.guest_manager = mock.MagicMock() # Set status manually, because the method used is mocked manager.analysis.status = "starting" result = manager.start_and_wait() # Check if all required methods were called successfully msas.assert_has_calls([mock.call("starting"), mock.call("running")]) resulserver_obj.add_task.assert_called_once_with( task.db_task, manager.machine) manager.aux.start.assert_called_once() manager.machinery.start.assert_called_once_with( "machine1", task.db_task) manager.route.route_network.assert_called_once() manager.machine_lock.release.assert_called_once() mrsa.assert_called_once_with("starting") manager.guest_manager.start_analysis.assert_called_once() manager.guest_manager.wait_for_completion.assert_called_once() assert result @mock.patch("cuckoo.analysis.regular.ResultServer") @mock.patch("cuckoo.common.abstracts.AnalysisManager.set_analysis_status") @mock.patch("cuckoo.common.abstracts.AnalysisManager." "request_scheduler_action") @mock.patch("time.sleep") def test_start_and_wait_baseline(self, mts, mrsa, msas, mrs): self.create_cwd() task = Task() id = task.add_baseline() task.load_from_db(id) # Mock resultserver obj so we can check if add_task was called resulserver_obj = mock.MagicMock() mrs.return_value = resulserver_obj manager = self.get_manager(task) manager.init(self.db) manager.machinery = mock.MagicMock() manager.route = mock.MagicMock() manager.aux = mock.MagicMock() result = manager.start_and_wait() # Check if all required methods were called successfully msas.assert_has_calls([mock.call("starting"), mock.call("running")]) resulserver_obj.add_task.assert_called_once_with( task.db_task, manager.machine) manager.aux.start.assert_called_once() manager.machinery.start.assert_called_once_with( "machine1", task.db_task) manager.route.route_network.assert_called_once() manager.machine_lock.release.assert_called_once() mrsa.assert_called_once_with("starting") mts.assert_called_once_with(manager.options["timeout"]) assert result @mock.patch("cuckoo.analysis.regular.ResultServer") @mock.patch("cuckoo.common.abstracts.AnalysisManager.set_analysis_status") @mock.patch("cuckoo.common.abstracts.AnalysisManager." "request_scheduler_action") @mock.patch("cuckoo.common.abstracts.AnalysisManager.wait_finish") def test_start_and_wait_noagent(self, mwf, mrsa, msas, mrs): self.create_cwd() task = Task() id = task.add_service(owner="1", tags="service,mitm", timeout=120) task.load_from_db(id) # Mock resultserver obj so we can check if add_task was called resulserver_obj = mock.MagicMock() mrs.return_value = resulserver_obj manager = self.get_manager(task) manager.machine.options = "noagent" manager.init(self.db) manager.machinery = mock.MagicMock() manager.route = mock.MagicMock() manager.aux = mock.MagicMock() result = manager.start_and_wait() # Check if all required methods were called successfully msas.assert_has_calls([mock.call("starting"), mock.call("running")]) resulserver_obj.add_task.assert_called_once_with( task.db_task, manager.machine) manager.aux.start.assert_called_once() manager.machinery.start.assert_called_once_with( "machine1", task.db_task) manager.route.route_network.assert_called_once() manager.machine_lock.release.assert_called_once() mrsa.assert_called_once_with("starting") mwf.assert_called_once() assert result @mock.patch("cuckoo.analysis.regular.ResultServer") @mock.patch("cuckoo.common.abstracts.AnalysisManager.set_analysis_status") def test_stop_and_wait(self, msas, mrs): self.create_cwd() # Mock resultserver obj so we can check if del_task was called resulserver_obj = mock.MagicMock() mrs.return_value = resulserver_obj manager = self.get_manager() manager.init(self.db) manager.machinery = mock.MagicMock() manager.route = mock.MagicMock() manager.aux = mock.MagicMock() manager.stop_and_wait() # Check if all required methods were called successfully msas.assert_called_once_with("stopping") manager.aux.stop.assert_called_once() manager.machinery.stop.assert_called_once_with("machine1") resulserver_obj.del_task.assert_called_once_with( manager.task.db_task, manager.machine) manager.route.unroute_network.assert_called_once() @mock.patch("cuckoo.analysis.regular.ResultServer") @mock.patch("cuckoo.common.abstracts.AnalysisManager.set_analysis_status") def test_stop_and_wait_dump_mem(self, msas, mrs): self.create_cwd() task = Task() id = task.add_path(__file__, memory=True) task.load_from_db(id) # Mock resultserver obj so we can check if del_task was called resulserver_obj = mock.MagicMock() mrs.return_value = resulserver_obj manager = self.get_manager(task) manager.init(self.db) manager.machinery = mock.MagicMock() manager.route = mock.MagicMock() manager.aux = mock.MagicMock() manager.stop_and_wait() # Check if all required methods were called successfully msas.assert_called_once_with("stopping") manager.aux.stop.assert_called_once() manager.machinery.dump_memory.assert_called_once_with( "machine1", cwd("storage", "analyses", str(task.id), "memory.dmp")) manager.machinery.stop.assert_called_once_with("machine1") resulserver_obj.del_task.assert_called_once_with( task.db_task, manager.machine) manager.route.unroute_network.assert_called_once() def test_run(self): self.create_cwd() manager = self.get_manager() manager.init(self.db) manager.start_and_wait = mock.MagicMock(return_value=True) manager.stop_and_wait = mock.MagicMock() manager.task.process = mock.MagicMock(return_value=True) manager.set_analysis_status = mock.MagicMock() manager.release_machine_lock = mock.MagicMock() manager.run() manager.start_and_wait.assert_called_once() manager.stop_and_wait.assert_called_once() manager.set_analysis_status.assert_called_once_with("stopped", wait=True) manager.task.process.assert_called_once() def test_run_fail(self): self.create_cwd() manager = self.get_manager() manager.init(self.db) manager.start_and_wait = mock.MagicMock(return_value=False) manager.stop_and_wait = mock.MagicMock() manager.task.process = mock.MagicMock(return_value=True) manager.set_analysis_status = mock.MagicMock() manager.release_machine_lock = mock.MagicMock() manager.run() manager.start_and_wait.assert_called_once() manager.stop_and_wait.assert_called_once() manager.set_analysis_status.assert_called_once_with("failed", wait=True) manager.task.process.assert_called_once() def test_on_status_starting(self): manager = self.get_manager() manager.init(self.db) manager.route.route = "none" manager.on_status_starting(self.db) db_task = self.db.view_task(manager.task.id) assert db_task.machine == "machine1" assert db_task.route == "none" def test_on_status_stopped(self): manager = self.get_manager() task_json_path = cwd("task.json", analysis=manager.task.id) manager.init(self.db) manager.machinery = mock.MagicMock() # Remove because init creates it. We need to check if it was created # on status stopped os.remove(task_json_path) manager.on_status_stopped(self.db) db_task = self.db.view_task(manager.task.id) assert manager.task.db_task is not db_task assert db_task.status == "completed" assert os.path.isfile(task_json_path) manager.machinery.release.assert_called_once_with("machine1") def test_on_status_failed(self): manager = self.get_manager() manager.init(self.db) manager.on_status_failed(self.db) manager.machinery.release.assert_called_once_with("machine1") def test_finalize(self): manager = self.get_manager() task_json_path = cwd("task.json", analysis=manager.task.id) manager.init(self.db) manager.processing_success = True manager.release_machine_lock = mock.MagicMock() # Remove because init creates it. We need to check if it was created # on status stopped os.remove(task_json_path) manager.finalize(self.db) db_task = self.db.view_task(manager.task.id) assert manager.task.db_task is not db_task assert db_task.status == "reported" assert os.path.isfile(task_json_path) manager.release_machine_lock.assert_called_once() def test_finalize_analysis_failed(self): self.create_cwd(cfg={"cuckoo": {"cuckoo": {"process_results": False}}}) manager = self.get_manager() task_json_path = cwd("task.json", analysis=manager.task.id) manager.init(self.db) manager.analysis.status = "running" manager.release_machine_lock = mock.MagicMock() # Remove because init creates it. We need to check if it was created # on status stopped os.remove(task_json_path) manager.finalize(self.db) db_task = self.db.view_task(manager.task.id) assert manager.task.db_task is not db_task assert db_task.status == "failed_analysis" assert os.path.isfile(task_json_path) manager.release_machine_lock.assert_called_once() def test_finalize_process_failed(self): TestRegular.createcwd = True self.create_cwd() manager = self.get_manager() task_json_path = cwd("task.json", analysis=manager.task.id) manager.init(self.db) manager.processing_success = False # Remove because init creates it. We need to check if it was created # on status stopped os.remove(task_json_path) manager.finalize(self.db) db_task = self.db.view_task(manager.task.id) assert manager.task.db_task is not db_task assert db_task.status == "failed_processing" assert os.path.isfile(task_json_path) def test_finalize_process_disabled(self): self.create_cwd(cfg={"cuckoo": {"cuckoo": {"process_results": False}}}) manager = self.get_manager() task_json_path = cwd("task.json", analysis=manager.task.id) manager.init(self.db) manager.processing_success = None # Remove because init creates it. We need to check if it was created # on status stopped os.remove(task_json_path) manager.finalize(self.db) db_task = self.db.view_task(manager.task.id) assert manager.task.db_task is not db_task assert db_task.status != "reported" assert db_task.status != "failed_processing" assert os.path.isfile(task_json_path) def test_support_list(self): for tasktype in ("regular", "baseline", "service"): assert tasktype in Regular.supports
class TestAnalysisManager(object): def setup_class(self): self.cwd = tempfile.mkdtemp() set_cwd(self.cwd) cuckoo_create() self.db = Database() self.db.connect() def teardown_class(self): if os.path.isdir(self.cwd): shutil.rmtree(self.cwd) def test_set_task(self): task = Task() id = task.add_path(__file__) task.load_from_db(id) a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.set_task(task) assert a.task == task assert isinstance(a.analysis, Analysis) assert a.name == "task_%s_AnalysisManager" % task.id def test_set_target(self): task = Task() id = task.add_path(__file__) task.load_from_db(id) a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) with pytest.raises(NotImplementedError): a.set_target(task.targets) def test_build_options(self): task = Task() id = task.add_path(__file__, options={"free": "yes"}) task.load_from_db(id) a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.set_task(task) expected = { "clock": task.clock, "enforce_timeout": False, "id": task.id, "package": "", "target": None, "terminate_processes": False, "ip": "192.168.56.1", "port": 4242, "timeout": 120, "options": "free=yes" } assert a.options == {} a.build_options() assert a.options == expected a.build_options({ "file_name": "doge.py", "options": {"doges": "many"}, "category": "file" }) assert a.options["options"] == "doges=many,free=yes" assert a.options["file_name"] == "doge.py" assert a.options["category"] == "file" @mock.patch("time.sleep") def test_wait_finish(self, mts): task = Task() id = task.add_url("http://example.com/42") task.load_from_db(id) a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.set_task(task) a.analysis.status = "stoppped" a.wait_finish() mts.assert_not_called() def test_request_scheduler_action(self): task = Task() id = task.add_url("http://example.com/42") task.load_from_db(id) a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.set_task(task) a.action_lock = mock.MagicMock() a.action_lock.locked = mock.MagicMock(return_value=False) a.request_scheduler_action() a.action_lock.acquire.assert_has_calls([ mock.call(False), mock.call(True) ]) a.action_lock.release.assert_called_once() assert a.override_status is None def test_set_analysis_status(self): a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.analysis = mock.MagicMock() a.request_scheduler_action = mock.MagicMock() a.set_analysis_status("starting") a.analysis.set_status.assert_called_once_with("starting") a.request_scheduler_action.assert_not_called() def test_set_analysis_status_request(self): a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.analysis = mock.MagicMock() a.request_scheduler_action = mock.MagicMock() a.set_analysis_status("starting", wait=True) a.analysis.set_status.assert_called_once_with("starting") a.request_scheduler_action.assert_called_once() def test_action_requested(self): a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.analysis = mock.MagicMock() a.action_lock = mock.MagicMock() a.action_lock.locked = mock.MagicMock(return_value=True) a.analysis.changed = True assert a.action_requested() def test_get_analysis_status(self): a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.analysis = mock.MagicMock() a.analysis.get_status = mock.MagicMock(return_value="starting") assert a.get_analysis_status() == "starting" def test_get_analysis_status_overriden(self): a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.analysis = mock.MagicMock() a.override_status = "stopping" a.analysis.get_status = mock.MagicMock(return_value="starting") assert a.get_analysis_status() == "stopping" def test_init(self): a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) assert a.init(self.db) def test_release_machine_lock(self): a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) a.init(self.db) a.release_machine_lock() a.machine_lock.release.assert_called_once() assert a.lock_released def test_run(self): a = abstracts.AnalysisManager( FakeMachine(), mock.MagicMock(), mock.MagicMock() ) with pytest.raises(NotImplementedError): a.run()
class DatabaseEngine(object): """Tests database stuff.""" URI = None def setup_class(self): set_cwd(tempfile.mkdtemp()) self.d = Database() self.d.connect(dsn=self.URI) def add_url(self, url, priority=1, status="pending"): task_id = self.d.add_url(url, priority=priority) self.d.set_status(task_id, status) return task_id def test_add_tasks(self): fd, sample_path = tempfile.mkstemp() os.write(fd, "hehe") os.close(fd) # Add task. count = self.d.Session().query(Task).count() self.d.add_path(sample_path) assert self.d.Session().query(Task).count() == count + 1 # Add url. self.d.add_url("http://foo.bar") assert self.d.Session().query(Task).count() == count + 2 def test_processing_get_task(self): # First reset all existing rows so that earlier exceptions don't affect # this unit test run. null, session = None, self.d.Session() session.query(Task).filter(Task.status == "completed", Task.processing == null).update({ "processing": "something", }) session.commit() t1 = self.add_url("http://google.com/1", priority=1, status="completed") t2 = self.add_url("http://google.com/2", priority=2, status="completed") t3 = self.add_url("http://google.com/3", priority=1, status="completed") t4 = self.add_url("http://google.com/4", priority=1, status="completed") t5 = self.add_url("http://google.com/5", priority=3, status="completed") t6 = self.add_url("http://google.com/6", priority=1, status="completed") t7 = self.add_url("http://google.com/7", priority=1, status="completed") assert self.d.processing_get_task("foo") == t5 assert self.d.processing_get_task("foo") == t2 assert self.d.processing_get_task("foo") == t1 assert self.d.processing_get_task("foo") == t3 assert self.d.processing_get_task("foo") == t4 assert self.d.processing_get_task("foo") == t6 assert self.d.processing_get_task("foo") == t7 assert self.d.processing_get_task("foo") is None def test_error_exists(self): task_id = self.add_url("http://google.com/") self.d.add_error("A" * 1024, task_id) assert len(self.d.view_errors(task_id)) == 1 self.d.add_error("A" * 1024, task_id) assert len(self.d.view_errors(task_id)) == 2 def test_long_error(self): self.add_url("http://google.com/") self.d.add_error("A" * 1024, 1) err = self.d.view_errors(1) assert err and len(err[0].message) == 1024 def test_submit(self): dirpath = tempfile.mkdtemp() submit_id = self.d.add_submit(dirpath, "files", { "foo": "bar", }) submit = self.d.view_submit(submit_id) assert submit.id == submit_id assert submit.tmp_path == dirpath assert submit.submit_type == "files" assert submit.data == { "foo": "bar", } def test_connect_no_create(self): AlembicVersion.__table__.drop(self.d.engine) self.d.connect(dsn=self.URI, create=False) assert "alembic_version" not in self.d.engine.table_names() self.d.connect(dsn=self.URI) assert "alembic_version" in self.d.engine.table_names() def test_view_submit_tasks(self): submit_id = self.d.add_submit(None, None, None) t1 = self.d.add_path(__file__, custom="1", submit_id=submit_id) t2 = self.d.add_path(__file__, custom="2", submit_id=submit_id) submit = self.d.view_submit(submit_id) assert submit.id == submit_id with pytest.raises(DetachedInstanceError): print submit.tasks submit = self.d.view_submit(submit_id, tasks=True) assert len(submit.tasks) == 2 tasks = sorted((task.id, task) for task in submit.tasks) assert tasks[0][1].id == t1 assert tasks[0][1].custom == "1" assert tasks[1][1].id == t2 assert tasks[1][1].custom == "2" def test_add_reboot(self): t0 = self.d.add_path(__file__) s0 = self.d.add_submit(None, None, None) t1 = self.d.add_reboot(task_id=t0, submit_id=s0) t = self.d.view_task(t1) assert t.custom == "%s" % t0 assert t.submit_id == s0 def test_task_set_options(self): t0 = self.d.add_path(__file__, options={"foo": "bar"}) t1 = self.d.add_path(__file__, options="foo=bar") assert self.d.view_task(t0).options == {"foo": "bar"} assert self.d.view_task(t1).options == {"foo": "bar"} def test_task_tags_str(self): task = self.d.add_path(__file__, tags="foo,,bar") tag0, tag1 = self.d.view_task(task).tags assert sorted((tag0.name, tag1.name)) == ["bar", "foo"] def test_task_tags_list(self): task = self.d.add_path(__file__, tags=["tag1", "tag2", "", 1, "tag3"]) tag0, tag1, tag2 = self.d.view_task(task).tags assert sorted( (tag0.name, tag1.name, tag2.name)) == ["tag1", "tag2", "tag3"] def test_error_action(self): task_id = self.d.add_path(__file__) self.d.add_error("message1", task_id) self.d.add_error("message2", task_id, "actionhere") e1, e2 = self.d.view_errors(task_id) assert e1.message == "message1" assert e1.action is None assert e2.message == "message2" assert e2.action == "actionhere" def test_view_tasks(self): t1 = self.d.add_path(__file__) t2 = self.d.add_url("http://google.com/") tasks = self.d.view_tasks([t1, t2]) assert tasks[0].to_dict() == self.d.view_task(t1).to_dict() assert tasks[1].to_dict() == self.d.view_task(t2).to_dict() def test_add_machine(self): self.d.add_machine("name1", "label", "1.2.3.4", "windows", None, "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043) self.d.add_machine("name2", "label", "1.2.3.4", "windows", "", "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043) self.d.add_machine("name3", "label", "1.2.3.4", "windows", "opt1 opt2", "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043) self.d.add_machine("name4", "label", "1.2.3.4", "windows", ["opt3", "opt4"], "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043) m1 = self.d.view_machine("name1") m2 = self.d.view_machine("name2") m3 = self.d.view_machine("name3") m4 = self.d.view_machine("name4") assert m1.options == [] assert m2.options == [] assert m3.options == ["opt1", "opt2"] assert m4.options == ["opt3", "opt4"] def test_set_machine_rcparams(self): self.d.add_machine("name5", "label5", "1.2.3.4", "windows", None, "tag1 tag2", "int0", "snap0", "5.6.7.8", 2043) self.d.set_machine_rcparams("label5", { "protocol": "rdp", "host": "127.0.0.1", "port": 3389, }) m = self.d.view_machine("name5") assert m.rcparams == { "protocol": "rdp", "host": "127.0.0.1", "port": "3389", } @mock.patch("sflock.magic") def test_add_sample(self, p): p.from_file.return_value = "" assert self.d.add_path(Files.temp_put(os.urandom(16))) is not None
class TestRoute(object): def setup(self): self.cwd = tempfile.mkdtemp() set_cwd(self.cwd) cuckoo_create() self.db = Database() self.db.connect() def teardown(self): if os.path.isdir(self.cwd): shutil.rmtree(self.cwd) @mock.patch("cuckoo.common.routing.rooter") def test_route_network_none(self, mr): route = Route(FakeTask(), FakeMachine()) route.route_network() assert route.route == "none" assert route.interface is None assert route.rt_table is None mr.assert_not_called() @mock.patch("cuckoo.common.routing.rooter") def test_route_network_inetsim(self, mr): route = Route(FakeTask(options={"route": "inetsim"}), FakeMachine()) route.route_network() assert route.route == "inetsim" assert route.interface is None assert route.rt_table is None mr.assert_called_once_with( "inetsim_enable", "192.168.56.10", "192.168.56.1", "vboxnet0", "2042", "" ) @mock.patch("cuckoo.common.routing.rooter") def test_route_network_internet(self, mr): write_cuckoo_conf(cfg={ "routing": { "routing": { "internet": "eth0" } } }) # Clear config cache so it will load new values config._cache = {} mr.return_value = True route = Route(FakeTask(options={"route": "internet"}), FakeMachine()) route.route_network() assert route.route == "internet" assert route.interface == "eth0" assert route.rt_table == "main" mr.assert_has_calls([ mock.call("nic_available", "eth0"), mock.call("drop_enable", "192.168.56.10", "192.168.56.1", "2042"), mock.call("forward_enable", "tap0", "eth0", "192.168.56.10"), mock.call("srcroute_enable", "main", "192.168.56.10") ]) @mock.patch("cuckoo.common.routing.rooter") def test_route_network_tor(self, mr): route = Route(FakeTask(options={"route": "tor"}), FakeMachine()) route.route_network() assert route.route == "tor" assert route.interface is None assert route.rt_table is None mr.assert_called_once_with( "proxy_enable", "192.168.56.10", "192.168.56.1", "5353", "9040" ) @mock.patch("cuckoo.common.routing.rooter") def test_route_network_drop(self, mr): route = Route(FakeTask(options={"route": "drop"}), FakeMachine()) route.route_network() assert route.route == "drop" assert route.interface is None assert route.rt_table is None mr.assert_called_once_with( "drop_enable", "192.168.56.10", "192.168.56.1", "2042" ) @mock.patch("cuckoo.common.routing.rooter") def test_route_network_socks5(self, mr): write_cuckoo_conf(cfg={ "auxiliary": { "redsocks": { "enabled": True } } }) route = Route(FakeTask(options={ "route": "socks5", "socks5.localport": 4242 }), FakeMachine()) route.route_network() assert route.route == "socks5" assert route.interface is None assert route.rt_table is None mr.assert_called_once_with( "proxy_enable", "192.168.56.10", "192.168.56.1", "53", "4242" ) @mock.patch("cuckoo.common.routing.rooter") def test_route_network_socks5_disabled(self, mr): write_cuckoo_conf(cfg={ "auxiliary": { "redsocks": { "enabled": False } } }) route = Route(FakeTask(options={ "route": "socks5", "socks5.localport": 4242 }), FakeMachine()) route.route_network() assert route.route == "none" assert route.task.options["route"] is "none" assert route.interface is None assert route.rt_table is None mr.assert_not_called() @mock.patch("cuckoo.common.routing.rooter") def test_route_network_socks5_noport(self, mr): write_cuckoo_conf(cfg={ "auxiliary": { "redsocks": { "enabled": True } } }) route = Route(FakeTask(options={ "route": "socks5" }), FakeMachine()) route.route_network() assert route.route == "none" assert route.task.options["route"] is "none" assert route.interface is None assert route.rt_table is None mr.assert_not_called() @mock.patch("cuckoo.common.routing.rooter") def test_route_network_vpn(self, mr): mr.return_value = True route = Route(FakeTask(options={"route": "vpn0"}), FakeMachine()) route.route_network() assert route.route == "vpn0" assert route.interface == "tun0" assert route.rt_table == "tun0" mr.assert_has_calls([ mock.call("nic_available", "tun0"), mock.call("forward_enable", "tap0", "tun0", "192.168.56.10"), mock.call("srcroute_enable", "tun0", "192.168.56.10") ]) @mock.patch("cuckoo.common.routing.rooter") def test_unroute_network_none(self, mr): route = Route(FakeTask(), FakeMachine()) route.route = "none" route.unroute_network() mr.assert_not_called() @mock.patch("cuckoo.common.routing.rooter") def test_unroute_network_vpn(self, mr): route = Route(FakeTask(), FakeMachine()) route.route = "vpn0" route.unroute_network() route.rt_table = "tun0" route.interface = "tun0" route.unroute_network() mr.assert_has_calls([ mock.call("forward_disable", "tap0", "tun0", "192.168.56.10"), mock.call("srcroute_disable", "tun0", "192.168.56.10"), ]) @mock.patch("cuckoo.common.routing.rooter") def test_unroute_network_inetsim(self, mr): route = Route(FakeTask(), FakeMachine()) route.route = "inetsim" route.unroute_network() mr.assert_has_calls([ mock.call( "inetsim_disable", "192.168.56.10", "192.168.56.1", "vboxnet0", "2042", "" ) ]) @mock.patch("cuckoo.common.routing.rooter") def test_unroute_network_tor(self, mr): route = Route(FakeTask(), FakeMachine()) route.route = "tor" route.unroute_network() mr.assert_has_calls([ mock.call( "proxy_disable", "192.168.56.10", "192.168.56.1", "5353", "9040" ) ]) @mock.patch("cuckoo.common.routing.rooter") def test_unroute_network_socks5(self, mr): route = Route(FakeTask(), FakeMachine()) route.route = "socks5" route.task.options["socks5.localport"] = 4242 route.unroute_network() mr.assert_has_calls([ mock.call( "proxy_disable", "192.168.56.10", "192.168.56.1", "53", "4242", ) ])