def cuckoo_clean_sorted_pcap_dump(): """Clean up failed tasks It deletes all stored data from file system and configured databases (SQL and MongoDB for failed tasks. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() results_db = connect_to_mongo()[mdb] if not results_db: log.info("Can't connect to mongo") return done = False while not done: rtmp = results_db.analysis.find({"network.sorted_pcap_id": {"$exists": True}}, {"info.id": 1}, sort=[("_id", -1)]).limit(100) if rtmp and rtmp.count() > 0: for e in rtmp: if e["info"]["id"]: log.info((e["info"]["id"])) try: results_db.analysis.update({"info.id": int(e["info"]["id"])}, {"$unset": {"network.sorted_pcap_id": ""}}) except: log.info(("failed to remove sorted pcap from db for id %s" % (e["info"]["id"]))) try: path = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % (e["info"]["id"]), "dump_sorted.pcap") os.remove(path) except Exception as e: log.info(("failed to remove sorted_pcap from disk %s" % (e))) else: done = True else: done = True
def cuckoo_clean_lower_score(args): """Clean up tasks with score <= X It deletes all stored data from file system and configured databases (SQL and MongoDB for tasks. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. if not args.malscore: log.info("No malscore argument provided bailing") return create_structure() init_console_logging() id_arr = [] if not is_reporting_db_connected(): return if repconf.mongodb and repconf.mongodb.enabled: results_db = connect_to_mongo()[mdb] result = list(results_db.analysis.find({"malscore": {"$lte": args.malscore}})) id_arr = [entry["info"]["id"] for entry in result] elif repconf.elasticsearchdb.enabled: id_arr = [d["_source"]["info"]["id"] for d in all_docs( index=get_analysis_index(), query={ "query": { "range": { "malscore": { "lte": args.malscore } } } }, _source=["info.id"])] log.info(("number of matching records %s" % len(id_arr))) resolver_pool.map(lambda tid: delete_data(tid), id_arr)
def cuckoo_clean_bson_suri_logs(): """Clean up raw suri log files probably not needed if storing in mongo. Does not remove extracted files """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() from glob import glob failed_tasks_a = db.list_tasks(status=TASK_FAILED_ANALYSIS) failed_tasks_p = db.list_tasks(status=TASK_FAILED_PROCESSING) failed_tasks_r = db.list_tasks(status=TASK_FAILED_REPORTING) failed_tasks_rc = db.list_tasks(status=TASK_RECOVERED) tasks_rp = db.list_tasks(status=TASK_REPORTED) for e in failed_tasks_a, failed_tasks_p, failed_tasks_r, failed_tasks_rc, tasks_rp: for el2 in e: new = el2.to_dict() id = new["id"] path = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % id) if os.path.exists(path): jsonlogs = glob("%s/logs/*json*" % (path)) bsondata = glob("%s/logs/*.bson" % (path)) filesmeta = glob("%s/logs/files/*.meta" % (path)) for f in jsonlogs, bsondata, filesmeta: for fe in f: try: log.info(("removing %s" % (fe))) os.remove(fe) except Exception as Err: log.info( ("failed to remove sorted_pcap from disk %s" % (Err)))
def cuckoo_clean_lower_score(args): """Clean up tasks with score <= X It deletes all stored data from file system and configured databases (SQL and MongoDB for tasks. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. if not args.malscore: log.info("No malscore argument provided bailing") return create_structure() init_console_logging() id_arr = [] results_db = connect_to_mongo()[mdb] if not results_db: log.info("Can't connect to mongo") return result = list( results_db.analysis.find({"malscore": { "$lte": args.malscore }})) id_arr = [entry["info"]["id"] for entry in result] log.info(("number of matching records %s" % len(id_arr))) resolver_pool.map(lambda tid: delete_data(tid), id_arr)
def cuckoo_clean_failed_url_tasks(): """Clean up failed tasks It deletes all stored data from file system and configured databases (SQL and MongoDB for failed tasks. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() results_db = connect_to_mongo()[mdb] if not results_db: log.info("Can't connect to mongo") return rtmp = results_db.analysis.find( { "info.category": "url", "network.http.0": { "$exists": False } }, { "info.id": 1 }, sort=[("_id", -1)]).limit(100) if rtmp and rtmp.count() > 0: resolver_pool.map(lambda tid: delete_data(tid), rtmp)
def main(): logo() check_working_directory() check_configs() check_version() create_structure() parser = argparse.ArgumentParser() parser.add_argument("-q", "--quiet", help="Display only error messages", action="store_true", required=False) parser.add_argument("-d", "--debug", help="Display debug messages", action="store_true", required=False) parser.add_argument( "-v", "--version", action="version", version="You are running Cuckoo Sandbox {0}".format(CUCKOO_VERSION)) parser.add_argument("-a", "--artwork", help="Show artwork", action="store_true", required=False) args = parser.parse_args() if args.artwork: import time try: while True: time.sleep(1) logo() except KeyboardInterrupt: return init_logging() if args.quiet: log.setLevel(logging.WARN) elif args.debug: log.setLevel(logging.DEBUG) init_modules() init_tasks() Resultserver() try: sched = Scheduler() sched.start() except KeyboardInterrupt: sched.stop()
def cape_clean_tlp(): create_structure() init_console_logging() if not is_reporting_db_connected(): return tlp_tasks = db.get_tlp_tasks() resolver_pool.map(lambda tid: delete_data(tid), tlp_tasks)
def cuckoo_clean(): """Clean up cuckoo setup. It deletes logs, all stored data from file system and configured databases (SQL and MongoDB. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() # Drop all tables. db.drop() if repconf.mongodb.enabled: mongo_drop_database(mdb) elif repconf.elasticsearchdb.enabled and not repconf.elasticsearchdb.searchonly: analyses = all_docs(index=get_analysis_index(), query={"query": { "match_all": {} }}, _source=["info.id"]) if analyses: for analysis in analyses: delete_analysis_and_related_calls( analysis["_source"]["info"]["id"]) # Paths to clean. paths = [ os.path.join(CUCKOO_ROOT, "db"), os.path.join(CUCKOO_ROOT, "log"), os.path.join(CUCKOO_ROOT, "storage"), ] # Delete various directories. for path in paths: if os.path.isdir(path): try: shutil.rmtree(path) except (IOError, OSError) as e: log.warning("Error removing directory %s: %s", path, e) # Delete all compiled Python objects ("*.pyc"). for dirpath, dirnames, filenames in os.walk(CUCKOO_ROOT): for fname in filenames: if not fname.endswith(".pyc"): continue path = os.path.join(CUCKOO_ROOT, dirpath, fname) try: os.unlink(path) except (IOError, OSError) as e: log.warning("Error removing file %s: %s", path, e)
def cuckoo_clean(): # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() # Initialize the database connection. db = Database() # Drop all tables. db.drop() # Check if MongoDB reporting is enabled and drop that if it is. cfg = Config("reporting") if cfg.mongodb and cfg.mongodb.enabled: from pymongo import MongoClient host = cfg.mongodb.get("host", "127.0.0.1") port = cfg.mongodb.get("port", 27017) mdb = cfg.mongodb.get("db", "cuckoo") try: conn = MongoClient(host, port) conn.drop_database(mdb) conn.disconnect() except: log.warning("Unable to drop MongoDB database: %s", mdb) # Paths to clean paths = [ os.path.join(CUCKOO_ROOT, "db"), os.path.join(CUCKOO_ROOT, "log"), os.path.join(CUCKOO_ROOT, "storage"), ] # Delete various directories. for path in paths: if os.path.isdir(path): try: shutil.rmtree(path) except (IOError, OSError) as e: log.warning("Error removing directory %s: %s", path, e) # Delete all compiled Python objects ("*.pyc"). for dirpath, dirnames, filenames in os.walk(CUCKOO_ROOT): for fname in filenames: if not fname.endswith(".pyc"): continue path = os.path.join(CUCKOO_ROOT, dirpath, fname) try: os.unlink(path) except (IOError, OSError) as e: log.warning("Error removing file %s: %s", path, e)
def cuckoo_clean_range_tasks(start, end): """Clean up tasks between start and end It deletes all stored data from file system and configured databases (SQL and MongoDB for selected tasks. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() pending_tasks = db.list_tasks(id_after=start - 1, id_before=end + 1) resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), pending_tasks)
def cape_clean_tlp(): create_structure() init_console_logging() results_db = connect_to_mongo()[mdb] if not results_db: log.info("Can't connect to mongo") return tlp_tasks = db.get_tlp_tasks() resolver_pool.map(lambda tid: delete_data(tid), tlp_tasks)
def cuckoo_init(quiet=False, debug=False, artwork=False, test=False, ml=False): """Cuckoo initialization workflow. @param quiet: if set enable silent mode, it doesn't print anything except warnings @param debug: if set enable debug mode, it print all debug messages @param artwork: if set it will print only artworks, forever @param test: enable integration test mode, used only for testing @param ml: do CuckooML analysis of locally stored samples """ cur_path = os.getcwd() os.chdir(CUCKOO_ROOT) logo() check_working_directory() check_configs() check_version() create_structure() if artwork: import time try: while True: time.sleep(1) logo() except KeyboardInterrupt: return init_logging() if quiet: log.setLevel(logging.WARN) elif debug: log.setLevel(logging.DEBUG) if ml: init_cuckooml() return init_modules() init_tasks() init_yara() init_binaries() init_rooter() init_routing() # TODO: This is just a temporary hack, we need an actual test suite to # integrate with Travis-CI. if test: return ResultServer() os.chdir(cur_path)
def cuckoo_clean_before_day(args): """Clean up failed tasks It deletes all stored data from file system and configured databases (SQL and MongoDB for tasks completed before now - days. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. if not args.delete_older_than_days: log.info("No days argument provided bailing") return else: days = args.delete_older_than_days create_structure() init_console_logging() id_arr = [] if not is_reporting_db_connected(): return if repconf.mongodb and repconf.mongodb.enabled: results_db = connect_to_mongo()[mdb] added_before = datetime.now() - timedelta(days=int(days)) if args.files_only_filter: log.info("file filter applied") old_tasks = db.list_tasks(added_before=added_before, category="file") elif args.urls_only_filter: log.info("url filter applied") old_tasks = db.list_tasks(added_before=added_before, category="url") else: old_tasks = db.list_tasks(added_before=added_before) for e in old_tasks: id_arr.append({"info.id": (int(e.to_dict()["id"]))}) log.info(("number of matching records %s before suri/custom filter " % len(id_arr))) if id_arr and args.suricata_zero_alert_filter: result = list( results_db.analysis.find({"suricata.alerts.alert": {"$exists": False}, "$or": id_arr}, {"info.id": 1, "_id": 0}) ) id_arr = [entry["info"]["id"] for entry in result] if id_arr and args.custom_include_filter: result = list( results_db.analysis.find( {"info.custom": {"$regex": args.custom_include_filter}, "$or": id_arr}, {"info.id": 1, "_id": 0} ) ) id_arr = [entry["info"]["id"] for entry in result] log.info("number of matching records %s" % len(id_arr)) delete_bulk_tasks_n_folders(id_arr, args.delete_mongo)
def cuckoo_clean_failed_url_tasks(): """Clean up failed tasks It deletes all stored data from file system and configured databases (SQL and MongoDB for failed tasks. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() if not is_reporting_db_connected(): return if repconf.mongodb.enabled: rtmp = mongo_find("analysis", { "info.category": "url", "network.http.0": { "$exists": False } }, { "info.id": 1 }, sort=[("_id", -1)]).limit(100) elif repconf.elasticsearchdb.enabled: rtmp = [ d["_source"] for d in all_docs( index=get_analysis_index(), query={ "query": { "bool": { "must": [{ "exists": { "field": "network.http" } }, { "match": { "info.category": "url" } }] } } }, _source=["info.id"], ) ] else: rtmp = [] if rtmp and len(rtmp) > 0: resolver_pool.map(lambda tid: delete_data(tid), rtmp)
def cuckoo_clean_pending_tasks(): """Clean up pending tasks It deletes all stored data from file system and configured databases (SQL and MongoDB for pending tasks. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() if not is_reporting_db_connected(): return pending_tasks = db.list_tasks(status=TASK_PENDING) resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), pending_tasks)
def main(): logo() check_working_directory() check_configs() check_version() create_structure() parser = argparse.ArgumentParser() parser.add_argument("-q", "--quiet", help="Display only error messages", action="store_true", required=False) parser.add_argument("-d", "--debug", help="Display debug messages", action="store_true", required=False) parser.add_argument("-v", "--version", action="version", version="You are running Cuckoo Sandbox {0}".format(CUCKOO_VERSION)) parser.add_argument("-a", "--artwork", help="Show artwork", action="store_true", required=False) parser.add_argument("-t", "--test", help="Test startup", action="store_true", required=False) args = parser.parse_args() if args.artwork: import time try: while True: time.sleep(1) logo() except KeyboardInterrupt: return init_logging() if args.quiet: log.setLevel(logging.WARN) elif args.debug: log.setLevel(logging.DEBUG) init_modules() init_tasks() # This is just a temporary hack, we need an actual test suite to integrate # with Travis-CI. if args.test: return Resultserver() try: sched = Scheduler() sched.start() except KeyboardInterrupt: sched.stop()
def cuckoo_clean_failed_tasks(): """Clean up failed tasks It deletes all stored data from file system and configured databases (SQL and MongoDB for failed tasks. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() failed_tasks_a = db.list_tasks(status=TASK_FAILED_ANALYSIS) failed_tasks_p = db.list_tasks(status=TASK_FAILED_PROCESSING) failed_tasks_r = db.list_tasks(status=TASK_FAILED_REPORTING) failed_tasks_rc = db.list_tasks(status=TASK_RECOVERED) resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), failed_tasks_a) resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), failed_tasks_p) resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), failed_tasks_r) resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), failed_tasks_rc)
def cuckoo_init(quiet=False, debug=False, artwork=False, test=False): cur_path = os.getcwd() os.chdir(CUCKOO_ROOT) logo() check_working_directory() check_configs() create_structure() if artwork: import time try: while True: time.sleep(1) logo() except KeyboardInterrupt: return if quiet: level = logging.WARN elif debug: level = logging.DEBUG else: level = logging.INFO log.setLevel(level) init_logging(level) check_webgui_mongo() init_modules() init_tasks() init_yara() init_rooter() init_routing() # This is just a temporary hack, we need an actual test suite to integrate # with Travis-CI. if test: return ResultServer() os.chdir(cur_path)
def cuckoo_clean(): # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() # Initialize the database connection. db = Database() # Drop all tables. db.drop() # Paths to clean paths = [ os.path.join(CUCKOO_ROOT, "db"), os.path.join(CUCKOO_ROOT, "log"), os.path.join(CUCKOO_ROOT, "storage"), ] # Delete various directories. for path in paths: if os.path.isdir(path): try: shutil.rmtree(path) except (IOError, OSError) as e: log.warning("Error removing directory %s: %s", path, e) # Delete all compiled Python objects ("*.pyc"). for dirpath, dirnames, filenames in os.walk(CUCKOO_ROOT): for fname in filenames: if not fname.endswith(".pyc"): continue path = os.path.join(CUCKOO_ROOT, dirpath, fname) try: os.unlink(path) except (IOError, OSError) as e: log.warning("Error removing file %s: %s", path, e)
def cuckoo_init(quiet=False, debug=False, artwork=False, test=False): cur_path = os.getcwd() os.chdir(CUCKOO_ROOT) logo() check_working_directory() check_configs() check_version() create_structure() if artwork: import time try: while True: time.sleep(1) logo() except KeyboardInterrupt: return init_logging() if quiet: log.setLevel(logging.WARN) elif debug: log.setLevel(logging.DEBUG) init_config() init_modules() init_tasks() init_yara() # This is just a temporary hack, we need an actual test suite to integrate # with Travis-CI. if test: return ResultServer() os.chdir(cur_path)
def setUp(self): create_structure() self.anal = Dictionary() self.anal["id"] = "test-cuckoo-remove-me" self.a = AnalysisManager(self.anal)
def cuckoo_clean(): """Clean up cuckoo setup. It deletes logs, all stored data from file system and configured databases (SQL and MongoDB. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() # Drop all tables. db.drop() conn = connect_to_mongo() if not conn: log.info("Can't connect to mongo") return try: conn.drop_database(mdb) conn.close() except: log.warning("Unable to drop MongoDB database: %s", mdb) if rep_config.elasticsearchdb and rep_config.elasticsearchdb.enabled and not rep_config.elasticsearchdb.searchonly: es = False es, delidx = connect_to_es() if not es: return analyses = es.search(index=delidx, doc_type="analysis", q="*")["hits"]["hits"] if analyses: for analysis in analyses: esidx = analysis["_index"] esid = analysis["_id"] # Check if behavior exists if analysis["_source"]["behavior"]: for process in analysis["_source"]["behavior"][ "processes"]: for call in process["calls"]: es.delete( index=esidx, doc_type="calls", id=call, ) # Delete the analysis results es.delete( index=esidx, doc_type="analysis", id=esid, ) # Paths to clean. paths = [ os.path.join(CUCKOO_ROOT, "db"), os.path.join(CUCKOO_ROOT, "log"), os.path.join(CUCKOO_ROOT, "storage"), ] # Delete various directories. for path in paths: if os.path.isdir(path): try: shutil.rmtree(path) except (IOError, OSError) as e: log.warning("Error removing directory %s: %s", path, e) # Delete all compiled Python objects ("*.pyc"). for dirpath, dirnames, filenames in os.walk(CUCKOO_ROOT): for fname in filenames: if not fname.endswith(".pyc"): continue path = os.path.join(CUCKOO_ROOT, dirpath, fname) try: os.unlink(path) except (IOError, OSError) as e: log.warning("Error removing file %s: %s", path, e)
def cuckoo_clean_sorted_pcap_dump(): """Clean up failed tasks It deletes all stored data from file system and configured databases (SQL and MongoDB for failed tasks. """ # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() init_console_logging() if not is_reporting_db_connected(): return if repconf.mongodb.enabled: results_db = connect_to_mongo()[mdb] elif repconf.elasticsearchdb.enabled: es = connect_to_es() done = False while not done: if repconf.mongodb and repconf.mongodb.enabled: rtmp = results_db.analysis.find({"network.sorted_pcap_id": {"$exists": True}}, {"info.id": 1}, sort=[("_id", -1)]).limit( 100 ) elif repconf.elasticsearchdb.enabled: rtmp = [d['_source'] for d in all_docs(index=get_analysis_index(), query={ "query": { "exists": { "field": "network.sorted_pcap_id" } } }, _source=['info.id'])] else: rtmp = 0 if rtmp and len(rtmp) > 0: for e in rtmp: if e["info"]["id"]: log.info((e["info"]["id"])) try: if repconf.mongodb and repconf.mongodb.enabled: results_db.analysis.update( {"info.id": int(e["info"]["id"])}, {"$unset": {"network.sorted_pcap_id": ""}}) elif repconf.elasticsearchdb.enabled: es.update( index=e["index"], id=e["info"]["id"], body={"network.sorted_pcap_id": ""} ) except Exception: log.info(("failed to remove sorted pcap from db for id %s" % (e["info"]["id"]))) try: path = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % (e["info"]["id"]), "dump_sorted.pcap") os.remove(path) except Exception as e: log.info(("failed to remove sorted_pcap from disk %s" % (e))) else: done = True else: done = True