def __init__(self): self.module_name = "" self.options = None self.options_globals = Config( os.path.join(CUCKOO_ROOT, "conf", "cuckoo.conf")) # Database pointer. self.db = Database() # Machine table is cleaned to be filled from configuration file at each start. self.db.clean_machines()
def run(self): """Run manager thread.""" success = self.launch_analysis() Database().complete(self.task.id, success) self.process_results() log.debug("Releasing database task #%d with status %s", self.task.id, success) log.info("Task #%d: analysis procedure completed", self.task.id)
def __init__(self, task_id): """@param analysis_path: analysis folder path. """ self.task = Database().view_task(task_id).to_dict() self.analysis_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(task_id)) self.cfg = Config(cfg=os.path.join(CUCKOO_ROOT, "conf", "reporting.conf"))
def run(self): """Run debug analysis. @return: debug information dict. """ self.key = "debug" debug = {"log": "", "errors": []} if os.path.exists(self.log_path): try: debug["log"] = codecs.open(self.log_path, "rb", "utf-8").read() except ValueError as e: raise CuckooProcessingError("Error decoding %s: %s" % (self.log_path, e)) except (IOError, OSError) as e: raise CuckooProcessingError("Error opening %s: %s" % (self.log_path, e)) for error in Database().view_errors(int(self.task["id"])): debug["errors"].append(error.message) return debug
except ImportError: sys.stderr.write("ERROR: Bottle library is missing") sys.exit(1) logging.basicConfig() sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..")) from lib.dragon.core.database import Database from lib.dragon.common.constants import CUCKOO_ROOT from lib.dragon.common.utils import store_temp_file # Templating engine. env = Environment() env.loader = FileSystemLoader(os.path.join(CUCKOO_ROOT, "data", "html")) # Global db pointer. db = Database() @hook("after_request") def custom_headers(): """Set some custom headers across all HTTP responses.""" response.headers["Server"] = "Machete Server" response.headers["X-Content-Type-Options"] = "nosniff" response.headers["X-Frame-Options"] = "DENY" response.headers["X-XSS-Protection"] = "1; mode=block" response.headers["Pragma"] = "no-cache" response.headers["Cache-Control"] = "no-cache" response.headers["Expires"] = "0" @route("/") def index(): context = {}
def main(): parser = argparse.ArgumentParser() parser.add_argument("target", type=str, help="URL, path to the file or folder to analyze") parser.add_argument("--url", action="store_true", default=False, help="Specify whether the target is an URL", required=False) parser.add_argument("--package", type=str, action="store", default="", help="Specify an analysis package", required=False) parser.add_argument("--custom", type=str, action="store", default="", help="Specify any custom value", required=False) parser.add_argument("--timeout", type=int, action="store", default=0, help="Specify an analysis timeout", required=False) parser.add_argument( "--options", type=str, action="store", default="", help= "Specify options for the analysis package (e.g. \"name=value,name2=value2\")", required=False) parser.add_argument( "--priority", type=int, action="store", default=1, help="Specify a priority for the analysis represented by an integer", required=False) parser.add_argument( "--machine", type=str, action="store", default="", help="Specify the identifier of a machine you want to use", required=False) parser.add_argument( "--platform", type=str, action="store", default="", help= "Specify the operating system platform you want to use (windows/darwin/linux)", required=False) parser.add_argument( "--memory", action="store_true", default=False, help="Enable to take a memory dump of the analysis machine", required=False) parser.add_argument( "--enforce-timeout", action="store_true", default=False, help="Enable to force the analysis to run for the full timeout period", required=False) try: args = parser.parse_args() except IOError as e: parser.error(e) return False db = Database() # Try to get input as utf-8. try: target = args.target.decode("utf-8") except UnicodeEncodeError: target = args.target if args.url: task_id = db.add_url(target, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, memory=args.memory, enforce_timeout=args.enforce_timeout) print( bold(green("Success")) + ": URL \"{0}\" added as task with ID {1}".format(target, task_id)) else: # Get absolute path to deal with relative. path = os.path.abspath(target) if not os.path.exists(path): print( bold(red("Error")) + ": the specified file/folder does not exist at path \"{0}\"". format(path)) return False files = [] if os.path.isdir(path): for dirname, dirnames, filenames in os.walk(path): for file_name in filenames: file_path = os.path.join(dirname, file_name) if os.path.isfile(file_path): files.append(file_path) else: files.append(path) for file_path in files: task_id = db.add_path(file_path=file_path, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, memory=args.memory, enforce_timeout=args.enforce_timeout) if task_id: print( bold(green("Success")) + ": File \"{0}\" added as task with ID {1}".format( file_path, task_id)) else: print(bold(red("Error")) + ": adding task to database")
def emit(self, record): if hasattr(record, "task_id"): db = Database() db.add_error(record.msg, int(record.task_id))
def __init__(self): self.running = True self.cfg = Config() self.db = Database()
def launch_analysis(self): """Start analysis.""" sniffer = None succeeded = False log.info("Starting analysis of %s \"%s\" (task=%d)", self.task.category.upper(), self.task.target, self.task.id) # Initialize the the analysis folders. if not self.init_storage(): return False if self.task.category == "file": # Store a copy of the original file. if not self.store_file(): return False # Generate the analysis configuration file. options = self.build_options() # Acquire analysis machine. machine = self.acquire_machine() # At this point we can tell the Resultserver about it Resultserver().add_task(self.task, machine) # If enabled in the configuration, start the tcpdump instance. if self.cfg.sniffer.enabled: sniffer = Sniffer(self.cfg.sniffer.tcpdump) sniffer.start(interface=self.cfg.sniffer.interface, host=machine.ip, file_path=os.path.join(self.storage, "dump.pcap")) try: # Mark the selected analysis machine in the database as started. guest_log = Database().guest_start(self.task.id, machine.name, machine.label, mmanager.__class__.__name__) # Start the machine. mmanager.start(machine.label) except CuckooMachineError as e: log.error(str(e), extra={"task_id": self.task.id}) # Stop the sniffer. if sniffer: sniffer.stop() return False else: try: # Initialize the guest manager. guest = GuestManager(machine.name, machine.ip, machine.platform) # Start the analysis. guest.start_analysis(options) except CuckooGuestError as e: log.error(str(e), extra={"task_id": self.task.id}) # Stop the sniffer. if sniffer: sniffer.stop() return False else: # Wait for analysis completion. try: guest.wait_for_completion() succeeded = True except CuckooGuestError as e: log.error(str(e), extra={"task_id": self.task.id}) succeeded = False # Retrieve the analysis results and store them. try: guest.save_results(self.storage) succeeded = True except CuckooGuestError as e: log.error(str(e), extra={"task_id": self.task.id}) succeeded = False finally: # Stop the sniffer. if sniffer: sniffer.stop() # Take a memory dump of the machine before shutting it off. if self.cfg.cuckoo.memory_dump or self.task.memory: try: mmanager.dump_memory( machine.label, os.path.join(self.storage, "memory.dmp")) except NotImplementedError: log.error("The memory dump functionality is not available " "for current machine manager") except CuckooMachineError as e: log.error(e) try: # Stop the analysis machine. mmanager.stop(machine.label) except CuckooMachineError as e: log.warning("Unable to stop machine %s: %s", machine.label, e) # Market the machine in the database as stopped. Database().guest_stop(guest_log) try: # Release the analysis machine. mmanager.release(machine.label) except CuckooMachineError as e: log.error( "Unable to release machine %s, reason %s. " "You might need to restore it manually", machine.label, e) # after all this, we can make the Resultserver forget about it Resultserver().del_task(self.task, machine) return succeeded
def __init__(self, task_id): """@param task_id: ID of the analyses to process.""" self.task = Database().view_task(task_id).to_dict() self.analysis_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(task_id))