def check_version(): """Checks version of Cuckoo.""" cfg = Config() if not cfg.cuckoo.version_check: return print(" Checking for updates...") url = "http://api.cuckoosandbox.org/checkversion.php" data = urllib.urlencode({"version": CUCKOO_VERSION}) try: request = urllib2.Request(url, data) response = urllib2.urlopen(request) except (urllib2.URLError, urllib2.HTTPError): print(red(" Failed! ") + "Unable to establish connection.\n") return try: response_data = json.loads(response.read()) except ValueError: print(red(" Failed! ") + "Invalid response.\n") return if not response_data["error"]: if response_data["response"] == "NEW_VERSION": msg = "Cuckoo Sandbox version {0} is available " \ "now.\n".format(response_data["current"]) print(red(" Outdated! ") + msg) else: print(green(" Good! ") + "You have the latest version " "available.\n")
def main(): parser = argparse.ArgumentParser() parser.add_argument("-a", "--all", help="Download everything", action="store_true", required=False) parser.add_argument("-s", "--signatures", help="Download Cuckoo signatures", action="store_true", required=False) #parser.add_argument("-p", "--processing", help="Download processing modules", action="store_true", required=False) #parser.add_argument("-m", "--machinemanagers", help="Download machine managers", action="store_true", required=False) #parser.add_argument("-r", "--reporting", help="Download reporting modules", action="store_true", required=False) parser.add_argument("-f", "--force", help="Install files without confirmation", action="store_true", required=False) parser.add_argument("-w", "--rewrite", help="Rewrite existing files", action="store_true", required=False) args = parser.parse_args() enabled = [] force = False rewrite = False if args.all: enabled.append("signatures") else: if args.signatures: enabled.append("signatures") if not enabled: print(colors.red("You need to enable some category!\n")) parser.print_help() return if args.force: force = True if args.rewrite: rewrite = True install(enabled, force, rewrite)
def main(): global URL parser = argparse.ArgumentParser() parser.add_argument("-a", "--all", help="Download everything", action="store_true", required=False) parser.add_argument("-s", "--signatures", help="Download Cuckoo signatures", action="store_true", required=False) parser.add_argument("-p", "--processing", help="Download processing modules", action="store_true", required=False) parser.add_argument("-m", "--machinery", help="Download machine managers", action="store_true", required=False) parser.add_argument("-n", "--analyzer", help="Download analyzer modules", action="store_true", required=False) parser.add_argument("-M", "--monitor", help="Download monitoring binaries", action="store_true", required=False) parser.add_argument("-g", "--agent", help="Download agent modules", action="store_true", required=False) parser.add_argument("-r", "--reporting", help="Download reporting modules", action="store_true", required=False) parser.add_argument("-f", "--force", help="Install files without confirmation", action="store_true", required=False) parser.add_argument("-w", "--rewrite", help="Rewrite existing files", action="store_true", required=False) parser.add_argument("-b", "--branch", help="Specify a different branch", action="store", default="master", required=False) parser.add_argument("archive", help="Install a stored archive", nargs="?") args = parser.parse_args() enabled = [] force = False rewrite = False if args.all: enabled.append("processing") enabled.append("signatures") enabled.append("reporting") enabled.append("machinery") enabled.append("analyzer") enabled.append("monitor") enabled.append("agent") else: if args.signatures: enabled.append("signatures") if args.processing: enabled.append("processing") if args.reporting: enabled.append("reporting") if args.machinery: enabled.append("machinery") if args.analyzer: enabled.append("analyzer") if args.agent: enabled.append("agent") if not enabled: print(colors.red("You need to enable some category!\n")) parser.print_help() return if args.force: force = True if args.rewrite: rewrite = True URL = URL.format(args.branch) install(enabled, force, rewrite, args.archive)
def delete_all(): """Delete ALL tasks in Cuckoo's local processing queue.""" db = Database() list = db.list_tasks() if not list: print(bold(red("Error")) + ": no tasks to be deleted") else: for url in list: db.delete_task(db.count_tasks())
def check_version(): """Checks version of Cuckoo.""" cfg = Config() if not cfg.cuckoo.version_check: return print(" Checking for updates...") url = "http://api.cuckoosandbox.org/checkversion.php" data = urllib.urlencode({"version": CUCKOO_VERSION}) try: request = urllib2.Request(url, data) response = urllib2.urlopen(request) except (urllib2.URLError, urllib2.HTTPError): print(red(" Failed! ") + "Unable to establish connection.\n") return try: response_data = json.loads(response.read()) except ValueError: print(red(" Failed! ") + "Invalid response.\n") return stable_version = response_data["current"] if CUCKOO_VERSION.endswith("-dev"): print(yellow(" You are running a development version! Current stable is {}.".format( stable_version))) else: if LooseVersion(CUCKOO_VERSION) < LooseVersion(stable_version): msg = "Cuckoo Sandbox version {} is available now.".format( stable_version) print(red(" Outdated! ") + msg) else: print(green(" Good! ") + "You have the latest version " "available.\n")
def main(): parser = argparse.ArgumentParser() parser.add_argument("id", type=str, help="ID of the analysis to process (auto for continuous processing of unprocessed tasks).") parser.add_argument("-c", "--caperesubmit", help="Allow CAPE resubmit processing.", action="store_true", required=False) parser.add_argument("-d", "--debug", help="Display debug messages", action="store_true", required=False) parser.add_argument("-r", "--report", help="Re-generate report", action="store_true", required=False) parser.add_argument("-s", "--signatures", help="Re-execute signatures on the report", action="store_true", required=False) parser.add_argument("-p", "--parallel", help="Number of parallel threads to use (auto mode only).", type=int, required=False, default=1) parser.add_argument("-fp", "--failed-processing", help="reprocess failed processing", action="store_true", required=False, default=False) parser.add_argument("-mc", "--maxtasksperchild", help="Max children tasks per worker", action="store", type=int, required=False, default=7) parser.add_argument( "-md", "--memory-debugging", help="Enable logging garbage collection related info", action="store_true", required=False, default=False ) parser.add_argument( "-pt", "--processing-timeout", help="Max amount of time spent in processing before we fail a task", action="store", type=int, required=False, default=300, ) args = parser.parse_args() init_yara() init_modules() if args.id == "auto": init_logging(auto=True, debug=args.debug) autoprocess( parallel=args.parallel, failed_processing=args.failed_processing, maxtasksperchild=args.maxtasksperchild, memory_debugging=args.memory_debugging, processing_timeout=args.processing_timeout, ) else: if not os.path.exists(os.path.join(CUCKOO_ROOT, "storage", "analyses", args.id)): sys.exit(red("\n[-] Analysis folder doesn't exist anymore\n")) init_logging(tid=args.id, debug=args.debug) task = Database().view_task(int(args.id)) if args.signatures: report = os.path.join(CUCKOO_ROOT, "storage", "analyses", args.id, "reports", "report.json") if not os.path.exists(report): sys.exit("File {} doest exist".format(report)) results = json.load(open(report)) if results is not None: RunSignatures(task=task.to_dict(), results=results).run() else: process(task=task, report=args.report, capeproc=args.caperesubmit, memory_debugging=args.memory_debugging)
def emit(self, record): colored = copy.copy(record) if record.levelname == "WARNING": colored.msg = yellow(record.msg) elif record.levelname == "ERROR" or record.levelname == "CRITICAL": colored.msg = red(record.msg) else: if "analysis procedure completed" in record.msg: colored.msg = cyan(record.msg) else: colored.msg = record.msg logging.StreamHandler.emit(self, colored)
def main(): global URL parser = argparse.ArgumentParser() parser.add_argument("-a", "--all", help="Download everything", action="store_true", required=False) parser.add_argument("-e", "--feeds", help="Download Cuckoo feed modules", action="store_true", required=False) parser.add_argument("-s", "--signatures", help="Download Cuckoo signatures", action="store_true", required=False) parser.add_argument("-p", "--processing", help="Download processing modules", action="store_true", required=False) parser.add_argument("-m", "--machinemanagers", help="Download machine managers",action="store_true", required=False) parser.add_argument("-r", "--reporting", help="Download reporting modules", action="store_true", required=False) parser.add_argument("-f", "--force", help="Install files without confirmation", action="store_true", required=False) parser.add_argument("-w", "--rewrite", help="Rewrite existing files", action="store_true", required=False) parser.add_argument("-b", "--branch", help="Specify a different branch", action="store", default="master", required=False) args = parser.parse_args() enabled = [] force = False rewrite = False if args.all: enabled.append("feeds") enabled.append("processing") enabled.append("signatures") enabled.append("reporting") enabled.append("machinemanagers") else: if args.feeds: enabled.append("feeds") if args.signatures: enabled.append("signatures") if args.processing: enabled.append("processing") if args.reporting: enabled.append("reporting") if args.machinemanagers: enabled.append("machinemanagers") if not enabled: print(colors.red("You need to enable some category!\n")) parser.print_help() return if args.force: force = True if args.rewrite: rewrite = True URL = URL.format(args.branch) install(enabled, force, rewrite)
def start_worker(): """Pull a task from MongoHQ DB and add URLs for processing queue in Cuckoo db. @return task_id""" task = mongo.pull_task() if task != None: global taskid # Places in a dictionary url to corresponding cuckoo taskid taskid[task['_id']] = [task['url'], taskhandler.add_urls(task)] return True else: print(bold(red("Error")) + ": no tasks in 'idle' state to pull") time.sleep(30) return False
def add_urls(task): url = task['url'] db = Database() task_id = db.add_url(url, timeout=task['timeout'], priority=task['priority']) if task_id: print( bold(green("Success")) + u": URL \"{0}\" added as task with ID {1}".format(url, task_id)) mongo.update_tasklist(task, task_id) return task_id else: print(bold(red("Error")) + ": adding task to database")
def main(): global URL parser = argparse.ArgumentParser() parser.add_argument("-a", "--all", help="Download everything", action="store_true", required=False) parser.add_argument("-e", "--feeds", help="Download CAPE feed modules", action="store_true", required=False) parser.add_argument("-s", "--signatures", help="Download CAPE signatures", action="store_true", required=False) parser.add_argument("-p", "--processing", help="Download processing modules", action="store_true", required=False) parser.add_argument("-m", "--machinery", help="Download machine managers",action="store_true", required=False) parser.add_argument("-r", "--reporting", help="Download reporting modules", action="store_true", required=False) parser.add_argument("-an", "--analyzer", help="Download analyzer modules/binaries/etc", action="store_true", required=False) parser.add_argument("-data", "--data", help="Download data things", action="store_true", required=False) parser.add_argument("-f", "--force", help="Install files without confirmation", action="store_true", default=False, required=False) parser.add_argument("-w", "--rewrite", help="Rewrite existing files", action="store_true", required=False) parser.add_argument("-b", "--branch", help="Specify a different branch", action="store", default="master", required=False) parser.add_argument("--file", help="Specify a local copy of a community .zip file", action="store", default=False, required=False) args = parser.parse_args() URL = URL.format(args.branch) enabled = [] if args.all: enabled = ["feeds", "processing", "signatures", "reporting", "machinery", "analyzer", "data"] else: if args.feeds: enabled.append("feeds") if args.signatures: enabled.append("signatures") if args.processing: enabled.append("processing") if args.reporting: enabled.append("reporting") if args.machinery: enabled.append("machinery") if args.analyzer: enabled.append("analyzer") if args.data: enabled.append("data") if not enabled: print(colors.red("You need to enable some category!\n")) parser.print_help() return install(enabled, args.force, args.rewrite, args.file)
def __init__(self, file_name="cuckoo", cfg=None): """ @param file_name: file name without extension. @param cfg: configuration file path. """ config = configparser.ConfigParser() if cfg: config.read(cfg) else: try: config.read(os.path.join(CUCKOO_ROOT, "conf", "%s.conf" % file_name)) except UnicodeDecodeError as e: print( bold( red( "please fix your config file: {}.conf - Pay attention for bytes c2 xa - {}\n\n{}".format( file_name, e.object, e.reason ) ) ) ) raise UnicodeDecodeError self.fullconfig = config._sections for section in config.sections(): setattr(self, section, Dictionary()) for name, _ in config.items(section): try: # Ugly fix to avoid '0' and '1' to be parsed as a # boolean value. # We raise an exception to goto fail^w parse it # as integer. if config.get(section, name) in ["0", "1"]: raise ValueError value = config.getboolean(section, name) except ValueError: try: value = config.getint(section, name) except ValueError: value = config.get(section, name) setattr(getattr(self, section), name, value)
def add_urls(task): """For every URL, add to Cuckoo's local processing queue and get task_id for all URLs to save to MongoHQ DB. @return tasklist""" for url in task['urls']: db = Database() task_id = db.add_url(url, timeout=task['timeout'], priority=task['priority']) if task_id: tasklist.append(task_id) print(bold(green("Success")) + u": URL \"{0}\" added as task with ID {1}".format(url, task_id)) else: print(bold(red("Error")) + ": adding task to database") mongo.update_tasklist(task, tasklist) return tasklist
def _read_files(self, files: Iterable[str]): # Escape the percent signs so that ConfigParser doesn't try to do # interpolation of the value as well. config = configparser.ConfigParser({ f"ENV:{key}": val.replace("%", "%%") for key, val in os.environ.items() }) try: config.read(files) except UnicodeDecodeError as e: print( bold( red(f"please fix your config file(s): {', '.join(files)} - " f"Pay attention for bytes c2 xa - {e.object}\n\n{e.reason}" ))) raise self.fullconfig = config._sections for section in config.sections(): dct = Dictionary() for name, value in config.items(section): if name.startswith("env:"): continue try: # Ugly fix to avoid '0' and '1' to be parsed as a boolean value. # We raise an exception to parse it as an integer. if value in {"0", "1"}: raise ValueError value = config.getboolean(section, name) except ValueError: try: value = config.getint(section, name) except ValueError: value = config.get(section, name) setattr(dct, name, value) setattr(self, section, dct)
def delete_all(): """Delete ALL tasks in Cuckoo's local processing queue and clears any existing analysis files""" db = Database() list = db.list_tasks() if not list: print(bold(red("Error")) + ": no tasks to be deleted") else: for url in list: db.delete_task(db.count_tasks()) path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "storage", "analyses") for root, dirs, files in os.walk(path): for f in files: os.unlink(os.path.join(root, f)) for d in dirs: if os.path.islink(os.path.join(root, d)): os.unlink(os.path.join(root, d)) else: shutil.rmtree(os.path.join(root, d))
def main(): parser = argparse.ArgumentParser() parser.add_argument("target", type=str, nargs="?", help="URL, path to the file or folder to analyze") parser.add_argument("-d", "--debug", action="store_true", help="Enable debug logging") parser.add_argument( "--remote", type=str, action="store", default=None, help="Specify IP:port to a Cuckoo API server to submit remotely", required=False) parser.add_argument("--url", action="store_true", default=False, help="Specify whether the target is an URL", required=False) parser.add_argument("--package", type=str, action="store", default="", help="Specify an analysis package", required=False) parser.add_argument("--custom", type=str, action="store", default="", help="Specify any custom value", required=False) parser.add_argument("--owner", type=str, action="store", default="", help="Specify the task owner", required=False) parser.add_argument("--timeout", type=int, action="store", default=0, help="Specify an analysis timeout", required=False) parser.add_argument( "-o", "--options", type=str, action="store", default="", help= "Specify options for the analysis package (e.g. \"name=value,name2=value2\")", required=False) parser.add_argument( "--priority", type=int, action="store", default=1, help="Specify a priority for the analysis represented by an integer", required=False) parser.add_argument( "--machine", type=str, action="store", default="", help="Specify the identifier of a machine you want to use", required=False) parser.add_argument( "--platform", type=str, action="store", default="", help= "Specify the operating system platform you want to use (windows/darwin/linux)", required=False) parser.add_argument( "--memory", action="store_true", default=False, help="Enable to take a memory dump of the analysis machine", required=False) parser.add_argument( "--enforce-timeout", action="store_true", default=False, help="Enable to force the analysis to run for the full timeout period", required=False) parser.add_argument("--clock", type=str, action="store", default=None, help="Set virtual machine clock", required=False) parser.add_argument( "--tags", type=str, action="store", default=None, help="Specify tags identifier of a machine you want to use", required=False) parser.add_argument("--baseline", action="store_true", default=None, help="Run a baseline analysis", required=False) parser.add_argument("--max", type=int, action="store", default=None, help="Maximum samples to add in a row", required=False) parser.add_argument("--pattern", type=str, action="store", default=None, help="Pattern of files to submit", required=False) parser.add_argument("--shuffle", action="store_true", default=False, help="Shuffle samples before submitting them", required=False) parser.add_argument("--unique", action="store_true", default=False, help="Only submit new samples, ignore duplicates", required=False) parser.add_argument("--quiet", action="store_true", default=False, help="Only print text on failure", required=False) parser.add_argument("--PIN", help="\033[91mForce PINDemonium \o/ \033[0m", action='store_true', required=False) try: args = parser.parse_args() except IOError as e: parser.error(e) return False print(args.target) if args.target: if FileEntropy(args.target) > 7.5: print( green( "We detected high entropy on the file. Trying to unpack.\n" )) args.PIN = True else: print(FileEntropy(args.target), args.target) # Add PINDemonium as an option if args.PIN: args.package = 'exePIN' Pin = PINParser() print( "\033[91m\n\t\t Please specify arguments for PIN. If no arguments are specified, it will run with default args within 10 secs. \n\t\t Here is a reminder : \033[0m\n" ) # We use a timeout in case the file is automatically sent so the analysis is not stuck on waiting for arguments. Pin.Help() print("\n\t\tPress enter to continue") i, o, e = select.select([sys.stdin], [], [], 10) if (i): sys.stdin.readline() sys.stdout.write(">>> ") Args = Pin.Parse(raw_input()) args.package = args.package + ";" + Args else: print("Timeout, launching analysis with default args.") if not args.baseline and not args.target: print "No file or URL has been specified!" exit(1) # If the quiet flag has been set, then we also disable the "warning" # level of the logging module. (E.g., when pydeep has not been installed, # there will be a warning message, because Cuckoo can't resolve the # ssdeep hash of this particular sample.) if args.debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig() if args.quiet: logging.disable(logging.WARNING) db = Database() if args.url: target = to_unicode(args.target) if args.remote: if not HAVE_REQUESTS: print( bold(red("Error")) + ": you need to install python-requests (`pip install requests`)" ) return False url = "http://{0}/tasks/create/url".format(args.remote) data = dict(url=target, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, owner=args.owner, tags=args.tags) try: response = requests.post(url, data=data) except Exception as e: print( bold(red("Error")) + ": unable to send URL: {0}".format(e)) return False json = response.json() task_id = json["task_id"] else: task_id = db.add_url(target, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, owner=args.owner, memory=args.memory, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags) if task_id: if not args.quiet: print( bold(green("Success")) + u": URL \"{0}\" added as task with ID {1}".format( target, task_id)) else: print(bold(red("Error")) + ": adding task to database") elif args.baseline: if args.remote: print "Remote baseline support has not yet been implemented." exit(1) task_id = db.add_baseline(args.timeout, args.owner, args.machine, args.memory) if task_id: if not args.quiet: print( bold(green("Success")) + u": Baseline analysis added as task with ID {0}".format( task_id)) else: print(bold(red("Error")) + ": adding task to database") else: target = to_unicode(args.target) # Get absolute path to deal with relative. path = to_unicode(os.path.abspath(target)) if not os.path.exists(path): print( bold(red("Error")) + u": the specified file/folder does not exist at path \"{0}\"". format(path)) return False files = [] if os.path.isdir(path): for dirname, dirnames, filenames in os.walk(path): for file_name in filenames: file_path = os.path.join(dirname, file_name) if os.path.isfile(file_path): if args.pattern: if fnmatch.fnmatch(file_name, args.pattern): files.append(to_unicode(file_path)) else: files.append(to_unicode(file_path)) else: files.append(path) if args.shuffle: random.shuffle(files) else: files = sorted(files) for file_path in files: if not File(file_path).get_size(): if not args.quiet: print( bold( yellow("Empty") + ": sample {0} (skipping file)".format(file_path))) continue if args.max is not None: # Break if the maximum number of samples has been reached. if not args.max: break args.max -= 1 if args.remote: if not HAVE_REQUESTS: print( bold(red("Error")) + ": you need to install python-requests (`pip install requests`)" ) return False url = "http://{0}/tasks/create/file".format(args.remote) files = dict(file=open(file_path, "rb"), filename=os.path.basename(file_path)) data = dict(package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, owner=args.owner, tags=args.tags) try: response = requests.post(url, files=files, data=data) except Exception as e: print( bold(red("Error")) + ": unable to send file: {0}".format(e)) return False json = response.json() task_id = json["task_id"] else: if args.unique: sha256 = File(file_path).get_sha256() if not db.find_sample(sha256=sha256) is None: msg = ": Sample {0} (skipping file)".format(file_path) if not args.quiet: print(bold(yellow("Duplicate")) + msg) continue task_id = db.add_path( file_path=file_path, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, owner=args.owner, memory=args.memory, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags, ) if task_id: if not args.quiet: print( bold(green("Success")) + u": File \"{0}\" added as task with ID {1}".format( file_path, task_id)) else: print(bold(red("Error")) + ": adding task to database")
def main(): global URL parser = argparse.ArgumentParser() parser.add_argument("-a", "--all", help="Download everything", action="store_true", required=False) parser.add_argument("-e", "--feeds", help="Download CAPE feed modules", action="store_true", required=False) parser.add_argument("-s", "--signatures", help="Download CAPE signatures", action="store_true", required=False) parser.add_argument("-p", "--processing", help="Download processing modules", action="store_true", required=False) parser.add_argument("-m", "--machinery", help="Download machine managers", action="store_true", required=False) parser.add_argument("-r", "--reporting", help="Download reporting modules", action="store_true", required=False) parser.add_argument("-an", "--analyzer", help="Download analyzer modules/binaries/etc", action="store_true", required=False) parser.add_argument("-data", "--data", help="Download data items", action="store_true", required=False) parser.add_argument( "-f", "--force", help="Install files without confirmation", action="store_true", default=False, required=False ) parser.add_argument("-w", "--rewrite", help="Rewrite existing files", action="store_true", required=False) parser.add_argument("-b", "--branch", help="Specify a different branch", action="store", default="master", required=False) parser.add_argument( "--file", help="Specify a local copy of a community .zip file", action="store", default=False, required=False ) parser.add_argument( "-cr", "--capa-rules", help="Download capa rules and signatures", action="store_true", default=False, required=False ) parser.add_argument("--mitre", help="Download updated MITRE JSONS", action="store_true", default=False, required=False) parser.add_argument( "-u", "--url", help="Download community modules from the specified url", action="store", default=None, required=False ) parser.add_argument( "-t", "--token", help="Access token to download private repositories", action="store", default=None, required=False ) args = parser.parse_args() URL = args.url or URL.format(args.branch) enabled = [] if args.all: enabled = ["feeds", "processing", "signatures", "reporting", "machinery", "analyzer", "data"] flare_capa() else: if args.feeds: enabled.append("feeds") if args.signatures: enabled.append("signatures") if args.processing: enabled.append("processing") if args.reporting: enabled.append("reporting") if args.machinery: enabled.append("machinery") if args.analyzer: enabled.append("analyzer") if args.data: enabled.append("data") if args.capa_rules: flare_capa() if not enabled: return if args.mitre: mitre() if not enabled: return if not enabled: print(colors.red("You need to enable a category!\n")) parser.print_help() return install(enabled, args.force, args.rewrite, args.file, args.token)
def main(): parser = argparse.ArgumentParser() parser.add_argument("target", type=str, help="URL, path to the file or folder to analyze") parser.add_argument("--remote", type=str, action="store", default=None, help="Specify IP:port to a Cuckoo API server to submit remotely", required=False) parser.add_argument("--user", type=str, action="store", default=None, help="Username for Basic Auth", required=False) parser.add_argument("--password", type=str, action="store", default=None, help="Password for Basic Auth", required=False) parser.add_argument("--sslnoverify", action="store_true", default=False, help="Do not validate SSL cert", required=False) parser.add_argument("--ssl", action="store_true", default=False, help="Use SSL/TLS for remote", required=False) parser.add_argument("--url", action="store_true", default=False, help="Specify whether the target is an URL", required=False) parser.add_argument("--package", type=str, action="store", default="", help="Specify an analysis package", required=False) parser.add_argument("--custom", type=str, action="store", default="", help="Specify any custom value", required=False) parser.add_argument("--timeout", type=int, action="store", default=0, help="Specify an analysis timeout", required=False) parser.add_argument("--options", type=str, action="store", default="", help="Specify options for the analysis package (e.g. \"name=value,name2=value2\")", required=False) parser.add_argument("--priority", type=int, action="store", default=1, help="Specify a priority for the analysis represented by an integer", required=False) parser.add_argument("--machine", type=str, action="store", default="", help="Specify the identifier of a machine you want to use", required=False) parser.add_argument("--platform", type=str, action="store", default="", help="Specify the operating system platform you want to use (windows/darwin/linux)", required=False) parser.add_argument("--memory", action="store_true", default=False, help="Enable to take a memory dump of the analysis machine", required=False) parser.add_argument("--enforce-timeout", action="store_true", default=False, help="Enable to force the analysis to run for the full timeout period", required=False) parser.add_argument("--clock", type=str, action="store", default=None, help="Set virtual machine clock", required=False) parser.add_argument("--tags", type=str, action="store", default=None, help="Specify tags identifier of a machine you want to use", required=False) parser.add_argument("--max", type=int, action="store", default=None, help="Maximum samples to add in a row", required=False) parser.add_argument("--pattern", type=str, action="store", default=None, help="Pattern of files to submit", required=False) parser.add_argument("--shuffle", action="store_true", default=False, help="Shuffle samples before submitting them", required=False) parser.add_argument("--unique", action="store_true", default=False, help="Only submit new samples, ignore duplicates", required=False) parser.add_argument("--quiet", action="store_true", default=False, help="Only print text on failure", required=False) try: args = parser.parse_args() except IOError as e: parser.error(e) return False # If the quiet flag has been set, then we also disable the "warning" # level of the logging module. (E.g., when pydeep has not been installed, # there will be a warning message, because Cuckoo can't resolve the # ssdeep hash of this particular sample.) if args.quiet: logging.disable(logging.WARNING) db = Database() target = to_unicode(args.target) if args.url: if args.remote: if not HAVE_REQUESTS: print(bold(red("Error")) + ": you need to install python-requests (`pip install requests`)") return False if args.ssl: url = "https://{0}/tasks/create/url".format(args.remote) else: url = "http://{0}/tasks/create/url".format(args.remote) data = dict( url=target, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, tags=args.tags ) try: if args.user and args.password: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, auth=(args.user,args.password), data=data,verify=verify) else: response = requests.post(url, auth=(args.user,args.password), data=data) else: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, data=data,verify=verify) else: response = requests.post(url, data=data) except Exception as e: print(bold(red("Error")) + ": unable to send URL: {0}".format(e)) return False json = response.json() task_id = json["task_id"] else: task_id = db.add_url(target, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, memory=args.memory, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags) if task_id: if not args.quiet: print(bold(green("Success")) + u": URL \"{0}\" added as task with ID {1}".format(target, task_id)) else: print(bold(red("Error")) + ": adding task to database") else: # Get absolute path to deal with relative. path = to_unicode(os.path.abspath(target)) if not os.path.exists(path): print(bold(red("Error")) + u": the specified file/folder does not exist at path \"{0}\"".format(path)) return False files = [] if os.path.isdir(path): for dirname, dirnames, filenames in os.walk(path): for file_name in filenames: file_path = os.path.join(dirname, file_name) if os.path.isfile(file_path): if args.pattern: if fnmatch.fnmatch(file_name, args.pattern): files.append(to_unicode(file_path)) else: files.append(to_unicode(file_path)) else: files.append(path) if args.shuffle: random.shuffle(files) for file_path in files: if not File(file_path).get_size(): if not args.quiet: print(bold(yellow("Empty") + ": sample {0} (skipping file)".format(file_path))) continue if not args.max is None: # Break if the maximum number of samples has been reached. if not args.max: break args.max -= 1 if args.remote: if not HAVE_REQUESTS: print(bold(red("Error")) + ": you need to install python-requests (`pip install requests`)") return False if args.ssl: url = "https://{0}/tasks/create/file".format(args.remote) else: url = "http://{0}/tasks/create/file".format(args.remote) files = dict( file=open(file_path, "rb"), filename=os.path.basename(file_path) ) data = dict( package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, tags=args.tags ) try: if args.user and args.password: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, auth=(args.user,args.password), files=files,data=data,verify=verify) else: response = requests.post(url, auth=(args.user,args.password), files=files,data=data) else: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, files=files, data=data, verify=verify) else: response = requests.post(url, files=files, data=data) except Exception as e: print(bold(red("Error")) + ": unable to send file: {0}".format(e)) return False json = response.json() task_id = json["task_id"] else: if args.unique: sha256 = File(file_path).get_sha256() if not db.find_sample(sha256=sha256) is None: msg = ": Sample {0} (skipping file)".format(file_path) if not args.quiet: print(bold(yellow("Duplicate")) + msg) continue task_id = db.add_path(file_path=file_path, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, memory=args.memory, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags) if task_id: if not args.quiet: print(bold(green("Success")) + u": File \"{0}\" added as task with ID {1}".format(file_path, task_id)) else: print(bold(red("Error")) + ": adding task to database")
if task != None: global tid_list tid_list = taskhandler.add_urls(task) # print tid_list return task['_id'] else: return None def check_status(_id): """Periodically check whether all URL analysis is finished.""" try: while True: time.sleep(5) task_status = taskhandler.task_done(tid_list) if task_status == False: print(bold(yellow("Task Not Done")) + ": still processing URLs") if task_status == True: print (bold(green("Task Completed"))+ ": All URLs done have been analyzed") mongo.task_done(_id) break except KeyboardInterrupt: return if __name__ == '__main__': _id = start_worker() if _id == None: print(bold(red("Error")) + ": no tasks in 'idle' state to pull") else: check_status(_id)
def main(): parser = argparse.ArgumentParser() parser.add_argument("id", type=str, help="ID of the analysis to process (auto for continuous processing of unprocessed tasks).") parser.add_argument("-c", "--caperesubmit", help="Allow CAPE resubmit processing.", action="store_true", required=False) parser.add_argument("-d", "--debug", help="Display debug messages", action="store_true", required=False) parser.add_argument("-r", "--report", help="Re-generate report", action="store_true", required=False) parser.add_argument( "-p", "--parallel", help="Number of parallel threads to use (auto mode only).", type=int, required=False, default=1 ) parser.add_argument( "-fp", "--failed-processing", help="reprocess failed processing", action="store_true", required=False, default=False ) parser.add_argument( "-mc", "--maxtasksperchild", help="Max children tasks per worker", action="store", type=int, required=False, default=7 ) parser.add_argument( "-md", "--memory-debugging", help="Enable logging garbage collection related info", action="store_true", required=False, default=False, ) parser.add_argument( "-pt", "--processing-timeout", help="Max amount of time spent in processing before we fail a task", action="store", type=int, required=False, default=300, ) testing_args = parser.add_argument_group("Signature testing options") testing_args.add_argument( "-sig", "--signatures", help="Re-execute signatures on the report, doesn't work for signature with self.get_raw_argument, use self.get_argument", action="store_true", default=False, required=False, ) testing_args.add_argument( "-sn", "--signature-name", help="Run only one signature. To be used with --signature. Example -sig -sn cape_detected_threat", action="store", default=False, required=False, ) testing_args.add_argument( "-jr", "--json-report", help="Path to json report, only if data not in mongo/default report location", action="store", default=False, required=False, ) args = parser.parse_args() init_yara() init_modules() if args.id == "auto": init_logging(auto=True, debug=args.debug) autoprocess( parallel=args.parallel, failed_processing=args.failed_processing, maxtasksperchild=args.maxtasksperchild, memory_debugging=args.memory_debugging, processing_timeout=args.processing_timeout, ) else: if not os.path.exists(os.path.join(CUCKOO_ROOT, "storage", "analyses", args.id)): sys.exit(red("\n[-] Analysis folder doesn't exist anymore\n")) init_logging(tid=args.id, debug=args.debug) task = Database().view_task(int(args.id)) if args.signatures: conn = False report = False # check mongo if repconf.mongodb.enabled: conn, _, results = _load_mongo_report(int(args.id), return_one=True) if not results: # fallback to json report = os.path.join(CUCKOO_ROOT, "storage", "analyses", args.id, "reports", "report.json") if not os.path.exists(report): if args.json_report and not os.path.exists(args.json_report): report = args.json_report else: sys.exit("File {} doest exist".format(report)) if report: results = json.load(open(report)) if results is not None: RunSignatures(task=task.to_dict(), results=results).run(args.signature_name) else: process(task=task, report=args.report, capeproc=args.caperesubmit, memory_debugging=args.memory_debugging)