def check_status(_id): """Periodically check whether all URL analysis is finished.""" try: while True: time.sleep(5) task_status = task_done(tid) if task_status == False: print(bold(yellow("Task Not Done")) + ": still processing URLs") if task_status == True: print (bold(green("Task Completed"))+ ": All URLs done have been analyzed") break except KeyboardInterrupt: return
def main(): """main function for standalone usage""" usage = "usage: %prog [options]" parser = OptionParser(usage=usage) parser.add_option('-a', '--archive-toplevel-dir', default='/mnt/cuckoo_archive', help='Archive top-level directory [default: %default]') parser.add_option('-m', '--local-machine-dir', default=socket.gethostname(), help='Machine-specific directory [default: $HOST]') (options, args) = parser.parse_args() if len(args) != 0: parser.print_help() return 2 # do stuff archive_dir = os.path.join(options.archive_toplevel_dir, options.local_machine_dir) try: os.mkdir(archive_dir) except OSError: # already exists pass db = Database() for task in db.list_tasks(status=TASK_REPORTED): task_path_src = _analysis_dir(task.id) if not os.path.islink(task_path_src): task_path_dst = os.path.join(archive_dir, str(task.id)) move(task_path_src, task_path_dst) os.symlink(task_path_dst, task_path_src) print(bold(green('Successfully')) + ' archived %s' % task_path_dst)
def add_urls(task): url = task['url'] db = Database() task_id = db.add_url(url, timeout=task['timeout'], priority=task['priority']) if task_id: print( bold(green("Success")) + u": URL \"{0}\" added as task with ID {1}".format(url, task_id)) mongo.update_tasklist(task, task_id) return task_id else: print(bold(red("Error")) + ": adding task to database")
def run(self): while True: if taskid: for mongoid, task in taskid.items(): if taskhandler.task_done(task[1]): print(bold(green("Completed")) + ": Task %d") % task[1] mongo.task_done(mongoid) sendAnalysis(task[1], task[0]) taskid.pop(mongoid, None) time.sleep(1)
def delete_all(): """Delete ALL tasks in Cuckoo's local processing queue.""" db = Database() list = db.list_tasks() if not list: print(bold(red("Error")) + ": no tasks to be deleted") else: for url in list: db.delete_task(db.count_tasks())
def add_urls(task): """For every URL, add to Cuckoo's local processing queue and get task_id for all URLs to save to MongoHQ DB. @return tasklist""" for url in task['urls']: db = Database() task_id = db.add_url(url, timeout=task['timeout'], priority=task['priority']) if task_id: tasklist.append(task_id) print(bold(green("Success")) + u": URL \"{0}\" added as task with ID {1}".format(url, task_id)) else: print(bold(red("Error")) + ": adding task to database") mongo.update_tasklist(task, tasklist) return tasklist
def start_worker(): """Pull a task from MongoHQ DB and add URLs for processing queue in Cuckoo db. @return task_id""" task = mongo.pull_task() if task != None: global taskid # Places in a dictionary url to corresponding cuckoo taskid taskid[task['_id']] = [task['url'], taskhandler.add_urls(task)] return True else: print(bold(red("Error")) + ": no tasks in 'idle' state to pull") time.sleep(30) return False
def __init__(self, file_name="cuckoo", cfg=None): """ @param file_name: file name without extension. @param cfg: configuration file path. """ config = configparser.ConfigParser() if cfg: config.read(cfg) else: try: config.read(os.path.join(CUCKOO_ROOT, "conf", "%s.conf" % file_name)) except UnicodeDecodeError as e: print( bold( red( "please fix your config file: {}.conf - Pay attention for bytes c2 xa - {}\n\n{}".format( file_name, e.object, e.reason ) ) ) ) raise UnicodeDecodeError self.fullconfig = config._sections for section in config.sections(): setattr(self, section, Dictionary()) for name, _ in config.items(section): try: # Ugly fix to avoid '0' and '1' to be parsed as a # boolean value. # We raise an exception to goto fail^w parse it # as integer. if config.get(section, name) in ["0", "1"]: raise ValueError value = config.getboolean(section, name) except ValueError: try: value = config.getint(section, name) except ValueError: value = config.get(section, name) setattr(getattr(self, section), name, value)
def _read_files(self, files: Iterable[str]): # Escape the percent signs so that ConfigParser doesn't try to do # interpolation of the value as well. config = configparser.ConfigParser({ f"ENV:{key}": val.replace("%", "%%") for key, val in os.environ.items() }) try: config.read(files) except UnicodeDecodeError as e: print( bold( red(f"please fix your config file(s): {', '.join(files)} - " f"Pay attention for bytes c2 xa - {e.object}\n\n{e.reason}" ))) raise self.fullconfig = config._sections for section in config.sections(): dct = Dictionary() for name, value in config.items(section): if name.startswith("env:"): continue try: # Ugly fix to avoid '0' and '1' to be parsed as a boolean value. # We raise an exception to parse it as an integer. if value in {"0", "1"}: raise ValueError value = config.getboolean(section, name) except ValueError: try: value = config.getint(section, name) except ValueError: value = config.get(section, name) setattr(dct, name, value) setattr(self, section, dct)
def delete_all(): """Delete ALL tasks in Cuckoo's local processing queue and clears any existing analysis files""" db = Database() list = db.list_tasks() if not list: print(bold(red("Error")) + ": no tasks to be deleted") else: for url in list: db.delete_task(db.count_tasks()) path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "storage", "analyses") for root, dirs, files in os.walk(path): for f in files: os.unlink(os.path.join(root, f)) for d in dirs: if os.path.islink(os.path.join(root, d)): os.unlink(os.path.join(root, d)) else: shutil.rmtree(os.path.join(root, d))
def main(): parser = argparse.ArgumentParser() parser.add_argument("target", type=str, help="URL, path to the file or folder to analyze") parser.add_argument("--remote", type=str, action="store", default=None, help="Specify IP:port to a Cuckoo API server to submit remotely", required=False) parser.add_argument("--user", type=str, action="store", default=None, help="Username for Basic Auth", required=False) parser.add_argument("--password", type=str, action="store", default=None, help="Password for Basic Auth", required=False) parser.add_argument("--sslnoverify", action="store_true", default=False, help="Do not validate SSL cert", required=False) parser.add_argument("--ssl", action="store_true", default=False, help="Use SSL/TLS for remote", required=False) parser.add_argument("--url", action="store_true", default=False, help="Specify whether the target is an URL", required=False) parser.add_argument("--package", type=str, action="store", default="", help="Specify an analysis package", required=False) parser.add_argument("--custom", type=str, action="store", default="", help="Specify any custom value", required=False) parser.add_argument("--timeout", type=int, action="store", default=0, help="Specify an analysis timeout", required=False) parser.add_argument("--options", type=str, action="store", default="", help="Specify options for the analysis package (e.g. \"name=value,name2=value2\")", required=False) parser.add_argument("--priority", type=int, action="store", default=1, help="Specify a priority for the analysis represented by an integer", required=False) parser.add_argument("--machine", type=str, action="store", default="", help="Specify the identifier of a machine you want to use", required=False) parser.add_argument("--platform", type=str, action="store", default="", help="Specify the operating system platform you want to use (windows/darwin/linux)", required=False) parser.add_argument("--memory", action="store_true", default=False, help="Enable to take a memory dump of the analysis machine", required=False) parser.add_argument("--enforce-timeout", action="store_true", default=False, help="Enable to force the analysis to run for the full timeout period", required=False) parser.add_argument("--clock", type=str, action="store", default=None, help="Set virtual machine clock", required=False) parser.add_argument("--tags", type=str, action="store", default=None, help="Specify tags identifier of a machine you want to use", required=False) parser.add_argument("--max", type=int, action="store", default=None, help="Maximum samples to add in a row", required=False) parser.add_argument("--pattern", type=str, action="store", default=None, help="Pattern of files to submit", required=False) parser.add_argument("--shuffle", action="store_true", default=False, help="Shuffle samples before submitting them", required=False) parser.add_argument("--unique", action="store_true", default=False, help="Only submit new samples, ignore duplicates", required=False) parser.add_argument("--quiet", action="store_true", default=False, help="Only print text on failure", required=False) try: args = parser.parse_args() except IOError as e: parser.error(e) return False # If the quiet flag has been set, then we also disable the "warning" # level of the logging module. (E.g., when pydeep has not been installed, # there will be a warning message, because Cuckoo can't resolve the # ssdeep hash of this particular sample.) if args.quiet: logging.disable(logging.WARNING) db = Database() target = to_unicode(args.target) if args.url: if args.remote: if not HAVE_REQUESTS: print(bold(red("Error")) + ": you need to install python-requests (`pip install requests`)") return False if args.ssl: url = "https://{0}/tasks/create/url".format(args.remote) else: url = "http://{0}/tasks/create/url".format(args.remote) data = dict( url=target, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, tags=args.tags ) try: if args.user and args.password: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, auth=(args.user,args.password), data=data,verify=verify) else: response = requests.post(url, auth=(args.user,args.password), data=data) else: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, data=data,verify=verify) else: response = requests.post(url, data=data) except Exception as e: print(bold(red("Error")) + ": unable to send URL: {0}".format(e)) return False json = response.json() task_id = json["task_id"] else: task_id = db.add_url(target, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, memory=args.memory, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags) if task_id: if not args.quiet: print(bold(green("Success")) + u": URL \"{0}\" added as task with ID {1}".format(target, task_id)) else: print(bold(red("Error")) + ": adding task to database") else: # Get absolute path to deal with relative. path = to_unicode(os.path.abspath(target)) if not os.path.exists(path): print(bold(red("Error")) + u": the specified file/folder does not exist at path \"{0}\"".format(path)) return False files = [] if os.path.isdir(path): for dirname, dirnames, filenames in os.walk(path): for file_name in filenames: file_path = os.path.join(dirname, file_name) if os.path.isfile(file_path): if args.pattern: if fnmatch.fnmatch(file_name, args.pattern): files.append(to_unicode(file_path)) else: files.append(to_unicode(file_path)) else: files.append(path) if args.shuffle: random.shuffle(files) for file_path in files: if not File(file_path).get_size(): if not args.quiet: print(bold(yellow("Empty") + ": sample {0} (skipping file)".format(file_path))) continue if not args.max is None: # Break if the maximum number of samples has been reached. if not args.max: break args.max -= 1 if args.remote: if not HAVE_REQUESTS: print(bold(red("Error")) + ": you need to install python-requests (`pip install requests`)") return False if args.ssl: url = "https://{0}/tasks/create/file".format(args.remote) else: url = "http://{0}/tasks/create/file".format(args.remote) files = dict( file=open(file_path, "rb"), filename=os.path.basename(file_path) ) data = dict( package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, tags=args.tags ) try: if args.user and args.password: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, auth=(args.user,args.password), files=files,data=data,verify=verify) else: response = requests.post(url, auth=(args.user,args.password), files=files,data=data) else: if args.ssl: if args.sslnoverify: verify = False else: verify = True response = requests.post(url, files=files, data=data, verify=verify) else: response = requests.post(url, files=files, data=data) except Exception as e: print(bold(red("Error")) + ": unable to send file: {0}".format(e)) return False json = response.json() task_id = json["task_id"] else: if args.unique: sha256 = File(file_path).get_sha256() if not db.find_sample(sha256=sha256) is None: msg = ": Sample {0} (skipping file)".format(file_path) if not args.quiet: print(bold(yellow("Duplicate")) + msg) continue task_id = db.add_path(file_path=file_path, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, memory=args.memory, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags) if task_id: if not args.quiet: print(bold(green("Success")) + u": File \"{0}\" added as task with ID {1}".format(file_path, task_id)) else: print(bold(red("Error")) + ": adding task to database")
if task != None: global tid_list tid_list = taskhandler.add_urls(task) # print tid_list return task['_id'] else: return None def check_status(_id): """Periodically check whether all URL analysis is finished.""" try: while True: time.sleep(5) task_status = taskhandler.task_done(tid_list) if task_status == False: print(bold(yellow("Task Not Done")) + ": still processing URLs") if task_status == True: print (bold(green("Task Completed"))+ ": All URLs done have been analyzed") mongo.task_done(_id) break except KeyboardInterrupt: return if __name__ == '__main__': _id = start_worker() if _id == None: print(bold(red("Error")) + ": no tasks in 'idle' state to pull") else: check_status(_id)
def main(): parser = argparse.ArgumentParser() parser.add_argument("target", type=str, nargs="?", help="URL, path to the file or folder to analyze") parser.add_argument("-d", "--debug", action="store_true", help="Enable debug logging") parser.add_argument( "--remote", type=str, action="store", default=None, help="Specify IP:port to a Cuckoo API server to submit remotely", required=False) parser.add_argument("--url", action="store_true", default=False, help="Specify whether the target is an URL", required=False) parser.add_argument("--package", type=str, action="store", default="", help="Specify an analysis package", required=False) parser.add_argument("--custom", type=str, action="store", default="", help="Specify any custom value", required=False) parser.add_argument("--owner", type=str, action="store", default="", help="Specify the task owner", required=False) parser.add_argument("--timeout", type=int, action="store", default=0, help="Specify an analysis timeout", required=False) parser.add_argument( "-o", "--options", type=str, action="store", default="", help= "Specify options for the analysis package (e.g. \"name=value,name2=value2\")", required=False) parser.add_argument( "--priority", type=int, action="store", default=1, help="Specify a priority for the analysis represented by an integer", required=False) parser.add_argument( "--machine", type=str, action="store", default="", help="Specify the identifier of a machine you want to use", required=False) parser.add_argument( "--platform", type=str, action="store", default="", help= "Specify the operating system platform you want to use (windows/darwin/linux)", required=False) parser.add_argument( "--memory", action="store_true", default=False, help="Enable to take a memory dump of the analysis machine", required=False) parser.add_argument( "--enforce-timeout", action="store_true", default=False, help="Enable to force the analysis to run for the full timeout period", required=False) parser.add_argument("--clock", type=str, action="store", default=None, help="Set virtual machine clock", required=False) parser.add_argument( "--tags", type=str, action="store", default=None, help="Specify tags identifier of a machine you want to use", required=False) parser.add_argument("--baseline", action="store_true", default=None, help="Run a baseline analysis", required=False) parser.add_argument("--max", type=int, action="store", default=None, help="Maximum samples to add in a row", required=False) parser.add_argument("--pattern", type=str, action="store", default=None, help="Pattern of files to submit", required=False) parser.add_argument("--shuffle", action="store_true", default=False, help="Shuffle samples before submitting them", required=False) parser.add_argument("--unique", action="store_true", default=False, help="Only submit new samples, ignore duplicates", required=False) parser.add_argument("--quiet", action="store_true", default=False, help="Only print text on failure", required=False) parser.add_argument("--PIN", help="\033[91mForce PINDemonium \o/ \033[0m", action='store_true', required=False) try: args = parser.parse_args() except IOError as e: parser.error(e) return False print(args.target) if args.target: if FileEntropy(args.target) > 7.5: print( green( "We detected high entropy on the file. Trying to unpack.\n" )) args.PIN = True else: print(FileEntropy(args.target), args.target) # Add PINDemonium as an option if args.PIN: args.package = 'exePIN' Pin = PINParser() print( "\033[91m\n\t\t Please specify arguments for PIN. If no arguments are specified, it will run with default args within 10 secs. \n\t\t Here is a reminder : \033[0m\n" ) # We use a timeout in case the file is automatically sent so the analysis is not stuck on waiting for arguments. Pin.Help() print("\n\t\tPress enter to continue") i, o, e = select.select([sys.stdin], [], [], 10) if (i): sys.stdin.readline() sys.stdout.write(">>> ") Args = Pin.Parse(raw_input()) args.package = args.package + ";" + Args else: print("Timeout, launching analysis with default args.") if not args.baseline and not args.target: print "No file or URL has been specified!" exit(1) # If the quiet flag has been set, then we also disable the "warning" # level of the logging module. (E.g., when pydeep has not been installed, # there will be a warning message, because Cuckoo can't resolve the # ssdeep hash of this particular sample.) if args.debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig() if args.quiet: logging.disable(logging.WARNING) db = Database() if args.url: target = to_unicode(args.target) if args.remote: if not HAVE_REQUESTS: print( bold(red("Error")) + ": you need to install python-requests (`pip install requests`)" ) return False url = "http://{0}/tasks/create/url".format(args.remote) data = dict(url=target, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, owner=args.owner, tags=args.tags) try: response = requests.post(url, data=data) except Exception as e: print( bold(red("Error")) + ": unable to send URL: {0}".format(e)) return False json = response.json() task_id = json["task_id"] else: task_id = db.add_url(target, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, owner=args.owner, memory=args.memory, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags) if task_id: if not args.quiet: print( bold(green("Success")) + u": URL \"{0}\" added as task with ID {1}".format( target, task_id)) else: print(bold(red("Error")) + ": adding task to database") elif args.baseline: if args.remote: print "Remote baseline support has not yet been implemented." exit(1) task_id = db.add_baseline(args.timeout, args.owner, args.machine, args.memory) if task_id: if not args.quiet: print( bold(green("Success")) + u": Baseline analysis added as task with ID {0}".format( task_id)) else: print(bold(red("Error")) + ": adding task to database") else: target = to_unicode(args.target) # Get absolute path to deal with relative. path = to_unicode(os.path.abspath(target)) if not os.path.exists(path): print( bold(red("Error")) + u": the specified file/folder does not exist at path \"{0}\"". format(path)) return False files = [] if os.path.isdir(path): for dirname, dirnames, filenames in os.walk(path): for file_name in filenames: file_path = os.path.join(dirname, file_name) if os.path.isfile(file_path): if args.pattern: if fnmatch.fnmatch(file_name, args.pattern): files.append(to_unicode(file_path)) else: files.append(to_unicode(file_path)) else: files.append(path) if args.shuffle: random.shuffle(files) else: files = sorted(files) for file_path in files: if not File(file_path).get_size(): if not args.quiet: print( bold( yellow("Empty") + ": sample {0} (skipping file)".format(file_path))) continue if args.max is not None: # Break if the maximum number of samples has been reached. if not args.max: break args.max -= 1 if args.remote: if not HAVE_REQUESTS: print( bold(red("Error")) + ": you need to install python-requests (`pip install requests`)" ) return False url = "http://{0}/tasks/create/file".format(args.remote) files = dict(file=open(file_path, "rb"), filename=os.path.basename(file_path)) data = dict(package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, memory=args.memory, enforce_timeout=args.enforce_timeout, custom=args.custom, owner=args.owner, tags=args.tags) try: response = requests.post(url, files=files, data=data) except Exception as e: print( bold(red("Error")) + ": unable to send file: {0}".format(e)) return False json = response.json() task_id = json["task_id"] else: if args.unique: sha256 = File(file_path).get_sha256() if not db.find_sample(sha256=sha256) is None: msg = ": Sample {0} (skipping file)".format(file_path) if not args.quiet: print(bold(yellow("Duplicate")) + msg) continue task_id = db.add_path( file_path=file_path, package=args.package, timeout=args.timeout, options=args.options, priority=args.priority, machine=args.machine, platform=args.platform, custom=args.custom, owner=args.owner, memory=args.memory, enforce_timeout=args.enforce_timeout, clock=args.clock, tags=args.tags, ) if task_id: if not args.quiet: print( bold(green("Success")) + u": File \"{0}\" added as task with ID {1}".format( file_path, task_id)) else: print(bold(red("Error")) + ": adding task to database")