def __init__(self, task_id): """@param task_id: ID of the analyses to process.""" self.task = Database().view_task(task_id).to_dict() self.analysis_path = os.path.join(MALICE_ROOT, "storage", "analyses", str(task_id)) self.cfg = Config( cfg=os.path.join(MALICE_ROOT, "conf", "processing.conf"))
def store_temp_file(filedata, filename): """Store a temporary file. @param filedata: content of the original file. @param filename: name of the original file. @return: path to the temporary file. """ filename = get_filename_from_path(filename) # Reduce length (100 is arbitrary). filename = filename[:100] options = Config(os.path.join(MALICE_ROOT, "conf", "cuckoo.conf")) tmppath = options.cuckoo.tmppath targetpath = os.path.join(tmppath, "cuckoo-tmp") if not os.path.exists(targetpath): os.mkdir(targetpath) tmp_dir = tempfile.mkdtemp(prefix="upload_", dir=targetpath) tmp_file_path = os.path.join(tmp_dir, filename) with open(tmp_file_path, "wb") as tmp_file: # If filedata is file object, do chunked copy. if hasattr(filedata, "read"): chunk = filedata.read(1024) while chunk: tmp_file.write(chunk) chunk = filedata.read(1024) else: tmp_file.write(filedata) return tmp_file_path
def check_version(): """Checks version of Malice.""" cfg = Config() if not cfg.malice.version_check: return print(" Checking for updates...") url = "https://api.github.com/repos/blacktop/malice/releases" try: response = requests.get(url=url) except requests.RequestException as e: print(red(" Failed! ") + "Unable to establish connection.\n") return # return dict(error=e) if response.status_code == requests.codes.ok: try: response_data = response.json() except: print(red(" Failed! ") + "Invalid response.\n") return latest_version = response_data[0][u'name'] if latest_version != MALICE_VERSION: msg = "Malice version {0} is available " \ "now.\n".format(latest_version) print(red(" Outdated! ") + msg) else: print(green(" Good! ") + "You have the latest version " "available.\n")
def __init__(self, task_id): """@param task_id: ID of the analyses to process.""" # self.task = Database().view_task(task_id).to_dict() TODO: Implement task queue # TODO: Change this to the tmp file folder self.analysis_path = os.path.join(MALICE_ROOT, "storage", "analyses", str(task_id)) self.cfg = Config(cfg=os.path.join(MALICE_ROOT, "conf", "av.conf"))
def __init__(self, task_id, results): """@param analysis_path: analysis folder path.""" self.task = Database().view_task(task_id).to_dict() self.results = results self.analysis_path = os.path.join(MALICE_ROOT, "storage", "analyses", str(task_id)) self.cfg = Config( cfg=os.path.join(MALICE_ROOT, "conf", "reporting.conf"))
def process(self, module): """Run a single reporting module. @param module: reporting module. @param results: results results from analysis. """ # Initialize current reporting module. try: current = module() except: log.exception( "Failed to load the reporting module \"{0}\":".format(module)) return # Extract the module name. module_name = inspect.getmodule(current).__name__ if "." in module_name: module_name = module_name.rsplit(".", 1)[1] try: options = self.cfg.get(module_name) except MaliceOperationalError: log.debug("Reporting module %s not found in configuration file", module_name) return # If the reporting module is disabled in the config, skip it. if not options.enabled: return # Give it the path to the analysis results folder. current.set_path(self.analysis_path) # Give it the analysis task object. current.set_task(self.task) # Give it the the relevant reporting.conf section. current.set_options(options) # Load the content of the analysis.conf file. current.cfg = Config(current.conf_path) try: current.run(self.results) log.debug("Executed reporting module \"%s\"", current.__class__.__name__) except MaliceDependencyError as e: log.warning( "The reporting module \"%s\" has missing dependencies: %s", current.__class__.__name__, e) except MaliceReportError as e: log.warning( "The reporting module \"%s\" returned the following error: %s", current.__class__.__name__, e) except: log.exception("Failed to run the reporting module \"%s\":", current.__class__.__name__)
stream = logging.StreamHandler() stream.setFormatter(formatter) logger.addHandler(stream) netlog = NetlogHandler() netlog.setFormatter(formatter) logger.addHandler(netlog) logger.setLevel(logging.DEBUG) if __name__ == "__main__": success = False error = "" try: config = Config(cfg="analysis.conf") cuckoo = CuckooHost(config.ip, config.port) analyzer = Macalyzer(cuckoo, config) success = analyzer.run() except KeyboardInterrupt: error = "Keyboard Interrupt" except Exception as err: error_exc = format_exc() error = str(err) if len(analyzer.log.handlers): analyzer.log.exception(error_exc) else: stderr.write("{0}\n".format(error_exc)) # Once the analysis is completed or terminated for any reason, we report
def __init__(self, task, machine): self.task = task self.machine = machine self.cfg = Config(cfg=os.path.join(MALICE_ROOT, "conf", "av.conf")) self.enabled = []
help="Brush history data", action="store_true", required=False) args = parser.parse_args() if args.brush_history: brush_history = True if args.verbose: init_logging(logname='rtdp', log_level=logging.DEBUG) else: init_logging(logname='rtdp', log_level=logging.INFO) if brush_history: config = Config(file_name="dataprocess-brushhistory") else: config = Config(file_name="dataprocess") pool = multiprocessing.Pool(processes=args.parallel, initializer=init_worker, maxtasksperchild=1000) init_task(config) try: while True: # 迭代复制的列表 for ar in pending_results[:]: if not ar.ready(): continue