def send_raw_analysis(self, sample, outdir, metadata, dumps_metadata, quality): """ Offload drakrun-prod by sending raw analysis output to be processed by drakrun.processor. """ payload = {"analysis_uid": self.analysis_uid} payload.update(metadata) if self.test_run: headers = dict(self.test_headers) else: headers = dict(self.headers) headers["quality"] = quality task = Task(headers, payload=payload) task.add_payload("sample", sample) task.add_payload("dumps_metadata", dumps_metadata) if self.test_run: task.add_payload("testcase", self.current_task.payload["testcase"]) if self.config.config.getboolean("drakrun", "attach_profiles", fallback=False): self.log.info("Uploading profiles...") task.add_payload("profiles", self.build_profile_payload()) self.log.info("Uploading artifacts...") for resource in self.upload_artifacts(self.analysis_uid, outdir): task.add_payload(resource.name, resource) self.send_task(task)
def process(self, task: Task): dumps = task.get_resource("dumps.zip") sample = task.get_resource("sample") with dumps.extract_temporary() as temp: family = self.analyze_dumps(sample, temp) testcase = TestCase.from_json(task.payload["testcase"]) expected_family = testcase.ripped if family is None or expected_family != family: self.log.error( f"Failed to rip {sample.sha256}. Expected {expected_family}, ripped {family}" ) result = 'FAIL' else: self.log.info(f"Ripping {sample.sha256} OK: {family}") result = 'OK' out_res = json.dumps({ "sample": sample.sha256, "family": { "expected": expected_family, "ripped": family }, "result": result }) task = Task({"type": "analysis-test-result", "kind": "drakrun"}) res = LocalResource(name=self.current_task.root_uid, bucket='draktestd', content=out_res) res._uid = res.name task.add_payload("result", res) self.send_task(task)
def mock_task(resource: Resource) -> Task: task = Task({ "type": "sample", "kind": "raw", }) task.add_payload("sample", resource) return task
def upload(): producer = Producer(conf) with NamedTemporaryFile() as f: request.files["file"].save(f.name) with open(f.name, "rb") as fr: sample = Resource("sample", fr.read()) task = Task({"type": "sample", "stage": "recognized", "platform": "win32"}) task.add_payload("override_uid", task.uid) # Add analysis timeout to task timeout = request.form.get("timeout") if timeout: task.add_payload("timeout", int(timeout)) # Add filename override to task if request.form.get("file_name"): filename = request.form.get("file_name") else: filename = request.files["file"].filename if not re.fullmatch( r"^((?![\\/><|:&])[\x20-\xfe])+\.(?:dll|exe|ps1|bat|doc|docm|docx|dotm|xls|xlsx|xlsm|xltx|xltm|ppt|pptx|vbs|js|jse|hta|html|htm)$", filename, flags=re.IGNORECASE, ): return jsonify({"error": "invalid file_name"}), 400 task.add_payload("file_name", os.path.splitext(filename)[0]) # Extract and add extension extension = os.path.splitext(filename)[1][1:] if extension: task.headers["extension"] = extension # Add startup command to task start_command = request.form.get("start_command") if start_command: task.add_payload("start_command", start_command) # Add plugins to task plugins = request.form.get("plugins") if plugins: plugins = json.loads(plugins) task.add_payload("plugins", plugins) task.add_resource("sample", sample) producer.send_task(task) return jsonify({"task_uid": task.uid})
def send_analysis(self, sample, outdir, metadata, quality): payload = {"analysis_uid": self.analysis_uid} payload.update(metadata) if self.test_run: headers = dict(self.test_headers) else: headers = dict(self.headers) headers["quality"] = quality task = Task(headers, payload=payload) task.add_payload('sample', sample) if self.test_run: task.add_payload('testcase', self.current_task.payload['testcase']) if self.config.config.getboolean("drakrun", "attach_profiles", fallback=False): self.log.info("Uploading profiles...") task.add_payload("profiles", self.build_profile_payload()) self.log.info("Uploading artifacts...") for resource in self.upload_artifacts(self.analysis_uid, outdir): task.add_payload(resource.name, resource) self.send_task(task)
def send_analysis(self, sample, outdir, metadata, quality): payload = {"analysis_uid": self.analysis_uid} payload.update(metadata) headers = dict(self.headers) headers["quality"] = quality task = Task(headers, payload=payload) task.add_payload('sample', sample) self.log.info("Uploading artifacts...") for resource in self.upload_artifacts(self.analysis_uid, outdir): task.add_payload(resource.name, resource) self.send_task(task)
def process(self): # downloaded resource cache task_resources = dict(self.current_task.iterate_resources()) for plugin in self.plugins: name = plugin.handler.__name__ if any( map(lambda r: r not in task_resources.keys(), plugin.required)): self.log.info("Skipping %s, missing resources", name) continue try: self.log.debug("Running postprocess - %s", plugin.handler.__name__) outputs = plugin.handler(self.current_task, task_resources, self.backend.minio) if outputs: for out in outputs: self.log.debug( f"Step {plugin.handler.__name__} outputted new resource: {out}" ) res_name = os.path.join( self.current_task.payload["analysis_uid"], out) task_resources[out] = RemoteResource( res_name, uid=res_name, bucket='drakrun', backend=self.backend, ) except Exception: self.log.error("Postprocess failed", exc_info=True) task = Task({ "type": "analysis", "kind": "drakrun", }) # Add metadata information about dumps within dumps.zip task.add_payload("dumps_metadata", self.current_task.get_payload("dumps_metadata")) for (name, resource) in task_resources.items(): task.add_payload(name, resource) self.send_task(task)
def submit_main(cls): parser = cls.args_parser() args = parser.parse_args() conf_path = os.path.join(ETC_DIR, "config.ini") config = patch_config(Config(conf_path)) with open(args.tests) as tests: testcases = [TestCase(**case) for case in json.load(tests)] root_uids = [] for test in testcases: sample = test.get_sample() sys.stderr.write(f"Submitting {test.sha256}\n") t = Task(headers=dict(type="sample-test", platform="win64")) t.add_payload("sample", Resource("malwar", sample)) t.add_payload("testcase", test.to_json()) if args.timeout: t.add_payload("timeout", args.timeout) p = Producer(config) p.send_task(t) root_uids.append(t.root_uid) consumer = RegressionTester(config) results = {} with tqdm(total=len(root_uids)) as pbar: while len(results) != len(root_uids): for root_uid in cls.get_finished_tasks(consumer.backend, root_uids): if root_uid not in results: res = json.load( consumer.backend.minio.get_object( "draktestd", root_uid)) results[root_uid] = res print(json.dumps(results[root_uid])) pbar.update(1) time.sleep(1) print(json.dumps(list(results.values())))
def main(): parser = argparse.ArgumentParser(description="Push sample to the karton") parser.add_argument("sample", help="Path to the sample") parser.add_argument( "--start_command", help="e.g. start %f, %f will be replaced by file name", required=False, ) parser.add_argument( "--timeout", default=600, type=int, help="analysis timeout in seconds", required=False, ) args = parser.parse_args() conf = patch_config(Config(os.path.join(ETC_DIR, "config.ini"))) producer = Producer(conf) task = Task({"type": "sample", "stage": "recognized", "platform": "win32"}) with open(args.sample, "rb") as f: sample = Resource("sample", f.read()) task.add_resource("sample", sample) # Add filename filename = os.path.basename(args.sample) task.add_payload("file_name", os.path.splitext(filename)[0]) # Extract and add extension extension = os.path.splitext(filename)[1][1:] if extension: task.headers["extension"] = extension if args.start_command is not None: task.add_payload("start_command", args.start_command) if args.timeout is not None: task.add_payload("timeout", args.timeout) producer.send_task(task)
def send_raw_analysis(self, sample, outdir, metadata, dumps_metadata, quality): """ Offload drakrun-prod by sending raw analysis output to be processed by drakrun.processor. """ if self.test_run: headers = dict(self.test_headers) else: headers = dict(self.headers) headers["quality"] = quality task = Task(headers, payload=metadata) task.add_payload("sample", sample) task.add_payload("dumps_metadata", dumps_metadata) if self.test_run: task.add_payload("testcase", self.current_task.payload["testcase"]) if self.config.config.getboolean("drakrun", "attach_profiles", fallback=False): self.log.info("Uploading profiles...") task.add_payload("profiles", self.build_profile_payload()) if self.config.config.getboolean( "drakrun", "attach_apiscout_profile", fallback=False ): self.log.info("Uploading static ApiScout profile...") task.add_payload( "static_apiscout_profile.json", LocalResource( name="static_apiscout_profile.json", path=Path(APISCOUT_PROFILE_DIR) / "static_apiscout_profile.json", ), ) self.log.info("Uploading artifacts...") for resource in self.upload_artifacts(self.analysis_uid, outdir): task.add_payload(resource.name, resource) self.send_task(task)