Ejemplo n.º 1
0
 def _single_file_test(
         self, name, content, tag,
         kind=None, platform=None, extension=None
 ):
     sample = TestResource(name, content)
     task = Task({
         "type": "sample",
         "kind": "raw"
     }, payload={
         "sample": sample,
         "extraction_level": 999
     })
     results = self.run_task(task)
     if kind is None:
         self.assertTasksEqual(results, [])
     else:
         expected_headers = {
             "origin": "karton.classifier",
             "type": "sample",
             "stage": "recognized",
             "quality": "high",
             "kind": kind
         }
         if platform:
             expected_headers["platform"] = platform
         if extension:
             expected_headers["extension"] = extension
         self.assertTasksEqual(results, [
             Task(expected_headers, payload={
                 "sample": sample,
                 "extraction_level": 999,
                 "tags": [tag]
             })
         ])
 def test_ascii_magic(self):
     test_groups = os.path.join(os.path.dirname(__file__), "testdata")
     for group in os.listdir(test_groups):
         test_group_dir = os.path.join(test_groups, group)
         for case in os.listdir(test_group_dir):
             if "." in os.path.basename(case):
                 continue
             with self.subTest(group + "/" + case):
                 case_path = os.path.join(test_group_dir, case)
                 with open(case_path, "rb") as f:
                     content = f.read()
                 sample = TestResource(case, content)
                 if os.path.isfile(case_path + ".decoded"):
                     with open(
                             os.path.join(test_group_dir, case) +
                             ".decoded", "rb") as f:
                         decoded = f.read()
                     expected = Task(
                         {
                             "type": "sample",
                             "kind": "raw",
                             "origin": "karton.asciimagic",
                         },
                         payload={
                             "parent": sample,
                             "sample": TestResource(case, decoded),
                         },
                     )
                 elif os.path.isfile(case_path + ".exe.decoded"):
                     with open(
                             os.path.join(test_group_dir, case) +
                             ".exe.decoded", "rb") as f:
                         decoded = f.read()
Ejemplo n.º 3
0
def mock_task(resource: Resource) -> Task:
    task = Task({
        "type": "sample",
        "kind": "raw",
    })
    task.add_payload("sample", resource)
    return task
Ejemplo n.º 4
0
    def test_process_document_docx(self):
        magic, mime = (
            "Microsoft Word 2007+...",
            "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
        )
        self.karton = mock_classifier(magic, mime)
        resource = mock_resource("file.docx")
        res = self.run_task(mock_task(resource))

        expected = Task(
            headers={
                "type": "sample",
                "stage": "recognized",
                "origin": "karton.classifier",
                "quality": "high",
                "kind": "document",
                "mime": mime,
                "extension": "docx",
                "platform": "win32",
            },
            payload={
                "sample": resource,
                "tags": ["document:win32:docx"],
                "magic": magic,
            },
        )
        self.assertTasksEqual(res, [expected])
Ejemplo n.º 5
0
    def test_process_document_xls(self):
        magic, mime = (
            "Composite Document File V2 Document...",
            "application/vnd.ms-excel",
        )
        self.karton = mock_classifier(magic, mime)
        resource = mock_resource("file.xls")
        res = self.run_task(mock_task(resource))

        expected = Task(
            headers={
                "type": "sample",
                "stage": "recognized",
                "origin": "karton.classifier",
                "quality": "high",
                "kind": "document",
                "mime": mime,
                "extension": "xls",
                "platform": "win32",
            },
            payload={
                "sample": resource,
                "tags": ["document:win32:xls"],
                "magic": magic,
            },
        )
        self.assertTasksEqual(res, [expected])
Ejemplo n.º 6
0
    def send_raw_analysis(self, sample, outdir, metadata, dumps_metadata,
                          quality):
        """
        Offload drakrun-prod by sending raw analysis output to be processed by
        drakrun.processor.
        """
        payload = {"analysis_uid": self.analysis_uid}
        payload.update(metadata)

        if self.test_run:
            headers = dict(self.test_headers)
        else:
            headers = dict(self.headers)

        headers["quality"] = quality

        task = Task(headers, payload=payload)
        task.add_payload("sample", sample)
        task.add_payload("dumps_metadata", dumps_metadata)

        if self.test_run:
            task.add_payload("testcase", self.current_task.payload["testcase"])

        if self.config.config.getboolean("drakrun",
                                         "attach_profiles",
                                         fallback=False):
            self.log.info("Uploading profiles...")
            task.add_payload("profiles", self.build_profile_payload())

        self.log.info("Uploading artifacts...")
        for resource in self.upload_artifacts(self.analysis_uid, outdir):
            task.add_payload(resource.name, resource)

        self.send_task(task)
Ejemplo n.º 7
0
    def main(self) -> list:
        # Perform Operations on self.data to unpack the sample
        pe = pefile.PE(data=self.data)

        extractedPayload, extractedDecryptionSection, extractedValue = self.selectingSections(
            pe)
        decrementationCounter = extractedValue // 512  # that's how it is calculated
        obfuscatedPayload = self.payloadDecrypt(
            self.payloadDecode(extractedPayload), decrementationCounter)
        deobfuscatedPayload = self.runObfuscationCode(obfuscatedPayload)
        unpackedExecutable = self.decryptSecondStage(
            deobfuscatedPayload, extractedDecryptionSection)
        task = Task(
            headers={
                'type': 'sample',
                'kind': 'runnable',
                'stage': 'recognized'
            },
            payload={
                'parent':
                Resource(name='sample',
                         content=self.data),  # Set Parent Data (Packed Sample)
                'sample': Resource(name='unpacked', content=unpackedExecutable
                                   )  # Set Child Data (Unpacked Sample)
            })
        # A list of tasks must be returned, as there can be more than one unpacked child
        return [task]
Ejemplo n.º 8
0
    def test_process_runnable_win32_exe(self):
        magic, mime = (
            "PE32 executable (GUI) Intel 80386 Mono/.Net assembly...",
            "application/x-dosexec",
        )
        self.karton = mock_classifier(magic, mime)
        resource = mock_resource("file")
        res = self.run_task(mock_task(resource))

        expected = Task(
            headers={
                "type": "sample",
                "stage": "recognized",
                "origin": "karton.classifier",
                "quality": "high",
                "kind": "runnable",
                "mime": mime,
                "extension": "exe",
                "platform": "win32",
            },
            payload={
                "sample": resource,
                "tags": ["runnable:win32:exe"],
                "magic": magic,
            },
        )
        self.assertTasksEqual(res, [expected])
Ejemplo n.º 9
0
    def test_process_runnable_win32_swf(self):
        magic, mime = (
            "Macromedia Flash data (compressed)...",
            "application/x-shockwave-flash",
        )
        self.karton = mock_classifier(magic, mime)
        resource = mock_resource("file.swf")
        res = self.run_task(mock_task(resource))

        expected = Task(
            headers={
                "type": "sample",
                "stage": "recognized",
                "origin": "karton.classifier",
                "quality": "high",
                "kind": "runnable",
                "mime": mime,
                "extension": "swf",
                "platform": "win32",
            },
            payload={
                "sample": resource,
                "tags": ["runnable:win32:swf"],
                "magic": magic,
            },
        )
        self.assertTasksEqual(res, [expected])
Ejemplo n.º 10
0
    def test_process_runnable_win32_msi(self):
        magic, mime = (
            "Composite Document File V2 Document, MSI Installer...",
            "application/x-msi",
        )
        self.karton = mock_classifier(magic, mime)
        resource = mock_resource("file.msi")
        res = self.run_task(mock_task(resource))

        expected = Task(
            headers={
                "type": "sample",
                "stage": "recognized",
                "origin": "karton.classifier",
                "quality": "high",
                "kind": "runnable",
                "mime": mime,
                "extension": "msi",
                "platform": "win32",
            },
            payload={
                "sample": resource,
                "tags": ["runnable:win32:msi"],
                "magic": magic,
            },
        )
        self.assertTasksEqual(res, [expected])
Ejemplo n.º 11
0
    def test_process_archive_iso(self):
        magic, mime = (
            "ISO 9660 CD-ROM filesystem data...",
            "application/x-iso9660-image",
        )
        self.karton = mock_classifier(magic, mime)
        resource = mock_resource("file.iso")
        res = self.run_task(mock_task(resource))

        expected = Task(
            headers={
                "type": "sample",
                "stage": "recognized",
                "origin": "karton.classifier",
                "quality": "high",
                "kind": "archive",
                "mime": mime,
                "extension": "iso",
            },
            payload={
                "sample": resource,
                "tags": ["archive:iso"],
                "magic": magic,
            },
        )
        self.assertTasksEqual(res, [expected])
Ejemplo n.º 12
0
    def test_process_archive_cab(self):
        magic, mime = (
            "Microsoft Cabinet archive data...",
            "application/vnd.ms-cab-compressed",
        )
        self.karton = mock_classifier(magic, mime)
        resource = mock_resource("file.cab")
        res = self.run_task(mock_task(resource))

        expected = Task(
            headers={
                "type": "sample",
                "stage": "recognized",
                "origin": "karton.classifier",
                "quality": "high",
                "kind": "archive",
                "mime": mime,
                "extension": "cab",
            },
            payload={
                "sample": resource,
                "tags": ["archive:cab"],
                "magic": magic,
            },
        )
        self.assertTasksEqual(res, [expected])
Ejemplo n.º 13
0
    def process(self, task: Task):
        dumps = task.get_resource("dumps.zip")
        sample = task.get_resource("sample")
        with dumps.extract_temporary() as temp:
            family = self.analyze_dumps(sample, temp)

            testcase = TestCase.from_json(task.payload["testcase"])
            expected_family = testcase.ripped

            if family is None or expected_family != family:
                self.log.error(
                    f"Failed to rip {sample.sha256}. Expected {expected_family}, ripped {family}"
                )
                result = 'FAIL'
            else:
                self.log.info(f"Ripping {sample.sha256} OK: {family}")
                result = 'OK'

            out_res = json.dumps({
                "sample": sample.sha256,
                "family": {
                    "expected": expected_family,
                    "ripped": family
                },
                "result": result
            })

            task = Task({"type": "analysis-test-result", "kind": "drakrun"})
            res = LocalResource(name=self.current_task.root_uid,
                                bucket='draktestd',
                                content=out_res)
            res._uid = res.name
            task.add_payload("result", res)
            self.send_task(task)
Ejemplo n.º 14
0
    def send_analysis(self, sample, outdir, metadata, quality):
        payload = {"analysis_uid": self.analysis_uid}
        payload.update(metadata)

        if self.test_run:
            headers = dict(self.test_headers)
        else:
            headers = dict(self.headers)

        headers["quality"] = quality

        task = Task(headers, payload=payload)
        task.add_payload('sample', sample)

        if self.test_run:
            task.add_payload('testcase', self.current_task.payload['testcase'])

        if self.config.config.getboolean("drakrun",
                                         "attach_profiles",
                                         fallback=False):
            self.log.info("Uploading profiles...")
            task.add_payload("profiles", self.build_profile_payload())

        self.log.info("Uploading artifacts...")
        for resource in self.upload_artifacts(self.analysis_uid, outdir):
            task.add_payload(resource.name, resource)

        self.send_task(task)
Ejemplo n.º 15
0
    def process(self):
        # downloaded resource cache
        task_resources = dict(self.current_task.iterate_resources())
        for plugin in self.plugins:
            name = plugin.handler.__name__
            if any(map(lambda r: r not in task_resources.keys(), plugin.required)):
                self.log.info("Skipping %s, missing resources", name)
                continue

            try:
                self.log.debug("Running postprocess - %s", plugin.handler.__name__)
                outputs = plugin.handler(
                    self.current_task, task_resources, self.backend.minio
                )

                if outputs:
                    for out in outputs:
                        self.log.debug(
                            f"Step {plugin.handler.__name__} outputted new resource: {out}"
                        )
                        res_name = os.path.join(
                            self.current_task.payload["analysis_uid"], out
                        )
                        task_resources[out] = RemoteResource(
                            res_name,
                            uid=res_name,
                            bucket="drakrun",
                            backend=self.backend,
                        )
            except Exception:
                self.log.error("Postprocess failed", exc_info=True)

        task = Task(
            {
                "type": "analysis",
                "kind": "drakrun",
            }
        )

        # Add metadata information about task analysis.
        metadata = json.loads(task_resources["metadata.json"].content)
        task.add_payload("metadata", metadata)

        # Add metadata information about dumps within dumps.zip
        task.add_payload(
            "dumps_metadata", self.current_task.get_payload("dumps_metadata")
        )

        # metadata.json is internal, don't leak it to other services
        del task_resources["metadata.json"]

        for (name, resource) in task_resources.items():
            task.add_payload(name, resource)

        self.send_task(task)
Ejemplo n.º 16
0
    def report_config(self, config, sample, parent=None):
        legacy_config = dict(config)
        legacy_config["type"] = config["family"]
        del legacy_config["family"]

        # This allows us to spawn karton tasks for special config handling
        if "store-in-karton" in legacy_config:
            self.log.info("Karton tasks found in config, sending")

            for karton_task in legacy_config["store-in-karton"]:
                task_data = karton_task["task"]
                payload_data = karton_task["payload"]
                payload_data["parent"] = parent or sample

                task = Task(headers=task_data, payload=payload_data)
                self.send_task(task)
                self.log.info("Sending ripped task %s", task.uid)

            del legacy_config["store-in-karton"]

        if len(legacy_config.items()) == 1:
            self.log.info(
                "Final config is empty, not sending it to the reporter")
            return

        task = Task(
            {
                "type": "config",
                "kind": "static",
                "family": config["family"],
                "quality": self.current_task.headers.get("quality", "high"),
            },
            payload={
                "config": legacy_config,
                "sample": sample,
                "parent": parent or sample,
                "tags": self.result_tags,
                "attributes": self.result_attributes,
            },
        )
        self.send_task(task)
Ejemplo n.º 17
0
 def test_pass(self) -> None:
     res = Resource("sample", b"z")
     task = Task(
         {
             "type": "sample",
             "stage": "recognized",
             "kind": "runnable"
         },
         payload={"sample": res},
     )
     res_tasks = self.run_task(task)
     self.assertTasksEqual(res_tasks, [])
Ejemplo n.º 18
0
def upload():
    producer = Producer(conf)

    with NamedTemporaryFile() as f:
        request.files["file"].save(f.name)

        with open(f.name, "rb") as fr:
            sample = Resource("sample", fr.read())

    task = Task({"type": "sample", "stage": "recognized", "platform": "win32"})
    task.add_payload("override_uid", task.uid)

    # Add analysis timeout to task
    timeout = request.form.get("timeout")
    if timeout:
        task.add_payload("timeout", int(timeout))

    # Add filename override to task
    if request.form.get("file_name"):
        filename = request.form.get("file_name")
    else:
        filename = request.files["file"].filename
    if not re.fullmatch(
            r"^((?![\\/><|:&])[\x20-\xfe])+\.(?:dll|exe|ps1|bat|doc|docm|docx|dotm|xls|xlsx|xlsm|xltx|xltm|ppt|pptx|vbs|js|jse|hta|html|htm)$",
            filename,
            flags=re.IGNORECASE,
    ):
        return jsonify({"error": "invalid file_name"}), 400
    task.add_payload("file_name", os.path.splitext(filename)[0])

    # Extract and add extension
    extension = os.path.splitext(filename)[1][1:]
    if extension:
        task.headers["extension"] = extension

    # Add startup command to task
    start_command = request.form.get("start_command")
    if start_command:
        task.add_payload("start_command", start_command)

    # Add plugins to task
    plugins = request.form.get("plugins")
    if plugins:
        plugins = json.loads(plugins)
        task.add_payload("plugins", plugins)

    task.add_resource("sample", sample)

    producer.send_task(task)

    return jsonify({"task_uid": task.uid})
Ejemplo n.º 19
0
 def test_match_2(self) -> None:
     res = Resource("sample", b"ab")
     input_task = Task(
         {
             "type": "sample",
             "stage": "recognized",
             "kind": "runnable"
         },
         payload={"sample": res},
     )
     expected_task = Task(
         {
             "type": "sample",
             "origin": "karton.yaramatcher",
             "stage": "analyzed"
         },
         payload={
             "sample": res,
             "tags": ["yara:a", "yara:b"]
         },
     )
     res_tasks = self.run_task(input_task)
     self.assertTasksEqual(res_tasks, [expected_task])
Ejemplo n.º 20
0
    def send_analysis(self, sample, outdir, metadata, quality):
        payload = {"analysis_uid": self.analysis_uid}
        payload.update(metadata)

        headers = dict(self.headers)
        headers["quality"] = quality

        task = Task(headers, payload=payload)
        task.add_payload('sample', sample)

        self.log.info("Uploading artifacts...")
        for resource in self.upload_artifacts(self.analysis_uid, outdir):
            task.add_payload(resource.name, resource)

        self.send_task(task)
Ejemplo n.º 21
0
    def test_process_unknown_file(self):
        resource = Resource("file.txt", b"\x00", sha256="sha256")
        res = self.run_task(mock_task(resource))

        expected = Task(
            headers={
                "type": "sample",
                "stage": "unrecognized",
                "origin": "karton.classifier",
                "kind": "unknown",
                "quality": "high",
            },
            payload={
                "sample": resource,
            },
        )
        self.assertTasksEqual(res, [expected])
Ejemplo n.º 22
0
    def submit_main(cls):
        parser = cls.args_parser()
        args = parser.parse_args()

        conf_path = os.path.join(ETC_DIR, "config.ini")
        config = patch_config(Config(conf_path))

        with open(args.tests) as tests:
            testcases = [TestCase(**case) for case in json.load(tests)]

        root_uids = []

        for test in testcases:
            sample = test.get_sample()
            sys.stderr.write(f"Submitting {test.sha256}\n")

            t = Task(headers=dict(type="sample-test", platform="win64"))
            t.add_payload("sample", Resource("malwar", sample))
            t.add_payload("testcase", test.to_json())

            if args.timeout:
                t.add_payload("timeout", args.timeout)

            p = Producer(config)
            p.send_task(t)
            root_uids.append(t.root_uid)

        consumer = RegressionTester(config)
        results = {}

        with tqdm(total=len(root_uids)) as pbar:
            while len(results) != len(root_uids):
                for root_uid in cls.get_finished_tasks(consumer.backend,
                                                       root_uids):
                    if root_uid not in results:
                        res = json.load(
                            consumer.backend.minio.get_object(
                                "draktestd", root_uid))
                        results[root_uid] = res
                        print(json.dumps(results[root_uid]))
                        pbar.update(1)

                time.sleep(1)

        print(json.dumps(list(results.values())))
Ejemplo n.º 23
0
def send_file_to_karton(file) -> str:
    from mwdb.model.file import File

    tmpfile = None

    try:
        # TODO: Use file.open() directly when Resource(fd=...)
        # is implemented in Karton
        try:
            # If file contents are available via path: just use the path
            path = file.get_path()
        except (ValueError, IOError):
            # If get_path doesn't work: download content to NamedTemporaryFile
            tmpfile = tempfile.NamedTemporaryFile()
            file_stream = file.open()
            shutil.copyfileobj(file_stream, tmpfile)
            File.close(file_stream)
            path = tmpfile.name

        producer = get_karton_producer()
        feed_quality = g.auth_user.feed_quality
        task_priority = (TaskPriority.NORMAL
                         if feed_quality == "high" else TaskPriority.LOW)
        task = Task(
            headers={
                "type": "sample",
                "kind": "raw",
                "quality": feed_quality
            },
            payload={
                "sample":
                Resource(file.file_name, path=path, sha256=file.sha256),
                "attributes":
                file.get_attributes(as_dict=True, check_permissions=False),
            },
            priority=task_priority,
        )
        producer.send_task(task)
    finally:
        if tmpfile is not None:
            tmpfile.close()

    logger.info("File sent to Karton with %s", task.root_uid)
    return task.root_uid
Ejemplo n.º 24
0
    def test_process_unknown(self):
        magic, mime = "", None
        self.karton = mock_classifier(magic, mime)
        resource = mock_resource("file")
        res = self.run_task(mock_task(resource))

        expected = Task(
            headers={
                "type": "sample",
                "stage": "unrecognized",
                "origin": "karton.classifier",
                "quality": "high",
                "kind": "unknown",
            },
            payload={
                "sample": resource,
            },
        )
        self.assertTasksEqual(res, [expected])
Ejemplo n.º 25
0
def main():
    parser = argparse.ArgumentParser(description="Push sample to the karton")
    parser.add_argument("sample", help="Path to the sample")
    parser.add_argument(
        "--start_command",
        help="e.g. start %f, %f will be replaced by file name",
        required=False,
    )
    parser.add_argument(
        "--timeout",
        default=600,
        type=int,
        help="analysis timeout in seconds",
        required=False,
    )
    args = parser.parse_args()

    conf = patch_config(Config(os.path.join(ETC_DIR, "config.ini")))
    producer = Producer(conf)

    task = Task({"type": "sample", "stage": "recognized", "platform": "win32"})

    with open(args.sample, "rb") as f:
        sample = Resource("sample", f.read())
    task.add_resource("sample", sample)

    # Add filename
    filename = os.path.basename(args.sample)
    task.add_payload("file_name", os.path.splitext(filename)[0])

    # Extract and add extension
    extension = os.path.splitext(filename)[1][1:]
    if extension:
        task.headers["extension"] = extension

    if args.start_command is not None:
        task.add_payload("start_command", args.start_command)

    if args.timeout is not None:
        task.add_payload("timeout", args.timeout)

    producer.send_task(task)
Ejemplo n.º 26
0
def send_blob_to_karton(blob) -> str:
    producer = get_karton_producer()
    task = Task(
        headers={
            "type": "blob",
            "kind": blob.blob_type
        },
        payload={
            "content":
            blob.content,
            "dhash":
            blob.dhash,
            "attributes":
            blob.get_attributes(as_dict=True, check_permissions=False),
        },
    )
    producer.send_task(task)

    logger.info("Blob sent to Karton with %s", task.root_uid)
    return task.root_uid
Ejemplo n.º 27
0
    def process(self):
        # downloaded resource cache
        task_resources = dict(self.current_task.iterate_resources())
        for plugin in self.plugins:
            name = plugin.handler.__name__
            if any(
                    map(lambda r: r not in task_resources.keys(),
                        plugin.required)):
                self.log.info("Skipping %s, missing resources", name)
                continue

            try:
                self.log.debug("Running postprocess - %s",
                               plugin.handler.__name__)
                outputs = plugin.handler(self.current_task, task_resources,
                                         self.backend.minio)

                if outputs:
                    for out in outputs:
                        self.log.debug(
                            f"Step {plugin.handler.__name__} outputted new resource: {out}"
                        )
                        res_name = os.path.join(
                            self.current_task.payload["analysis_uid"], out)
                        task_resources[out] = RemoteResource(
                            res_name,
                            uid=res_name,
                            bucket='drakrun',
                            backend=self.backend,
                        )
            except Exception:
                self.log.error("Postprocess failed", exc_info=True)

        task = Task({
            "type": "analysis",
            "kind": "drakrun-processed",
        })

        for (name, resource) in task_resources.items():
            task.add_payload(name, resource)
        self.send_task(task)
Ejemplo n.º 28
0
    def send_raw_analysis(self, sample, outdir, metadata, dumps_metadata, quality):
        """
        Offload drakrun-prod by sending raw analysis output to be processed by
        drakrun.processor.
        """

        if self.test_run:
            headers = dict(self.test_headers)
        else:
            headers = dict(self.headers)

        headers["quality"] = quality

        task = Task(headers, payload=metadata)
        task.add_payload("sample", sample)
        task.add_payload("dumps_metadata", dumps_metadata)

        if self.test_run:
            task.add_payload("testcase", self.current_task.payload["testcase"])

        if self.config.config.getboolean("drakrun", "attach_profiles", fallback=False):
            self.log.info("Uploading profiles...")
            task.add_payload("profiles", self.build_profile_payload())

        if self.config.config.getboolean(
            "drakrun", "attach_apiscout_profile", fallback=False
        ):
            self.log.info("Uploading static ApiScout profile...")
            task.add_payload(
                "static_apiscout_profile.json",
                LocalResource(
                    name="static_apiscout_profile.json",
                    path=Path(APISCOUT_PROFILE_DIR) / "static_apiscout_profile.json",
                ),
            )

        self.log.info("Uploading artifacts...")
        for resource in self.upload_artifacts(self.analysis_uid, outdir):
            task.add_payload(resource.name, resource)

        self.send_task(task)
Ejemplo n.º 29
0
def send_config_to_karton(config) -> str:
    producer = get_karton_producer()
    task = Task(
        headers={
            "type": "config",
            "kind": config.config_type,
            "family": config.family
        },
        payload={
            "config":
            config.cfg,
            "dhash":
            config.dhash,
            "attributes":
            config.get_attributes(as_dict=True, check_permissions=False),
        },
    )
    producer.send_task(task)

    logger.info("Configuration sent to Karton with %s", task.root_uid)
    return task.root_uid
Ejemplo n.º 30
0
 def get_tasks(self):
     global memory_dumps
     self.memory_dump_cleanup()
     tasks = []
     if len(memory_dumps) <= 0:
         return tasks
     log.info(
         "Successfully Extracted {memory_dumps_count} Suspicious Memory Dump(s)"
         .format(memory_dumps_count=len(memory_dumps)))
     for memory_dump in memory_dumps:
         if self.config['debug'] is True:
             log.debug('EXTRACTED_PAYLOAD:')
             hexdump.hexdump(memory_dump)
         tasks.append(
             Task(headers=self.get_headers(memory_dump),
                  payload={
                      'parent': Resource(name='sample', content=self.data),
                      'sample': Resource(name='unpacked',
                                         content=memory_dump)
                  }))
     return tasks