def send_file_to_karton(file: File) -> str: try: path = file.get_path() tmpfile = None except Exception: # If get_path doesn't work: download content to NamedTemporaryFile # It won't work if we use S3 storage and try to reanalyze # existing file (not uploaded within the same request). tmpfile = tempfile.NamedTemporaryFile() file_stream = file.open() shutil.copyfileobj(file_stream, tmpfile) File.close(file_stream) path = tmpfile.name producer = Producer(identity="karton.mwdb", config=KartonConfig(config.karton.config_path)) feed_quality = g.auth_user.feed_quality task_priority = TaskPriority.NORMAL if feed_quality == "high" else TaskPriority.LOW task = Task(headers={ "type": "sample", "kind": "raw", "quality": feed_quality }, payload={ "sample": Resource(file.file_name, path=path, sha256=file.sha256), "attributes": file.get_metakeys(as_dict=True, check_permissions=False) }, priority=task_priority) producer.send_task(task) if tmpfile is not None: tmpfile.close() file.add_metakey("karton", task.root_uid, check_permissions=False) logger.info("File sent to karton with %s", task.root_uid) return task.root_uid
def on_created_file(self, file: File): """ Create Drakvuf Sandbox job for newly added file. Add analysis identifier as 'drakvuf' attribute. """ if not file.file_type.startswith("PE32 executable"): logger.debug("Not a PE executable, ignoring.") return # Get contents path from "uploads" directory contents_path = file.get_path() # Send request to Drakvuf Sandbox req = requests.post(f"{config.drakvuf.drakvuf_url}/upload", files={ "file": (file.sha256 + ".exe", open(contents_path, "rb")), }, data={"timeout": config.drakvuf.timeout}) req.raise_for_status() # Get task identifier task_uid = req.json()["task_uid"] # Add it as attribute to the file file.add_metakey("drakvuf", task_uid, check_permissions=False) logger.info("File sent to Drakvuf. Analysis identifier: %s", task_uid)
def on_created_file(self, file: File): file.add_metakey("virustotal", file.md5)