def _create_arborescence(self): """ .. py:function:: _create_arborescence(self) Creates the base arborescence for the current case. :param self: current class instance :type self: class """ if os.path.exists(self.resources["case"]): if not self.arguments.overwrite: if not _interaction.prompt( "Overwrite existing object <{}>?".format( self.resources["case"])): _log.fault("Aborted due to manual user interruption.") try: send2trash.send2trash(self.resources["case"]) _log.warning("Overwritten existing object <{}>.".format( self.resources["case"])) except (send2trash.TrashPermissionError, OSError, Exception): _log.fault("Failed to overwrite existing object <{}>.".format( self.resources["case"]), post_mortem=True) _fs.create_local_directory(self.resources["case"])
def create_arborescence(self): """ .. py:function:: create_arborescence(self) Creates the base arborescence for the current case. :param self: current class instance :type self: class """ if os.path.exists(self.resources["case"]): if not self.arguments.overwrite: self._prompt("Overwrite existing object <{}> ? [y/N] ".format(self.resources["case"])) try: shutil.rmtree(self.resources["case"]) _log.warning("Overwritten existing object <{}>.".format(self.resources["case"])) except ( OSError, Exception): _log.fault("Failed to overwrite existing object <{}>.".format(self.resources["case"]), trace=True) self._create_local_directory(self.resources["case"])
def run(self): """ .. py:function:: run(self) Main entry point for the class. :param self: current class instance :type self: class """ loaded = { "rulesets": 0, "rules": 0 } for name, ruleset in _loader.iterate_rulesets(): status, count = self._compile_ruleset(name, ruleset) if status: loaded["rulesets"] += 1 loaded["rules"] += count if not loaded["rulesets"]: _log.fault("No YARA ruleset(s) loaded. Quitting.") _log.info("Applying a total of <{}> YARA rule(s) from <{}> ruleset(s).".format(loaded["rules"], loaded["rulesets"])) del loaded if not self._dispatch_jobs(): _log.warning("Skipping <{}> module(s) invocation.".format(_models.Post.__name__)) return self._invoke_post_modules()
def run(self): """ .. py:function:: run(self) Main entry point for the class. :param self: current class instance :type self: class """ count = 0 for name, ruleset in _loader.iterate_rulesets(): if self._compile_ruleset(name, ruleset): count += 1 if not count: _log.fault("No YARA rulesets loaded. Quitting.") _log.info("Applying a total of <{}> YARA ruleset(s).".format(count)) del count if not self._dispatch_jobs(): _log.warning("Skipping <{}> module(s) invocation.".format( _models.Post.__name__)) return self._invoke_post_modules()
def _tear_down(self): """ .. py:function:: _tear_down(self) Cleanup method called on error. :param self: current class instance :type self: class """ _log.warning("Waiting for concurrent process(es) to terminate before exiting.") self.pool.terminate() self.pool.join()
def __del__(self): """ .. py:function:: __del__(self) Destruction method that calls the teardown function(s). :param self: current class instance :type self: class """ if _conf.KEEP_TEMPORARY_ARTIFACTS: _log.warning("Skipped temporary artifact(s) cleanup.") return self._tear_down()
def _dispatch_jobs(self): """ .. py:function:: _dispatch_jobs(self) Dispatches the processing task(s) to the subprocess(es). :param self: current class instance :type self: class :return: number of match(es) :rtype: int """ with multiprocessing.Manager() as manager: queue = manager.Queue() results = (multiprocessing.Lock(), multiprocessing.Value(ctypes.c_int, 0), manager.list()) reader = multiprocessing.Process(target=_reader.Reader(queue, results, { "target": self.case.resources["matches"], "storage": self.case.resources["storage"], "format": self.case.arguments.format }).run) reader.daemon = True reader.start() _log.debug("Started reader subprocess to consume queue result(s).") with _magic.Pool(processes=self.case.arguments.processes) as pool: for file in self.case.resources["evidences"]: if os.path.getsize(file) > self.case.arguments.max_size: _log.warning("Evidence <{}> exceeds the maximum size. Ignoring evidence. Try changing --max-size to override this behavior.".format(file)) continue pool.starmap_async( _processors.File(self.case.arguments.hash_algorithms, self.case.arguments.callbacks, queue, self.case.arguments.fast).run, [(file, self.buffers)], error_callback=_log.inner_exception) _log.debug("Mapped concurrent job to consume evidence <{}>.".format(file)) queue.put(_codes.DONE) with _magic.Hole(KeyboardInterrupt, action=lambda:_log.fault("Aborted due to manual user interruption <SIGINT>.")): reader.join() return results[1].value
def load_module(name, model, silent=False): """ .. py:function:: load_module(name, model) Dynamically loads a registered module. :param name: name of the module to load :type name: str :param model: reference class handle :type model: class :param silent: suppress the warning message :type silent: bool :return: module class handle :rtype: class """ try: module = importlib.import_module("framework.modules.{}.{}".format(model.__name__.lower(), name)) except ModuleNotFound as exc: _log.fault("Missing dependency <{0}>. Try <pip install {0}> or manually build the required module to fix the issue.".format(exc.name)) try: _checker.check_module(module, model) except _errors.NotFound: _log.fault("No subclass found in module <{}.{}>. Quitting.".format(model.__name__.lower(), name), post_mortem=True) except _errors.ModuleInheritance: _log.fault("Module <{}.{}> not inheriting from the base class. Quitting.".format(model.__name__.lower(), name), post_mortem=True) except _errors.SystemNotSupported: if model.__name__ == _models.Pre.__name__: _log.fault("Module <{}.{}> does not support the current system <{}>. Quitting.".format(model.__name__.lower(), name, platform.system())) elif not silent: _log.warning("Module <{}.{}> incompatible with the current system <{}>. Ignoring.".format(model.__name__.lower(), name, platform.system())) return None return getattr(module, model.__name__)
def track_file(self, evidence): """ .. py:function:: track_file(self, evidence) Checks and registers an evidence file for processing. :param self: current class instance :type self: class :param evidence: absolute path to the evidence file :type evidence: str """ if os.path.isfile(evidence): self.resources["evidences"]["files"].append(evidence) _log.debug("Tracking file <{}>.".format(evidence)) else: _log.warning("Evidence <{}> not found or invalid.".format(evidence))
def run(self): """ .. py:function:: run(self) Main entry point for the class. :param self: current class instance :type self: class """ with self._open_output_file() as self.output: self._read_queue() with self.results[0], _magic.OverrideConsoleLogging("WARNING"): _log.warning( "Total of <{}> matching pattern(s). See <{}> for more details." .format(self.results[1].value, self.target["target"] )) if self.results[1].value else _log.info( "No matching pattern(s) found.") self._store_matching_evidences()
def run(self): """ .. py:function:: run(self) Main entry point for the module. :param self: current class instance :type self: class """ tmp = self.case.require_temporary_directory() for item in self.case.arguments.input: if os.path.isfile(item): _log.debug("Tracking file <{}> to <{}>.".format(file, tmp)) elif os.path.isdir(item): _log.warning( "Directory <{}> is not an archive. Ignoring.".format(item)) else: _log.warning( "Unknown inode type for object <{}>.".format(item))
def track_process(self, pid): """ .. py:function:: track_process(self, pid) Checks wether a process exists on the local machine and registers it for processing. :param self: current class instance :type self: class :param pid: process identifier :type pid: int """ if not isinstance(pid, int): _log.error("Invalid PID format <{}>.".format(pid)) return if psutil.pid_exists(pid): self.resources["evidences"]["processes"].append(pid) _log.debug("Tracking live process matching PID <{}>.".format(pid)) else: _log.warning("Process <{}> not found.".format(pid))
def run(self): """ .. py:function:: run(self) Main entry point for the module. :param self: current class instance :type self: class """ tmp = self.case.require_temporary_directory() for evidence in self.feed: try: mail = eml_parser.eml_parser.decode_email( evidence, include_raw_body=True, include_attachment_data=True) _log.info("Extracted <{}> attachment(s) from <{}>.".format( len(mail["attachment"]), evidence)) except Exception: _log.exception( "Failed to extract data from <{}>. Ignoring evidence.". format(evidence)) continue output_directory = os.path.join(tmp, os.path.basename(evidence)) if not os.path.isdir(output_directory): _fs.create_local_directory(output_directory) for attachment in mail["attachment"]: if not attachment["filename"]: attachment["filename"] = idx if not _fs.matches_patterns(attachment["filename"], self.case.arguments._include): _log.warning( "Ignoring attachment <{}> not matching inner inclusion pattern(s)." .format(attachment["filename"])) continue if _fs.matches_patterns(attachment["filename"], self.case.arguments._exclude): _log.warning( "Ignoring attachment <{}> matching inner exclusion pattern(s)." .format(attachment["filename"])) continue output_path = os.path.join(output_directory, attachment["filename"]) with open(output_path, "wb") as out: out.write(base64.b64decode(attachment["raw"])) _log.debug( "Attachment <{}> extracted from <{}> stored locally as <{}>." .format(attachment["filename"], evidence, output_path)) self.case.track_file(output_path)
def _consume_evidence(self): """ .. py:function:: _consume_evidence(self) Main loop that processes the evidence(s) and formats the match(es). :param self: current class instance :type self: class """ for _, buffer in self.buffers.items(): try: for match in buffer.match(self.evidence, timeout=_conf.YARA_MATCH_TIMEOUT, fast=self.fast): hashes = {} for algorithm in self.algorithms: hashes[algorithm] = self._compute_hash( self.evidence, algorithm=algorithm) for action in [self.queue.put, self._invoke_callbacks]: action({ "origin": _meta.__package__, "target": { "type": "file", "identifier": self.evidence }, "match": { "timestamp": _rendering.timestamp(), "rule": match.rule, "meta": match.meta, "namespace": match.namespace, "tags": match.tags, "hashes": hashes, "strings": [{ "offset": string[0], "reference": string[1], "litteral": string[2].decode("utf-8", "backslashreplace") } for string in match.strings] } }) except yara.TimeoutError: _log.warning("Timeout exceeded for evidence <{}>.".format( self.evidence)) continue except (yara.Error, Exception): _log.exception( "YARA exception raised during processing of evidence <{}>." .format(self.evidence)) continue
def _process_evidence(self): """ .. py:function:: _process_evidence(self) Main loop that processes the evidence(s) and formats the match(es). :param self: current class instance :type self: class """ for _, buffer in self.buffers.items(): try: for match in buffer.match(pid=self.evidence, timeout=_conf.YARA_MATCH_TIMEOUT, fast=self.fast): for action in [self.queue.put, self._invoke_callbacks]: action({ "origin": _meta.__package__, "target": { "type": "process", "identifier": self.evidence }, "match": { "timestamp": pendulum.now().format(_conf.TIMESTAMP_FORMAT, formatter="alternative"), "rule": match.rule, "meta": match.meta, "namespace": match.namespace, "tags": match.tags, "hashes": [], "strings": [{ "offset": string[0], "reference": string[1], "litteral": string[2].decode("utf-8", "backslashreplace") } for string in match.strings] } }) except yara.TimeoutError: _log.warning( "Timeout exceeded for live process matching PID <{}>.". format(self.evidence)) continue except yara.Error: _log.exception( "YARA exception raised during processing of live process matching PID <{}>." .format(self.evidence)) continue except Exception: _log.exception( "YARA exception raised during processing of live process matching PID <{}>." .format(self.evidence)) continue
:param case: preloaded Case class :type case: class :param feed: flattened list of evidence(s) :type feed: list """ tasks = {} for file in feed: meta = _fs.guess_file_type(file) if not meta: tasks.setdefault(("raw", modules["raw"]), []).append(file) _log.warning( "Could not determine data type. Added evidence <{}> to the force-feeding list." .format(file)) continue try: name, Module = _find_association(modules, meta) tasks.setdefault((name, Module), []).append(file) _log.debug( "Identified data type <{}> for evidence <{}>. Dispatching to <{}>." .format(meta.mime, file, name)) except _errors.UnsupportedType: tasks.setdefault(("raw", modules["raw"]), []).append(file) _log.warning( "Data type <{}> unsupported. Added evidence <{}> to the force-feeding list."
def run(self): _log.warning("Unimplemented <{}> module.".format( self.__class__.__name__))
""" for item in self.case.arguments.input: if os.path.isfile(item): self.case.track_file(file) elif os.path.isdir(item): for file in _fs.enumerate_matching_files( item, self.case.arguments.filters, recursive=self.case.arguments.recursive): if os.path.isfile(file): self.case.track_file(file) else: _log.warning( "Unknown inode type for object <{}>.".format(item)) def run(self): """ .. py:function:: run(self) Main entry point for the module. :param self: current class instance :type self: class """ if self.case.arguments.input: self._track_files()