def _read_queue(self): """ .. py:function:: _read_queue(self) Main loop that processes the match(es) from the :code:`multiprocessing.Queue` instance. :param self: current class instance :type self: class """ while True: item = self.queue.get() if item == _codes.DONE: break self.map[self.target["format"]](item) with self.results[0]: self.results[1].value += 1 self.results[2].append(item["target"]["identifier"]) _log.debug( "Matching signature from rule <{}> on evidence <{}>.".format( item["match"]["rule"], item["target"]["identifier"]))
def __init__(self, arguments): """ .. py:function:: __init__(self, arguments) Initialization method for the class. :param self: current class instance :type self: class :param arguments: :code:`argparse.Parser` instance containing the processed command-line arguments :type arguments: list """ self.arguments = arguments self.name = os.path.basename(self.arguments.output) self.resources = { "case": self.arguments.output, "matches": os.path.join(self.arguments.output, "{}.{}".format(_conf.MATCHES_FILE_BASENAME, self.arguments.format.lower())), "evidences": { "files": [], "processes": [] }, "temporary": [] } _log.debug("Initialized new case <{}> anchored to <{}>.".format(self.name, self.resources["case"]))
def _open_output_file(self, mode="a", character_encoding=_conf.OUTPUT_CHARACTER_ENCODING): """ .. py:function:: _open_output_file(self, mode="a", character_encoding=conf.OUTPUT_CHARACTER_ENCODING) Opens the output stream. :param self: current class instance :type self: class :param mode: file opening mode to use :type mode: str :param character_encoding: character encoding to use :type character_encoding: str :return: descriptor for the newly opened file stream :rtype: class """ try: return open(self.target["target"], mode=mode, encoding=character_encoding) except (OSError, Exception): _log.fault("Failed to open <{}> for writing.".format( self.target["target"]), post_mortem=True)
def _matches_yara_signatures(evidence): for ruleset in Identifier._iterate_signatures(): try: rules = yara.compile(ruleset, includes=True, error_on_warning=True) except yara.SyntaxError: _log.exception("Syntax error in YARA ruleset <{}>.".format(ruleset))
def create_arborescence(self): """ .. py:function:: create_arborescence(self) Creates the base arborescence for the current case. :param self: current class instance :type self: class """ if os.path.exists(self.resources["case"]): if not self.arguments.overwrite: self._prompt("Overwrite existing object <{}> ? [y/N] ".format(self.resources["case"])) try: shutil.rmtree(self.resources["case"]) _log.warning("Overwritten existing object <{}>.".format(self.resources["case"])) except ( OSError, Exception): _log.fault("Failed to overwrite existing object <{}>.".format(self.resources["case"]), trace=True) self._create_local_directory(self.resources["case"])
def _tear_down(self): """ .. py:function:: _tear_down(self) Cleanup method called on error. :param self: current class instance :type self: class """ _log.warning("Waiting for concurrent process(es) to terminate before exiting.") self.pool.terminate() self.pool.join()
def __exit__(self, *args): """ .. py:function:: __exit__(self, *args) Exit method raised when leaving the context manager. :param self: current class instance :type self: class :param *args: list of argument(s) :type *args: class """ _log.debug("Ended <{}> session <{}>.".format(self.module.__class__.__name__, self.module.__name__))
def main(): """ .. py:function:: main() Main entry point for the program. """ try: _initialize(_argparser(_parser.Parser())) except SystemExit: pass except: _log.fault("Unhandled exception trapped. Quitting.", post_mortem=True)
def __del__(self): """ .. py:function:: __del__(self) Destruction method that calls the teardown function(s). :param self: current class instance :type self: class """ if _conf.KEEP_TEMPORARY_ARTIFACTS: _log.warning("Skipped temporary artifact(s) cleanup.") return self._tear_down()
def __init__(self, module): """ .. py:function:: __init__(self, module) Initialization method for the class. :param self: current class instance :type self: class :param module: class inherited from the :code:`models` reference classes :type module: class """ self.module = module _log.debug("Started <{}> session <{}>.".format(self.module.__class__.__name__, self.module.__name__))
def __enter__(self): """ .. py:function:: __enter__(self) Callback method called when the context manager is invoked. :param self: current class instance :type self: class """ if _log.console.disabled: _log._set_console_state(True) self.rollback["state"] = True if _log.console.level > self.level: _log.console.setLevel(self.level) self.rollback["level"] = True
def __init__(self, processes=(multiprocessing.cpu_count() or _conf.FALLBACK_PROCESSES)): """ .. py:function:: __init__(self, processes=(multiprocessing.cpu_count() or _conf.FALLBACK_PROCESSES)) Initialization method for the class. :param self: current class instance :type self: class :param exception: number of concurrent process(es) to spawn :type exception: int """ self.processes = processes self.pool = multiprocessing.Pool(processes=self.processes, initializer=self._worker_initializer) _log.debug("Initialized pool of <{}> concurrent process(es).".format(self.processes))
def __exit__(self, *args): """ .. py:function:: __exit__(self, *args) Exit method raised when leaving the context manager. :param self: current class instance :type self: class :param *args: list of argument(s) :type *args: class """ if self.rollback["state"]: _log._set_console_state(self.reference["state"]) if self.rollback["level"]: _log.console.setLevel(self.reference["level"])
def track_file(self, evidence): """ .. py:function:: track_file(self, evidence) Checks and registers an evidence file for processing. :param self: current class instance :type self: class :param evidence: absolute path to the evidence file :type evidence: str """ if os.path.isfile(evidence): self.resources["evidences"]["files"].append(evidence) _log.debug("Tracking file <{}>.".format(evidence)) else: _log.warning("Evidence <{}> not found or invalid.".format(evidence))
def load_module(name, model, silent=False): """ .. py:function:: load_module(name, model) Dynamically loads a registered module. :param name: name of the module to load :type name: str :param model: reference class handle :type model: class :param silent: suppress the warning message :type silent: bool :return: module class handle :rtype: class """ try: module = importlib.import_module("framework.modules.{}.{}".format(model.__name__.lower(), name)) except ModuleNotFound as exc: _log.fault("Missing dependency <{0}>. Try <pip install {0}> or manually build the required module to fix the issue.".format(exc.name)) try: _checker.check_module(module, model) except _errors.NotFound: _log.fault("No subclass found in module <{}.{}>. Quitting.".format(model.__name__.lower(), name), post_mortem=True) except _errors.ModuleInheritance: _log.fault("Module <{}.{}> not inheriting from the base class. Quitting.".format(model.__name__.lower(), name), post_mortem=True) except _errors.SystemNotSupported: if model.__name__ == _models.Pre.__name__: _log.fault("Module <{}.{}> does not support the current system <{}>. Quitting.".format(model.__name__.lower(), name, platform.system())) elif not silent: _log.warning("Module <{}.{}> incompatible with the current system <{}>. Ignoring.".format(model.__name__.lower(), name, platform.system())) return None return getattr(module, model.__name__)
def expand_files(feed, recursive=False, include=_conf.DEFAULTS["INCLUSION_FILTERS"], exclude=_conf.DEFAULTS["EXCLUSION_FILTERS"]): """ .. py:function:: _expand_files(feed, recursive=False, include=_conf.DEFAULTS["INCLUSION_FILTERS"], exclude=_conf.DEFAULTS["EXCLUSION_FILTERS"]) Iterates through file(s) and directory(ies) to retrieve the complete list of file(s). :param feed: list of files and directories :type feed: list :param recursive: search files recursively :type recursive: bool :param include: list of wildcard patterns to include :type include: list :param exclude: list of wildcard patterns to exclude :type exclude: list :return: flattened list of existing files :rtype: list """ feedback = [] for item in [os.path.abspath(_) for _ in feed]: if os.path.isfile(item): if matches_patterns(os.path.basename(item), wildcard_patterns=include): if not exclude or (exclude and not matches_patterns(os.path.basename(item), wildcard_patterns=exclude)): feedback.append(item) elif os.path.isdir(item): for file in [os.path.abspath(_) for _ in enumerate_matching_files(item, wildcard_patterns=include, recursive=recursive)]: if os.path.isfile(file): if matches_patterns(os.path.basename(file), wildcard_patterns=include): if not exclude or (exclude and not matches_patterns(os.path.basename(file), wildcard_patterns=exclude)): feedback.append(file) else: _log.error("Object not found <{}>.".format(item)) return feedback
def run(self): """ .. py:function:: run(self) Main entry point for the class. :param self: current class instance :type self: class """ with self._open_output_file() as self.output: self._read_queue() with self.results[0], _magic.OverrideConsoleLogging("WARNING"): _log.warning( "Total of <{}> matching pattern(s). See <{}> for more details." .format(self.results[1].value, self.target["target"] )) if self.results[1].value else _log.info( "No matching pattern(s) found.") self._store_matching_evidences()
def iterate_matches(target): """ .. py:function:: iterate_matches(target) Iterates over match(es) and yields a Python dictionary representation of each. :param target: path to the file containing JSON-encoded match(es) :type target: str :return: dictionary representation of the match :rtype: dict """ with open(target) as matches: for match in matches: try: yield _renderer.from_json(match) except (_errors.CharacterEncoding, _errors.InvalidObject): _log.error("Failed to interpret match <{}>.".format(match))
def _create_arborescence(self): """ .. py:function:: _create_arborescence(self) Creates the base arborescence for the current case. :param self: current class instance :type self: class """ if os.path.exists(self.resources["case"]): if not self.arguments.overwrite: if not _interaction.prompt( "Overwrite existing object <{}>?".format( self.resources["case"])): _log.fault("Aborted due to manual user interruption.") try: send2trash.send2trash(self.resources["case"]) _log.warning("Overwritten existing object <{}>.".format( self.resources["case"])) except (send2trash.TrashPermissionError, OSError, Exception): _log.fault("Failed to overwrite existing object <{}>.".format( self.resources["case"]), post_mortem=True) _fs.create_local_directory(self.resources["case"])
def run(self): """ .. py:function:: run(self) Main entry point for the class. :param self: current class instance :type self: class """ count = 0 for name, ruleset in _loader.iterate_rulesets(): if self._compile_ruleset(name, ruleset): count += 1 if not count: _log.fault("No YARA rulesets loaded. Quitting.") _log.info("Applying a total of <{}> YARA ruleset(s).".format(count)) del count if not self._dispatch_jobs(): _log.warning("Skipping <{}> module(s) invocation.".format( _models.Post.__name__)) return self._invoke_post_modules()
def _create_local_directory(self, directory, mask=0o700): """ .. py:function:: _create_local_directory(self, directory, mask=0o700) Creates a directory on the filesystem. :param self: current class instance :type self: class :param directory: absolute path to the directory to create :type directory: str :param mask: permissions bit mask to apply for the newly created :code:`directory` and its parents if necessary :type mask: oct :return: random string of :code:`rounds` characters :rtype: str """ try: os.makedirs(directory, mode=mask) _log.debug("Created local directory <{}>.".format(directory)) except FileExistsError: _log.fault("Failed to create local directory due to existing object <{}>.".format(directory), trace=True) except ( OSError, Exception): _log.fault("Failed to create local directory <{}>.".format(directory), trace=True)
def _store_matching_evidences(self): """ .. py:function:: _store_matching_evidences(self) Saves the matching evidence(s) to the specified storage directory. :param self: current class instance :type self: class """ for evidence in self.results[2]: if not os.path.isdir(self.target["storage"]): _fs.create_local_directory(self.target["storage"]) try: storage_path = ( os.path.join(self.target["storage"], os.path.basename(evidence)) if not _conf.NEUTRALIZE_MATCHING_EVIDENCES else os.path.join( self.target["storage"], "{}.{}".format( os.path.basename(evidence), _meta.__package__))) shutil.copy2(evidence, storage_path) if _conf.NEUTRALIZE_MATCHING_EVIDENCES: os.chmod( storage_path, stat.S_IMODE(os.lstat(storage_path).st_mode) & ~stat.S_IEXEC) _log.debug("Saved {}matching evidence <{}> as <{}>.".format( "and neutralized " if _conf.NEUTRALIZE_MATCHING_EVIDENCES else "", os.path.basename(evidence), storage_path)) except (OSError, shutil.Error, Exception): _log.exception( "Failed to save matching evidence <{}> as <{}>.".format( os.path.basename(evidence), storage_path))
def create_local_directory(directory, mask=0o700): """ .. py:function:: create_local_directory(directory, mask=0o700) Creates a local case directory on the filesystem. :param directory: absolute path to the directory to create :type directory: str :param mask: permissions bit mask to apply for the newly created :code:`directory` and its parents if necessary :type mask: oct """ try: os.makedirs(directory, mode=mask) _log.debug("Created local directory <{}>.".format(directory)) except FileExistsError: _log.fault("Failed to create local directory due to existing object <{}>.".format(directory), post_mortem=True) except ( OSError, Exception): _log.fault("Failed to create local directory <{}>.".format(directory), post_mortem=True)
def run(self): """ .. py:function:: run(self) Main entry point for the class. :param self: current class instance :type self: class """ loaded = { "rulesets": 0, "rules": 0 } for name, ruleset in _loader.iterate_rulesets(): status, count = self._compile_ruleset(name, ruleset) if status: loaded["rulesets"] += 1 loaded["rules"] += count if not loaded["rulesets"]: _log.fault("No YARA ruleset(s) loaded. Quitting.") _log.info("Applying a total of <{}> YARA rule(s) from <{}> ruleset(s).".format(loaded["rules"], loaded["rulesets"])) del loaded if not self._dispatch_jobs(): _log.warning("Skipping <{}> module(s) invocation.".format(_models.Post.__name__)) return self._invoke_post_modules()
def _iterate_existing_files(self, evidences): """ .. py:function:: _iterate_existing_files(self, evidences) Iterates over file(s) and yields the corresponding path if existing. :param self: current class instance :type self: class :param files: list of file(s) path(s) :type files: list :return: path to the existing file(s) :rtype: str """ for file in evidences: if not os.path.isfile(file): _log.error("File not found <{}>.".format(file)) continue yield file
def run(self): """ .. py:function:: run(self) Main entry point for the module. :param self: current class instance :type self: class """ if self.case.arguments._inline_password: _log.debug( "Using inline password <{}> to unpack archive(s).".format( self.case.arguments._inline_password)) elif self.case.arguments._password: self._password = _interaction.password_prompt( "Unpacking password: "******"Recursive unpacking manually disabled using --no-recursion.") tmp = self.case.require_temporary_directory() for evidence in self.feed: self.recursive_inflate( evidence, tmp, password=(self.case.arguments._inline_password if not hasattr(self, "_password") else self._password)) for evidence in _fs.expand_files([tmp], recursive=True, include=self.case.arguments._include, exclude=self.case.arguments._exclude): print("found {}".format(evidence))
def render_modules(package, model): """ .. py:function:: render_modules(package, model) Renders available module(s) name(s) as a list. :param package: package handle to import module(s) from :type package: class :param model: reference module class handle :type model: class :return: available module(s) in :code:`package` :rtype: list """ try: _checker.check_package(package) except _errors.InvalidPackage: _log.fault("Invalid package <{}>.".format(package), post_mortem=True) return [os.path.splitext(name)[0] for name, _ in Loader.iterate_modules(package, model, silent=True)]
def track_files(self, evidences, include=[], exclude=[]): """ .. py:function:: track_files(self, evidences) Checks and registers multiple evidence files for processing. :param self: current class instance :type self: class :param evidences: list of absolute path(s) to the evidence file(s) :type evidences: list :param include: list of wildcard pattern(s) to include :type include: list :param exclude: list of wildcard pattern(s) to exclude :type exclude: list """ evidences = [os.path.abspath(evidence) for evidence in evidences] for evidence in self._iterate_existing_files(evidences): if include and not _fs.matches_patterns(os.path.basename(evidence), wildcard_patterns=include): _log.debug( "Ignoring evidence <{}> not matching inclusion pattern(s) <{}>." .format(evidence, include)) continue if exclude and _fs.matches_patterns(os.path.basename(evidence), wildcard_patterns=exclude): _log.debug( "Ignoring evidence <{}> matching exclusion pattern(s) <{}>." .format(evidence, exclude)) continue self.track_file(evidence)
def _append_json(self, data): """ .. py:function:: _append_json(self, data) Encodes the match data using the given format and appends the match data to the output file. :param self: current class instance :type self: class :param data: dictionary containing the match data :type data: dict """ try: self.output.write("{}\n".format(_renderer.to_json(data))) except _errors.CharacterEncoding: _log.error("Cannot decode data from <{}>.".format( data["target"]["identifier"])) except InvalidObject: _log.exception( "Exception raised while retrieving matching data from <{}>.". format(data["target"]["identifier"]))
def _dispatch_jobs(self): """ .. py:function:: _dispatch_jobs(self) Dispatches the processing task(s) to the subprocess(es). :param self: current class instance :type self: class :return: number of match(es) :rtype: int """ with multiprocessing.Manager() as manager: queue = manager.Queue() results = (multiprocessing.Lock(), multiprocessing.Value(ctypes.c_int, 0), manager.list()) reader = multiprocessing.Process(target=_reader.Reader(queue, results, { "target": self.case.resources["matches"], "storage": self.case.resources["storage"], "format": self.case.arguments.format }).run) reader.daemon = True reader.start() _log.debug("Started reader subprocess to consume queue result(s).") with _magic.Pool(processes=self.case.arguments.processes) as pool: for file in self.case.resources["evidences"]: if os.path.getsize(file) > self.case.arguments.max_size: _log.warning("Evidence <{}> exceeds the maximum size. Ignoring evidence. Try changing --max-size to override this behavior.".format(file)) continue pool.starmap_async( _processors.File(self.case.arguments.hash_algorithms, self.case.arguments.callbacks, queue, self.case.arguments.fast).run, [(file, self.buffers)], error_callback=_log.inner_exception) _log.debug("Mapped concurrent job to consume evidence <{}>.".format(file)) queue.put(_codes.DONE) with _magic.Hole(KeyboardInterrupt, action=lambda:_log.fault("Aborted due to manual user interruption <SIGINT>.")): reader.join() return results[1].value