def expand_files(feed, recursive=False, include=_conf.DEFAULTS["INCLUSION_FILTERS"], exclude=_conf.DEFAULTS["EXCLUSION_FILTERS"]): """ .. py:function:: _expand_files(feed, recursive=False, include=_conf.DEFAULTS["INCLUSION_FILTERS"], exclude=_conf.DEFAULTS["EXCLUSION_FILTERS"]) Iterates through file(s) and directory(ies) to retrieve the complete list of file(s). :param feed: list of files and directories :type feed: list :param recursive: search files recursively :type recursive: bool :param include: list of wildcard patterns to include :type include: list :param exclude: list of wildcard patterns to exclude :type exclude: list :return: flattened list of existing files :rtype: list """ feedback = [] for item in [os.path.abspath(_) for _ in feed]: if os.path.isfile(item): if matches_patterns(os.path.basename(item), wildcard_patterns=include): if not exclude or (exclude and not matches_patterns(os.path.basename(item), wildcard_patterns=exclude)): feedback.append(item) elif os.path.isdir(item): for file in [os.path.abspath(_) for _ in enumerate_matching_files(item, wildcard_patterns=include, recursive=recursive)]: if os.path.isfile(file): if matches_patterns(os.path.basename(file), wildcard_patterns=include): if not exclude or (exclude and not matches_patterns(os.path.basename(file), wildcard_patterns=exclude)): feedback.append(file) else: _log.error("Object not found <{}>.".format(item)) return feedback
def iterate_matches(target): """ .. py:function:: iterate_matches(target) Iterates over match(es) and yields a Python dictionary representation of each. :param target: path to the file containing JSON-encoded match(es) :type target: str :return: dictionary representation of the match :rtype: dict """ with open(target) as matches: for match in matches: try: yield _renderer.from_json(match) except (_errors.CharacterEncoding, _errors.InvalidObject): _log.error("Failed to interpret match <{}>.".format(match))
def _iterate_existing_files(self, evidences): """ .. py:function:: _iterate_existing_files(self, evidences) Iterates over file(s) and yields the corresponding path if existing. :param self: current class instance :type self: class :param files: list of file(s) path(s) :type files: list :return: path to the existing file(s) :rtype: str """ for file in evidences: if not os.path.isfile(file): _log.error("File not found <{}>.".format(file)) continue yield file
def track_process(self, pid): """ .. py:function:: track_process(self, pid) Checks wether a process exists on the local machine and registers it for processing. :param self: current class instance :type self: class :param pid: process identifier :type pid: int """ if not isinstance(pid, int): _log.error("Invalid PID format <{}>.".format(pid)) return if psutil.pid_exists(pid): self.resources["evidences"]["processes"].append(pid) _log.debug("Tracking live process matching PID <{}>.".format(pid)) else: _log.warning("Process <{}> not found.".format(pid))
def _append_json(self, data): """ .. py:function:: _append_json(self, data) Encodes the match data using the given format and appends the match data to the output file. :param self: current class instance :type self: class :param data: dictionary containing the match data :type data: dict """ try: self.output.write("{}\n".format(_renderer.to_json(data))) except _errors.CharacterEncoding: _log.error("Cannot decode data from <{}>.".format( data["target"]["identifier"])) except InvalidObject: _log.exception( "Exception raised while retrieving matching data from <{}>.". format(data["target"]["identifier"]))
def recursive_inflate(self, archive, output_directory, level=0, password=None): if level > self.case.arguments._level: _log.warning( "Limit unpacking level <{}> exceeded. Stopped unpacking.". format(self.case.arguments._level)) return _log.debug( "Inflating {}archive <{}> to temporary directory <{}>.".format( "level {} sub".format(level) if level else "base ", archive, output_directory)) sub_directory = os.path.join(output_directory, os.path.basename(archive)) try: with zipfile.ZipFile(archive) as z: z.extractall(path=sub_directory, pwd=(password.encode() if password else password)) except zipfile.BadZipFile: _log.error( "Bad file header. Cannot inflate evidence <{}>. Try to filter out non-zip file(s) using --include \"*.zip\" \".*.zip\"." .format(archive)) return except RuntimeError as exc: if "password required" in str(exc): _log.error( "Archive <{}> seems to be encrypted. Please specify a password using --password or --inline-password." .format(archive)) elif "Bad password" in str(exc): _log.error( "Password {}seems to be incorrect for archive <{}>. Please specify another password using --password or --inline-password." .format( "<{}> ".format(self.case.arguments._inline_password) if not hasattr(self, "_password") else "", archive)) else: _log.exception( "Runtime exception raised while unpacking archive <{}>.". format(archive)) return except KeyboardInterrupt: sys.stderr.write("\n") _log.fault("Aborted due to manual user interruption.") except Exception: _log.exception( "Exception raised while unpacking archive <{}>.".format( archive)) if self.case.arguments._no_recursion: return for subarchive in _fs.enumerate_matching_files( sub_directory, wildcard_patterns=([ "*.{}".format(_) for _ in self.__associations__["extensions"] ] + [ ".*.{}".format(_) for _ in self.__associations__["extensions"] ] if hasattr(self, "__associations__") and "extensions" in self.__associations__ else None), mime_types=(self.__associations__["mime"] if hasattr(self, "__associations__") and "mime" in self.__associations__ else None), recursive=True): self.recursive_inflate(subarchive, sub_directory, level=(level + 1), password=password)