def _matches_yara_signatures(evidence): for ruleset in Identifier._iterate_signatures(): try: rules = yara.compile(ruleset, includes=True, error_on_warning=True) except yara.SyntaxError: _log.exception("Syntax error in YARA ruleset <{}>.".format(ruleset))
def _compile_ruleset(self, name, ruleset): """ .. py:function:: _compile_ruleset(self, name, ruleset) Compiles and saves YARA rule(s) to the dictionary to be passed to the asynchronous job(s). :param self: current class instance :type self: class :param name: name of the ruleset file to compile the rule(s) from :type name: str :param ruleset: absolute path to the ruleset file to compile the rule(s) from :type ruleset: str :return: tuple containing the final status of the compilation and the number of successfully loaded rule(s) :rtype: bool, int """ count = 0 try: buffer = io.BytesIO() rules = yara.compile(ruleset, includes=_conf.YARA_INCLUDES, error_on_warning=(not self.case.arguments.ignore_warnings)) rules.save(file=buffer) self.buffers[ruleset] = buffer count += sum(1 for _ in rules) _log.debug("Precompilated YARA ruleset <{}> in memory with a total of <{}> valid rule(s).".format(name, count)) return True, count except yara.SyntaxError: _log.exception("Syntax error in YARA ruleset <{}>.".format(ruleset)) except ( Exception, yara.Error): _log.exception("Failed to pre-compile ruleset <{}>.".format(ruleset)) return False, count
def _store_matching_evidences(self): """ .. py:function:: _store_matching_evidences(self) Saves the matching evidence(s) to the specified storage directory. :param self: current class instance :type self: class """ for evidence in self.results[2]: if not os.path.isdir(self.target["storage"]): _fs.create_local_directory(self.target["storage"]) try: storage_path = ( os.path.join(self.target["storage"], os.path.basename(evidence)) if not _conf.NEUTRALIZE_MATCHING_EVIDENCES else os.path.join( self.target["storage"], "{}.{}".format( os.path.basename(evidence), _meta.__package__))) shutil.copy2(evidence, storage_path) if _conf.NEUTRALIZE_MATCHING_EVIDENCES: os.chmod( storage_path, stat.S_IMODE(os.lstat(storage_path).st_mode) & ~stat.S_IEXEC) _log.debug("Saved {}matching evidence <{}> as <{}>.".format( "and neutralized " if _conf.NEUTRALIZE_MATCHING_EVIDENCES else "", os.path.basename(evidence), storage_path)) except (OSError, shutil.Error, Exception): _log.exception( "Failed to save matching evidence <{}> as <{}>.".format( os.path.basename(evidence), storage_path))
def _tear_down(self): """ .. py:function:: _tear_down(self) Cleanup method called on class destruction that gets rid of the temporary artifact(s). :param self: current class instance :type self: class """ for artifact in self.resources["temporary"]: try: shutil.rmtree(artifact) _log.debug("Removed temporary artifact <{}>.".format(artifact)) except FileNotFoundError: _log.debug("Temporary artifact not found <{}>.".format(artifact)) except ( OSError, Exception): _log.exception("Failed to remove temporary artifact <{}>.".format(artifact))
def _append_json(self, data): """ .. py:function:: _append_json(self, data) Encodes the match data using the given format and appends the match data to the output file. :param self: current class instance :type self: class :param data: dictionary containing the match data :type data: dict """ try: self.output.write("{}\n".format(_renderer.to_json(data))) except _errors.CharacterEncoding: _log.error("Cannot decode data from <{}>.".format( data["target"]["identifier"])) except InvalidObject: _log.exception( "Exception raised while retrieving matching data from <{}>.". format(data["target"]["identifier"]))
def run(self): """ .. py:function:: run(self) Main entry point for the module. :param self: current class instance :type self: class """ tmp = self.case.require_temporary_directory() for evidence in self.feed: try: mail = eml_parser.eml_parser.decode_email( evidence, include_raw_body=True, include_attachment_data=True) _log.info("Extracted <{}> attachment(s) from <{}>.".format( len(mail["attachment"]), evidence)) except Exception: _log.exception( "Failed to extract data from <{}>. Ignoring evidence.". format(evidence)) continue output_directory = os.path.join(tmp, os.path.basename(evidence)) if not os.path.isdir(output_directory): _fs.create_local_directory(output_directory) for attachment in mail["attachment"]: if not attachment["filename"]: attachment["filename"] = idx if not _fs.matches_patterns(attachment["filename"], self.case.arguments._include): _log.warning( "Ignoring attachment <{}> not matching inner inclusion pattern(s)." .format(attachment["filename"])) continue if _fs.matches_patterns(attachment["filename"], self.case.arguments._exclude): _log.warning( "Ignoring attachment <{}> matching inner exclusion pattern(s)." .format(attachment["filename"])) continue output_path = os.path.join(output_directory, attachment["filename"]) with open(output_path, "wb") as out: out.write(base64.b64decode(attachment["raw"])) _log.debug( "Attachment <{}> extracted from <{}> stored locally as <{}>." .format(attachment["filename"], evidence, output_path)) self.case.track_file(output_path)
def _consume_evidence(self): """ .. py:function:: _consume_evidence(self) Main loop that processes the evidence(s) and formats the match(es). :param self: current class instance :type self: class """ for _, buffer in self.buffers.items(): try: for match in buffer.match(self.evidence, timeout=_conf.YARA_MATCH_TIMEOUT, fast=self.fast): hashes = {} for algorithm in self.algorithms: hashes[algorithm] = self._compute_hash( self.evidence, algorithm=algorithm) for action in [self.queue.put, self._invoke_callbacks]: action({ "origin": _meta.__package__, "target": { "type": "file", "identifier": self.evidence }, "match": { "timestamp": _rendering.timestamp(), "rule": match.rule, "meta": match.meta, "namespace": match.namespace, "tags": match.tags, "hashes": hashes, "strings": [{ "offset": string[0], "reference": string[1], "litteral": string[2].decode("utf-8", "backslashreplace") } for string in match.strings] } }) except yara.TimeoutError: _log.warning("Timeout exceeded for evidence <{}>.".format( self.evidence)) continue except (yara.Error, Exception): _log.exception( "YARA exception raised during processing of evidence <{}>." .format(self.evidence)) continue
def _process_evidence(self): """ .. py:function:: _process_evidence(self) Main loop that processes the evidence(s) and formats the match(es). :param self: current class instance :type self: class """ for _, buffer in self.buffers.items(): try: for match in buffer.match(pid=self.evidence, timeout=_conf.YARA_MATCH_TIMEOUT, fast=self.fast): for action in [self.queue.put, self._invoke_callbacks]: action({ "origin": _meta.__package__, "target": { "type": "process", "identifier": self.evidence }, "match": { "timestamp": pendulum.now().format(_conf.TIMESTAMP_FORMAT, formatter="alternative"), "rule": match.rule, "meta": match.meta, "namespace": match.namespace, "tags": match.tags, "hashes": [], "strings": [{ "offset": string[0], "reference": string[1], "litteral": string[2].decode("utf-8", "backslashreplace") } for string in match.strings] } }) except yara.TimeoutError: _log.warning( "Timeout exceeded for live process matching PID <{}>.". format(self.evidence)) continue except yara.Error: _log.exception( "YARA exception raised during processing of live process matching PID <{}>." .format(self.evidence)) continue except Exception: _log.exception( "YARA exception raised during processing of live process matching PID <{}>." .format(self.evidence)) continue
def recursive_inflate(self, archive, output_directory, level=0, password=None): if level > self.case.arguments._level: _log.warning( "Limit unpacking level <{}> exceeded. Stopped unpacking.". format(self.case.arguments._level)) return _log.debug( "Inflating {}archive <{}> to temporary directory <{}>.".format( "level {} sub".format(level) if level else "base ", archive, output_directory)) sub_directory = os.path.join(output_directory, os.path.basename(archive)) try: with zipfile.ZipFile(archive) as z: z.extractall(path=sub_directory, pwd=(password.encode() if password else password)) except zipfile.BadZipFile: _log.error( "Bad file header. Cannot inflate evidence <{}>. Try to filter out non-zip file(s) using --include \"*.zip\" \".*.zip\"." .format(archive)) return except RuntimeError as exc: if "password required" in str(exc): _log.error( "Archive <{}> seems to be encrypted. Please specify a password using --password or --inline-password." .format(archive)) elif "Bad password" in str(exc): _log.error( "Password {}seems to be incorrect for archive <{}>. Please specify another password using --password or --inline-password." .format( "<{}> ".format(self.case.arguments._inline_password) if not hasattr(self, "_password") else "", archive)) else: _log.exception( "Runtime exception raised while unpacking archive <{}>.". format(archive)) return except KeyboardInterrupt: sys.stderr.write("\n") _log.fault("Aborted due to manual user interruption.") except Exception: _log.exception( "Exception raised while unpacking archive <{}>.".format( archive)) if self.case.arguments._no_recursion: return for subarchive in _fs.enumerate_matching_files( sub_directory, wildcard_patterns=([ "*.{}".format(_) for _ in self.__associations__["extensions"] ] + [ ".*.{}".format(_) for _ in self.__associations__["extensions"] ] if hasattr(self, "__associations__") and "extensions" in self.__associations__ else None), mime_types=(self.__associations__["mime"] if hasattr(self, "__associations__") and "mime" in self.__associations__ else None), recursive=True): self.recursive_inflate(subarchive, sub_directory, level=(level + 1), password=password)