Exemplo n.º 1
0
    def save_payload(self, payload, connector):
        """
        Save a payload using the designated connector

        :param bytes payload: Payload to pass to the connector for saving
        :param str connector: Connector plugin to save the payload with

        """

        arc = get_hashes(payload)

        # Let's make sure we add some additional metadata so we don't need
        # to create this later.
        arc['ssdeep'] = get_ssdeep(payload)
        arc['content-type'] = get_magic(payload)

        # Make sure our connector is loaded
        self.load_connector(connector)

        # Save our payload to the appropriate plugin
        res = self.connectors[connector].save(payload, archive=True, **arc)

        arc['conn_id'] = res

        return arc
Exemplo n.º 2
0
    def attachment_metadata(self, payload=None, filename=None, uuid=None):
        # Make sure we have a payload, otherwise return None
        if not payload or len(payload) <= 0:
            return None

        attachment_json = {}

        # Generate hashes
        attachment_json['md5'] = get_md5(payload)
        attachment_json['sha1'] = get_sha1(payload)
        attachment_json['sha256'] = get_sha256(payload)
        attachment_json['sha512'] = get_sha512(payload)
        attachment_json['ssdeep'] = get_ssdeep(payload)

        # Get magic type
        attachment_json['magic'] = get_magic(payload)

        # Get size
        attachment_json['size'] = len(payload)

        # Define the filename as provided
        attachment_json['filename'] = filename

        # Make sure we have the parent uuid generated with the original email
        attachment_json['uuid'] = uuid.copy()

        # Generate a unique ID
        attachment_json['uuid'].append(self.stoq.get_uuid)

        return attachment_json
Exemplo n.º 3
0
    def save_payload(self, payload, connector):
        """
        Save a payload using the designated connector

        :param bytes payload: Payload to pass to the connector for saving
        :param str connector: Connector plugin to save the payload with

        """

        arc = get_hashes(payload)

        # Let's make sure we add some additional metadata so we don't need
        # to create this later.
        arc['ssdeep'] = get_ssdeep(payload)
        arc['content-type'] = get_magic(payload)

        # Make sure our connector is loaded
        self.load_connector(connector)

        # Save our payload to the appropriate plugin
        res = self.connectors[connector].save(payload, archive=True, **arc)

        arc['conn_id'] = res

        return arc
Exemplo n.º 4
0
    def attachment_metadata(self, payload=None, filename=None, uuid=None):
        # Make sure we have a payload, otherwise return None
        if not payload or len(payload) <= 0:
            return None

        attachment_json = {}

        # Generate hashes
        attachment_json['md5'] = get_md5(payload)
        attachment_json['sha1'] = get_sha1(payload)
        attachment_json['sha256'] = get_sha256(payload)
        attachment_json['sha512'] = get_sha512(payload)
        attachment_json['ssdeep'] = get_ssdeep(payload)

        # Get magic type
        attachment_json['magic'] = get_magic(payload)

        # Get size
        attachment_json['size'] = len(payload)

        # Define the filename as provided
        attachment_json['filename'] = filename

        # Make sure we have the parent uuid generated with the original email
        attachment_json['uuid'] = uuid.copy()

        # Generate a unique ID
        attachment_json['uuid'].append(self.stoq.get_uuid)

        return attachment_json
Exemplo n.º 5
0
    def do_results(self, input):
        """
        results
            Display results of previous plugin run
        """

        try:
            # This is a mess. Plugins can produce either str(), bytes(),
            # or a list(). If it is a list(), there may be a tuple() in it.
            # Let's go over them and make sure we produce that right content
            # to display
            if self.results:
                if type(self.results) is dict:
                    print(self.stoq.dumps(self.results, compactly=False))
                elif type(self.results) is list:
                    for idx, r in enumerate(self.results):
                        if type(r) is dict:
                            print(self.stoq.dumps(r, compactly=False))
                        if type(r) is tuple:
                            if type(r[0]) is dict:
                                print(
                                    "[*] Extracted content: id {}".format(idx))
                                for sub_key, sub_value in r[0].items():
                                    print("    - {}: {}".format(
                                        sub_key, sub_value))
                                hashes = get_hashes(r[1])
                                mime = get_magic(r[1])
                                for key, value in hashes.items():
                                    print("    - {}: {}".format(key, value))
                                print("    - magic: {}".format(mime))

                            else:
                                print(r)
                        else:
                            print(r)
                else:
                    print(self.results)
            else:
                print(
                    "[!] No results. Did you run a plugin? Try 'run <category> <plugin>'"
                )
        except Exception as err:
            print("[!] Error: {}".format(str(err)))
Exemplo n.º 6
0
    def save(self, payload, archive=False, **kwargs):
        """
        Save results to GCS

        :param bytes payload: Content to be stored in GCS
        :param **kwargs bucket: Bucket name to be used
        :param **kwargs sha1: SHA1 hash to be used as a filename

        :returns: Filename used to save the payload
        :rtype: bytes

        """

        if not self.conn:
            self.connect()

        bucket = kwargs.get('index', self.bucket_name)
        sha1 = kwargs.get('sha1', get_sha1(payload))
        magic = get_magic(payload)

        hashpath = '/'.join(list(sha1[:5]))
        filename = "{}/{}".format(hashpath, sha1)

        body = {
            'name': filename
        }

        content = BytesIO(payload)
        media_body = http.MediaIoBaseUpload(content, magic)

        try:
            req = self.conn.objects().insert(bucket=bucket, body=body,
                                             media_body=media_body)
            resp = req.execute()
            self.stoq.log.debug(resp)
        except Exception as err:
            self.stoq.log.error("Unable to save file to GCS: {}".format(str(err)))
            return None

        return filename
Exemplo n.º 7
0
    def do_results(self, input):
        """
        results
            Display results of previous plugin run
        """

        try:
            # This is a mess. Plugins can produce either str(), bytes(),
            # or a list(). If it is a list(), there may be a tuple() in it.
            # Let's go over them and make sure we produce that right content
            # to display
            if self.results:
                if type(self.results) is dict:
                    print(self.stoq.dumps(self.results, compactly=False))
                elif type(self.results) is list:
                    for idx, r in enumerate(self.results):
                        if type(r) is dict:
                            print(self.stoq.dumps(r, compactly=False))
                        if type(r) is tuple:
                            if type(r[0]) is dict:
                                print("[*] Extracted content: id {}".format(idx))
                                for sub_key, sub_value in r[0].items():
                                    print("    - {}: {}".format(sub_key, sub_value))
                                hashes = get_hashes(r[1])
                                mime = get_magic(r[1])
                                for key, value in hashes.items():
                                    print("    - {}: {}".format(key, value))
                                print("    - magic: {}".format(mime))

                            else:
                                print(r)
                        else:
                            print(r)
                else:
                    print(self.results)
            else:
                print("[!] No results. Did you run a plugin? Try 'run <category> <plugin>'")
        except Exception as err:
            print("[!] Error: {}".format(str(err)))
Exemplo n.º 8
0
    def save(self, payload, archive=False, **kwargs):
        """
        Save results to GCS

        :param bytes payload: Content to be stored in GCS
        :param **kwargs bucket: Bucket name to be used
        :param **kwargs sha1: SHA1 hash to be used as a filename

        :returns: Filename used to save the payload
        :rtype: bytes

        """

        if not self.conn:
            self.connect()

        bucket = kwargs.get('index', self.bucket_name)
        sha1 = kwargs.get('sha1', get_sha1(payload))
        magic = get_magic(payload)

        hashpath = '/'.join(list(sha1[:5]))
        filename = "{}/{}".format(hashpath, sha1)

        body = {'name': filename}

        content = BytesIO(payload)
        media_body = http.MediaIoBaseUpload(content, magic)

        try:
            req = self.conn.objects().insert(bucket=bucket,
                                             body=body,
                                             media_body=media_body)
            resp = req.execute()
            self.log.debug(resp)
        except Exception as err:
            self.log.error("Unable to save file to GCS: {}".format(str(err)))
            return None

        return filename
Exemplo n.º 9
0
    def do_read(self, input):
        """
        read <path to file>
            Open a file at specified path
        """

        try:
            self.filename = os.path.basename(input)
            self.payload = self.stoq.get_file(input)
            if not self.payload:
                print("[!] No payload found.")
            else:
                hashes = get_hashes(self.payload)
                mime = get_magic(self.payload)
                print("[*] Filename: {}".format(input))
                print("[*] Size: {}".format(len(self.payload)))

                # Iterate over all of the hashes that were generated
                for key, value in hashes.items():
                    print("[*] {}: {}".format(key, value))
                print("[*] magic: {}".format(mime))
        except Exception as err:
            print("[!] Error: {}".format(str(err)))
Exemplo n.º 10
0
    def do_read(self, input):
        """
        read <path to file>
            Open a file at specified path
        """

        try:
            self.filename = os.path.basename(input)
            self.payload = self.stoq.get_file(input)
            if not self.payload:
                print("[!] No payload found.")
            else:
                hashes = get_hashes(self.payload)
                mime = get_magic(self.payload)
                print("[*] Filename: {}".format(input))
                print("[*] Size: {}".format(len(self.payload)))

                # Iterate over all of the hashes that were generated
                for key, value in hashes.items():
                    print("[*] {}: {}".format(key, value))
                print("[*] magic: {}".format(mime))
        except Exception as err:
            print("[!] Error: {}".format(str(err)))
Exemplo n.º 11
0
    def extract(self, payload, **kwargs):
        """
        Decompress a payload

        :param bytes payload: Content to be decompressed
        :param **kwargs filename: Filename of compressed archive
        :param **kwargs archive_passwords: List of passwords to attempt against the archive

        :returns: Metadata and content extracted
        :rtype: list of tuples

        """

        # Make sure the payload is not larger that what is permitted
        if len(payload) > int(self.maximum_size):
            self.stoq.log.warn("Compressed file too large: {}".format(kwargs))
            return None

        if 'filename' in kwargs:
            filename = kwargs['filename']
        else:
            filename = self.stoq.get_uuid

        if 'archive_passwords' in kwargs:
            archive_passwords = kwargs['archive_passwords']
            if type(archive_passwords) is not (list, tuple):
                archive_passwords = archive_passwords.split(",")
        else:
            archive_passwords = self.password_list

        results = None

        # Determine the mimetype of the payload so we can identify the
        # correct archiver
        mimetype = get_magic(payload)
        if mimetype in archive_magic:
            archive_type = archive_magic[mimetype]
            if archive_type in archive_cmds:
                archiver = archive_cmds[archive_type]
            else:
                self.stoq.log.warn("Unknown archive type: {}".format(archive_type))
                return None
        else:
            self.stoq.log.warn("Unknown MIME type: {}".format(mimetype))
            return None

        # Build our temporary directory and file structure
        tmp_archive_dir = tempfile.mkdtemp(dir=self.stoq.temp_dir)
        extract_dir = os.path.join(tmp_archive_dir, "out")
        archive_file = os.path.join(tmp_archive_dir, filename)

        with open(archive_file, "wb") as f:
            f.write(payload)

        for password in archive_passwords:
            # Check to make sure there are no special characters in the
            # password to prevent any potential security issues.
            if not re.search(r"[\\|&;<>()$'`\"`'*?#~=%]", password):
                # Check to see what kind of archive we have and build the
                # command as appropriate
                cmd = archiver.replace('%INFILE%', archive_file)
                cmd = cmd.replace('%OUTDIR%', extract_dir)
                cmd = cmd.replace('%PASSWORD%', password)
                cmd = cmd.split(" ")
            else:
                self.stoq.log.warn("Password contains invalid character")
                continue

            # Start the process
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True)
            try:
                # Monitor the command and wait for it to complete within a set
                # timeout
                outs, errs = p.communicate(timeout=45)
            except TimeoutExpired:
                p.kill()
                self.stoq.log.error("Timed out decompressing {}".format(archive_file))

            # Attempt to list contents of extract_dir, if files exist,
            # then let's break out of the loop and continue on
            # as it would mean the file extracted successfully
            if os.listdir(extract_dir):
                break

        # Looks like we are ready, let's step through each file
        for root, dirs, files in os.walk(extract_dir):
            for f in files:
                # We are going to skip this file if the filename is the same as
                # our original file
                if f != filename:
                    path = os.path.join(extract_dir, f)
                    extracted_filename = os.path.basename(path)

                    # Open the file so we can return the content
                    with open(path, "rb") as extracted_file:
                        # Generate relevant metadata
                        meta = {}
                        content = extracted_file.read()
                        meta['filename'] = extracted_filename
                        meta['size'] = len(content)

                        # Since we defined results as None above, we need to
                        # ensure it is a list now that we have results
                        if not results:
                            results = []

                        # Construct our set for return
                        results.append((meta, content))

                        self.stoq.log.info("Extracted file {} ({} bytes) from "
                                           "{}".format(meta['filename'],
                                                       meta['size'],
                                                       filename))

        # Cleanup the extracted content
        if os.path.isdir(tmp_archive_dir):
            shutil.rmtree(tmp_archive_dir)

        return results
Exemplo n.º 12
0
    def extract(self, payload, **kwargs):
        """
        Decompress a payload

        :param bytes payload: Content to be decompressed
        :param str filename: Filename of compressed archive
        :param list archive_passwords: List of passwords to attempt against the archive

        :returns: Metadata and content extracted
        :rtype: list of tuples

        """

        # Make sure the payload is not larger that what is permitted
        if len(payload) > int(self.maximum_size):
            self.log.warn("Compressed file too large: {}".format(kwargs))
            return None

        if 'filename' in kwargs:
            filename = kwargs['filename']
        else:
            filename = self.stoq.get_uuid

        if 'archive_passwords' in kwargs:
            archive_passwords = kwargs['archive_passwords']
            if type(archive_passwords) is not (list, tuple):
                archive_passwords = archive_passwords.split(",")
        else:
            archive_passwords = self.password_list

        results = None

        # Determine the mimetype of the payload so we can identify the
        # correct archiver
        mimetype = get_magic(payload)
        self.log.debug("Mimetype: {}".format(mimetype))
        if mimetype in archive_magic:
            archive_type = archive_magic[mimetype]
            if archive_type in archive_cmds:
                archiver = archive_cmds[archive_type]
            else:
                self.log.warn("Unknown archive type: {}".format(archive_type))
                return None
        else:
            self.log.warn("Unknown MIME type: {}".format(mimetype))
            return None

        # Build our temporary directory and file structure
        tmp_archive_dir = tempfile.mkdtemp(dir=self.stoq.temp_dir)
        extract_dir = tmp_archive_dir
        archive_file = os.path.join(tmp_archive_dir, filename)

        with open(archive_file, "wb") as f:
            f.write(payload)

        for password in archive_passwords:
            # Check to see what kind of archive we have and build the
            # command as appropriate
            cmd = archiver.replace('%INFILE%', shlex.quote(archive_file))
            cmd = cmd.replace('%OUTDIR%', shlex.quote(extract_dir))
            cmd = cmd.replace('%PASSWORD%', shlex.quote(password))
            cmd = cmd.split(" ")

            # Start the process
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True)
            try:
                # Monitor the command and wait for it to complete within a set
                # timeout
                outs, errs = p.communicate(timeout=45)
            except TimeoutExpired:
                p.kill()
                self.log.error(
                    "Timed out decompressing {}".format(archive_file))

            # Attempt to list contents of extract_dir, if files exist,
            # then let's break out of the loop and continue on
            # as it would mean the file extracted successfully
            if p.returncode == 0:
                break

        # Looks like we are ready, let's step through each file
        for root, dirs, files in os.walk(extract_dir):
            for f in files:
                # We are going to skip this file if the filename is the same as
                # our original file
                if f != filename:
                    base_path = os.path.join(extract_dir, root)
                    path = os.path.join(base_path, f)
                    extracted_filename = os.path.basename(path)

                    try:
                        # Open the file so we can return the content
                        with open(path, "rb") as extracted_file:
                            # Generate relevant metadata
                            meta = {}
                            content = extracted_file.read()
                            meta['filename'] = extracted_filename
                            meta['size'] = len(content)

                            # Since we defined results as None above, we need to
                            # ensure it is a list now that we have results
                            if not results:
                                results = []

                            # Construct our set for return
                            results.append((meta, content))

                            self.log.info("Extracted file {} ({} bytes) from "
                                          "{}".format(meta['filename'],
                                                      meta['size'], filename))
                    except Exception as err:
                        self.log.warn(
                            "Unable to access extracted content: {}".format(
                                err))

        # Cleanup the extracted content
        if os.path.isdir(tmp_archive_dir):
            shutil.rmtree(tmp_archive_dir)

        return results