예제 #1
0
def demux_sflock(filename, options, package):
    retlist = []
    # only extract from files with no extension or with .bin (downloaded from us) or .zip PACKAGE, we do extract from zip archives, to ignore it set ZIP PACKAGES
    ext = os.path.splitext(filename)[1]
    if ext == b".bin":
        return retlist

    # to handle when side file for exec is required
    if package == b".zip" and "file=" in options:
        return [filename]

    try:
        password = b"infected"
        tmp_pass = options2passwd(options)
        if tmp_pass:
            password = tmp_pass

        try:
            unpacked = unpack(filename, password=password)
        except UnpackException:
            unpacked = unpack(filename)

        if unpacked.package in whitelist_extensions:
            return [filename]
        if unpacked.package in blacklist_extensions:
            return retlist
        for sf_child in unpacked.children or []:
            if sf_child.get("children") and sf_child["children"]:
                retlist += [_sf_chlildren(ch) for ch in sf_child["children"]]
            else:
                retlist.append(_sf_chlildren(sf_child))
    except Exception as e:
        log.error(e, exc_info=True)

    return list(filter(None, retlist))
예제 #2
0
파일: demux.py 프로젝트: kevoreilly/CAPEv2
def demux_sflock(filename: bytes, options: str) -> List[bytes]:
    retlist = []
    # only extract from files with no extension or with .bin (downloaded from us) or .zip PACKAGE, we do extract from zip archives, to ignore it set ZIP PACKAGES
    ext = os.path.splitext(filename)[1]
    if ext == b".bin":
        return retlist

    # to handle when side file for exec is required
    if "file=" in options:
        return [filename]

    try:
        password = options2passwd(options) or "infected"
        try:
            unpacked = unpack(filename, password=password)
        except UnpackException:
            unpacked = unpack(filename)

        if unpacked.package in whitelist_extensions:
            return [filename]
        if unpacked.package in blacklist_extensions:
            return retlist
        for sf_child in unpacked.children:
            if sf_child.to_dict().get("children"):
                retlist.extend(_sf_chlildren(ch) for ch in sf_child.children)
                # child is not available, the original file should be put into the list
                if filter(None, retlist):
                    retlist.append(_sf_chlildren(sf_child))
            else:
                retlist.append(_sf_chlildren(sf_child))
    except Exception as e:
        log.error(e, exc_info=True)
    return list(filter(None, retlist))
예제 #3
0
파일: demux.py 프로젝트: 5l1v3r1/CAPE-1
def demux_sflock(filename, options):
    retlist = []

    try:
        password = "******"
        tmp_pass = options2passwd(options)
        if tmp_pass:
            password = tmp_pass

        unpacked = unpack(filename, password=password)
        if unpacked.children:
            cuckoo_conf = Config()
            for sf_child in unpacked.children:

                base, ext = os.path.splitext(sf_child.filename)
                basename = os.path.basename(sf_child.filename)
                ext = ext.lower()
                if ext in demux_extensions_list:

                    tmp_path = cuckoo_conf.cuckoo.get("tmppath", "/tmp")
                    target_path = os.path.join(tmp_path, "cuckoo-sflock")
                    if not os.path.exists(target_path):
                        os.mkdir(target_path)
                    tmp_dir = tempfile.mkdtemp(dir=target_path)
                    try:
                        path_to_extract = os.path.join(tmp_dir,
                                                       sf_child.filename)
                        open(path_to_extract, "wb").write(sf_child.contents)
                        retlist.append(path_to_extract)
                    except Exception as e:
                        log.error(e, exc_info=True)
    except Exception as e:
        log.error(e)

    return retlist
예제 #4
0
파일: test_attr.py 프로젝트: BuloZB/sflock
def test_attributes():
    for filename in os.listdir("tests/files"):
        if "encrypted" in filename:
            continue

        f = unpack("tests/files/%s" % filename)
        f.to_dict()
예제 #5
0
def extract(fn, data):
    f = sflock.unpack(None, contents=data, filename=fn)

    f = ExtrObj(f)
    if f.blacklisted:
        return []
    return f.process()
예제 #6
0
def demux_sflock(filename, options, package):
    retlist = []
    # only extract from files with no extension or with .bin (downloaded from us) or .zip PACKAGE, we do extract from zip archives, to ignore it set ZIP PACKAGES
    ext = os.path.splitext(filename)[1]
    if ext == b".bin":
        return retlist

    # to handle when side file for exec is required
    if package == b".zip" and "file=" in options:
        return [filename]

    try:
        password = b"infected"
        tmp_pass = options2passwd(options)
        if tmp_pass:
            password = tmp_pass

        try:
            unpacked = unpack(filename, password=password)
        except UnpackException:
            unpacked = unpack(filename)

        if unpacked.package in whitelist_extensions:
            return [filename]
        if unpacked.package in blacklist_extensions:
            return retlist
        for sf_child in unpacked.children or []:
            base, ext = os.path.splitext(sf_child.filename)
            ext = ext.lower()
            if ext in demux_extensions_list or is_valid_type(sf_child.magic):
                target_path = os.path.join(tmp_path, b"cuckoo-sflock")
                if not os.path.exists(target_path):
                    os.mkdir(target_path)
                tmp_dir = tempfile.mkdtemp(dir=target_path)
                try:
                    path_to_extract = os.path.join(tmp_dir, sf_child.filename)
                    open(path_to_extract, "wb").write(sf_child.contents)
                    retlist.append(path_to_extract)
                except Exception as e:
                    log.error(e, exc_info=True)
    except Exception as e:
        log.error(e)

    return retlist
예제 #7
0
파일: demux.py 프로젝트: naxonez/CAPEv2
def demux_sflock(filename, options):
    retlist = []
    # only extract from files with no extension or with .bin (downloaded from us) or .zip extensions
    ext = os.path.splitext(filename)[1]
    if ext != "" and ext != ".zip" and ext != ".bin":
        return retlist
    try:
        password = "******"
        tmp_pass = options2passwd(options)
        if tmp_pass:
            password = tmp_pass

        try:
            unpacked = unpack(filename, password=password)
        except UnpackException:
            unpacked = unpack(filename)

        if unpacked.package in whitelist_extensions:
            return [filename]
        if unpacked.children:
            for sf_child in unpacked.children:
                base, ext = os.path.splitext(sf_child.filename)
                ext = ext.lower()
                if ext in demux_extensions_list or is_valid_type(
                        sf_child.magic):
                    target_path = os.path.join(tmp_path, "cuckoo-sflock")
                    if not os.path.exists(target_path):
                        os.mkdir(target_path)
                    tmp_dir = tempfile.mkdtemp(dir=target_path)
                    try:
                        path_to_extract = os.path.join(tmp_dir,
                                                       sf_child.filename)
                        open(path_to_extract, "wb").write(sf_child.contents)
                        retlist.append(path_to_extract)
                    except Exception as e:
                        log.error(e, exc_info=True)
    except Exception as e:
        log.error(e)

    return retlist
예제 #8
0
    def children(self):
        if not self.f.children and self.f.magic.startswith('gzip compressed data'):

            fn_idx = self.f.magic.find('was "')
            if fn_idx != -1:
                fn = self.f.magic[fn_idx + 5:].split('"')[0]

            g = gzip.GzipFile(fileobj=StringIO(self.data))
            fl = sflock.unpack(None, contents=g.read(), filename=g.filename)
            if not fl.filename:
                fl.filename = fl.sha256
            fl.parent = self.f
            self.f.children.append(fl)
        return self.f.children
예제 #9
0
    def get_files(self, submit_id, password=None, astree=False):
        """
        Returns files or URLs from a submitted analysis.
        @param password: The password to unlock container archives with
        @param astree: sflock option; determines the format in which the files are returned
        @return: A tree of files
        """
        submit = db.view_submit(submit_id)
        files, duplicates = [], []

        for data in submit.data["data"]:
            if data["type"] == "file":
                filename = Storage.get_filename_from_path(data["data"])
                filepath = os.path.join(submit.tmp_path, filename)

                unpacked = sflock.unpack(
                    filepath=filepath, password=password,
                    duplicates=duplicates
                )

                if astree:
                    unpacked = unpacked.astree(sanitize=True)

                files.append(unpacked)
            elif data["type"] == "url":
                files.append({
                    "filename": data["data"],
                    "filepath": "",
                    "relapath": "",
                    "selected": True,
                    "size": 0,
                    "type": "url",
                    "package": "ie",
                    "extrpath": [],
                    "duplicate": False,
                    "children": [],
                    "mime": "text/html",
                    "finger": {
                        "magic_human": "url",
                        "magic": "url"
                    }
                })
            else:
                raise RuntimeError(
                    "Unknown data entry type: %s" % data["type"]
                )

        return files, submit.data["errors"], submit.data["options"]
예제 #10
0
    def get_files(self, submit_id, password=None, astree=False):
        """
        Returns files or URLs from a submitted analysis.
        @param password: The password to unlock container archives with
        @param astree: sflock option; determines the format in which the files are returned
        @return: A tree of files
        """
        submit = db.view_submit(submit_id)
        files, duplicates = [], []

        for data in submit.data["data"]:
            if data["type"] == "file":
                filename = Storage.get_filename_from_path(data["data"])
                filepath = os.path.join(submit.tmp_path, filename)

                unpacked = sflock.unpack(
                    filepath=filepath, password=password,
                    duplicates=duplicates
                )

                if astree:
                    unpacked = unpacked.astree(sanitize=True)

                files.append(unpacked)
            elif data["type"] == "url":
                files.append({
                    "filename": data["data"],
                    "filepath": "",
                    "relapath": "",
                    "selected": True,
                    "size": 0,
                    "type": "url",
                    "package": "ie",
                    "extrpath": [],
                    "duplicate": False,
                    "children": [],
                    "mime": "text/html",
                    "finger": {
                        "magic_human": "url",
                        "magic": "url"
                    }
                })
            else:
                raise RuntimeError(
                    "Unknown data entry type: %s" % data["type"]
                )

        return files, submit.data["errors"], submit.data["options"]
예제 #11
0
def test_msg_nullbyte():
    f = unpack("tests/files/ole_nullbyte.zip")
    assert len(f.children) == 1
    assert len(f.children[0].children) == 2

    ole = f.children[0]
    assert ole.filename == "You have recevied a message.msg"
    assert f.read(ole.extrpath) == ole.contents

    doc = ole.children[0]
    assert doc.filename == "eFax_document-4631559.doc"
    assert doc.relapath == "eFax_document-4631559.doc\x00"
    assert doc.relaname == "eFax_document-4631559.doc"

    z = zipfile.ZipFile(io.BytesIO(zipify(ole)))
    assert z.read(doc.relaname) == doc.contents
예제 #12
0
 def test_embed_win(self):
     t = unpack(b"tests/files/test.win.zip")
     assert t.children[0].filename == b"test.win"
     assert t.children[0].children[0].filename == b"Invoice_for_part_shipped(Feb 19,2021).exe"
     assert t.children[0].children[0].sha256 == "62966847ea9cc94aa58288579519ee2fb2bf17c40579537f949c2665e84f29ba"
예제 #13
0
def SevenZip_unpack(file: str, destination_folder: str, filetype: str, data_dictionary: dict, options: dict, results: dict):
    tool = False

    password = ""
    # Only for real 7zip, breaks others
    password = options.get("password", "infected")
    if any(
        "7-zip Installer data" in string for string in data_dictionary.get("die", {})
    ) or "Zip archive data" in data_dictionary.get("type", ""):
        tool = "7Zip"
        prefix = "7zip_"
        password = options.get("password", "infected")
        password = f"-p{password}"

    elif any(
        "Microsoft Cabinet" in string for string in data_dictionary.get("die", {})
    ) or "Microsoft Cabinet" in data_dictionary.get("type", ""):
        tool = "UnCab"
        prefix = "cab_"
        password = ""

    elif "Nullsoft Installer self-extracting archive" in filetype:
        tool = "UnNSIS"
        prefix = "unnsis_"
        """
        elif (
            any("SFX: WinRAR" in string for string in data_dictionary.get("die", {}))
            or any("RAR Self Extracting archive" in string for string in data_dictionary.get("trid", {}))
            or "RAR self-extracting archive" in data_dictionary.get("type", "")
        ):
            tool = "UnRarSFX"
            prefix = "unrar_"
        """
    else:
        return

    metadata = []
    with tempfile.TemporaryDirectory(prefix=prefix) as tempdir:
        try:
            HAVE_SFLOCK = False
            if HAVE_SFLOCK:
                unpacked = unpack(file.encode(), password=password)
                for child in unpacked.children:
                    with open(os.path.join(tempdir, child.filename.decode()), "wb") as f:
                        f.write(child.contents)

            else:
                output = subprocess.check_output(
                    [
                        "7z",
                        "e",
                        file,
                        password,
                        f"-o{tempdir}",
                        "-y",
                    ],
                    universal_newlines=True,
                )
                print(output)
            files = [
                os.path.join(tempdir, extracted_file)
                for extracted_file in tempdir
                if os.path.isfile(os.path.join(tempdir, extracted_file))
            ]
            metadata.extend(_extracted_files_metadata(tempdir, destination_folder, files=files))
        except subprocess.CalledProcessError:
            logging.error("Can't unpack with 7Zip for %s", file)
        except Exception as e:
            log.error(e, exc_info=True)

    return tool, metadata
예제 #14
0
def test_msg_rtf_magic():
    f = unpack("tests/files/msg_rtf.msg_")
    assert len(f.children) == 1
    assert f.children[0].filename == "g94ys83xi8_8fb0ud5,7.rtf"
    assert f.children[0].filesize == 138638
예제 #15
0
def test_msg_doc_magic():
    f = unpack("tests/files/msg_doc.msg_")
    assert len(f.children) == 1
    assert f.children[0].filename == "Kristina_Meyer.doc"
    assert f.children[0].filesize == 57856
예제 #16
0
    def submit(self, submit_id, config):
        """Reads, interprets, and converts the JSON configuration provided by
        the Web Interface into something we insert into the database."""
        ret = []
        submit = db.view_submit(submit_id)

        machines = {}

        for entry in config["file_selection"]:
            # Merge the global & per-file analysis options.
            info = copy.deepcopy(config["global"])
            info.update(entry)
            info.update(entry.get("options", {}))
            options = copy.deepcopy(config["global"]["options"])
            options.update(entry.get("options", {}).get("options", {}))

            machine = info.get("machine")
            if machine:
                if machine not in machines:
                    m = db.view_machine(machine)
                    # TODO Add error handling for missing machine entry.
                    machines[machine] = m.label if m else None

                machine = machines[machine]
            else:
                machine = None

            kw = {
                "package": info.get("package") or "",
                "timeout": info.get("timeout", 120),
                "priority": info.get("priority"),
                "custom": info.get("custom"),
                "owner": info.get("owner"),
                "tags": info.get("tags"),
                "memory": options.get("full-memory-dump"),
                "enforce_timeout": options.get("enforce-timeout"),
                "machine": machine,
                "platform": info.get("platform"),
                "options": self.translate_options_from(info, options),
                "submit_id": submit_id,
            }

            if entry["type"] == "url":
                ret.append(submit_task.add_url(url=info["filename"], **kw))
                continue

            # for each selected file entry, create a new temp. folder
            path_dest = Folders.create_temp()

            if not info["extrpath"]:
                path = os.path.join(submit.tmp_path,
                                    os.path.basename(info["filename"]))

                filepath = Files.copy(path, path_dest=path_dest)

                ret.append(submit_task.add_path(file_path=filepath, **kw))
            elif len(info["extrpath"]) == 1:
                arcpath = os.path.join(submit.tmp_path,
                                       os.path.basename(info["arcname"]))
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"]))
                    continue

                arc = sflock.zipify(
                    sflock.unpack(contents=open(arcpath, "rb").read(),
                                  filename=info["arcname"]))

                # Create a .zip archive out of this container.
                arcpath = Files.temp_named_put(
                    arc, os.path.basename(info["arcname"]))

                ret.append(
                    submit_task.add_archive(file_path=arcpath,
                                            filename=info["relaname"],
                                            **kw))
            else:
                arcpath = os.path.join(submit.tmp_path,
                                       os.path.basename(info["arcname"]))
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"]))
                    continue

                content = sflock.unpack(arcpath).read(info["extrpath"][:-1])
                subarc = sflock.unpack(contents=content,
                                       filename=info["extrpath"][-2])

                # Write intermediate .zip archive file.
                arcpath = Files.temp_named_put(
                    sflock.zipify(subarc),
                    os.path.basename(info["extrpath"][-2]))

                ret.append(
                    submit_task.add_archive(file_path=arcpath,
                                            filename=info["relaname"],
                                            **kw))

        return ret
예제 #17
0
    def submit(self, submit_id, config):
        """Reads, interprets, and converts the JSON configuration provided by
        the Web Interface into something we insert into the database."""
        ret = []
        submit = db.view_submit(submit_id)

        machines = {}

        for entry in config["file_selection"]:
            # Merge the global & per-file analysis options.
            info = copy.deepcopy(config["global"])
            info.update(entry)
            info.update(entry.get("options", {}))
            options = copy.deepcopy(config["global"]["options"])
            options.update(entry.get("options", {}).get("options", {}))

            machine = info.get("machine")
            if machine:
                if machine not in machines:
                    m = db.view_machine(machine)
                    # TODO Add error handling for missing machine entry.
                    machines[machine] = m.label if m else None

                machine = machines[machine]
            else:
                machine = None

            kw = {
                "package": info.get("package"),
                "timeout": info.get("timeout", 120),
                "priority": info.get("priority"),
                "custom": info.get("custom"),
                "owner": info.get("owner"),
                "tags": info.get("tags"),
                "memory": options.get("full-memory-dump"),
                "enforce_timeout": options.get("enforce-timeout"),
                "machine": machine,
                "platform": info.get("platform"),
                "options": self.translate_options_from(info, options),
                "submit_id": submit_id,
            }

            if entry["type"] == "url":
                ret.append(db.add_url(
                    url=info["filename"], **kw
                ))
                continue

            # for each selected file entry, create a new temp. folder
            path_dest = Folders.create_temp()

            if not info["extrpath"]:
                path = os.path.join(
                    submit.tmp_path, os.path.basename(info["filename"])
                )

                filepath = Files.copy(path, path_dest=path_dest)

                ret.append(db.add_path(
                    file_path=filepath, **kw
                ))
            elif len(info["extrpath"]) == 1:
                arcpath = os.path.join(
                    submit.tmp_path, os.path.basename(info["arcname"])
                )
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"])
                    )
                    continue

                arc = sflock.zipify(sflock.unpack(
                    contents=open(arcpath, "rb").read(),
                    filename=info["arcname"]
                ))

                # Create a .zip archive out of this container.
                arcpath = Files.temp_named_put(
                    arc, os.path.basename(info["arcname"])
                )

                ret.append(db.add_archive(
                    file_path=arcpath, filename=info["relaname"], **kw
                ))
            else:
                arcpath = os.path.join(
                    submit.tmp_path, os.path.basename(info["arcname"])
                )
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"])
                    )
                    continue

                content = sflock.unpack(arcpath).read(info["extrpath"][:-1])
                subarc = sflock.unpack(
                    contents=content, filename=info["extrpath"][-2]
                )

                # Write intermediate .zip archive file.
                arcpath = Files.temp_named_put(
                    sflock.zipify(subarc),
                    os.path.basename(info["extrpath"][-2])
                )

                ret.append(db.add_archive(
                    file_path=arcpath, filename=info["relaname"], **kw
                ))

        return ret