Exemplo n.º 1
0
 def test_create_temp(self):
     """Test creation of temporary directory."""
     dirpath1 = Folders.create_temp()
     dirpath2 = Folders.create_temp()
     assert os.path.exists(dirpath1)
     assert os.path.exists(dirpath2)
     assert dirpath1 != dirpath2
Exemplo n.º 2
0
 def test_create_temp(self):
     """Test creation of temporary directory."""
     dirpath1 = Folders.create_temp("/tmp")
     dirpath2 = Folders.create_temp("/tmp")
     assert os.path.exists(dirpath1)
     assert os.path.exists(dirpath2)
     assert dirpath1 != dirpath2
Exemplo n.º 3
0
    def test_create_temp_conf(self):
        """Test creation of temporary directory with configuration."""
        dirpath = tempfile.mkdtemp()
        set_cwd(dirpath)

        Folders.create(dirpath, "conf")
        with open(os.path.join(dirpath, "conf", "cuckoo.conf"), "wb") as f:
            f.write("[cuckoo]\ntmppath = %s" % dirpath)

        dirpath2 = Folders.create_temp()
        assert dirpath2.startswith(dirpath)
Exemplo n.º 4
0
    def test_create_temp_conf(self):
        """Test creation of temporary directory with configuration."""
        dirpath = tempfile.mkdtemp()
        set_cwd(dirpath)

        Folders.create(dirpath, "conf")
        with open(os.path.join(dirpath, "conf", "cuckoo.conf"), "wb") as f:
            f.write("[cuckoo]\ntmppath = %s" % dirpath)

        dirpath2 = Folders.create_temp()
        assert dirpath2.startswith(os.path.join(dirpath, "cuckoo-tmp"))
Exemplo n.º 5
0
    def pre(self, submit_type, data, options=None):
        """
        The first step to submitting new analysis.
        @param submit_type: "files" or "strings"
        @param data: a list of dicts containing "name" (file name)
                and "data" (file data) or a list of strings (urls or hashes)
        @return: submit id
        """
        if submit_type not in ("strings", "files"):
            log.error("Bad parameter '%s' for submit_type", submit_type)
            return False

        path_tmp = Folders.create_temp()
        submit_data = {
            "data": [],
            "errors": [],
            "options": options or {},
        }

        if submit_type == "strings":
            for line in data:
                self._handle_string(submit_data, path_tmp, line.strip())

        if submit_type == "files":
            for entry in data:
                filename = Storage.get_filename_from_path(entry["name"])
                filepath = Files.create(path_tmp, filename, entry["data"])
                submit_data["data"].append({
                    "type":
                    "file",
                    "data":
                    filepath,
                    "options":
                    self.translate_options_to(entry.get("options", {})),
                })

        return db.add_submit(path_tmp, submit_type, submit_data)
Exemplo n.º 6
0
    def pre(self, submit_type, data, options=None):
        """
        The first step to submitting new analysis.
        @param submit_type: "files" or "strings"
        @param data: a list of dicts containing "name" (file name)
                and "data" (file data) or a list of strings (urls or hashes)
        @return: submit id
        """
        if submit_type not in ("strings", "files"):
            log.error("Bad parameter '%s' for submit_type", submit_type)
            return False

        path_tmp = Folders.create_temp()
        submit_data = {
            "data": [],
            "errors": [],
            "options": options or {},
        }

        if submit_type == "strings":
            for line in data:
                self._handle_string(submit_data, path_tmp, line.strip())

        if submit_type == "files":
            for entry in data:
                filename = Storage.get_filename_from_path(entry["name"])
                filepath = Files.create(path_tmp, filename, entry["data"])
                submit_data["data"].append({
                    "type": "file",
                    "data": filepath,
                    "options": self.translate_options_to(
                        entry.get("options", {})
                    ),
                })

        return db.add_submit(path_tmp, submit_type, submit_data)
Exemplo n.º 7
0
    def submit(self, submit_id, config):
        """Reads, interprets, and converts the JSON configuration provided by
        the Web Interface into something we insert into the database."""
        ret = []
        submit = db.view_submit(submit_id)

        machines = {}

        for entry in config["file_selection"]:
            # Merge the global & per-file analysis options.
            info = copy.deepcopy(config["global"])
            info.update(entry)
            info.update(entry.get("options", {}))
            options = copy.deepcopy(config["global"]["options"])
            options.update(entry.get("options", {}).get("options", {}))

            machine = info.get("machine")
            if machine:
                if machine not in machines:
                    m = db.view_machine(machine)
                    # TODO Add error handling for missing machine entry.
                    machines[machine] = m.label if m else None

                machine = machines[machine]
            else:
                machine = None

            kw = {
                "package": info.get("package") or "",
                "timeout": info.get("timeout", 120),
                "priority": info.get("priority"),
                "custom": info.get("custom"),
                "owner": info.get("owner"),
                "tags": info.get("tags"),
                "memory": options.get("full-memory-dump"),
                "enforce_timeout": options.get("enforce-timeout"),
                "machine": machine,
                "platform": info.get("platform"),
                "options": self.translate_options_from(info, options),
                "submit_id": submit_id,
            }

            if entry["type"] == "url":
                ret.append(submit_task.add_url(url=info["filename"], **kw))
                continue

            # for each selected file entry, create a new temp. folder
            path_dest = Folders.create_temp()

            if not info["extrpath"]:
                path = os.path.join(submit.tmp_path,
                                    os.path.basename(info["filename"]))

                filepath = Files.copy(path, path_dest=path_dest)

                ret.append(submit_task.add_path(file_path=filepath, **kw))
            elif len(info["extrpath"]) == 1:
                arcpath = os.path.join(submit.tmp_path,
                                       os.path.basename(info["arcname"]))
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"]))
                    continue

                arc = sflock.zipify(
                    sflock.unpack(contents=open(arcpath, "rb").read(),
                                  filename=info["arcname"]))

                # Create a .zip archive out of this container.
                arcpath = Files.temp_named_put(
                    arc, os.path.basename(info["arcname"]))

                ret.append(
                    submit_task.add_archive(file_path=arcpath,
                                            filename=info["relaname"],
                                            **kw))
            else:
                arcpath = os.path.join(submit.tmp_path,
                                       os.path.basename(info["arcname"]))
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"]))
                    continue

                content = sflock.unpack(arcpath).read(info["extrpath"][:-1])
                subarc = sflock.unpack(contents=content,
                                       filename=info["extrpath"][-2])

                # Write intermediate .zip archive file.
                arcpath = Files.temp_named_put(
                    sflock.zipify(subarc),
                    os.path.basename(info["extrpath"][-2]))

                ret.append(
                    submit_task.add_archive(file_path=arcpath,
                                            filename=info["relaname"],
                                            **kw))

        return ret
Exemplo n.º 8
0
    def submit(self, submit_id, config):
        """Reads, interprets, and converts the JSON configuration provided by
        the Web Interface into something we insert into the database."""
        ret = []
        submit = db.view_submit(submit_id)

        machines = {}

        for entry in config["file_selection"]:
            # Merge the global & per-file analysis options.
            info = copy.deepcopy(config["global"])
            info.update(entry)
            info.update(entry.get("options", {}))
            options = copy.deepcopy(config["global"]["options"])
            options.update(entry.get("options", {}).get("options", {}))

            machine = info.get("machine")
            if machine:
                if machine not in machines:
                    m = db.view_machine(machine)
                    # TODO Add error handling for missing machine entry.
                    machines[machine] = m.label if m else None

                machine = machines[machine]
            else:
                machine = None

            kw = {
                "package": info.get("package"),
                "timeout": info.get("timeout", 120),
                "priority": info.get("priority"),
                "custom": info.get("custom"),
                "owner": info.get("owner"),
                "tags": info.get("tags"),
                "memory": options.get("full-memory-dump"),
                "enforce_timeout": options.get("enforce-timeout"),
                "machine": machine,
                "platform": info.get("platform"),
                "options": self.translate_options_from(info, options),
                "submit_id": submit_id,
            }

            if entry["type"] == "url":
                ret.append(db.add_url(
                    url=info["filename"], **kw
                ))
                continue

            # for each selected file entry, create a new temp. folder
            path_dest = Folders.create_temp()

            if not info["extrpath"]:
                path = os.path.join(
                    submit.tmp_path, os.path.basename(info["filename"])
                )

                filepath = Files.copy(path, path_dest=path_dest)

                ret.append(db.add_path(
                    file_path=filepath, **kw
                ))
            elif len(info["extrpath"]) == 1:
                arcpath = os.path.join(
                    submit.tmp_path, os.path.basename(info["arcname"])
                )
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"])
                    )
                    continue

                arc = sflock.zipify(sflock.unpack(
                    contents=open(arcpath, "rb").read(),
                    filename=info["arcname"]
                ))

                # Create a .zip archive out of this container.
                arcpath = Files.temp_named_put(
                    arc, os.path.basename(info["arcname"])
                )

                ret.append(db.add_archive(
                    file_path=arcpath, filename=info["relaname"], **kw
                ))
            else:
                arcpath = os.path.join(
                    submit.tmp_path, os.path.basename(info["arcname"])
                )
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"])
                    )
                    continue

                content = sflock.unpack(arcpath).read(info["extrpath"][:-1])
                subarc = sflock.unpack(
                    contents=content, filename=info["extrpath"][-2]
                )

                # Write intermediate .zip archive file.
                arcpath = Files.temp_named_put(
                    sflock.zipify(subarc),
                    os.path.basename(info["extrpath"][-2])
                )

                ret.append(db.add_archive(
                    file_path=arcpath, filename=info["relaname"], **kw
                ))

        return ret