Example #1
0
 def test_droidmon_file(self):
     d = Droidmon()
     d.set_task({
         "category": "file",
     })
     filepath = Files.temp_named_put("", "droidmon.log")
     d.logs_path = os.path.dirname(filepath)
     # Ensure there is data available and none of it is a set().
     assert d.run() != {}
     assert json.loads(json.dumps(d.run())) == d.run()
Example #2
0
 def test_global_osprofile(self, p):
     set_cwd(tempfile.mkdtemp())
     cuckoo_create(cfg={
         "memory": {
             "basic": {
                 "guest_profile": "profile0",
             },
         },
     })
     filepath = Files.temp_named_put("notempty", "memory.dmp")
     m = Memory()
     m.set_path(os.path.dirname(filepath))
     m.set_machine({})
     m.run()
     p.assert_called_once_with(filepath, "profile0")
Example #3
0
def tasks_create_file():
    data = request.files["file"]
    package = request.form.get("package", "")
    timeout = request.form.get("timeout", "")
    priority = request.form.get("priority", 1)
    options = request.form.get("options", "")
    machine = request.form.get("machine", "")
    platform = request.form.get("platform", "")
    tags = request.form.get("tags", None)
    custom = request.form.get("custom", "")
    owner = request.form.get("owner", "")
    clock = request.form.get("clock", None)
    user_id = request.form.get("user_id", "")

    memory = parse_bool(request.form.get("memory", 0))
    unique = parse_bool(request.form.get("unique", 0))
    enforce_timeout = parse_bool(request.form.get("enforce_timeout", 0))

    content = data.read()
    if unique and db.find_sample(sha256=hashlib.sha256(content).hexdigest()):
        return json_error(400, "This file has already been submitted")

    temp_file_path = Files.temp_named_put(content, data.filename)

    task_id = db.add_path(
        file_path=temp_file_path,
        package=package,
        timeout=timeout,
        priority=priority,
        options=options,
        machine=machine,
        platform=platform,
        tags=tags,
        custom=custom,
        owner=owner,
        memory=memory,
        enforce_timeout=enforce_timeout,
        clock=clock,
        user_id=user_id,
    )

    return jsonify(task_id=task_id)
Example #4
0
    def gen_config(self,
                   logfile,
                   local_ip,
                   local_port,
                   socks5_host,
                   socks5_port,
                   username=None,
                   password=None):
        """Generate and writea redsocks config file to be used for
         one analysis"""

        conf_base = {
            "log_debug": "on",
            "log_info": "on",
            "log": "\"file:%s\"" % logfile,
            "daemon": "off",
            "redirector": "iptables"
        }

        conf_redsocks = {
            "local_ip": local_ip,
            "local_port": str(local_port),
            "ip": socks5_host,
            "port": str(socks5_port),
            "type": "socks5"
        }

        conf_sections = {"base": conf_base, "redsocks": conf_redsocks}

        if username:
            conf_redsocks["login"] = username
            conf_redsocks["password"] = password

        conf = ""
        for name, section in conf_sections.iteritems():
            conf += "%s {\n" % name
            for field, value in section.iteritems():
                conf += "%s = %s;\n" % (field, value)
            conf += "}\n"

        return Files.temp_named_put(conf, "redsocks-task-%s" % self.task.id)
Example #5
0
def tasks_create_file():
    data = request.files["file"]
    package = request.form.get("package", "")
    timeout = request.form.get("timeout", "")
    priority = request.form.get("priority", 1)
    options = request.form.get("options", "")
    machine = request.form.get("machine", "")
    platform = request.form.get("platform", "")
    tags = request.form.get("tags", None)
    custom = request.form.get("custom", "")
    owner = request.form.get("owner", "")
    clock = request.form.get("clock", None)

    memory = parse_bool(request.form.get("memory", 0))
    unique = parse_bool(request.form.get("unique", 0))
    enforce_timeout = parse_bool(request.form.get("enforce_timeout", 0))

    content = data.read()
    if unique and db.find_sample(sha256=hashlib.sha256(content).hexdigest()):
        return json_error(400, "This file has already been submitted")

    temp_file_path = Files.temp_named_put(content, data.filename)

    task_id = db.add_path(
        file_path=temp_file_path,
        package=package,
        timeout=timeout,
        priority=priority,
        options=options,
        machine=machine,
        platform=platform,
        tags=tags,
        custom=custom,
        owner=owner,
        memory=memory,
        enforce_timeout=enforce_timeout,
        clock=clock
    )

    return jsonify(task_id=task_id)
Example #6
0
    def import_(self, f, submit_id):
        """Import an analysis identified by the file(-like) object f."""
        try:
            z = zipfile.ZipFile(f)
        except zipfile.BadZipfile:
            raise CuckooOperationalError(
                "Imported analysis is not a proper .zip file.")

        # Ensure there are no files with illegal or potentially insecure names.
        # TODO Keep in mind that if we start to support other archive formats
        # (e.g., .tar) that those may also support symbolic links. In that case
        # we should probably start using sflock here.
        for filename in z.namelist():
            if filename.startswith("/") or ".." in filename or ":" in filename:
                raise CuckooOperationalError(
                    "The .zip file contains a file with a potentially "
                    "incorrect filename: %s" % filename)

        if "task.json" not in z.namelist():
            raise CuckooOperationalError(
                "The task.json file is required in order to be able to import "
                "an analysis! This file contains metadata about the analysis.")

        required_fields = {
            "options": dict,
            "route": basestring,
            "package": basestring,
            "target": basestring,
            "category": basestring,
            "memory": bool,
            "timeout": (int, long),
            "priority": (int, long),
            "custom": basestring,
            "tags": (tuple, list),
        }

        try:
            info = json.loads(z.read("task.json"))
            for key, type_ in required_fields.items():
                if key not in info:
                    raise ValueError("missing %s" % key)
                if info[key] is not None and not isinstance(info[key], type_):
                    raise ValueError("%s => %s" % (key, info[key]))
        except ValueError as e:
            raise CuckooOperationalError(
                "The provided task.json file, required for properly importing "
                "the analysis, is incorrect or incomplete (%s)." % e)

        if info["category"] == "url":
            task_id = submit_task.add_url(url=info["target"],
                                          package=info["package"],
                                          timeout=info["timeout"],
                                          options=info["options"],
                                          priority=info["priority"],
                                          custom=info["custom"],
                                          memory=info["memory"],
                                          tags=info["tags"],
                                          submit_id=submit_id)
        else:
            # Users may have the "delete_bin_copy" enabled and in such cases
            # the binary file won't be included in the .zip file.
            if "binary" in z.namelist():
                filepath = Files.temp_named_put(
                    z.read("binary"), os.path.basename(info["target"]))
            else:
                # Generate a temp file as a target if no target is present
                filepath = Files.temp_put("")

            # We'll be updating the target shortly.
            task_id = submit_task.add_path(file_path=filepath,
                                           package=info["package"],
                                           timeout=info["timeout"],
                                           options=info["options"],
                                           priority=info["priority"],
                                           custom=info["custom"],
                                           memory=info["memory"],
                                           tags=info["tags"],
                                           submit_id=submit_id)

        if not task_id:
            raise CuckooOperationalError(
                "There was an error creating a task for the to-be imported "
                "analysis in our database.. Can't proceed.")

        # The constructors currently don't accept this argument.
        db.set_route(task_id, info["route"])

        mkdir(cwd(analysis=task_id))
        z.extractall(cwd(analysis=task_id))

        # If there's an analysis.json file, load it up to figure out additional
        # metdata regarding this analysis.
        if os.path.exists(cwd("analysis.json", analysis=task_id)):
            try:
                obj = json.load(
                    open(cwd("analysis.json", analysis=task_id), "rb"))
                if not isinstance(obj, dict):
                    raise ValueError
                if "errors" in obj and not isinstance(obj["errors"], list):
                    raise ValueError
                if "action" in obj and not isinstance(obj["action"], list):
                    raise ValueError
            except ValueError:
                log.warning(
                    "An analysis.json file was provided, but wasn't a valid "
                    "JSON object/structure that we can to enhance the "
                    "analysis information.")
            else:
                for error in set(obj.get("errors", [])):
                    if isinstance(error, basestring):
                        db.add_error(error, task_id)
                for action in set(obj.get("action", [])):
                    if isinstance(action, basestring):
                        db.add_error("", task_id, action)

        # We set this analysis as completed so that it will be processed
        # automatically (assuming 'cuckoo process' is running).
        db.set_status(task_id, TASK_COMPLETED)
        return task_id
Example #7
0
    def submit(self, submit_id, config):
        """Reads, interprets, and converts the JSON configuration provided by
        the Web Interface into something we insert into the database."""
        ret = []
        submit = db.view_submit(submit_id)

        machines = {}

        for entry in config["file_selection"]:
            # Merge the global & per-file analysis options.
            info = copy.deepcopy(config["global"])
            info.update(entry)
            info.update(entry.get("options", {}))
            options = copy.deepcopy(config["global"]["options"])
            options.update(entry.get("options", {}).get("options", {}))

            machine = info.get("machine")
            if machine:
                if machine not in machines:
                    m = db.view_machine(machine)
                    # TODO Add error handling for missing machine entry.
                    machines[machine] = m.label if m else None

                machine = machines[machine]
            else:
                machine = None

            kw = {
                "package": info.get("package") or "",
                "timeout": info.get("timeout", 120),
                "priority": info.get("priority"),
                "custom": info.get("custom"),
                "owner": info.get("owner"),
                "tags": info.get("tags"),
                "memory": options.get("full-memory-dump"),
                "enforce_timeout": options.get("enforce-timeout"),
                "machine": machine,
                "platform": info.get("platform"),
                "options": self.translate_options_from(info, options),
                "submit_id": submit_id,
            }

            if entry["type"] == "url":
                ret.append(submit_task.add_url(url=info["filename"], **kw))
                continue

            # for each selected file entry, create a new temp. folder
            path_dest = Folders.create_temp()

            if not info["extrpath"]:
                path = os.path.join(submit.tmp_path,
                                    os.path.basename(info["filename"]))

                filepath = Files.copy(path, path_dest=path_dest)

                ret.append(submit_task.add_path(file_path=filepath, **kw))
            elif len(info["extrpath"]) == 1:
                arcpath = os.path.join(submit.tmp_path,
                                       os.path.basename(info["arcname"]))
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"]))
                    continue

                arc = sflock.zipify(
                    sflock.unpack(contents=open(arcpath, "rb").read(),
                                  filename=info["arcname"]))

                # Create a .zip archive out of this container.
                arcpath = Files.temp_named_put(
                    arc, os.path.basename(info["arcname"]))

                ret.append(
                    submit_task.add_archive(file_path=arcpath,
                                            filename=info["relaname"],
                                            **kw))
            else:
                arcpath = os.path.join(submit.tmp_path,
                                       os.path.basename(info["arcname"]))
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"]))
                    continue

                content = sflock.unpack(arcpath).read(info["extrpath"][:-1])
                subarc = sflock.unpack(contents=content,
                                       filename=info["extrpath"][-2])

                # Write intermediate .zip archive file.
                arcpath = Files.temp_named_put(
                    sflock.zipify(subarc),
                    os.path.basename(info["extrpath"][-2]))

                ret.append(
                    submit_task.add_archive(file_path=arcpath,
                                            filename=info["relaname"],
                                            **kw))

        return ret
Example #8
0
 def test_named_temp_abs(self):
     filepath = Files.temp_named_put("test", "/tmp/foobar/hello.txt")
     assert open(filepath, "rb").read() == "test"
     assert "foobar" not in filepath
Example #9
0
 def test_named_temp(self):
     filepath = Files.temp_named_put("test", "hello.txt")
     assert open(filepath, "rb").read() == "test"
     assert os.path.basename(filepath) == "hello.txt"
Example #10
0
 def test_named_temp_abs(self):
     filepath = Files.temp_named_put(
         "test", "/tmp/foobar/hello.txt", "/tmp"
     )
     assert open(filepath, "rb").read() == "test"
     assert "foobar" not in filepath
Example #11
0
 def test_named_temp(self):
     filepath = Files.temp_named_put("test", "hello.txt", "/tmp")
     assert open(filepath, "rb").read() == "test"
     assert os.path.basename(filepath) == "hello.txt"
Example #12
0
    def import_(self, f, submit_id):
        """Import an analysis identified by the file(-like) object f."""
        try:
            z = zipfile.ZipFile(f)
        except zipfile.BadZipfile:
            raise CuckooOperationalError(
                "Imported analysis is not a proper .zip file."
            )

        # Ensure there are no files with illegal or potentially insecure names.
        # TODO Keep in mind that if we start to support other archive formats
        # (e.g., .tar) that those may also support symbolic links. In that case
        # we should probably start using sflock here.
        for filename in z.namelist():
            if filename.startswith("/") or ".." in filename or ":" in filename:
                raise CuckooOperationalError(
                    "The .zip file contains a file with a potentially "
                    "incorrect filename: %s" % filename
                )

        if "task.json" not in z.namelist():
            raise CuckooOperationalError(
                "The task.json file is required in order to be able to import "
                "an analysis! This file contains metadata about the analysis."
            )

        required_fields = {
            "options": dict, "route": basestring, "package": basestring,
            "target": basestring, "category": basestring, "memory": bool,
            "timeout": (int, long), "priority": (int, long),
            "custom": basestring, "tags": (tuple, list),
        }

        try:
            info = json.loads(z.read("task.json"))
            for key, type_ in required_fields.items():
                if key not in info:
                    raise ValueError("missing %s" % key)
                if info[key] is not None and not isinstance(info[key], type_):
                    raise ValueError("%s => %s" % (key, info[key]))
        except ValueError as e:
            raise CuckooOperationalError(
                "The provided task.json file, required for properly importing "
                "the analysis, is incorrect or incomplete (%s)." % e
            )

        if info["category"] == "url":
            task_id = db.add_url(
                url=info["target"], package=info["package"],
                timeout=info["timeout"], options=info["options"],
                priority=info["priority"], custom=info["custom"],
                memory=info["memory"], tags=info["tags"], submit_id=submit_id
            )
        else:
            # Users may have the "delete_bin_copy" enabled and in such cases
            # the binary file won't be included in the .zip file.
            if "binary" in z.namelist():
                filepath = Files.temp_named_put(
                    z.read("binary"), os.path.basename(info["target"])
                )
            else:
                filepath = __file__

            # We'll be updating the target shortly.
            task_id = db.add_path(
                file_path=filepath, package=info["package"],
                timeout=info["timeout"], options=info["options"],
                priority=info["priority"], custom=info["custom"],
                memory=info["memory"], tags=info["tags"], submit_id=submit_id
            )

        if not task_id:
            raise CuckooOperationalError(
                "There was an error creating a task for the to-be imported "
                "analysis in our database.. Can't proceed."
            )

        # The constructors currently don't accept this argument.
        db.set_route(task_id, info["route"])

        mkdir(cwd(analysis=task_id))
        z.extractall(cwd(analysis=task_id))

        # If there's an analysis.json file, load it up to figure out additional
        # metdata regarding this analysis.
        if os.path.exists(cwd("analysis.json", analysis=task_id)):
            try:
                obj = json.load(
                    open(cwd("analysis.json", analysis=task_id), "rb")
                )
                if not isinstance(obj, dict):
                    raise ValueError
                if "errors" in obj and not isinstance(obj["errors"], list):
                    raise ValueError
                if "action" in obj and not isinstance(obj["action"], list):
                    raise ValueError
            except ValueError:
                log.warning(
                    "An analysis.json file was provided, but wasn't a valid "
                    "JSON object/structure that we can to enhance the "
                    "analysis information."
                )
            else:
                for error in set(obj.get("errors", [])):
                    if isinstance(error, basestring):
                        db.add_error(error, task_id)
                for action in set(obj.get("action", [])):
                    if isinstance(action, basestring):
                        db.add_error("", task_id, action)

        # We set this analysis as completed so that it will be processed
        # automatically (assuming 'cuckoo process' is running).
        db.set_status(task_id, TASK_COMPLETED)
        return task_id
Example #13
0
    def submit(self, submit_id, config):
        """Reads, interprets, and converts the JSON configuration provided by
        the Web Interface into something we insert into the database."""
        ret = []
        submit = db.view_submit(submit_id)

        machines = {}

        for entry in config["file_selection"]:
            # Merge the global & per-file analysis options.
            info = copy.deepcopy(config["global"])
            info.update(entry)
            info.update(entry.get("options", {}))
            options = copy.deepcopy(config["global"]["options"])
            options.update(entry.get("options", {}).get("options", {}))

            machine = info.get("machine")
            if machine:
                if machine not in machines:
                    m = db.view_machine(machine)
                    # TODO Add error handling for missing machine entry.
                    machines[machine] = m.label if m else None

                machine = machines[machine]
            else:
                machine = None

            kw = {
                "package": info.get("package"),
                "timeout": info.get("timeout", 120),
                "priority": info.get("priority"),
                "custom": info.get("custom"),
                "owner": info.get("owner"),
                "tags": info.get("tags"),
                "memory": options.get("full-memory-dump"),
                "enforce_timeout": options.get("enforce-timeout"),
                "machine": machine,
                "platform": info.get("platform"),
                "options": self.translate_options_from(info, options),
                "submit_id": submit_id,
            }

            if entry["type"] == "url":
                ret.append(db.add_url(
                    url=info["filename"], **kw
                ))
                continue

            # for each selected file entry, create a new temp. folder
            path_dest = Folders.create_temp()

            if not info["extrpath"]:
                path = os.path.join(
                    submit.tmp_path, os.path.basename(info["filename"])
                )

                filepath = Files.copy(path, path_dest=path_dest)

                ret.append(db.add_path(
                    file_path=filepath, **kw
                ))
            elif len(info["extrpath"]) == 1:
                arcpath = os.path.join(
                    submit.tmp_path, os.path.basename(info["arcname"])
                )
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"])
                    )
                    continue

                arc = sflock.zipify(sflock.unpack(
                    contents=open(arcpath, "rb").read(),
                    filename=info["arcname"]
                ))

                # Create a .zip archive out of this container.
                arcpath = Files.temp_named_put(
                    arc, os.path.basename(info["arcname"])
                )

                ret.append(db.add_archive(
                    file_path=arcpath, filename=info["relaname"], **kw
                ))
            else:
                arcpath = os.path.join(
                    submit.tmp_path, os.path.basename(info["arcname"])
                )
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"])
                    )
                    continue

                content = sflock.unpack(arcpath).read(info["extrpath"][:-1])
                subarc = sflock.unpack(
                    contents=content, filename=info["extrpath"][-2]
                )

                # Write intermediate .zip archive file.
                arcpath = Files.temp_named_put(
                    sflock.zipify(subarc),
                    os.path.basename(info["extrpath"][-2])
                )

                ret.append(db.add_archive(
                    file_path=arcpath, filename=info["relaname"], **kw
                ))

        return ret