Exemplo n.º 1
0
def enumerate_plugins(dirpath, module_prefix, namespace, class_,
                      attributes={}, as_dict=False):
    """Import plugins of type `class` located at `dirpath` into the
    `namespace` that starts with `module_prefix`. If `dirpath` represents a
    filepath then it is converted into its containing directory. The
    `attributes` dictionary allows one to set extra fields for all imported
    plugins. Using `as_dict` a dictionary based on the module name is
    returned."""
    if os.path.isfile(dirpath):
        dirpath = os.path.dirname(dirpath)

    for fname in os.listdir(dirpath):
        if fname.endswith(".py") and not fname.startswith("__init__"):
            module_name, _ = os.path.splitext(fname)
            try:
                importlib.import_module(
                    "%s.%s" % (module_prefix, module_name)
                )
            except ImportError as e:
                raise CuckooOperationalError(
                    "Unable to load the Cuckoo plugin at %s: %s. Please "
                    "review its contents and/or validity!" % (fname, e)
                )

    subclasses = class_.__subclasses__()[:]

    plugins = []
    while subclasses:
        subclass = subclasses.pop(0)

        # Include subclasses of this subclass (there are some subclasses, e.g.,
        # LibVirtMachinery, that fail the fail the following module namespace
        # check and as such we perform this logic here).
        subclasses.extend(subclass.__subclasses__())

        # Check whether this subclass belongs to the module namespace that
        # we're currently importing. It should be noted that parent and child
        # namespaces should fail the following if-statement.
        if module_prefix != ".".join(subclass.__module__.split(".")[:-1]):
            continue

        namespace[subclass.__name__] = subclass
        for key, value in attributes.items():
            setattr(subclass, key, value)

        plugins.append(subclass)

    if as_dict:
        ret = {}
        for plugin in plugins:
            ret[plugin.__module__.split(".")[-1]] = plugin
        return ret

    return sorted(plugins, key=lambda x: x.__name__.lower())
Exemplo n.º 2
0
 def delete(*folder):
     """Delete a folder and all its subdirectories.
     @param folder: path or components to path to delete.
     @raise CuckooOperationalError: if fails to delete folder.
     """
     folder = os.path.join(*folder)
     if os.path.exists(folder):
         try:
             shutil.rmtree(folder)
         except OSError:
             raise CuckooOperationalError("Unable to delete folder: %s" %
                                          folder)
Exemplo n.º 3
0
def _dburi_engine(dburi):
    # Defaults to a sqlite3 database.
    if not dburi:
        dburi = "sqlite:///db/cuckoo.db"

    try:
        return sqlalchemy.create_engine(dburi).engine
    except sqlalchemy.exc.ArgumentError:
        raise CuckooOperationalError(
            "Error creating SQL database backup as your SQL database URI "
            "wasn't understood by us: %r!" % dburi
        )
Exemplo n.º 4
0
def netlog_sanitize_fname(path):
    """Validate agent-provided path for result files"""
    path = path.replace("\\", "/")
    dir_part, name = os.path.split(path)
    if dir_part not in RESULT_UPLOADABLE:
        raise CuckooOperationalError(
            "Netlog client requested banned path: %r" % path)
    if any(c in BANNED_PATH_CHARS for c in name):
        for c in BANNED_PATH_CHARS:
            path = path.replace(c, "X")

    return path
Exemplo n.º 5
0
def fetch_community(branch="master", force=False, filepath=None):
    if filepath:
        buf = open(filepath, "rb").read()
    else:
        log.info("Downloading.. %s", URL % branch)
        r = requests.get(URL % branch)
        if r.status_code != 200:
            raise CuckooOperationalError(
                "Error fetching the Cuckoo Community binaries "
                "(status_code: %d)!" % r.status_code)

        buf = r.content

    t = tarfile.TarFile.open(fileobj=io.BytesIO(buf), mode="r:gz")

    folders = {
        "modules/signatures": "signatures",
        "data/monitor": "monitor",
        "agent": "agent",
        "analyzer": "analyzer",
    }

    members = t.getmembers()

    directory = members[0].name.split("/")[0]
    for tarfolder, outfolder in folders.items():
        mkdir(cwd(outfolder))

        # E.g., "community-master/modules/signatures".
        name_start = "%s/%s" % (directory, tarfolder)
        for member in members:
            if not member.name.startswith(name_start) or \
                    name_start == member.name:
                continue

            filepath = cwd(outfolder, member.name[len(name_start) + 1:])
            if member.isdir():
                mkdir(filepath)
                continue

            # TODO Ask for confirmation as we used to do.
            if os.path.exists(filepath) and not force:
                log.debug("Not overwriting file which already exists: %s",
                          member.name[len(name_start) + 1:])
                continue

            if member.issym():
                t.makelink(member, filepath)
                continue

            log.debug("Extracted %s..", member.name[len(name_start) + 1:])
            open(filepath, "wb").write(t.extractfile(member).read())
Exemplo n.º 6
0
    def connect(self):
        if not self.enabled:
            return

        try:
            self.client = pymongo.MongoClient(self.hostname, self.port)
            self.db = self.client[self.database]
            if self.username and self.password:
                self.db.authenticate(self.username, self.password)
            self.grid = gridfs.GridFS(self.db)
        except pymongo.errors.PyMongoError as e:
            raise CuckooOperationalError(
                "Unable to connect to MongoDB: %s" % e
            )
Exemplo n.º 7
0
    def _request_hash(self, file_hash, **kwargs):
        """Wrapper around requesting a hash."""
        params = dict(hash=file_hash, apikey=self.apikey)

        try:
            r = requests.get(self.HASH_DOWNLOAD,
                             params=params,
                             timeout=self.timeout,
                             **kwargs)
            r.raise_for_status()  # raise an exception for HTTP error codes
            return r.content
        except (requests.ConnectionError, ValueError, requests.HTTPError):
            raise CuckooOperationalError("Could not fetch hash \"%s\" "
                                         "from VirusTotal" % file_hash)
Exemplo n.º 8
0
 def read_newline(self):
     """Read until the next newline character, but never more than
     `MAX_NETLOG_LINE`."""
     while True:
         pos = self.buf.find("\n")
         if pos < 0:
             if len(self.buf) >= MAX_NETLOG_LINE:
                 raise CuckooOperationalError("Received overly long line")
             buf = self.read()
             if buf == "":
                 raise EOFError
             self.buf += buf
             continue
         line, self.buf = self.buf[:pos], self.buf[pos + 1:]
         return line
    def init(self):
        """
        Initialization method to determine if the OpenDXL Cuckoo Reporting Module is enabled and to get the OpenDXL
        Python Client config file location from the dxl_client_config_file setting under the [dxleventreporting]
        section of the reporting.conf file.

        :return: A boolean indicating if the OpenDXL Cuckoo Reporting Module is enabled or not.
        """
        self.enabled = config("reporting:dxleventreporting:enabled")
        dxl_client_config_file = config(
            "reporting:dxleventreporting:dxl_client_config_file")

        if dxl_client_config_file is None:
            raise CuckooOperationalError(
                "Missing dxl_client_config_file setting under the "
                "[dxleventreporting] section in the report.conf file.")

        self.config = DxlClientConfig.create_dxl_config_from_file(
            dxl_client_config_file)
        return self.enabled
Exemplo n.º 10
0
    def negotiate_protocol(self):
        protocol = self.read_newline(strip=True)

        # Command with version number.
        if " " in protocol:
            command, version = protocol.split()
            version = int(version)
        else:
            command, version = protocol, None

        if command == "BSON":
            self.protocol = BsonParser(self, version)
        elif command == "FILE":
            self.protocol = FileUpload(self, version)
        elif command == "LOG":
            self.protocol = LogHandler(self, version)
        else:
            raise CuckooOperationalError(
                "Netlog failure, unknown protocol requested.")

        self.protocol.init()
Exemplo n.º 11
0
    def handle(self):
        # Read until newline for file path, e.g.,
        # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin
        self.handler.sock.settimeout(30)
        dump_path = netlog_sanitize_fname(self.handler.read_newline())

        if self.version and self.version >= 2:
            # NB: filepath is only used as metadata
            filepath = self.handler.read_newline()
            pids = map(int, self.handler.read_newline().split())
        else:
            filepath, pids = None, []

        log.debug("Task #%s: File upload for %r", self.task_id, dump_path)
        file_path = os.path.join(self.storagepath, dump_path.decode("utf-8"))

        try:
            self.fd = open_exclusive(file_path)
        except OSError as e:
            if e.errno == errno.EEXIST:
                raise CuckooOperationalError("Analyzer for task #%s tried to "
                                             "overwrite an existing file" %
                                             self.task_id)
            raise

        # Append-writes are atomic
        with open(self.filelog, "a+b") as f:
            print(json.dumps({
                "path": dump_path,
                "filepath": filepath,
                "pids": pids,
            }),
                  file=f)

        self.handler.sock.settimeout(None)
        try:
            return self.handler.copy_to_fd(self.fd, self.upload_max_size)
        finally:
            log.debug("Task #%s uploaded file length: %s", self.task_id,
                      self.fd.tell())
Exemplo n.º 12
0
    def create(root=".", folders=None):
        """Creates a directory or multiple directories.
        @param root: root path.
        @param folders: folders list to be created.
        @raise CuckooOperationalError: if fails to create folder.
        If folders is None, we try to create the folder provided by `root`.
        """
        if isinstance(root, (tuple, list)):
            root = os.path.join(*root)

        if folders is None:
            folders = [""]
        elif isinstance(folders, basestring):
            folders = folders,

        for folder in folders:
            folder_path = os.path.join(root, folder)
            if not os.path.isdir(folder_path):
                try:
                    os.makedirs(folder_path)
                except OSError:
                    raise CuckooOperationalError(
                        "Unable to create folder: %s" % folder_path)
    def connect(self):
        """
        A method to have the OpenDXL Python Client connect to a DXL Broker listed in the OpenDXL Python Client
        configuration client.
        """
        if not self.enabled:
            return

        try:
            log.info("Creating DXL Client")
            if not self.client:
                self.client = DxlClient(self.config)
                self.client.connect()
            elif not self.client.connected:
                self.client.connect()

            log.info("Connected to a broker")
        except Exception as ex:
            log.exception(
                "Error creating DXL Client and connecting to a DXL Broker.")
            raise CuckooOperationalError(
                "Error creating DXL Client and connecting to a DXL Broker: %s"
                % ex)
Exemplo n.º 14
0
def dumpcmd(dburi, dirpath):
    engine = _dburi_engine(dburi)

    if engine.name == "sqlite":
        # If the SQLite3 database filepath is relative, then make it relative
        # against the old Cuckoo setup. If it's absolute, os.path.join() will
        # keep it absolute as-is (see also our version 1.1.1 release :-P).
        return [
            "sqlite3",
            os.path.join(dirpath, engine.url.database), ".dump"
        ], {}

    if engine.name == "mysql":
        args = ["mysqldump"]
        if engine.url.username:
            args += ["-u", engine.url.username]
        if engine.url.password:
            args.append("-p%s" % engine.url.password)
        if engine.url.host and engine.url.host != "localhost":
            args += ["-h", engine.url.host]
        args.append(engine.url.database)
        return args, {}

    if engine.name == "postgresql":
        args, env = ["pg_dump"], {}
        if engine.url.username:
            args += ["-U", engine.url.username]
        if engine.url.password:
            env["PGPASSWORD"] = engine.url.password
        if engine.url.host and engine.url.host != "localhost":
            args += ["-h", engine.url.host]
        args.append(engine.url.database)
        return args, env

    raise CuckooOperationalError(
        "Error creating SQL database backup as your SQL database URI "
        "wasn't understood by us: %r!" % dburi)
Exemplo n.º 15
0
    def stop(self):
        """Stop sniffing.
        @return: operation status.
        """
        # The tcpdump process was never started in the first place.
        if not self.proc:
            return

        # The tcpdump process has already quit, generally speaking this
        # indicates an error such as "permission denied".
        if self.proc.poll():
            out, err = self.proc.communicate()
            raise CuckooOperationalError(
                "Error running tcpdump to sniff the network traffic during "
                "the analysis; stdout = %r and stderr = %r. Did you enable "
                "the extra capabilities to allow running tcpdump as non-root "
                "user and disable AppArmor properly (the latter only applies "
                "to Ubuntu-based distributions with AppArmor, see also %s)?" %
                (out, err, faq("permission-denied-for-tcpdump"))
            )

        try:
            self.proc.terminate()
        except:
            try:
                if not self.proc.poll():
                    log.debug("Killing sniffer")
                    self.proc.kill()
            except OSError as e:
                log.debug("Error killing sniffer: %s. Continue", e)
            except Exception as e:
                log.exception("Unable to stop the sniffer with pid %d: %s",
                              self.proc.pid, e)

        # Ensure expected output was received from tcpdump.
        out, err = self.proc.communicate()
        self._check_output(out, err)
Exemplo n.º 16
0
def sqldump(dburi, dirpath):
    args, env = dumpcmd(dburi, dirpath)

    envargs = " ".join("%s=%s" % (k, v) for k, v in env.items())
    cmdline = " ".join('"%s"' % arg if " " in arg else arg for arg in args)
    cmd = "%s %s" % (envargs, cmdline) if envargs else cmdline

    print "We can make a SQL database backup as follows:"
    print "input cmd  =>", cmd
    print "output SQL =>", cwd("backup.sql")

    if not click.confirm("Would you like to make a backup", default=True):
        return

    try:
        subprocess.check_call(args,
                              stdout=open(cwd("backup.sql"), "wb"),
                              env=dict(os.environ.items() + env.items()))
    except (subprocess.CalledProcessError, OSError) as e:
        raise CuckooOperationalError(
            "Error creating SQL database dump as the command returned an "
            "error code: %s. Please make sure that the required tooling "
            "for making a database backup is installed and review the "
            "database URI to make sure it's correct: %s!" % (e, dburi))
Exemplo n.º 17
0
def import_cuckoo(username, mode, dirpath):
    version = identify(dirpath)
    if not version:
        raise CuckooOperationalError(
            "The path that you specified is not a proper Cuckoo setup. Please "
            "point the path to the root of your older Cuckoo setup, i.e., to "
            "the directory containing the cuckoo.py script!")

    # TODO Copy over the configuration and ignore the database.
    if version in ("0.4", "0.4.1", "0.4.2"):
        raise CuckooOperationalError(
            "Importing from version 0.4, 0.4.1, or 0.4.2 is not supported as "
            "there are no database migrations for that version. Please start "
            "from scratch, your configuration would have been obsolete anyway!"
        )

    print "We've identified a Cuckoo Sandbox %s installation!" % version

    if os.path.isdir(cwd()) and os.listdir(cwd()):
        raise CuckooOperationalError(
            "This Cuckoo Working Directory already exists. Please import to "
            "a new/clean Cuckoo Working Directory.")

    # Following are various recursive imports.
    from cuckoo.apps import migrate_database
    from cuckoo.main import cuckoo_create

    print "Reading in the old configuration.."

    # Port the older configuration.
    cfg = Config.from_confdir(os.path.join(dirpath, "conf"), loose=True)
    cfg = migrate_conf(cfg, version)

    print "  configuration has been migrated to the latest version!"
    print

    # Create a fresh Cuckoo Working Directory.
    cuckoo_create(username, cfg, quiet=True)

    dburi = cfg["cuckoo"]["database"]["connection"]

    # Ask if the user would like to make a backup of the SQL database and in
    # the case of sqlite3, copy/move/symlink cuckoo.db to the CWD.
    sqldump(dburi, dirpath)
    movesql(dburi, mode, dirpath)

    # Run database migrations.
    if not migrate_database():
        raise CuckooOperationalError(
            "Error migrating your old Cuckoo database!")

    # Link or copy all of the older results to the new CWD.
    import_legacy_analyses(mode, dirpath)

    # Urge the user to run the community command.
    print
    print "You have successfully imported your old version of Cuckoo!"
    print "However, in order to get up-to-date, you'll probably want to"
    print yellow("run the community command"),
    print "by running", red("'cuckoo community'"), "manually."
    print "The community command will fetch the latest monitoring updates"
    print "and Cuckoo Signatures."
Exemplo n.º 18
0
    def import_(self, f, submit_id):
        """Import an analysis identified by the file(-like) object f."""
        try:
            z = zipfile.ZipFile(f)
        except zipfile.BadZipfile:
            raise CuckooOperationalError(
                "Imported analysis is not a proper .zip file.")

        # Ensure there are no files with illegal or potentially insecure names.
        # TODO Keep in mind that if we start to support other archive formats
        # (e.g., .tar) that those may also support symbolic links. In that case
        # we should probably start using sflock here.
        for filename in z.namelist():
            if filename.startswith("/") or ".." in filename or ":" in filename:
                raise CuckooOperationalError(
                    "The .zip file contains a file with a potentially "
                    "incorrect filename: %s" % filename)

        if "task.json" not in z.namelist():
            raise CuckooOperationalError(
                "The task.json file is required in order to be able to import "
                "an analysis! This file contains metadata about the analysis.")

        required_fields = {
            "options": dict,
            "route": basestring,
            "package": basestring,
            "target": basestring,
            "category": basestring,
            "memory": bool,
            "timeout": (int, long),
            "priority": (int, long),
            "custom": basestring,
            "tags": (tuple, list),
        }

        try:
            info = json.loads(z.read("task.json"))
            for key, type_ in required_fields.items():
                if key not in info:
                    raise ValueError("missing %s" % key)
                if info[key] is not None and not isinstance(info[key], type_):
                    raise ValueError("%s => %s" % (key, info[key]))
        except ValueError as e:
            raise CuckooOperationalError(
                "The provided task.json file, required for properly importing "
                "the analysis, is incorrect or incomplete (%s)." % e)

        if info["category"] == "url":
            task_id = submit_task.add_url(url=info["target"],
                                          package=info["package"],
                                          timeout=info["timeout"],
                                          options=info["options"],
                                          priority=info["priority"],
                                          custom=info["custom"],
                                          memory=info["memory"],
                                          tags=info["tags"],
                                          submit_id=submit_id)
        else:
            # Users may have the "delete_bin_copy" enabled and in such cases
            # the binary file won't be included in the .zip file.
            if "binary" in z.namelist():
                filepath = Files.temp_named_put(
                    z.read("binary"), os.path.basename(info["target"]))
            else:
                # Generate a temp file as a target if no target is present
                filepath = Files.temp_put("")

            # We'll be updating the target shortly.
            task_id = submit_task.add_path(file_path=filepath,
                                           package=info["package"],
                                           timeout=info["timeout"],
                                           options=info["options"],
                                           priority=info["priority"],
                                           custom=info["custom"],
                                           memory=info["memory"],
                                           tags=info["tags"],
                                           submit_id=submit_id)

        if not task_id:
            raise CuckooOperationalError(
                "There was an error creating a task for the to-be imported "
                "analysis in our database.. Can't proceed.")

        # The constructors currently don't accept this argument.
        db.set_route(task_id, info["route"])

        mkdir(cwd(analysis=task_id))
        z.extractall(cwd(analysis=task_id))

        # If there's an analysis.json file, load it up to figure out additional
        # metdata regarding this analysis.
        if os.path.exists(cwd("analysis.json", analysis=task_id)):
            try:
                obj = json.load(
                    open(cwd("analysis.json", analysis=task_id), "rb"))
                if not isinstance(obj, dict):
                    raise ValueError
                if "errors" in obj and not isinstance(obj["errors"], list):
                    raise ValueError
                if "action" in obj and not isinstance(obj["action"], list):
                    raise ValueError
            except ValueError:
                log.warning(
                    "An analysis.json file was provided, but wasn't a valid "
                    "JSON object/structure that we can to enhance the "
                    "analysis information.")
            else:
                for error in set(obj.get("errors", [])):
                    if isinstance(error, basestring):
                        db.add_error(error, task_id)
                for action in set(obj.get("action", [])):
                    if isinstance(action, basestring):
                        db.add_error("", task_id, action)

        # We set this analysis as completed so that it will be processed
        # automatically (assuming 'cuckoo process' is running).
        db.set_status(task_id, TASK_COMPLETED)
        return task_id
Exemplo n.º 19
0
    def __iter__(self):
        # Read until newline for file path, e.g.,
        # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin

        dump_path = self.handler.read_newline(strip=True).replace("\\", "/")

        if self.version >= 2:
            filepath = self.handler.read_newline(strip=True)
            pids = map(int, self.handler.read_newline(strip=True).split())
        else:
            filepath, pids = None, []

        log.debug("File upload request for %s", dump_path)

        dir_part, filename = os.path.split(dump_path)

        if "./" in dump_path or not dir_part or dump_path.startswith("/"):
            raise CuckooOperationalError(
                "FileUpload failure, banned path: %s" % dump_path)

        for restricted in self.RESTRICTED_DIRECTORIES:
            if restricted in dir_part:
                raise CuckooOperationalError(
                    "FileUpload failure, banned path.")

        try:
            Folders.create(self.storagepath, dir_part)
        except CuckooOperationalError:
            log.error("Unable to create folder %s", dir_part)
            return

        file_path = os.path.join(self.storagepath, dump_path)

        if not file_path.startswith(self.storagepath):
            raise CuckooOperationalError(
                "FileUpload failure, path sanitization failed.")

        if os.path.exists(file_path):
            log.warning("Analyzer tried to overwrite an existing file, "
                        "closing connection.")
            return

        self.fd = open(file_path, "wb")
        chunk = self.handler.read_any()
        while chunk:
            self.fd.write(chunk)

            if self.fd.tell() >= self.upload_max_size:
                log.warning(
                    "Uploaded file length larger than upload_max_size, "
                    "stopping upload.")
                self.fd.write("... (truncated)")
                break

            try:
                chunk = self.handler.read_any()
            except:
                break

        self.lock.acquire()

        with open(self.filelog, "a+b") as f:
            f.write("%s\n" % json.dumps({
                "path": dump_path,
                "filepath": filepath,
                "pids": pids,
            }))

        self.lock.release()

        log.debug("Uploaded file length: %s", self.fd.tell())
        return
        yield
Exemplo n.º 20
0
    def init_config(self):
        """Create a volatility configuration."""
        if self.config is not None and self.addr_space is not None:
            return

        if not self.osprofile:
            raise CuckooOperationalError(
                "Can't continue to process the VM memory dump if no OS "
                "profile has been defined for it. One may define its OS "
                "profile using the 'osprofile' field for the VM in its "
                "machinery configuration or set a global default using "
                "'guest_profile' in memory.conf")

        if self.osprofile not in self.profiles:
            raise CuckooOperationalError(
                "The profile '%s' does not exist! Please pick one of the "
                "following profiles for your VMs: %s" %
                (self.osprofile, ", ".join(sorted(self.profiles))))

        self.config = conf.ConfObject()
        self.config.optparser.set_conflict_handler("resolve")
        registry.register_global_options(self.config, commands.Command)

        base_conf = {
            "profile": self.osprofile,
            "use_old_as": None,
            "kdbg": None,
            "help": False,
            "kpcr": None,
            "tz": None,
            "pid": None,
            "output_file": None,
            "physical_offset": None,
            "conf_file": None,
            "dtb": None,
            "output": None,
            "info": None,
            "location": "file://%s" % self.memdump,
            "plugins": None,
            "debug": None,
            "cache_dtb": True,
            "filename": None,
            "cache_directory": None,
            "verbose": None,
            "write": False
        }

        for key, value in base_conf.items():
            self.config.update(key, value)

        # Deal with Volatility support for KVM/qemu memory dump.
        # See: #464.
        try:
            self.addr_space = utils.load_as(self.config)
        except exc.AddrSpaceError as e:
            if self.get_dtb():
                self.addr_space = utils.load_as(self.config)
            elif "No suitable address space mapping found" in e.message:
                raise CuckooOperationalError(
                    "An incorrect OS has been specified for this machine! "
                    "Please provide the correct one or Cuckoo won't be able "
                    "to provide Volatility-based results for analyses with "
                    "this VM.")
            else:
                raise

        self.plugins = (registry.get_plugin_classes(commands.Command,
                                                    lower=True))