Example #1
0
def test_invalid_plugin():
    dirpath = tempfile.mkdtemp()
    Files.create(dirpath, "foo.py", "import foobarnotexist")

    with pytest.raises(CuckooOperationalError) as e:
        enumerate_plugins(dirpath, "enumplugins", globals(), Signature, {})
    e.match("Unable to load the Cuckoo plugin")
Example #2
0
    def test_tasks_delete(self):
        filepath1 = Files.temp_put("foobar")
        filepath2 = Files.temp_put("foobar")
        assert os.path.exists(filepath1)
        assert os.path.exists(filepath2)

        self.db.session.add(db.Task(filepath1, status=db.Task.FINISHED))
        self.db.session.add(db.Task(filepath2, status=db.Task.FINISHED))
        data = {
            "task_ids": "1 2",
        }
        assert self.client.delete("/api/tasks", data=data).json == {
            "success": True,
        }
        assert not os.path.exists(filepath1)
        assert not os.path.exists(filepath2)
        assert self.client.delete("/api/task/1").json == {
            "success": False,
            "message": "Task already deleted",
        }
        assert self.client.delete("/api/task/2").json == {
            "success": False,
            "message": "Task already deleted",
        }
        assert not os.path.exists(filepath1)
        assert not os.path.exists(filepath2)
Example #3
0
    def test_create_tuple(self):
        dirpath = tempfile.mkdtemp()
        Folders.create(dirpath, "foo")
        Files.create((dirpath, "foo"), "a.txt", "bar")

        filepath = os.path.join(dirpath, "foo", "a.txt")
        assert open(filepath, "rb").read() == "bar"
Example #4
0
def test_migration_201_202():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(cwd("conf"), "virtualbox.conf", """
[virtualbox]
machines = cuckoo1, cuckoo2
[cuckoo1]
platform = windows
[cuckoo2]
platform = windows
""")
    # Except for virtualbox.
    machineries = (
        "avd", "esx", "kvm", "physical", "qemu",
        "vmware", "vsphere", "xenserver",
    )
    for machinery in machineries:
        Files.create(
            cwd("conf"), "%s.conf" % machinery,
            "[%s]\nmachines =" % machinery
        )
    cfg = Config.from_confdir(cwd("conf"), loose=True)
    cfg = migrate(cfg, "2.0.1", "2.0.2")
    assert cfg["virtualbox"]["cuckoo1"]["osprofile"] is None
    assert cfg["virtualbox"]["cuckoo2"]["osprofile"] is None
Example #5
0
    def test_status(self):
        # Create any temporary file, as long as the temporary directory is
        # not empty. Tests bug fix where /cuckoo/status tries to remove the
        # entire temporary directory.
        Files.temp_put("")

        r = self.app.get("/cuckoo/status")
        assert r.status_code == 200
Example #6
0
def test_unknown_section():
    Files.create(
        cwd("conf"), "cuckoo.conf",
        "[virtualbox]\npath = /usr/bin/VBoxManage"
    )
    cfg = Config.from_confdir(cwd("conf"))
    assert "virtualbox" not in cfg["cuckoo"]

    cfg = Config.from_confdir(cwd("conf"), loose=True)
    assert cfg["cuckoo"]["virtualbox"]["path"] == "/usr/bin/VBoxManage"
Example #7
0
def test_unknown_conf_file():
    Files.create(
        cwd("conf"), "foobar.conf",
        "[derp]\nfoo = bar"
    )
    cfg = Config.from_confdir(cwd("conf"))
    assert "derp" not in cfg["foobar"]

    cfg = Config.from_confdir(cwd("conf"), loose=True)
    assert cfg["foobar"]["derp"]["foo"] == "bar"
Example #8
0
def test_migration_204_205():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(cwd("conf"), "auxiliary.conf", """
[mitm]
script = mitm.py
""")
    cfg = Config.from_confdir(cwd("conf"), loose=True)
    cfg = migrate(cfg, "2.0.4", "2.0.5")
    assert cfg["auxiliary"]["mitm"]["script"] == "stuff/mitm.py"
Example #9
0
def test_migration_203_204():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(cwd("conf"), "processing.conf", """
[dumptls]
enabled = on
""")
    cfg = Config.from_confdir(cwd("conf"), loose=True)
    cfg = migrate(cfg, "2.0.3", "2.0.4")
    assert cfg["processing"]["extracted"]["enabled"] is True
Example #10
0
    def test_fd_exhaustion(self):
        fd, filepath = tempfile.mkstemp()

        for x in xrange(0x100):
            Files.temp_put("foo")

        fd2, filepath = tempfile.mkstemp()

        # Let's leave a bit of working space.
        assert fd2 - fd < 64
Example #11
0
def test_migration_200_201():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(cwd("conf"), "memory.conf", """
[mask]
pid_generic =
""")
    cfg = Config.from_confdir(cwd("conf"), loose=True)
    cfg = migrate(cfg, "2.0.0", "2.0.1")
    assert cfg["memory"]["mask"]["pid_generic"] == []
Example #12
0
def test_migration_100_110():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(cwd("conf"), "cuckoo.conf", """
[cuckoo]
delete_original = on
""")
    cfg = Config.from_confdir(cwd("conf"), loose=True)
    cfg = migrate(cfg, "1.0.0", "1.1.0")
    assert cfg["cuckoo"]["cuckoo"]["tmppath"] == "/tmp"
Example #13
0
    def parse_files(self):
        """Parse the files-json.log file and its associated files."""
        files_log = os.path.join(self.suricata_path, self.files_log)
        if not os.path.isfile(files_log):
            log.warning("Unable to find the files-json.log log file")
            return

        files = {}

        # Index all the available files.
        files_dir = os.path.join(self.suricata_path, self.files_dir)
        if not os.path.exists(files_dir):
            log.warning("Suricata files dir is not available. Maybe you forgot to enable Suricata file-store ?")
            return

        for filename in os.listdir(files_dir):
            filepath = os.path.join(files_dir, filename)
            files[Files.md5_file(filepath)] = filepath

        for line in open(files_log, "rb"):
            event = json.loads(line)

            # Not entirely sure what's up, but some files are given just an
            # ID, some files are given just an md5 hash (and maybe some get
            # neither?) So take care of these situations.
            if "id" in event:
                filepath = os.path.join(files_dir, "file.%s" % event["id"])
            elif "md5" in event:
                filepath = files.get(event["md5"])
            else:
                filepath = None

            if not filepath or not os.path.isfile(filepath):
                log.warning(
                    "Suricata dropped file with id=%s and md5=%s not found, "
                    "skipping it..", event.get("id"), event.get("md5")
                )
                continue

            referer = event.get("http_referer")
            if referer == "<unknown>":
                referer = None

            self.results["files"].append({
                "id": int(filepath.split(".", 1)[-1]),
                "filesize": event["size"],
                "filename": os.path.basename(event["filename"]),
                "hostname": event.get("http_host"),
                "uri": event.get("http_uri"),
                "md5": Files.md5_file(filepath),
                "sha1": Files.sha1_file(filepath),
                "magic": event.get("magic"),
                "referer": referer,
            })
Example #14
0
def test_invalid_section():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")

    Files.create(cwd("conf"), "cuckoo.conf", "[invalid_section]\nfoo = bar")
    with pytest.raises(CuckooConfigurationError) as e:
        Config("cuckoo", strict=True)
    e.match("Config section.*not found")

    Files.create(cwd("conf"), "cuckoo.conf", "[cuckoo]\ninvalid = entry")
    with pytest.raises(CuckooConfigurationError) as e:
        config("cuckoo:invalid:entry", strict=True)
    e.match("No such configuration value exists")
Example #15
0
def init_legacy_analyses():
    dirpath = tempfile.mkdtemp()
    mkdir(dirpath, "storage")
    mkdir(dirpath, "storage", "analyses")

    mkdir(dirpath, "storage", "analyses", "1")
    mkdir(dirpath, "storage", "analyses", "1", "logs")
    Files.create(
        (dirpath, "storage", "analyses", "1", "logs"), "a.txt", "a"
    )
    mkdir(dirpath, "storage", "analyses", "1", "reports")
    Files.create(
        (dirpath, "storage", "analyses", "1", "reports"), "b.txt", "b"
    )

    mkdir(dirpath, "storage", "analyses", "2")
    Files.create((dirpath, "storage", "analyses", "2"), "cuckoo.log", "log")

    if not is_windows():
        os.symlink(
            "thisisnotanexistingfile",
            os.path.join(dirpath, "storage", "analyses", "2", "binary")
        )

    Files.create((dirpath, "storage", "analyses"), "latest", "last!!1")
    return dirpath
Example #16
0
def test_confdir():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(
        cwd("conf"), "cuckoo.conf",
        "[cuckoo]\ndelete_original = yes"
    )
    Files.create(
        cwd("conf"), "virtualbox.conf",
        "[virtualbox]\npath = /usr/bin/VBoxManage"
    )
    cfg = Config.from_confdir(cwd("conf"))
    assert cfg["cuckoo"]["cuckoo"]["delete_original"] is True
    assert cfg["virtualbox"]["virtualbox"]["path"] == "/usr/bin/VBoxManage"
Example #17
0
def test_decide_cwd():
    orig_cuckoo_cwd = os.environ.pop("CUCKOO_CWD", None)
    orig_cuckoo = os.environ.pop("CUCKOO", None)

    dirpath1 = tempfile.mkdtemp()
    dirpath2 = tempfile.mkdtemp()
    dirpath3 = tempfile.mkdtemp()

    assert decide_cwd(dirpath1) == dirpath1

    assert decide_cwd() == os.path.abspath(os.path.expanduser("~/.cuckoo"))

    curdir = os.getcwd()
    os.chdir(dirpath2)
    open(".cwd", "wb").write("A"*40)

    assert decide_cwd() == os.path.abspath(".")
    os.chdir(curdir)

    os.environ["CUCKOO"] = dirpath2
    assert decide_cwd(dirpath1) == dirpath1
    assert decide_cwd() == dirpath2

    os.environ["CUCKOO_CWD"] = dirpath3
    assert decide_cwd(dirpath1) == dirpath1
    assert decide_cwd() == dirpath3

    with pytest.raises(CuckooStartupError) as e:
        decide_cwd(tempfile.mktemp(), exists=True)
    e.match("is not present")

    with pytest.raises(CuckooStartupError) as e:
        decide_cwd(dirpath1, exists=True)
    e.match("is not a proper CWD")

    Files.create(dirpath1, ".cwd", "A"*40)
    assert decide_cwd(dirpath1, exists=True) == dirpath1

    # Cleanup.
    if orig_cuckoo:
        os.environ["CUCKOO"] = orig_cuckoo
    else:
        os.environ.pop("CUCKOO", None)

    if orig_cuckoo_cwd:
        os.environ["CUCKOO_CWD"] = orig_cuckoo_cwd
    else:
        os.environ.pop("CUCKOO_CWD", None)
Example #18
0
 def test_empty_mempath(self, p):
     set_cwd(tempfile.mkdtemp())
     m = Memory()
     m.memory_path = Files.temp_put("")
     assert m.run() is None
     p.error.assert_called_once()
     assert "dump empty" in p.error.call_args_list[0][0][0]
Example #19
0
 def test_resubmit_file_missing(self, client):
     filepath = Files.temp_put("hello world")
     db.add_path(filepath, options={
         "human": 0, "free": "yes",
     })
     os.unlink(filepath)
     assert client.get("/submit/re/1/").status_code == 500
Example #20
0
def test_wsf_language():
    wsf = WindowsScriptFile(Files.temp_put(
        "<script language='JScript.Encode'></script>"
    ))
    wsf.decode = mock.MagicMock(return_value="codehere")
    assert wsf.run() == ["codehere"]
    wsf.decode.assert_called_once()
Example #21
0
 def test_submit_abort(self, p, capsys):
     p.side_effect = KeyboardInterrupt
     main.main((
         "--cwd", cwd(), "submit", Files.create(cwd(), "a.txt", "hello")
     ), standalone_mode=False)
     out, _ = capsys.readouterr()
     assert "Aborting submission of" in out
Example #22
0
    def _handle_string(self, submit, tmppath, line):
        if not line:
            return

        if validate_hash(line):
            try:
                filedata = VirusTotalAPI().hash_fetch(line)
            except CuckooOperationalError as e:
                submit["errors"].append(
                    "Error retrieving file hash: %s" % e
                )
                return

            filepath = Files.create(tmppath, line, filedata)

            submit["data"].append({
                "type": "file",
                "data": filepath
            })
            return

        if validate_url(line):
            submit["data"].append({
                "type": "url",
                "data": validate_url(line),
            })
            return

        submit["errors"].append(
            "'%s' was neither a valid hash or url" % line
        )
Example #23
0
def test_sanitize():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(
        cwd("conf"), "cuckoo.conf",
        "[database]\n"
        "timeout = 42\n"
        "connection = postgresql://user:pass@localhost/cuckoo"
    )
    cfg = Config.from_confdir(cwd("conf"))
    assert cfg["cuckoo"]["database"]["timeout"] == 42
    assert cfg["cuckoo"]["database"]["connection"] == "postgresql://*****:*****@localhost/cuckoo"

    cfg = Config.from_confdir(cwd("conf"), sanitize=True)
    assert cfg["cuckoo"]["database"]["timeout"] == 42
    assert cfg["cuckoo"]["database"]["connection"] == "*"*8
Example #24
0
def test_yara_offsets():
    set_cwd(tempfile.mkdtemp())
    cuckoo_create()
    init_yara()

    buf = (
        # The SSEXY payload as per vmdetect.yar
        "66 0F 70 ?? ?? 66 0F DB ?? ?? ?? ?? "
        "?? 66 0F DB ?? ?? ?? ?? ?? 66 0F EF "
        # A VirtualBox MAC address.
        "30 38 2d 30 30 2d 32 37"
    )
    filepath = Files.temp_put(
        "A"*64 + buf.replace("??", "00").replace(" ", "").decode("hex")
    )
    assert File(filepath).get_yara() == [{
        "meta": {
            "description": "Possibly employs anti-virtualization techniques",
            "author": "nex"
        },
        "name": "vmdetect",
        "offsets": {
            "ssexy": [
                (64, 1),
            ],
            "virtualbox_mac_1a": [
                (88, 0),
            ],
        },
        "strings": [
            "MDgtMDAtMjc=",
            "Zg9wAABmD9sAAAAAAGYP2wAAAAAAZg/v",
        ],
    }]
Example #25
0
 def test_star_new(self):
     filepath = Files.temp_put("""
     virtualbox.virtualbox.machines = cuckoo2, cuckoo3
     virtualbox.cuckoo2.ip = 192.168.56.102
     virtualbox.cuckoo3.ip = 192.168.56.103
     virtualbox.notexistingvm.ip = 1.2.3.4
     """)
     assert read_kv_conf(filepath) == {
         "virtualbox": {
             "virtualbox": {
                 "machines": [
                     "cuckoo2", "cuckoo3",
                 ],
             },
             "cuckoo2": {
                 "ip": "192.168.56.102",
             },
             "cuckoo3": {
                 "ip": "192.168.56.103",
             },
             "notexistingvm": {
                 "ip": "1.2.3.4",
             },
         },
     }
Example #26
0
 def test_empty_move(self):
     oldfilepath = Files.temp_put("hello")
     movesql("sqlite:///%s" % oldfilepath, "move", temppath())
     assert not os.path.exists(oldfilepath)
     assert os.path.exists(cwd("cuckoo.db"))
     assert not os.path.islink(cwd("cuckoo.db"))
     assert open(cwd("cuckoo.db"), "rb").read() == "hello"
Example #27
0
def test_yara_no_description():
    set_cwd(tempfile.mkdtemp())
    cuckoo_create()
    open(cwd("yara", "binaries", "empty.yara"), "wb").write("""
        rule EmptyRule {
            condition:
                1
        }
        rule DescrRule {
            meta:
                description = "this is description"
            condition:
                1
        }
    """)
    init_yara()
    a, b = File(Files.temp_put("hello")).get_yara()
    assert a["name"] == "EmptyRule"
    assert a["meta"] == {
        "description": "(no description)",
    }
    assert b["name"] == "DescrRule"
    assert b["meta"] == {
        "description": "this is description",
    }
Example #28
0
def test_process_dodelete(r, s, p):
    set_cwd(tempfile.mkdtemp())
    cuckoo_create(cfg={
        "cuckoo": {
            "cuckoo": {
                "delete_original": True,
                "delete_bin_copy": True,
            },
        },
    })

    filepath1 = Files.temp_put("hello world")
    filepath2 = Files.create(cwd("storage", "binaries"), "A"*40, "binary")

    process(filepath1, filepath2, 1)
    assert not os.path.exists(filepath1)
    assert not os.path.exists(filepath2)
Example #29
0
def test_process_nodelete(r, s, p):
    set_cwd(tempfile.mkdtemp())
    cuckoo_create(
        cfg={
            "cuckoo": {
                "cuckoo": {
                    "delete_original": False,
                    "delete_bin_copy": False,
                },
            },
        })

    filepath1 = Files.temp_put("hello world")
    filepath2 = Files.create(cwd("storage", "binaries"), "A" * 40, "binary")

    process(filepath1, filepath2, 1)
    assert os.path.exists(filepath1)
    assert os.path.exists(filepath2)
Example #30
0
def test_migration_050_060():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(cwd("conf"), "cuckoo.conf", "[cuckoo]")
    cfg = Config.from_confdir(cwd("conf"), loose=True)
    cfg = migrate(cfg, "0.5.0", "0.6.0")
    assert cfg["cuckoo"]["resultserver"] == {
        "ip": "192.168.56.1",
        "port": 2042,
        "store_csvs": False,
        "upload_max_size": 10485760,
    }
    assert cfg["processing"] == {
        "analysisinfo": {
            "enabled": True,
        },
        "behavior": {
            "enabled": True,
        },
        "debug": {
            "enabled": True,
        },
        "dropped": {
            "enabled": True,
        },
        "network": {
            "enabled": True,
        },
        "static": {
            "enabled": True,
        },
        "strings": {
            "enabled": True,
        },
        "targetinfo": {
            "enabled": True,
        },
        "virustotal": {
            "enabled":
            True,
            "key":
            "a0283a2c3d55728300d064874239b5346fb991317e8449fe43c902879d758088",
        },
    }
Example #31
0
 def test_private_key(self):
     buf = open("tests/files/pdf0.pdf", "rb").read()
     filepath = Files.temp_put((buf + "-----BEGIN RSA PRIVATE KEY-----\n"
                                "HELLOWORLD\n"
                                "-----END RSA PRIVATE KEY-----" + buf))
     assert File(filepath).get_keys() == [
         "-----BEGIN RSA PRIVATE KEY-----\n"
         "HELLOWORLD\n"
         "-----END RSA PRIVATE KEY-----"
     ]
Example #32
0
 def test_droidmon_file(self):
     d = Droidmon()
     d.set_task({
         "category": "file",
     })
     filepath = Files.temp_named_put("", "droidmon.log")
     d.logs_path = os.path.dirname(filepath)
     # Ensure there is data available and none of it is a set().
     assert d.run() != {}
     assert json.loads(json.dumps(d.run())) == d.run()
Example #33
0
def test_yara_externals():
    set_cwd(tempfile.mkdtemp())
    cuckoo_create()
    open(cwd("yara", "office", "external.yara"), "wb").write("""
        rule ExternalRule {
            condition:
                filename matches /document.xml/
        }
    """)
    init_yara()

    assert not File(Files.temp_put("")).get_yara("office")
    assert not File(Files.temp_put("hello")).get_yara("office", {
        "filename": "hello.jpg",
    })
    a, = File(Files.temp_put("hello")).get_yara("office", {
        "filename": "document.xml",
    })
    assert a["name"] == "ExternalRule"
Example #34
0
    def test_temp_conf(self):
        dirpath = tempfile.mkdtemp()
        set_cwd(dirpath)

        Folders.create(dirpath, "conf")
        with open(os.path.join(dirpath, "conf", "cuckoo.conf"), "wb") as f:
            f.write("[cuckoo]\ntmppath = %s" % dirpath)

        filepath = Files.temp_put("foo")
        assert filepath.startswith(dirpath)
Example #35
0
def test_yara_externals():
    set_cwd(tempfile.mkdtemp())
    cuckoo_create()
    open(cwd("yara", "office", "external.yara"), "wb").write("""
        rule ExternalRule {
            condition:
                filename matches /document.xml/
        }
    """)
    init_yara()

    assert not File(Files.temp_put("")).get_yara("office")
    assert not File(Files.temp_put("hello")).get_yara("office", {
        "filename": "hello.jpg",
    })
    a, = File(Files.temp_put("hello")).get_yara("office", {
        "filename": "document.xml",
    })
    assert a["name"] == "ExternalRule"
Example #36
0
    def test_cuckoo_init_kv_conf(self):
        filepath = Files.temp_put("cuckoo.cuckoo.version_check = no")

        # Create a new CWD as Files.temp_put() indexes - or tries to - the
        # original cuckoo.conf (even though it doesn't exist yet).
        set_cwd(tempfile.mkdtemp())
        with pytest.raises(SystemExit):
            main.main(("--cwd", cwd(), "init", "--conf", filepath),
                      standalone_mode=False)

        assert config("cuckoo:cuckoo:version_check") is False
Example #37
0
 def test_star_existing(self):
     filepath = Files.temp_put("""
     virtualbox.cuckoo1.resultserver_port = 1234
     """)
     assert read_kv_conf(filepath) == {
         "virtualbox": {
             "cuckoo1": {
                 "resultserver_port": 1234,
             },
         },
     }
Example #38
0
def test_migration_042_050():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(
        cwd("conf"), "cuckoo.conf", """
[cuckoo]
delete_original = yes
analysis_timeout = 122
critical_timeout = 601
analysis_size_limit = 123456
use_sniffer = no
""")
    Files.create(
        cwd("conf"), "virtualbox.conf", """
[virtualbox]
path = /usr/bin/VBoxManage
timeout = 1337
""")
    cfg = Config.from_confdir(cwd("conf"), loose=True)
    cfg = migrate(cfg, "0.4.2", "0.5.0")
    assert "analysis_timeout" not in cfg["cuckoo"]["cuckoo"]
    assert cfg["cuckoo"]["cuckoo"]["version_check"] is True
    assert cfg["cuckoo"]["cuckoo"]["memory_dump"] is False
    assert "analysis_size_limit" not in cfg["cuckoo"]["cuckoo"]
    assert cfg["cuckoo"]["processing"]["analysis_size_limit"] == "123456"
    assert cfg["cuckoo"]["processing"]["resolve_dns"] is True
    assert cfg["cuckoo"]["database"]["connection"] is None
    assert cfg["cuckoo"]["database"]["timeout"] is None
    assert cfg["cuckoo"]["timeouts"]["default"] == 122
    assert cfg["cuckoo"]["timeouts"]["critical"] == 601
    assert cfg["cuckoo"]["timeouts"]["vm_state"] == 1337
    assert "use_sniffer" not in cfg["cuckoo"]["cuckoo"]
    assert cfg["cuckoo"]["sniffer"]["enabled"] == "no"
    assert cfg["cuckoo"]["sniffer"]["tcpdump"] == "/usr/sbin/tcpdump"
    assert cfg["cuckoo"]["sniffer"]["interface"] == "vboxnet0"
    assert cfg["cuckoo"]["sniffer"]["bpf"] is None
    assert cfg["cuckoo"]["graylog"]["enabled"] is False
    assert cfg["cuckoo"]["graylog"]["host"] == "localhost"
    assert cfg["cuckoo"]["graylog"]["port"] == 12201
    assert cfg["cuckoo"]["graylog"]["level"] == "error"
    assert "timeout" not in cfg["virtualbox"]["virtualbox"]
Example #39
0
def test_extract():
    o = io.BytesIO()
    t = tarfile.open(fileobj=o, mode="w:gz")

    a = tempfile.mktemp()
    open(a, "wb").write("a")
    t.add(a, "community-master/modules/signatures/a.txt")

    b = tempfile.mktemp()
    open(b, "wb").write("b")
    t.add(b, "community-master/data/monitor/b.txt")

    y = tempfile.mktemp()
    open(y, "wb").write("y")
    t.add(y, "community-master/data/yara/binaries/y.yar")

    c = tempfile.mktemp()
    open(c, "wb").write("c")
    t.add(c, "community-master/agent/c.txt")

    d = tempfile.mkdtemp()
    Folders.create(d, "dir1")
    Folders.create(d, "dir2")
    Folders.create((d, "dir2"), "dir3")
    Files.create((d, "dir1"), "d.txt", "d")
    Files.create((d, "dir2", "dir3"), "e.txt", "e")
    t.add(d, "community-master/analyzer")

    t.close()

    responses.add(responses.GET, URL % "master", body=o.getvalue())

    set_cwd(tempfile.mkdtemp())
    fetch_community()

    assert open(cwd("signatures", "a.txt"), "rb").read() == "a"
    assert open(cwd("monitor", "b.txt"), "rb").read() == "b"
    assert open(cwd("yara", "binaries", "y.yar"), "rb").read() == "y"
    assert open(cwd("agent", "c.txt"), "rb").read() == "c"
    assert open(cwd("analyzer", "dir1", "d.txt"), "rb").read() == "d"
    assert open(cwd("analyzer", "dir2", "dir3", "e.txt"), "rb").read() == "e"
Example #40
0
def test_get_urls():
    filepath = Files.temp_put("""
http://google.com
google.com/foobar
thisisnotadomain
https://1.2.3.4:9001/hello
    """)
    assert sorted(File(filepath).get_urls()) == [
        # TODO Why does this not work properly at my own machine?
        "http://google.com",
        "https://1.2.3.4:9001/hello",
    ]
Example #41
0
def test_migration_041_042():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(
        cwd("conf"), "cuckoo.conf",
        "[cuckoo]\ndelete_original = yes"
    )
    Files.create(
        cwd("conf"), "virtualbox.conf",
        "[virtualbox]\npath = /usr/bin/VBoxManage"
    )
    cfg = Config.from_confdir(cwd("conf"), loose=True)
    cfg = migrate(cfg, "0.4.1", "0.4.2")
    assert cfg["cuckoo"]["cuckoo"]["analysis_size_limit"] == 104857600
    assert cfg["virtualbox"]["virtualbox"]["timeout"] == 300
    assert cfg["vmware"]["vmware"]["mode"] == "gui"
    assert cfg["vmware"]["vmware"]["path"] == "/usr/bin/vmrun"
    assert cfg["vmware"]["vmware"]["machines"] == ["cuckoo1"]
    assert cfg["vmware"]["cuckoo1"]["label"] == "../vmware-xp3.vmx,Snapshot1"
    assert cfg["vmware"]["cuckoo1"]["platform"] == "windows"
    assert cfg["vmware"]["cuckoo1"]["ip"] == "192.168.54.111"
Example #42
0
def test_migration_203_204():
    set_cwd(tempfile.mkdtemp())
    Folders.create(cwd(), "conf")
    Files.create(cwd("conf"), "processing.conf", """
[dumptls]
enabled = on
""")
    Files.create(
        cwd("conf"), "qemu.conf", """
[qemu]
machines = ubuntu32, ubuntu64
[ubuntu32]
arch = x86
[ubuntu64]
arch = x64
    """)
    cfg = Config.from_confdir(cwd("conf"), loose=True)
    cfg = migrate(cfg, "2.0.3", "2.0.4")
    assert cfg["processing"]["extracted"]["enabled"] is True
    # Except for qemu.
    machineries = (
        "avd",
        "esx",
        "kvm",
        "physical",
        "virtualbox",
        "vmware",
        "vsphere",
        "xenserver",
    )
    for machinery in machineries:
        Files.create(cwd("conf"), "%s.conf" % machinery,
                     "[%s]\nmachines =" % machinery)
    assert cfg["qemu"]["ubuntu32"]["enable_kvm"] is False
    assert cfg["qemu"]["ubuntu32"]["snapshot"] is None
Example #43
0
    def test_empty_symlink(self):
        oldfilepath = Files.temp_put("hello")
        try:
            movesql("sqlite:///%s" % oldfilepath, "symlink", temppath())

            # Following is non-windows.
            assert os.path.exists(oldfilepath)
            assert os.path.exists(cwd("cuckoo.db"))
            assert os.path.islink(cwd("cuckoo.db"))
            assert open(cwd("cuckoo.db"), "rb").read() == "hello"
        except RuntimeError as e:
            assert is_windows()
            assert "'symlink'" in e.message
Example #44
0
def test_whitespace_before_line2():
    set_cwd(tempfile.mkdtemp())
    filepath = Files.temp_put("""
[virtualbox]
machines = cuckoo1
[cuckoo1]
 label = cuckoo1
ip = 1.2.3.4
snapshot = asnapshot
""")
    with pytest.raises(CuckooConfigurationError) as e:
        Config(file_name="virtualbox", cfg=filepath)
    e.match("Most likely there are leading whitespaces")
Example #45
0
    def run(self):
        """Runs IRMA processing
        @return: full IRMA report.
        """
        self.key = "irma"

        """ Fall off if we don't deal with files """
        if self.results.get("info", {}).get("category") != "file":
            log.debug("IRMA supports only file scanning !")
            return {}

        self.url = self.options.get("url")
        self.timeout = int(self.options.get("timeout", 60))
        self.scan = int(self.options.get("scan", 0))
        self.force = int(self.options.get("force", 0))

        sha256 = Files.sha256_file(self.file_path)

        results = self._get_results(sha256)

        if not self.force and not self.scan and not results:
            return {}
        elif self.force or (not results and self.scan):
            log.info("File scan requested: %s", sha256)
            self._scan_file(self.file_path, self.force)
            results = self._get_results(sha256) or {}

        """ FIXME! could use a proper fix here
        that probably needs changes on IRMA side aswell
        --
        related to  https://github.com/elastic/elasticsearch/issues/15377
        entropy value is sometimes 0 and sometimes like  0.10191042566270775
        other issue is that results type changes between string and object :/
        """
        for idx, result in enumerate(results["probe_results"]):
            if result["name"] == "PE Static Analyzer":
                log.debug("Ignoring PE results at index {0}".format(idx))
                results["probe_results"][idx]["results"] = "... scrapped ..."

            """ When VT results comes back with 'detected by 0/58' then it gets
            cached as malicious with signature due to the fact that the result
            exists. This is a workaround to override that tragedy and make it
            compatible with other results.
            """
            if result["name"] == "VirusTotal" \
                    and results["probe_results"][idx]["results"].startswith("detected by 0/"):
                log.debug("Fixing empty match from VT")
                results["probe_results"][idx]["status"] = 0
                results["probe_results"][idx]["results"] = None

        return results
    def test_task_delete(self):
        filepath = Files.temp_put("foobar")
        assert os.path.exists(filepath)

        self.db.session.add(db.Task(filepath, status=db.Task.FINISHED))
        assert self.client.delete("/api/task/1").json == {
            "success": True,
        }
        assert not os.path.exists(filepath)
        assert self.client.delete("/api/task/1").json == {
            "success": False,
            "message": "Task already deleted",
        }
        assert not os.path.exists(filepath)
Example #47
0
def init_legacy_analyses():
    dirpath = tempfile.mkdtemp()
    mkdir(dirpath, "storage")
    mkdir(dirpath, "storage", "analyses")

    mkdir(dirpath, "storage", "analyses", "1")
    mkdir(dirpath, "storage", "analyses", "1", "logs")
    Files.create((dirpath, "storage", "analyses", "1", "logs"), "a.txt", "a")
    mkdir(dirpath, "storage", "analyses", "1", "reports")
    Files.create((dirpath, "storage", "analyses", "1", "reports"), "b.txt",
                 "b")

    mkdir(dirpath, "storage", "analyses", "2")
    Files.create((dirpath, "storage", "analyses", "2"), "cuckoo.log", "log")

    Files.create((dirpath, "storage", "analyses"), "latest", "last!!1")
    return dirpath
Example #48
0
 def test_global_osprofile(self, p):
     set_cwd(tempfile.mkdtemp())
     cuckoo_create(cfg={
         "memory": {
             "basic": {
                 "guest_profile": "profile0",
             },
         },
     })
     filepath = Files.temp_named_put("notempty", "memory.dmp")
     m = Memory()
     m.set_path(os.path.dirname(filepath))
     m.set_machine({})
     m.run()
     p.assert_called_once_with(filepath, "profile0")
Example #49
0
def test_default_config():
    """Test the default configuration."""
    dirpath = tempfile.mkdtemp()

    with pytest.raises(SystemExit):
        main.main(
            ("--cwd", dirpath, "--nolog", "init"),
            standalone_mode=False
        )

    assert config("cuckoo:cuckoo:version_check") is True
    assert config("cuckoo:cuckoo:tmppath") is None
    assert config("cuckoo:resultserver:ip") == "192.168.56.1"
    assert config("cuckoo:processing:analysis_size_limit") == 128*1024*1024
    assert config("cuckoo:timeouts:critical") == 60
    assert config("auxiliary:mitm:mitmdump") == "/usr/local/bin/mitmdump"

    with pytest.raises(RuntimeError) as e:
        config("nope")
    e.match("Invalid configuration entry")

    with pytest.raises(RuntimeError) as e:
        config("nope:nope")
    e.match("Invalid configuration entry")

    assert check_configs()

    os.remove(os.path.join(dirpath, "conf", "cuckoo.conf"))
    with pytest.raises(CuckooStartupError) as e:
        check_configs()
    e.match("Config file does not exist")

    Files.create(
        (dirpath, "conf"), "cuckoo.conf", "[cuckoo]\nversion_check = on"
    )
    assert check_configs()
Example #50
0
 def test_success(self):
     filepath = Files.temp_put("""
     cuckoo.cuckoo.version_check = off
     auxiliary.sniffer.enabled = no
     """)
     assert read_kv_conf(filepath) == {
         "cuckoo": {
             "cuckoo": {
                 "version_check": False,
             },
         },
         "auxiliary": {
             "sniffer": {
                 "enabled": False,
             },
         },
     }
Example #51
0
def init_legacy_analyses():
    dirpath = tempfile.mkdtemp()
    mkdir(dirpath, "storage")
    mkdir(dirpath, "storage", "analyses")

    mkdir(dirpath, "storage", "analyses", "1")
    mkdir(dirpath, "storage", "analyses", "1", "logs")
    Files.create((dirpath, "storage", "analyses", "1", "logs"), "a.txt", "a")
    mkdir(dirpath, "storage", "analyses", "1", "reports")
    Files.create((dirpath, "storage", "analyses", "1", "reports"), "b.txt",
                 "b")

    mkdir(dirpath, "storage", "analyses", "2")
    Files.create((dirpath, "storage", "analyses", "2"), "cuckoo.log", "log")

    if not is_windows():
        os.symlink("thisisnotanexistingfile",
                   os.path.join(dirpath, "storage", "analyses", "2", "binary"))

    Files.create((dirpath, "storage", "analyses"), "latest", "last!!1")
    return dirpath
Example #52
0
def tasks_create_file():
    data = request.files["file"]
    package = request.form.get("package", "")
    timeout = request.form.get("timeout", "")
    priority = request.form.get("priority", 1)
    options = request.form.get("options", "")
    machine = request.form.get("machine", "")
    platform = request.form.get("platform", "")
    tags = request.form.get("tags", None)
    custom = request.form.get("custom", "")
    owner = request.form.get("owner", "")
    clock = request.form.get("clock", None)
    user_id = request.form.get("user_id", "")

    memory = parse_bool(request.form.get("memory", 0))
    unique = parse_bool(request.form.get("unique", 0))
    enforce_timeout = parse_bool(request.form.get("enforce_timeout", 0))

    content = data.read()
    if unique and db.find_sample(sha256=hashlib.sha256(content).hexdigest()):
        return json_error(400, "This file has already been submitted")

    temp_file_path = Files.temp_named_put(content, data.filename)

    task_id = db.add_path(
        file_path=temp_file_path,
        package=package,
        timeout=timeout,
        priority=priority,
        options=options,
        machine=machine,
        platform=platform,
        tags=tags,
        custom=custom,
        owner=owner,
        memory=memory,
        enforce_timeout=enforce_timeout,
        clock=clock,
        user_id=user_id,
    )

    return jsonify(task_id=task_id)
Example #53
0
    def gen_config(self,
                   logfile,
                   local_ip,
                   local_port,
                   socks5_host,
                   socks5_port,
                   username=None,
                   password=None):
        """Generate and writea redsocks config file to be used for
         one analysis"""

        conf_base = {
            "log_debug": "on",
            "log_info": "on",
            "log": "\"file:%s\"" % logfile,
            "daemon": "off",
            "redirector": "iptables"
        }

        conf_redsocks = {
            "local_ip": local_ip,
            "local_port": str(local_port),
            "ip": socks5_host,
            "port": str(socks5_port),
            "type": "socks5"
        }

        conf_sections = {"base": conf_base, "redsocks": conf_redsocks}

        if username:
            conf_redsocks["login"] = username
            conf_redsocks["password"] = password

        conf = ""
        for name, section in conf_sections.iteritems():
            conf += "%s {\n" % name
            for field, value in section.iteritems():
                conf += "%s = %s;\n" % (field, value)
            conf += "}\n"

        return Files.temp_named_put(conf, "redsocks-task-%s" % self.task.id)
Example #54
0
    def run(self):
        """Runs IRMA processing
        @return: full IRMA report.
        """
        self.key = "irma"
        """ Fall off if we don't deal with files """
        if self.results.get("info", {}).get("category") != "file":
            log.debug("IRMA supports only file scanning !")
            return {}

        self.url = self.options.get("url")
        self.timeout = int(self.options.get("timeout", 60))
        self.scan = int(self.options.get("scan", 0))
        self.force = int(self.options.get("force", 0))

        sha256 = Files.sha256_file(self.file_path)

        results = self._get_results(sha256)

        if not self.force and not self.scan and not results:
            return {}
        elif self.force or (not results and self.scan):
            log.info("File scan requested: %s", sha256)
            self._scan_file(self.file_path, self.force)
            results = self._get_results(sha256) or {}
        """ FIXME! could use a proper fix here
        that probably needs changes on IRMA side aswell
        --
        related to  https://github.com/elastic/elasticsearch/issues/15377
        entropy value is sometimes 0 and sometimes like  0.10191042566270775
        other issue is that results type changes between string and object :/
        """
        for idx, result in enumerate(results["probe_results"]):
            if result["name"] == "PE Static Analyzer":
                log.debug("Ignoring PE results at index {0}".format(idx))
                results["probe_results"][idx]["results"] = "... scrapped ..."

        return results
Example #55
0
    def pre(self, submit_type, data, options=None):
        """
        The first step to submitting new analysis.
        @param submit_type: "files" or "strings"
        @param data: a list of dicts containing "name" (file name)
                and "data" (file data) or a list of strings (urls or hashes)
        @return: submit id
        """
        if submit_type not in ("strings", "files"):
            log.error("Bad parameter '%s' for submit_type", submit_type)
            return False

        path_tmp = Folders.create_temp()
        submit_data = {
            "data": [],
            "errors": [],
            "options": options or {},
        }

        if submit_type == "strings":
            for line in data:
                self._handle_string(submit_data, path_tmp, line.strip())

        if submit_type == "files":
            for entry in data:
                filename = Storage.get_filename_from_path(entry["name"])
                filepath = Files.create(path_tmp, filename, entry["data"])
                submit_data["data"].append({
                    "type":
                    "file",
                    "data":
                    filepath,
                    "options":
                    self.translate_options_to(entry.get("options", {})),
                })

        return db.add_submit(path_tmp, submit_type, submit_data)
Example #56
0
    def _handle_string(self, submit, tmppath, line):
        if not line:
            return

        if validate_hash(line):
            try:
                filedata = VirusTotalAPI().hash_fetch(line)
            except CuckooOperationalError as e:
                submit["errors"].append("Error retrieving file hash: %s" % e)
                return

            filepath = Files.create(tmppath, line, filedata)

            submit["data"].append({"type": "file", "data": filepath})
            return

        if validate_url(line):
            submit["data"].append({
                "type": "url",
                "data": validate_url(line),
            })
            return

        submit["errors"].append("'%s' was neither a valid hash or url" % line)
Example #57
0
    def import_(self, f, submit_id):
        """Import an analysis identified by the file(-like) object f."""
        try:
            z = zipfile.ZipFile(f)
        except zipfile.BadZipfile:
            raise CuckooOperationalError(
                "Imported analysis is not a proper .zip file.")

        # Ensure there are no files with illegal or potentially insecure names.
        # TODO Keep in mind that if we start to support other archive formats
        # (e.g., .tar) that those may also support symbolic links. In that case
        # we should probably start using sflock here.
        for filename in z.namelist():
            if filename.startswith("/") or ".." in filename or ":" in filename:
                raise CuckooOperationalError(
                    "The .zip file contains a file with a potentially "
                    "incorrect filename: %s" % filename)

        if "task.json" not in z.namelist():
            raise CuckooOperationalError(
                "The task.json file is required in order to be able to import "
                "an analysis! This file contains metadata about the analysis.")

        required_fields = {
            "options": dict,
            "route": basestring,
            "package": basestring,
            "target": basestring,
            "category": basestring,
            "memory": bool,
            "timeout": (int, long),
            "priority": (int, long),
            "custom": basestring,
            "tags": (tuple, list),
        }

        try:
            info = json.loads(z.read("task.json"))
            for key, type_ in required_fields.items():
                if key not in info:
                    raise ValueError("missing %s" % key)
                if info[key] is not None and not isinstance(info[key], type_):
                    raise ValueError("%s => %s" % (key, info[key]))
        except ValueError as e:
            raise CuckooOperationalError(
                "The provided task.json file, required for properly importing "
                "the analysis, is incorrect or incomplete (%s)." % e)

        if info["category"] == "url":
            task_id = submit_task.add_url(url=info["target"],
                                          package=info["package"],
                                          timeout=info["timeout"],
                                          options=info["options"],
                                          priority=info["priority"],
                                          custom=info["custom"],
                                          memory=info["memory"],
                                          tags=info["tags"],
                                          submit_id=submit_id)
        else:
            # Users may have the "delete_bin_copy" enabled and in such cases
            # the binary file won't be included in the .zip file.
            if "binary" in z.namelist():
                filepath = Files.temp_named_put(
                    z.read("binary"), os.path.basename(info["target"]))
            else:
                # Generate a temp file as a target if no target is present
                filepath = Files.temp_put("")

            # We'll be updating the target shortly.
            task_id = submit_task.add_path(file_path=filepath,
                                           package=info["package"],
                                           timeout=info["timeout"],
                                           options=info["options"],
                                           priority=info["priority"],
                                           custom=info["custom"],
                                           memory=info["memory"],
                                           tags=info["tags"],
                                           submit_id=submit_id)

        if not task_id:
            raise CuckooOperationalError(
                "There was an error creating a task for the to-be imported "
                "analysis in our database.. Can't proceed.")

        # The constructors currently don't accept this argument.
        db.set_route(task_id, info["route"])

        mkdir(cwd(analysis=task_id))
        z.extractall(cwd(analysis=task_id))

        # If there's an analysis.json file, load it up to figure out additional
        # metdata regarding this analysis.
        if os.path.exists(cwd("analysis.json", analysis=task_id)):
            try:
                obj = json.load(
                    open(cwd("analysis.json", analysis=task_id), "rb"))
                if not isinstance(obj, dict):
                    raise ValueError
                if "errors" in obj and not isinstance(obj["errors"], list):
                    raise ValueError
                if "action" in obj and not isinstance(obj["action"], list):
                    raise ValueError
            except ValueError:
                log.warning(
                    "An analysis.json file was provided, but wasn't a valid "
                    "JSON object/structure that we can to enhance the "
                    "analysis information.")
            else:
                for error in set(obj.get("errors", [])):
                    if isinstance(error, basestring):
                        db.add_error(error, task_id)
                for action in set(obj.get("action", [])):
                    if isinstance(action, basestring):
                        db.add_error("", task_id, action)

        # We set this analysis as completed so that it will be processed
        # automatically (assuming 'cuckoo process' is running).
        db.set_status(task_id, TASK_COMPLETED)
        return task_id
Example #58
0
    def submit(self, submit_id, config):
        """Reads, interprets, and converts the JSON configuration provided by
        the Web Interface into something we insert into the database."""
        ret = []
        submit = db.view_submit(submit_id)

        machines = {}

        for entry in config["file_selection"]:
            # Merge the global & per-file analysis options.
            info = copy.deepcopy(config["global"])
            info.update(entry)
            info.update(entry.get("options", {}))
            options = copy.deepcopy(config["global"]["options"])
            options.update(entry.get("options", {}).get("options", {}))

            machine = info.get("machine")
            if machine:
                if machine not in machines:
                    m = db.view_machine(machine)
                    # TODO Add error handling for missing machine entry.
                    machines[machine] = m.label if m else None

                machine = machines[machine]
            else:
                machine = None

            kw = {
                "package": info.get("package") or "",
                "timeout": info.get("timeout", 120),
                "priority": info.get("priority"),
                "custom": info.get("custom"),
                "owner": info.get("owner"),
                "tags": info.get("tags"),
                "memory": options.get("full-memory-dump"),
                "enforce_timeout": options.get("enforce-timeout"),
                "machine": machine,
                "platform": info.get("platform"),
                "options": self.translate_options_from(info, options),
                "submit_id": submit_id,
            }

            if entry["type"] == "url":
                ret.append(submit_task.add_url(url=info["filename"], **kw))
                continue

            # for each selected file entry, create a new temp. folder
            path_dest = Folders.create_temp()

            if not info["extrpath"]:
                path = os.path.join(submit.tmp_path,
                                    os.path.basename(info["filename"]))

                filepath = Files.copy(path, path_dest=path_dest)

                ret.append(submit_task.add_path(file_path=filepath, **kw))
            elif len(info["extrpath"]) == 1:
                arcpath = os.path.join(submit.tmp_path,
                                       os.path.basename(info["arcname"]))
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"]))
                    continue

                arc = sflock.zipify(
                    sflock.unpack(contents=open(arcpath, "rb").read(),
                                  filename=info["arcname"]))

                # Create a .zip archive out of this container.
                arcpath = Files.temp_named_put(
                    arc, os.path.basename(info["arcname"]))

                ret.append(
                    submit_task.add_archive(file_path=arcpath,
                                            filename=info["relaname"],
                                            **kw))
            else:
                arcpath = os.path.join(submit.tmp_path,
                                       os.path.basename(info["arcname"]))
                if not os.path.exists(arcpath):
                    submit.data["errors"].append(
                        "Unable to find parent archive file: %s" %
                        os.path.basename(info["arcname"]))
                    continue

                content = sflock.unpack(arcpath).read(info["extrpath"][:-1])
                subarc = sflock.unpack(contents=content,
                                       filename=info["extrpath"][-2])

                # Write intermediate .zip archive file.
                arcpath = Files.temp_named_put(
                    sflock.zipify(subarc),
                    os.path.basename(info["extrpath"][-2]))

                ret.append(
                    submit_task.add_archive(file_path=arcpath,
                                            filename=info["relaname"],
                                            **kw))

        return ret
Example #59
0
def cuckoo_status():
    # In order to keep track of the diskspace statistics of the temporary
    # directory we create a temporary file so we can statvfs() on that.
    temp_file = Files.temp_put("")

    paths = dict(
        binaries=cwd("storage", "binaries"),
        analyses=cwd("storage", "analyses"),
        temporary=temp_file,
    )

    diskspace = {}
    for key, path in paths.items():
        if hasattr(os, "statvfs") and os.path.isdir(path):
            stats = os.statvfs(path)
            diskspace[key] = dict(
                free=stats.f_bavail * stats.f_frsize,
                total=stats.f_blocks * stats.f_frsize,
                used=(stats.f_blocks - stats.f_bavail) * stats.f_frsize,
            )

    # Now we remove the temporary file and its parent directory.
    os.unlink(temp_file)

    # Get the CPU load.
    if hasattr(os, "getloadavg"):
        cpuload = os.getloadavg()
    else:
        cpuload = []

    if os.path.isfile("/proc/meminfo"):
        values = {}
        for line in open("/proc/meminfo"):
            key, value = line.split(":", 1)
            values[key.strip()] = value.replace("kB", "").strip()

        if "MemAvailable" in values and "MemTotal" in values:
            memavail = int(values["MemAvailable"])
            memtotal = int(values["MemTotal"])
            memory = 100 - 100.0 * memavail / memtotal
        else:
            memory = memavail = memtotal = None
    else:
        memory = memavail = memtotal = None

    try:
        cpu_core_count = multiprocessing.cpu_count()
    except NotImplementedError:
        cpu_core_count = None

    response = dict(version=version,
                    hostname=socket.gethostname(),
                    machines=dict(total=len(db.list_machines()),
                                  available=db.count_machines_available()),
                    tasks=dict(total=db.count_tasks(),
                               pending=db.count_tasks("pending"),
                               running=db.count_tasks("running"),
                               completed=db.count_tasks("completed"),
                               reported=db.count_tasks("reported")),
                    diskspace=diskspace,
                    cpuload=cpuload,
                    cpu_count=cpu_core_count,
                    memory=memory,
                    memavail=memavail,
                    memtotal=memtotal,
                    processes=Pidfile.get_active_pids())

    return jsonify(response)