Esempio n. 1
0
def tasks_create_file():
    response = {}

    data = request.files.file
    package = request.forms.get("package", "")
    timeout = request.forms.get("timeout", "")
    priority = request.forms.get("priority", 1)
    options = request.forms.get("options", "")
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    tags = request.forms.get("tags", None)
    custom = request.forms.get("custom", "")
    memory = request.forms.get("memory", False)
    clock = request.forms.get("clock", None)
    if memory:
        memory = True
    enforce_timeout = request.forms.get("enforce_timeout", False)
    if enforce_timeout:
        enforce_timeout = True

    temp_file_path = store_temp_file(data.file.read(), data.filename)
    task_ids = db.demux_sample_and_add_to_db(file_path=temp_file_path, package=package, timeout=timeout, options=options, priority=priority,
                                          machine=machine, platform=platform, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock)
    response["task_ids"] = task_ids
    return jsonize(response)
Esempio n. 2
0
    def save(self):
        # Use long filename as first preference
        filename = self.longFilename
        # Otherwise use the short filename
        if filename is None:
            filename = self.shortFilename
        # Otherwise just make something up!
        if filename is None:
            import random
            import string

            filename = (
                "UnknownAttachment"
                + "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(5))
                + ".bin"
            )

        base, ext = os.path.splitext(filename)
        basename = os.path.basename(filename)
        ext = ext.lower()
        if ext == "" and len(basename) and basename[0] == ".":
            return None
        extensions = INTERESTING_FILE_EXTENSIONS
        foundext = False
        for theext in extensions:
            if ext == theext:
                foundext = True
                break

        if not foundext:
            return None

        return store_temp_file(self.data, filename)
Esempio n. 3
0
    def save(self):
        # Use long filename as first preference
        filename = self.longFilename
        # Otherwise use the short filename
        if filename is None:
            filename = self.shortFilename
        # Otherwise just make something up!
        if filename is None:
            import random
            import string
            filename = 'UnknownAttachment' + \
                ''.join(random.choice(string.ascii_uppercase + string.digits)
                        for _ in range(5)) + ".bin"

        base, ext = os.path.splitext(filename)
        basename = os.path.basename(filename)
        ext = ext.lower()
        if ext == "" and len(basename) and basename[0] == ".":
            return None
        extensions = [
            "", ".exe", ".dll", ".com", ".pdf", ".msi", ".bin", ".scr", ".zip", ".tar", ".tgz", ".gz", ".rar", ".htm", ".html", ".hta",
            ".doc", ".dot", ".docx", ".dotx", ".docm", ".dotm", ".docb", ".mht", ".mso", ".js", ".jse", ".vbs", ".vbe",
            ".xls", ".xlt", ".xlm", ".xlsx", ".xltx", ".xlsm", ".xltm", ".xlsb", ".xla", ".xlam", ".xll", ".xlw",
            ".ppt", ".pot", ".pps", ".pptx", ".pptm", ".potx", ".potm", ".ppam", ".ppsx", ".ppsm", ".sldx", ".sldm", ".wsf",
        ]
        foundext = False
        for theext in extensions:
            if ext == theext:
                foundext = True
                break

        if not foundext:
            return None

        return store_temp_file(self.data, filename)
Esempio n. 4
0
def tasks_create_file():
    response = {}

    data = request.files.file
    package = request.forms.get("package", "")
    timeout = request.forms.get("timeout", "")
    priority = request.forms.get("priority", 1)
    options = request.forms.get("options", "")
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    custom = request.forms.get("custom", "")
    memory = request.forms.get("memory", False)
    if memory:
        memory = True
    enforce_timeout = request.forms.get("enforce_timeout", False)
    if enforce_timeout:
        enforce_timeout = True

    temp_file_path = store_temp_file(data.file.read(), data.filename)
    task_id = db.add_path(file_path=temp_file_path,
                          package=package,
                          timeout=timeout,
                          priority=priority,
                          options=options,
                          machine=machine,
                          platform=platform,
                          custom=custom,
                          memory=memory,
                          enforce_timeout=enforce_timeout)

    response["task_id"] = task_id
    return jsonize(response)
Esempio n. 5
0
def mse_unquarantine(f):
    with open(f, "rb") as quarfile:
        data = bytearray(quarfile.read())

    fsize = len(data)
    if fsize < 12 or data[0] != 0x0B or data[1] != 0xad or data[2] != 0x00:
        return None

    sbox = mse_ksa()
    outdata = rc4_decrypt(sbox, data)

    headerlen = 0x28 + struct.unpack("<I", outdata[8:12])[0]

    origlen = struct.unpack("<I", outdata[headerlen-12:headerlen-8])[0]

    if origlen + headerlen != fsize:
        return None

    # MSE stores metadata like the original filename in a separate file,
    # so due to our existing interface, we can't restore the original name
    # from just the ResourceData file.  Later we may allow uploading pairs
    # of files, match them up by name, and then associate that data here
    # for the final submission

    return store_temp_file(outdata[headerlen:], "MSEDequarantineFile")
Esempio n. 6
0
def mbam_unquarantine(f):
    with open(f, "rb") as quarfile:
        data = bytearray(quarfile.read())

    sbox = mbam_ksa()
    outdata = rc4_decrypt(sbox, data)

    return store_temp_file(outdata, "MBAMDequarantineFile")
Esempio n. 7
0
def forefront_unquarantine(file):
    base = os.path.basename(file)
    realbase, ext = os.path.splitext(base)

    with open(file, "rb") as quarfile:
        qdata = bytearray_xor(bytearray(quarfile.read()), 0xff)
        # can't do much about the name for this case
        return store_temp_file(qdata, base)
Esempio n. 8
0
def extract_config(file_path, decomp_jar):
    enckey = coded_jar = False

    if not decomp_jar:
        return None

    ret = { }

    try:
        with ZipFile(file_path, 'r') as zip:
            for name in zip.namelist():
                if name == 'e-data':
                    coded_data = zip.read(name)
                    seed = coded_data[:8]
                    enckey = unpack('>Q', seed)[0]
        
        if enckey and coded_data:
            java_rand = JavaRandom(enckey)
            coded_data = coded_data[8:]
            decoded_data = ""
            for i in range(len(coded_data)):
                key = java_rand.nextInt(255)
                dec_byte = chr((ord(coded_data[i]) - key + 256) % 256)
                decoded_data += dec_byte
            decoded_path = store_temp_file(decoded_data, "qrat.jar")

            try:
                p = Popen(["java", "-jar", decomp_jar, decoded_path], stdout=PIPE)
                decompiled_data = p.stdout.read()
            except:
                pass

            match = re.search("Utils\.serverHost = new String\[\] \{(?P<stringlist>[^};\r\n]*)\};", decompiled_data)
            if match:
                hostlist = match.group('stringlist').split(',')
                serverhosts = [x.strip(" \"") for x in hostlist]
                for i in xrange(len(serverhosts)):
                    ret["ServerHost" + str(i)] = serverhosts[i]
            match = re.search("Utils\.serverPort = (?P<portnum>\d+);", decompiled_data)
            if match:
                ret["ServerPort"] = int(match.group('portnum'))
            match = re.search("Utils\.instanceControlPortAgent = (?P<portnum>\d+);", decompiled_data)
            if match:
                ret["InstanceControlPortAgent"] = int(match.group('portnum'))
            match = re.search("Utils\.instanceControlPortClient = (?P<portnum>\d+);", decompiled_data)
            if match:
                ret["InstanceControlPortClient"] = int(match.group('portnum'))

            try:
                os.unlink(decoded_path)
            except:
                pass

            return ret
    except:
        pass

    return None
Esempio n. 9
0
    def post(self):
        args = self._parser.parse_args()
        f = request.files["file"]

        path = store_temp_file(f.read(), f.filename, path=app.config["SAMPLES_DIRECTORY"])

        # this return list of tasks ids if archive
        main_task_id = []
        main_task_id = main_db.demux_sample_and_add_to_db(file_path=path, **args)

        return dict(task_id=main_task_id)
Esempio n. 10
0
File: web.py Progetto: 0x71/cuckoo
def submit():
    context = {}
    errors = False

    package = request.forms.get("package", "")
    options = request.forms.get("options", "")
    priority = request.forms.get("priority", 1)
    timeout = request.forms.get("timeout", 0)
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    memory  = request.forms.get("memory", "")
    data = request.files.file

    try:
        priority = int(priority)
    except ValueError:
        context["error_toggle"] = True
        context["error_priority"] = "Needs to be a number"
        errors = True

    if not data:
        context["error_toggle"] = True
        context["error_file"] = "Mandatory"
        errors = True

    if errors:
        template = env.get_template("submit.html")
        return template.render({"timeout": timeout,
                                "priority": priority,
                                "options": options,
                                "package": package,
                                "context": context,
                                "machine": machine,
                                "platform": platform,
                                "memory": memory})

    temp_file_path = store_temp_file(data.file.read(), data.filename)

    task_id = db.add_path(file_path=temp_file_path,
                          timeout=timeout,
                          priority=priority,
                          options=options,
                          package=package,
                          machine=machine,
                          platform=platform,
                          memory=memory)

    if task_id:
        template = env.get_template("success.html")
        return template.render({"taskid": task_id, "submitfile": data.filename.decode("utf-8")})
    else:
        template = env.get_template("error.html")
        return template.render({"error": "The server encountered an internal error while submitting {0}".format(data.filename.decode("utf-8"))})
Esempio n. 11
0
def cuckoo_status():
    # In order to keep track of the diskspace statistics of the temporary
    # directory we create a temporary file so we can statvfs() on that.
    temp_file = store_temp_file("", "status")

    paths = dict(
        binaries=os.path.join(CUCKOO_ROOT, "storage", "binaries"),
        analyses=os.path.join(CUCKOO_ROOT, "storage", "analyses"),
        temporary=temp_file,
    )

    diskspace = {}
    for key, path in paths.items():
        if hasattr(os, "statvfs"):
            stats = os.statvfs(path)
            diskspace[key] = dict(
                free=stats.f_bavail * stats.f_frsize,
                total=stats.f_blocks * stats.f_frsize,
                used=(stats.f_blocks - stats.f_bavail) * stats.f_frsize,
            )

    # Now we remove the temporary file and its parent directory.
    os.unlink(temp_file)
    os.rmdir(os.path.dirname(temp_file))

    # Get the CPU load.
    if hasattr(os, "getloadavg"):
        cpuload = os.getloadavg()
    else:
        cpuload = []

    response = dict(
        version=CUCKOO_VERSION,
        hostname=socket.gethostname(),
        machines=dict(
            total=len(db.list_machines()),
            available=db.count_machines_available()
        ),
        tasks=dict(
            total=db.count_tasks(),
            pending=db.count_tasks("pending"),
            running=db.count_tasks("running"),
            completed=db.count_tasks("completed"),
            reported=db.count_tasks("reported")
        ),
        diskspace=diskspace,
        cpuload=cpuload,
    )

    return jsonize(response)
Esempio n. 12
0
def submit():
    context = {}
    errors = False

    package  = request.forms.get("package", "")
    options  = request.forms.get("options", "")
    priority = request.forms.get("priority", 1)
    timeout  = request.forms.get("timeout", 0)
    machine  = request.forms.get("machine", "")
    memory  = request.forms.get("memory", "")
    data = request.files.file

    try:
        priority = int(priority)
    except ValueError:
        context["error_toggle"] = True
        context["error_priority"] = "Needs to be a number"
        errors = True

    if data == None or data == "":
        context["error_toggle"] = True
        context["error_file"] = "Mandatory"
        errors = True

    if errors:
        template = env.get_template("submit.html")
        return template.render({"timeout": timeout,
                                "priority": priority,
                                "options": options,
                                "package": package,
                                "context": context,
                                "machine": machine,
                                "memory": memory})

    temp_file_path = store_temp_file(data.file.read(), data.filename)
    try:
        task_ids = db.demux_sample_and_add_to_db(file_path=temp_file_path, package=package, timeout=timeout, options=options, priority=priority,
                machine=machine, memory=memory)
        tasks_count = len(task_ids)
    except CuckooDemuxError as err:
        return template.render({"error": err})
    if tasks_count > 0:
        template = env.get_template("success.html")
        return template.render({"tasks": task_ids, "tasks_count" : tasks_count})
    else:
        template = env.get_template("error.html")
        return template.render({"error": "The server encountered an internal error while submitting {0}".format(data.filename.decode("utf-8"))})
Esempio n. 13
0
def kav_unquarantine(file):
    with open(file, "rb") as quarfile:
        data = bytearray(quarfile.read())

    # check for KLQB header
    magic = struct.unpack("<I", data[0:4])[0]
    if magic != 0x42514c4b:
        return None

    fsize = len(data)

    headerlen = struct.unpack("<I", data[8:12])[0]
    metaoffset = struct.unpack("<I", data[0x10:0x14])[0]
    metalen = struct.unpack("<I", data[0x20:0x24])[0]
    origlen = struct.unpack("<I", data[0x30:0x34])[0]

    if fsize < headerlen + origlen + metalen:
        return None
    if metaoffset < headerlen + origlen:
        return None

    origname = "KAVDequarantineFile"
    key = [0xe2, 0x45, 0x48, 0xec, 0x69, 0x0e, 0x5c, 0xac]

    curoffset = metaoffset
    length = struct.unpack("<I", data[curoffset:curoffset + 4])[0]
    while length:
        for i in xrange(length):
            data[curoffset + 4 + i] ^= key[i % len(key)]
        idlen = struct.unpack("<I", data[curoffset + 4:curoffset + 8])[0]
        idname = str(data[curoffset + 8:curoffset + 8 + idlen]).rstrip('\0')
        if idname == "cNP_QB_FULLNAME":
            vallen = length - idlen
            origname = unicode(data[curoffset + 8 + idlen:curoffset + 4 +
                                    length]).decode("utf-16").encode(
                                        "utf8", "ignore").rstrip('\0')
        curoffset += 4 + length
        if curoffset >= metaoffset + metalen:
            break
        length = struct.unpack("<I", data[curoffset:curoffset + 4])[0]

    for i in xrange(origlen):
        data[headerlen + i] ^= key[i % len(key)]

    return store_temp_file(data[headerlen:headerlen + origlen], origname)
Esempio n. 14
0
def submit():
    context = {}
    errors = False

    package = request.forms.get("package", "")
    options = request.forms.get("options", "")
    priority = request.forms.get("priority", 1)
    timeout = request.forms.get("timeout", "")
    data = request.files.file

    try:
        priority = int(priority)
    except ValueError:
        context["error_toggle"] = True
        context["error_priority"] = "Needs to be a number"
        errors = True

    if data == None or data == "":
        context["error_toggle"] = True
        context["error_file"] = "Mandatory"
        errors = True

    if errors:
        template = env.get_template("submit.html")
        return template.render({
            "timeout": timeout,
            "priority": priority,
            "options": options,
            "package": package,
            "context": context
        })

    temp_file_path = store_temp_file(data.file.read(), data.filename)

    task_id = db.add_path(file_path=temp_file_path,
                          timeout=timeout,
                          priority=priority,
                          options=options,
                          package=package)

    template = env.get_template("success.html")
    return template.render({
        "taskid": task_id,
        "submitfile": data.filename.decode("utf-8")
    })
Esempio n. 15
0
def mcafee_unquarantine(f):
    if not olefile.isOleFile(f):
        return None

    with open(f, "rb") as quarfile:
        qdata = quarfile.read()

    oledata = olefile.OleFileIO(qdata)
    olefiles = oledata.listdir()
    quarfiles = list()
    for item in olefiles:
        if "Details" in item:
            details = bytearray_xor(
                bytearray(oledata.openstream("Details").read()), 0x6a)
        else:
            # Parse for quarantine files
            for fileobj in item:
                if "File_" in fileobj:
                    quarfiles.append(fileobj)
            decoded = dict()
            # Try and decode quarantine files (sometimes there are none)
            for item in quarfiles:
                try:
                    decoded[item] = bytearray_xor(
                        bytearray(oledata.openstream(item).read()), 0x6a)
                except:
                    pass
            # Try and get original file name from details
            if decoded.keys():
                config = details.splitlines()
                malname = ""
                for item in decoded.keys():
                    parseit = False
                    for check in config:
                        if check.startswith("["):
                            if item in check:
                                parseit = True
                        if check == '':
                            parseit = False
                        if parseit and check.startswith("OriginalName="):
                            malname = str(check.split("\\")[-1])
                    if not malname:
                        malname = "McAfeeDequarantineFile"
                    # currently we're only returning the first found file in the quarantine file
                    return store_temp_file(decoded[item], malname)
Esempio n. 16
0
def tasks_create_file():
    response = {}

    data = request.files.file
    package = request.forms.get("package", "")
    timeout = request.forms.get("timeout", "")
    priority = request.forms.get("priority", 1)
    options = request.forms.get("options", "")
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    tags = request.forms.get("tags", None)
    custom = request.forms.get("custom", "")
    memory = request.forms.get("memory", False)
    clock = request.forms.get("clock", None)
    shrike_url = request.forms.get("shrike_url", None)
    shrike_msg = request.forms.get("shrike_msg", None)
    shrike_sid = request.forms.get("shrike_sid", None)
    shrike_refer = request.forms.get("shrike_refer", None)

    if int(memory):
        memory = True
    enforce_timeout = request.forms.get("enforce_timeout", False)
    if int(enforce_timeout):
        enforce_timeout = True

    temp_file_path = store_temp_file(data.file.read(), data.filename)
    task_ids = db.demux_sample_and_add_to_db(file_path=temp_file_path,
                                             package=package,
                                             timeout=timeout,
                                             options=options,
                                             priority=priority,
                                             machine=machine,
                                             platform=platform,
                                             custom=custom,
                                             memory=memory,
                                             enforce_timeout=enforce_timeout,
                                             tags=tags,
                                             clock=clock,
                                             shrike_url=shrike_url,
                                             shrike_msg=shrike_msg,
                                             shrike_sid=shrike_sid,
                                             shrike_refer=shrike_refer)
    response["task_ids"] = task_ids
    return jsonize(response)
Esempio n. 17
0
def mcafee_unquarantine(f):
    if not HAVE_OLEFILE:
        log.info("Missed olefile dependency: pip3 install olefile")
        return None

    if not olefile.isOleFile(f):
        return None

    with open(f, "rb") as quarfile:
        qdata = quarfile.read()

    oledata = olefile.OleFileIO(qdata)
    olefiles = oledata.listdir()
    quarfiles = []
    for item in olefiles:
        if "Details" in item:
            details = bytearray_xor(
                bytearray(oledata.openstream("Details").read()), 0x6A)
        else:
            # Parse for quarantine files
            quarfiles.extend(fileobj for fileobj in item if "File_" in fileobj)
            decoded = {}
            # Try and decode quarantine files (sometimes there are none)
            for item in quarfiles:
                with contextlib.suppress(Exception):
                    decoded[item] = bytearray_xor(
                        bytearray(oledata.openstream(item).read()), 0x6A)
            # Try and get original file name from details
            if list(decoded.keys()):
                config = details.splitlines()
                malname = ""
                for item, value in decoded.items():
                    parseit = False
                    for check in config:
                        if check.startswith("[") and item in check:
                            parseit = True
                        if check == "":
                            parseit = False
                        if parseit and check.startswith("OriginalName="):
                            malname = str(check.rsplit("\\", 1)[-1])
                    if not malname:
                        malname = "McAfeeDequarantineFile"
                    # currently we're only returning the first found file in the quarantine file
                    return store_temp_file(value, malname)
Esempio n. 18
0
def tasks_create_file():
    response = {}

    data = request.files.file
    package = request.forms.get("package", "")
    timeout = request.forms.get("timeout", "")
    priority = request.forms.get("priority", 1)
    options = request.forms.get("options", "")
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    tags = request.forms.get("tags", None)
    custom = request.forms.get("custom", "")
    memory = request.forms.get("memory", 'False')
    clock = request.forms.get("clock", datetime.now().strftime("%m-%d-%Y %H:%M:%S"))
    if clock is False or clock is None:
        clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S")
    if "1970" in clock:
        clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S")
    shrike_url = request.forms.get("shrike_url", None)
    shrike_msg = request.forms.get("shrike_msg", None)
    shrike_sid = request.forms.get("shrike_sid", None)
    shrike_refer = request.forms.get("shrike_refer", None)

    if memory.upper() == 'FALSE' or memory == '0':
        memory = False
    else:
        memory = True

    enforce_timeout = request.forms.get("enforce_timeout", 'False')
    if enforce_timeout.upper() == 'FALSE' or enforce_timeout == '0':
        enforce_timeout = False
    else:
        enforce_timeout = True

    temp_file_path = store_temp_file(data.file.read(), data.filename)
    try:
        task_ids = db.demux_sample_and_add_to_db(file_path=temp_file_path, package=package, timeout=timeout, options=options, priority=priority,
                machine=machine, platform=platform, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock,
                shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer)
    except CuckooDemuxError as e:
        return HTTPError(500, e)

    response["task_ids"] = task_ids
    return jsonize(response)
Esempio n. 19
0
def mcafee_unquarantine(file):
    if not olefile.isOleFile(file):
        return None

    with open(file, "rb") as quarfile:
        qdata = quarfile.read()

    oledata = olefile.OleFileIO(qdata)
    olefiles = oledata.listdir()
    quarfiles = list()
    for item in olefiles:
        if "Details" in item:
            details = bytearray_xor(bytearray(oledata.openstream("Details").read()), 0x6a)
        else:
            # Parse for quarantine files
            for fileobj in item:
                if "File_" in fileobj:
                    quarfiles.append(fileobj)
            decoded = dict()
            # Try and decode quarantine files (sometimes there are none)
            for item in quarfiles:
                try:
                    decoded[item] = bytearray_xor(bytearray(oledata.openstream(item).read()), 0x6a)
                except:
                    pass
            # Try and get original file name from details
            if decoded.keys():
                config = details.splitlines()
                malname = ""
                for item in decoded.keys():
                    parseit = False
                    for check in config:
                        if check.startswith("["):
                            if item in check:
                                parseit = True
                        if check == '':
                            parseit = False
                        if parseit and check.startswith("OriginalName="):
                            malname = str(check.split("\\")[-1])
                    if not malname:
                        malname = "McAfeeDequarantineFile"
                    # currently we're only returning the first found file in the quarantine file
                    return store_temp_file(decoded[item], malname)
Esempio n. 20
0
def kav_unquarantine(file):
    with open(file, "rb") as quarfile:
        data = bytearray(quarfile.read())

    # check for KLQB header
    magic = struct.unpack("<I", data[0:4])[0]
    if magic != 0x42514c4b:
        return None

    fsize = len(data)

    headerlen = struct.unpack("<I", data[8:12])[0]
    metaoffset = struct.unpack("<I", data[0x10:0x14])[0]
    metalen = struct.unpack("<I", data[0x20:0x24])[0]
    origlen = struct.unpack("<I", data[0x30:0x34])[0]

    if fsize < headerlen + origlen + metalen:
        return None
    if metaoffset < headerlen + origlen:
        return None

    origname = "KAVDequarantineFile"
    key = [0xe2, 0x45, 0x48, 0xec, 0x69, 0x0e, 0x5c, 0xac]

    curoffset = metaoffset
    length = struct.unpack("<I", data[curoffset:curoffset+4])[0]
    while length:
        for i in xrange(length):
            data[curoffset+4+i] ^= key[i % len(key)]
        idlen = struct.unpack("<I", data[curoffset+4:curoffset+8])[0]
        idname = str(data[curoffset+8:curoffset+8+idlen]).rstrip('\0')
        if idname == "cNP_QB_FULLNAME":
            vallen = length - idlen
            origname = unicode(data[curoffset+8+idlen:curoffset+4+length]).decode("utf-16").encode("utf8", "ignore").rstrip('\0')
        curoffset += 4 + length
        if curoffset >= metaoffset + metalen:
            break
        length = struct.unpack("<I", data[curoffset:curoffset+4])[0]

    for i in xrange(origlen):
        data[headerlen+i] ^= key[i % len(key)]

    return store_temp_file(data[headerlen:headerlen+origlen], origname)
Esempio n. 21
0
def tasks_create_file():
    data = request.files["file"]
    package = request.form.get("package", "")
    timeout = request.form.get("timeout", "")
    priority = request.form.get("priority", 1)
    options = request.form.get("options", "")
    machine = request.form.get("machine", "")
    platform = request.form.get("platform", "")
    tags = request.form.get("tags", None)
    custom = request.form.get("custom", "")
    owner = request.form.get("owner", "")
    memory = request.form.get("memory", False)
    clock = request.form.get("clock", None)
    docker_images = request.form.get("docker_images", "")

    if memory:
        memory = True
    enforce_timeout = request.form.get("enforce_timeout", False)

    if enforce_timeout:
        enforce_timeout = True

    temp_file_path = store_temp_file(data.read(), data.filename)

    task_id = db.add_path(
        file_path=temp_file_path,
        package=package,
        timeout=timeout,
        priority=priority,
        options=options,
        machine=machine,
        platform=platform,
        tags=tags,
        custom=custom,
        owner=owner,
        memory=memory,
        enforce_timeout=enforce_timeout,
        clock=clock,
        docker_images=docker_images
    )

    return jsonify(task_id=task_id)
Esempio n. 22
0
def submit():
    context = {}
    errors = False

    package  = request.forms.get("package", "")
    options  = request.forms.get("options", "")
    priority = request.forms.get("priority", 1)
    timeout  = request.forms.get("timeout", "")
    data = request.files.file

    try:
        priority = int(priority)
    except ValueError:
        context["error_toggle"] = True
        context["error_priority"] = "Needs to be a number"
        errors = True

    if data == None or data == "":
        context["error_toggle"] = True
        context["error_file"] = "Mandatory"
        errors = True

    if errors:
        template = env.get_template("submit.html")
        return template.render({"timeout" : timeout,
                                "priority" : priority,
                                "options" : options,
                                "package" : package,
                                "context" : context})

    temp_file_path = store_temp_file(data.file.read(), data.filename)

    task_id= db.add_path(file_path=temp_file_path,
                         timeout=timeout,
                         priority=priority,
                         options=options,
                         package=package)

    template = env.get_template("success.html")
    return template.render({"taskid" : task_id,
                            "submitfile" : data.filename.decode("utf-8")})
Esempio n. 23
0
def tasks_create_file():
    response = {}

    data = request.files.file
    package = request.forms.get("package", "")
    timeout = request.forms.get("timeout", "")
    priority = request.forms.get("priority", 1)
    options = request.forms.get("options", "")
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    tags = request.forms.get("tags", None)
    custom = request.forms.get("custom", "")
    memory = request.forms.get("memory", 'False')
    clock = request.forms.get("clock", None)
    shrike_url = request.forms.get("shrike_url", None)
    shrike_msg = request.forms.get("shrike_msg", None)
    shrike_sid = request.forms.get("shrike_sid", None)
    shrike_refer = request.forms.get("shrike_refer", None)

    if memory.upper() == 'FALSE' or memory == '0':
        memory = False
    else:
        memory = True

    enforce_timeout = request.forms.get("enforce_timeout", 'False')
    if enforce_timeout.upper() == 'FALSE' or enforce_timeout == '0':
        enforce_timeout = False
    else:
        enforce_timeout = True

    temp_file_path = store_temp_file(data.file.read(), data.filename)
    try:
        task_ids = db.demux_sample_and_add_to_db(file_path=temp_file_path, package=package, timeout=timeout, options=options, priority=priority,
                machine=machine, platform=platform, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock,
                shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer)
    except CuckooDemuxError as e:
        return HTTPError(500, e)

    response["task_ids"] = task_ids
    return jsonize(response)
Esempio n. 24
0
def tasks_create_file():
    response = {}

    data = request.files.file
    package = request.forms.get("package", "")
    timeout = request.forms.get("timeout", "")
    priority = request.forms.get("priority", 1)
    options = request.forms.get("options", "")
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    tags = request.forms.get("tags", None)
    custom = request.forms.get("custom", "")
    memory = request.forms.get("memory", False)
    clock = request.forms.get("clock", None)
    if memory:
        memory = True
    enforce_timeout = request.forms.get("enforce_timeout", False)
    if enforce_timeout:
        enforce_timeout = True

    temp_file_path = store_temp_file(data.file.read(), data.filename)
    task_id = db.add_path(
        file_path=temp_file_path,
        package=package,
        timeout=timeout,
        priority=priority,
        options=options,
        machine=machine,
        platform=platform,
        tags=tags,
        custom=custom,
        memory=memory,
        enforce_timeout=enforce_timeout,
        clock=clock
    )

    response["task_id"] = task_id
    return jsonize(response)
Esempio n. 25
0
    def save(self):
        # Use long filename as first preference
        filename = self.longFilename
        # Otherwise use the short filename
        if filename is None:
            filename = self.shortFilename
        # Otherwise just make something up!
        if filename is None:
            import random
            import string
            filename = 'UnknownAttachment' + \
                ''.join(random.choice(string.ascii_uppercase + string.digits)
                        for _ in range(5)) + ".bin"

        base, ext = os.path.splitext(filename)
        basename = os.path.basename(filename)
        ext = ext.lower()
        if ext == "" and len(basename) and basename[0] == ".":
            return None
        extensions = [
            "", ".exe", ".dll", ".pdf", ".msi", ".bin", ".scr", ".zip", ".htm",
            ".html", ".doc", ".dot", ".docx", ".dotx", ".docm", ".dotm",
            ".docb", ".mht", ".xls", ".xlt", ".xlm", ".xlsx", ".xltx", ".xlsm",
            ".xltm", ".xlsb", ".xla", ".xlam", ".xll", ".xlw", ".ppt", ".pot",
            ".pps", ".pptx", ".pptm", ".potx", ".potm", ".ppam", ".ppsx",
            ".ppsm", ".sldx", ".sldm"
        ]
        foundext = False
        for theext in extensions:
            if ext == theext:
                foundext = True
                break

        if not foundext:
            return None

        return store_temp_file(self.data, filename)
Esempio n. 26
0
File: api.py Progetto: 0x71/cuckoo
def cuckoo_status():
    # In order to keep track of the diskspace statistics of the temporary
    # directory we create a temporary file so we can statvfs() on that.
    temp_file = store_temp_file("", "status")

    paths = dict(
        binaries=os.path.join(CUCKOO_ROOT, "storage", "binaries"),
        analyses=os.path.join(CUCKOO_ROOT, "storage", "analyses"),
        temporary=temp_file,
    )

    diskspace = {}
    for key, path in paths.items():
        if hasattr(os, "statvfs") and os.path.isdir(path):
            stats = os.statvfs(path)
            diskspace[key] = dict(
                free=stats.f_bavail * stats.f_frsize,
                total=stats.f_blocks * stats.f_frsize,
                used=(stats.f_blocks - stats.f_bavail) * stats.f_frsize,
            )

    # Now we remove the temporary file and its parent directory.
    os.unlink(temp_file)
    os.rmdir(os.path.dirname(temp_file))

    # Get the CPU load.
    if hasattr(os, "getloadavg"):
        cpuload = os.getloadavg()
    else:
        cpuload = []

    if os.path.isfile("/proc/meminfo"):
        values = {}
        for line in open("/proc/meminfo"):
            key, value = line.split(":", 1)
            values[key.strip()] = value.replace("kB", "").strip()

        if "MemAvailable" in values and "MemTotal" in values:
            memory = 100.0 * int(values["MemFree"]) / int(values["MemTotal"])
        else:
            memory = None
    else:
        memory = None

    response = dict(
        version=CUCKOO_VERSION,
        hostname=socket.gethostname(),
        machines=dict(
            total=len(db.list_machines()),
            available=db.count_machines_available()
        ),
        tasks=dict(
            total=db.count_tasks(),
            pending=db.count_tasks("pending"),
            running=db.count_tasks("running"),
            completed=db.count_tasks("completed"),
            reported=db.count_tasks("reported")
        ),
        diskspace=diskspace,
        cpuload=cpuload,
        memory=memory,
    )

    return jsonify(response)
Esempio n. 27
0
def index(request, resubmit_hash=False):
    if request.method == "POST":
        package = request.POST.get("package", "")
        timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
        options = request.POST.get("options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        gateway = request.POST.get("gateway", None)
        clock = request.POST.get("clock", None)
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))
        referrer = validate_referrer(request.POST.get("referrer", None))
        tags = request.POST.get("tags", None)
        opt_filename = ""
        for option in options.split(","):
            if option.startswith("filename="):
                opt_filename = option.split("filename=")[1]
                break
        task_gateways = []
        ipaddy_re = re.compile(
            r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
        )

        if referrer:
            if options:
                options += ","
            options += "referrer=%s" % (referrer)

        if request.POST.get("free"):
            if options:
                options += ","
            options += "free=yes"

        if request.POST.get("nohuman"):
            if options:
                options += ","
            options += "nohuman=yes"

        if request.POST.get("tor"):
            if options:
                options += ","
            options += "tor=yes"

        if request.POST.get("route", None):
            if options:
                options += ","
            options += "route={0}".format(request.POST.get("route", None))

        if request.POST.get("process_dump"):
            if options:
                options += ","
            options += "procdump=0"
        else:
            if options:
                options += ","
            options += "procdump=1"

        if request.POST.get("process_memory"):
            if options:
                options += ","
            options += "procmemdump=1"

        if request.POST.get("import_reconstruction"):
            if options:
                options += ","
            options += "import_reconstruction=1"

        if request.POST.get("disable_cape"):
            if options:
                options += ","
            options += "disable_cape=1"

        if request.POST.get("kernel_analysis"):
            if options:
                options += ","
            options += "kernel_analysis=yes"

        if request.POST.get("norefer"):
            if options:
                options += ","
            options += "norefer=1"

        orig_options = options

        if gateway and gateway.lower() == "all":
            for e in settings.GATEWAYS:
                if ipaddy_re.match(settings.GATEWAYS[e]):
                    task_gateways.append(settings.GATEWAYS[e])
        elif gateway and gateway in settings.GATEWAYS:
            if "," in settings.GATEWAYS[gateway]:
                if request.POST.get("all_gw_in_group"):
                    tgateway = settings.GATEWAYS[gateway].split(",")
                    for e in tgateway:
                        task_gateways.append(settings.GATEWAYS[e])
                else:
                    tgateway = random.choice(
                        settings.GATEWAYS[gateway].split(","))
                    task_gateways.append(settings.GATEWAYS[tgateway])
            else:
                task_gateways.append(settings.GATEWAYS[gateway])

        if not task_gateways:
            # To reduce to the default case
            task_gateways = [None]

        db = Database()
        task_ids = []
        task_machines = []

        if machine.lower() == "all":
            for entry in db.list_machines():
                task_machines.append(entry.label)
        else:
            task_machines.append(machine)

        status = "ok"
        if "hash" in request.POST and request.POST.get(
                "hash", False) and request.POST.get("hash")[0] != '':
            resubmission_hash = request.POST.get("hash").strip()
            paths = db.sample_path_by_hash(resubmission_hash)
            paths = filter(
                None,
                [path if os.path.exists(path) else False for path in paths])
            if not paths and FULL_DB:
                tasks = results_db.analysis.find(
                    {"dropped.sha256": resubmission_hash})
                if tasks:
                    for task in tasks:
                        # grab task id and replace in path aka distributed cuckoo hack
                        path = os.path.join(settings.CUCKOO_PATH,
                                            "storage", "analyses",
                                            str(task["info"]["id"]), "files",
                                            resubmission_hash)
                        if os.path.exists(path):
                            paths = [path]
                            break
            if paths:
                content = ""
                content = submit_utils.get_file_content(paths)
                if content is False:
                    return render(request, "error.html", {
                        "error":
                        "Can't find {} on disk".format(resubmission_hash)
                    })
                base_dir = tempfile.mkdtemp(prefix='resubmit_',
                                            dir=settings.TEMP_PATH)
                if opt_filename:
                    filename = base_dir + "/" + opt_filename
                else:
                    filename = base_dir + "/" + resubmission_hash
                path = store_temp_file(content, filename)
                headers = {}
                url = 'local'
                params = {}

                status, task_ids = download_file(
                    content, request, db, task_ids, url, params, headers,
                    "Local", path, package, timeout, options, priority,
                    machine, gateway, clock, custom, memory, enforce_timeout,
                    referrer, tags, orig_options, task_gateways, task_machines)

        elif "sample" in request.FILES:
            samples = request.FILES.getlist("sample")
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(request, "error.html",
                                  {"error": "You uploaded an empty file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."
                        })

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), sample.name)

                for gw in task_gateways:
                    options = update_options(gw, orig_options)

                    for entry in task_machines:
                        try:
                            task_ids_new = db.demux_sample_and_add_to_db(
                                file_path=path,
                                package=package,
                                timeout=timeout,
                                options=options,
                                priority=priority,
                                machine=entry,
                                custom=custom,
                                memory=memory,
                                enforce_timeout=enforce_timeout,
                                tags=tags,
                                clock=clock)
                            task_ids.extend(task_ids_new)
                        except CuckooDemuxError as err:
                            return render(request, "error.html",
                                          {"error": err})

        elif "quarantine" in request.FILES:
            samples = request.FILES.getlist("quarantine")
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(
                        request, "error.html",
                        {"error": "You uploaded an empty quarantine file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."
                        })

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                tmp_path = store_temp_file(sample.read(), sample.name)

                path = unquarantine(tmp_path)
                try:
                    os.remove(tmp_path)
                except:
                    pass

                if not path:
                    return render(request, "error.html", {
                        "error":
                        "You uploaded an unsupported quarantine file."
                    })

                for gw in task_gateways:
                    options = update_options(gw, orig_options)

                    for entry in task_machines:
                        task_ids_new = db.demux_sample_and_add_to_db(
                            file_path=path,
                            package=package,
                            timeout=timeout,
                            options=options,
                            priority=priority,
                            machine=entry,
                            custom=custom,
                            memory=memory,
                            enforce_timeout=enforce_timeout,
                            tags=tags,
                            clock=clock)
                        task_ids.extend(task_ids_new)
        elif "pcap" in request.FILES:
            samples = request.FILES.getlist("pcap")
            for sample in samples:
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(
                        request, "error.html",
                        {"error": "You uploaded an empty PCAP file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."
                        })

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), sample.name)

                if sample.name.lower().endswith(".saz"):
                    saz = saz_to_pcap(path)
                    if saz:
                        try:
                            os.remove(path)
                        except:
                            pass
                        path = saz
                    else:
                        return render(
                            request, "error.html",
                            {"error": "Conversion from SAZ to PCAP failed."})

                task_id = db.add_pcap(file_path=path, priority=priority)
                task_ids.append(task_id)

        elif "url" in request.POST and request.POST.get("url").strip():
            url = request.POST.get("url").strip()
            if not url:
                return render(request, "error.html",
                              {"error": "You specified an invalid URL!"})

            url = url.replace("hxxps://", "https://").replace(
                "hxxp://", "http://").replace("[.]", ".")
            for gw in task_gateways:
                options = update_options(gw, orig_options)

                for entry in task_machines:
                    task_id = db.add_url(url=url,
                                         package=package,
                                         timeout=timeout,
                                         options=options,
                                         priority=priority,
                                         machine=entry,
                                         custom=custom,
                                         memory=memory,
                                         enforce_timeout=enforce_timeout,
                                         tags=tags,
                                         clock=clock)
                    if task_id:
                        task_ids.append(task_id)
        elif settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get(
                "vtdl", False) and request.POST.get("vtdl")[0] != '':
            vtdl = request.POST.get("vtdl")
            if (not settings.VTDL_PRIV_KEY
                    and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH:
                return render(
                    request, "error.html", {
                        "error":
                        "You specified VirusTotal but must edit the file and specify your VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory"
                    })
            else:
                base_dir = tempfile.mkdtemp(prefix='cuckoovtdl',
                                            dir=settings.VTDL_PATH)
                hashlist = []
                if "," in vtdl:
                    hashlist = vtdl.replace(" ", "").strip().split(",")
                else:
                    hashlist = vtdl.split()

                for h in hashlist:
                    if opt_filename:
                        filename = base_dir + "/" + opt_filename
                    else:
                        filename = base_dir + "/" + h

                    paths = db.sample_path_by_hash(h)
                    content = ""
                    if paths is not None:
                        content = submit_utils.get_file_content(paths)

                    headers = {}
                    url = 'https://www.virustotal.com/intelligence/download/'
                    params = {'apikey': settings.VTDL_INTEL_KEY, 'hash': h}

                    if content is False:
                        if settings.VTDL_PRIV_KEY:
                            url = 'https://www.virustotal.com/vtapi/v2/file/download'
                            params = {
                                'apikey': settings.VTDL_PRIV_KEY,
                                'hash': h
                            }

                        status, task_ids = download_file(
                            content, request, db, task_ids, url, params,
                            headers, "VirusTotal", filename, package, timeout,
                            options, priority, machine, gateway, clock, custom,
                            memory, enforce_timeout, referrer, tags,
                            orig_options, task_gateways, task_machines)
                    else:

                        status, task_ids = download_file(
                            content, request, db, task_ids, url, params,
                            headers, "Local", filename, package, timeout,
                            options, priority, machine, gateway, clock, custom,
                            memory, enforce_timeout, referrer, tags,
                            orig_options, task_gateways, task_machines)
        if status == "error":
            # is render msg
            return task_ids
        tasks_count = len(task_ids)
        if tasks_count > 0:
            return render(request, "submission/complete.html", {
                "tasks": task_ids,
                "tasks_count": tasks_count
            })
        else:
            return render(request, "error.html",
                          {"error": "Error adding task to Cuckoo's database."})
    else:
        cfg = Config("cuckoo")
        enabledconf = dict()
        enabledconf["vt"] = settings.VTDL_ENABLED
        enabledconf["kernel"] = settings.OPT_ZER0M0N
        enabledconf["memory"] = Config("processing").memory.get("enabled")
        enabledconf["procmemory"] = Config("processing").procmemory.get(
            "enabled")
        enabledconf["tor"] = Config("auxiliary").tor.get("enabled")
        if Config("auxiliary").gateways:
            enabledconf["gateways"] = True
        else:
            enabledconf["gateways"] = False
        enabledconf["tags"] = False
        # Get enabled machinery
        machinery = Config("cuckoo").cuckoo.get("machinery")
        # Get VM names for machinery config elements
        vms = [
            x.strip() for x in getattr(Config(machinery), machinery).get(
                "machines").split(",")
        ]
        # Check each VM config element for tags
        for vmtag in vms:
            if "tags" in getattr(Config(machinery), vmtag).keys():
                enabledconf["tags"] = True

        files = os.listdir(
            os.path.join(settings.CUCKOO_PATH, "analyzer", "windows",
                         "modules", "packages"))

        packages = []
        for name in files:
            name = os.path.splitext(name)[0]
            if name == "__init__":
                continue

            packages.append(name)

        # Prepare a list of VM names, description label based on tags.
        machines = []
        for machine in Database().list_machines():
            tags = []
            for tag in machine.tags:
                tags.append(tag.name)

            if tags:
                label = machine.label + ": " + ", ".join(tags)
            else:
                label = machine.label

            machines.append((machine.label, label))

        # Prepend ALL/ANY options.
        machines.insert(0, ("", "First available"))
        machines.insert(1, ("all", "All"))

        return render(
            request, "submission/index.html", {
                "packages": sorted(packages),
                "machines": machines,
                "vpns": vpns.values(),
                "route": cfg.routing.route,
                "internet": cfg.routing.internet,
                "inetsim": cfg.routing.inetsim,
                "tor": cfg.routing.tor,
                "gateways": settings.GATEWAYS,
                "config": enabledconf,
                "resubmit": resubmit_hash,
            })
Esempio n. 28
0
def index(request):
    if request.method == "POST":
        package = request.POST.get("package", "")
        # Timeout is in minute, but we handle seconds in the backend
        timeout = force_int(request.POST.get("timeout")) * 60
        options = request.POST.get("options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))
        tags = request.POST.get("tags", "")
        recurring = request.POST.get("recurring", None)
        experiment_name = request.POST.get("experiment_name", "")

        if request.POST.get("free"):
            if options:
                options += ","
            options += "free=yes"

        if request.POST.get("process_memory"):
            if options:
                options += ","
            options += "procmemdump=yes"

        recurring = TASK_SINGLE
        if request.POST.get("recurring"):
            recurring = TASK_RECURRENT
            tags = "longterm,%s" % tags

        db = Database()
        task_ids = []
        task_machines = []

        if machine.lower() == "all":
            for entry in db.list_machines(locked=False):
                task_machines.append(entry.label)
        else:
            task_machines.append(machine)

        if "sample" in request.FILES:
            if request.FILES["sample"].size == 0:
                return render_to_response("error.html",
                                          {"error": "You uploaded an empty file."},
                                          context_instance=RequestContext(request))
            elif request.FILES["sample"].size > settings.MAX_UPLOAD_SIZE:
                return render_to_response("error.html",
                                          {"error": "You uploaded a file that exceeds that maximum allowed upload size."},
                                          context_instance=RequestContext(request))

            # Moving sample from django temporary file to Cuckoo temporary storage to
            # let it persist between reboot (if user like to configure it in that way).
            path = store_temp_file(request.FILES["sample"].read(),
                                   request.FILES["sample"].name)

            for entry in task_machines:
                task_id = db.add_path(file_path=path,
                                      package=package,
                                      timeout=timeout,
                                      options=options,
                                      priority=priority,
                                      machine=entry,
                                      custom=custom,
                                      memory=memory,
                                      enforce_timeout=enforce_timeout,
                                      tags=tags,
                                      name=experiment_name,
                                      repeat=recurring)
                if task_id:
                    task_ids.append(task_id)
        elif "url" in request.POST:
            url = request.POST.get("url").strip()
            if not url:
                return render_to_response("error.html",
                                          {"error": "You specified an invalid URL!"},
                                          context_instance=RequestContext(request))

            for entry in task_machines:
                task_id = db.add_url(url=url,
                                     package=package,
                                     timeout=timeout,
                                     options=options,
                                     priority=priority,
                                     machine=entry,
                                     custom=custom,
                                     memory=memory,
                                     enforce_timeout=enforce_timeout,
                                     tags=tags,
                                     name=experiment_name)
                if task_id:
                    task_ids.append(task_id)

        tasks_count = len(task_ids)
        if tasks_count > 0:
            if tasks_count == 1:
                message = "The analysis task was successfully added with ID {0}.".format(task_ids[0])
            else:
                message = "The analysis task were successfully added with IDs {0}.".format(", ".join(str(i) for i in task_ids))

            return render_to_response("success.html",
                                      {"message": message},
                                      context_instance=RequestContext(request))
        else:
            return render_to_response("error.html",
                                      {"error": "Error adding task to Cuckoo's database."},
                                      context_instance=RequestContext(request))
    else:
        files = os.listdir(os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages"))

        packages = []
        for name in files:
            name = os.path.splitext(name)[0]
            if name == "__init__":
                continue

            packages.append(name)

        # Prepare a list of VM names, description label based on tags.
        machines = []
        for machine in Database().list_machines(locked=False):
            tags = []
            for tag in machine.tags:
                tags.append(tag.name)

            if tags:
                label = machine.name + ": " + ", ".join(tags)
            else:
                label = machine.name

            machines.append((machine.name, label))

        # Prepend ALL/ANY options.
        machines.insert(0, ("", "First available"))
        machines.insert(1, ("all", "All"))

        return render_to_response("submission/index.html",
                                  {"packages": sorted(packages),
                                   "machines": machines},
                                  context_instance=RequestContext(request))
Esempio n. 29
0
def index(request, resubmit_hash=False):
    if request.method == "POST":

        (
            static,
            package,
            timeout,
            priority,
            options,
            machine,
            platform,
            tags,
            custom,
            memory,
            clock,
            enforce_timeout,
            shrike_url,
            shrike_msg,
            shrike_sid,
            shrike_refer,
            unique,
            referrer,
            tlp,
            tags_tasks,
            route,
            cape,
        ) = parse_request_arguments(request)

        # This is done to remove spaces in options but not breaks custom paths
        options = ",".join("=".join(value.strip()
                                    for value in option.split("=", 1))
                           for option in options.split(",")
                           if option and "=" in option)
        opt_filename = get_user_filename(options, custom)

        if priority and web_conf.public.enabled and web_conf.public.priority and not request.user.is_staff:
            priority = web_conf.public.priority

        if timeout and web_conf.public.enabled and web_conf.public.timeout and not request.user.is_staff:
            timeout = web_conf.public.timeout

        if options:
            options += ","

        if referrer:
            options += "referrer=%s," % (referrer)

        if request.POST.get("free"):
            options += "free=yes,"

        if request.POST.get("nohuman"):
            options += "nohuman=yes,"

        if request.POST.get("tor"):
            options += "tor=yes,"

        if request.POST.get("process_dump"):
            options += "procdump=0,"

        if request.POST.get("process_memory"):
            options += "procmemdump=1,"

        if request.POST.get("import_reconstruction"):
            options += "import_reconstruction=1,"

        if request.POST.get("disable_cape"):
            options += "disable_cape=1,"

        if request.POST.get("kernel_analysis"):
            options += "kernel_analysis=yes,"

        if request.POST.get("norefer"):
            options += "norefer=1,"

        if request.POST.get("oldloader"):
            options += "no-iat=1,"

        if request.POST.get("unpack"):
            options += "unpack=yes,"

        # amsidump is enabled by default in the monitor for Win10+
        if web_conf.amsidump.enabled and not request.POST.get("amsidump"):
            options += "amsidump=0,"

        options = options[:-1]

        opt_apikey = False
        opts = get_options(options)
        if opts:
            opt_apikey = opts.get("apikey", False)

        status = "ok"
        task_ids_tmp = []
        existent_tasks = {}
        details = {
            "errors": [],
            "content": False,
            "request": request,
            "task_ids": [],
            "url": False,
            "params": {},
            "headers": {},
            "service": "Local",
            "path": "",
            "fhash": False,
            "options": options,
            "only_extraction": False,
            "user_id": request.user.id or 0,
        }

        if "hash" in request.POST and request.POST.get(
                "hash", False) and request.POST.get("hash")[0] != "":
            resubmission_hash = request.POST.get("hash").strip()
            paths = db.sample_path_by_hash(resubmission_hash)
            if paths:
                content = get_file_content(paths)
                if not content:
                    return render(
                        request, "error.html", {
                            "error":
                            "Can't find {} on disk, {}".format(
                                resubmission_hash, str(paths))
                        })
                folder = os.path.join(settings.TEMP_PATH, "cape-resubmit")
                if not os.path.exists(folder):
                    os.makedirs(folder)
                base_dir = tempfile.mkdtemp(prefix="resubmit_", dir=folder)
                if opt_filename:
                    filename = base_dir + "/" + opt_filename
                else:
                    filename = base_dir + "/" + sanitize_filename(
                        resubmission_hash)
                path = store_temp_file(content, filename)
                details["path"] = path
                details["content"] = content
                status, task_ids_tmp = download_file(**details)
                if status == "error":
                    details["errors"].append(
                        {os.path.basename(filename): task_ids_tmp})
                else:
                    details["task_ids"] = task_ids_tmp
                    if web_conf.general.get("existent_tasks", False):
                        records = perform_search("target_sha256",
                                                 resubmission_hash,
                                                 search_limit=5)
                        for record in records:
                            existent_tasks.setdefault(
                                record["target"]["file"]["sha256"],
                                []).append(record)
            else:
                return render(
                    request, "error.html",
                    {"error": "File not found on hdd for resubmission"})

        elif "sample" in request.FILES:
            samples = request.FILES.getlist("sample")
            details["service"] = "WebGUI"
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    details["errors"].append(
                        {sample.name: "You uploaded an empty file."})
                    continue
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    details["errors"].append({
                        sample.name:
                        "You uploaded a file that exceeds the maximum allowed upload size specified in conf/web.conf."
                    })
                    continue

                if opt_filename:
                    filename = opt_filename
                else:
                    filename = sanitize_filename(sample.name)
                # Moving sample from django temporary file to CAPE temporary storage to let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), filename)
                sha256 = File(path).get_sha256()
                if (not request.user.is_staff
                        and (web_conf.uniq_submission.enabled or unique)
                        and db.check_file_uniq(
                            sha256, hours=web_conf.uniq_submission.hours)):
                    details["errors"].append({
                        filename:
                        "Duplicated file, disable unique option on submit or in conf/web.conf to force submission"
                    })
                    continue

                if timeout and web_conf.public.enabled and web_conf.public.timeout and timeout > web_conf.public.timeout:
                    timeout = web_conf.public.timeout

                details["path"] = path
                details["content"] = get_file_content(path)
                status, task_ids_tmp = download_file(**details)
                if status == "error":
                    details["errors"].append(
                        {os.path.basename(path): task_ids_tmp})
                else:
                    if web_conf.general.get("existent_tasks", False):
                        records = perform_search("target_sha256",
                                                 sha256,
                                                 search_limit=5)
                        for record in records:
                            if record.get("target").get("file",
                                                        {}).get("sha256"):
                                existent_tasks.setdefault(
                                    record["target"]["file"]["sha256"],
                                    []).append(record)
                    details["task_ids"] = task_ids_tmp

        elif "quarantine" in request.FILES:
            samples = request.FILES.getlist("quarantine")
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(
                        request, "error.html",
                        {"error": "You uploaded an empty quarantine file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request,
                        "error.html",
                        {
                            "error":
                            "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in conf/web.conf."
                        },
                    )

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                tmp_path = store_temp_file(sample.read(), sample.name)

                path = unquarantine(tmp_path)
                try:
                    os.remove(tmp_path)
                except Exception as e:
                    print(e)

                if not path:
                    return render(request, "error.html", {
                        "error":
                        "You uploaded an unsupported quarantine file."
                    })

                details["path"] = path
                details["content"] = get_file_content(path)
                status, task_ids_tmp = download_file(**details)
                if status == "error":
                    details["errors"].append({sample.name: task_ids_tmp})
                else:
                    details["task_ids"] = task_ids_tmp

        elif "static" in request.FILES:
            samples = request.FILES.getlist("static")
            for sample in samples:
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(request, "error.html",
                                  {"error": "You uploaded an empty file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request,
                        "error.html",
                        {
                            "error":
                            "You uploaded a file that exceeds the maximum allowed upload size specified in conf/web.conf."
                        },
                    )

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), sample.name)

                task_id = db.add_static(file_path=path,
                                        priority=priority,
                                        tlp=tlp,
                                        user_id=request.user.id or 0)
                if not task_id:
                    return render(
                        request, "error.html",
                        {"error": "We don't have static extractor for this"})
                details["task_ids"] += task_id

        elif "pcap" in request.FILES:
            samples = request.FILES.getlist("pcap")
            for sample in samples:
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(
                        request, "error.html",
                        {"error": "You uploaded an empty PCAP file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request,
                        "error.html",
                        {
                            "error":
                            "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in conf/web.conf."
                        },
                    )

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), sample.name)

                if sample.name.lower().endswith(".saz"):
                    saz = saz_to_pcap(path)
                    if saz:
                        try:
                            os.remove(path)
                        except Exception as e:
                            pass
                        path = saz
                    else:
                        return render(
                            request, "error.html",
                            {"error": "Conversion from SAZ to PCAP failed."})

                task_id = db.add_pcap(file_path=path,
                                      priority=priority,
                                      tlp=tlp,
                                      user_id=request.user.id or 0)
                if task_id:
                    details["task_ids"].append(task_id)

        elif "url" in request.POST and request.POST.get("url").strip():
            url = request.POST.get("url").strip()
            if not url:
                return render(request, "error.html",
                              {"error": "You specified an invalid URL!"})

            url = url.replace("hxxps://", "https://").replace(
                "hxxp://", "http://").replace("[.]", ".")

            if machine.lower() == "all":
                machines = [
                    vm.name for vm in db.list_machines(platform=platform)
                ]
            elif machine:
                machine_details = db.view_machine(machine)
                if platform and hasattr(
                        machine_details, "platform"
                ) and not machine_details.platform == platform:
                    return render(
                        request,
                        "error.html",
                        {
                            "error":
                            "Wrong platform, {} VM selected for {} sample".
                            format(machine_details.platform, platform)
                        },
                    )
                else:
                    machines = [machine]

            else:
                machines = [None]
            for entry in machines:
                task_id = db.add_url(
                    url=url,
                    package=package,
                    timeout=timeout,
                    priority=priority,
                    options=options,
                    machine=entry,
                    platform=platform,
                    tags=tags,
                    custom=custom,
                    memory=memory,
                    enforce_timeout=enforce_timeout,
                    clock=clock,
                    shrike_url=shrike_url,
                    shrike_msg=shrike_msg,
                    shrike_sid=shrike_sid,
                    shrike_refer=shrike_refer,
                    route=route,
                    cape=cape,
                    tags_tasks=tags_tasks,
                    user_id=request.user.id or 0,
                )
                details["task_ids"].append(task_id)

        elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip():
            url = request.POST.get("dlnexec").strip()
            if not url:
                return render(request, "error.html",
                              {"error": "You specified an invalid URL!"})

            url = url.replace("hxxps://", "https://").replace(
                "hxxp://", "http://").replace("[.]", ".")
            response = _download_file(request.POST.get("route"), url, options)
            if not response:
                return render(request, "error.html",
                              {"error": "Was impossible to retrieve url"})

            name = os.path.basename(url)
            if not "." in name:
                name = get_user_filename(options,
                                         custom) or generate_fake_name()

            path = store_temp_file(response, name)
            details["path"] = path
            details["content"] = get_file_content(path)
            details["service"] = "DLnExec"
            details["source_url"] = url
            status, task_ids_tmp = download_file(**details)
            if status == "error":
                details["errors"].append({name: task_ids_tmp})
            else:
                details["task_ids"] = task_ids_tmp
        elif (settings.VTDL_ENABLED and "vtdl" in request.POST
              and request.POST.get("vtdl", False)
              and request.POST.get("vtdl")[0] != ""):
            if not settings.VTDL_KEY or not settings.VTDL_PATH:
                return render(
                    request,
                    "error.html",
                    {
                        "error":
                        "You specified VirusTotal but must edit the file and specify your VTDL_KEY variable and VTDL_PATH base directory"
                    },
                )
            else:
                if opt_apikey:
                    details["apikey"] = opt_apikey
                details = download_from_vt(
                    request.POST.get("vtdl").strip(), details, opt_filename,
                    settings)

        if details.get("task_ids"):
            tasks_count = len(details["task_ids"])
        else:
            tasks_count = 0
        if tasks_count > 0:
            data = {
                "tasks": details["task_ids"],
                "tasks_count": tasks_count,
                "errors": details["errors"],
                "existent_tasks": existent_tasks,
            }
            return render(request, "submission/complete.html", data)
        else:
            return render(
                request, "error.html", {
                    "error": "Error adding task(s) to CAPE's database.",
                    "errors": details["errors"]
                })
    else:
        enabledconf = {}
        enabledconf["vt"] = settings.VTDL_ENABLED
        enabledconf["kernel"] = settings.OPT_ZER0M0N
        enabledconf["memory"] = processing.memory.get("enabled")
        enabledconf["procmemory"] = processing.procmemory.get("enabled")
        enabledconf["dlnexec"] = settings.DLNEXEC
        enabledconf["url_analysis"] = settings.URL_ANALYSIS
        enabledconf["tags"] = False
        enabledconf[
            "dist_master_storage_only"] = repconf.distributed.master_storage_only
        enabledconf["linux_on_gui"] = web_conf.linux.enabled
        enabledconf["tlp"] = web_conf.tlp.enabled
        enabledconf["timeout"] = cfg.timeouts.default
        enabledconf["amsidump"] = web_conf.amsidump.enabled

        if all_vms_tags:
            enabledconf["tags"] = True

        if not enabledconf["tags"]:
            # load multi machinery tags:
            # Get enabled machinery
            machinery = cfg.cuckoo.get("machinery")
            if machinery == "multi":
                for mmachinery in Config(machinery).multi.get(
                        "machinery").split(","):
                    vms = [
                        x.strip() for x in getattr(Config(
                            mmachinery), mmachinery).get("machines").split(",")
                    ]
                    if any([
                            "tags"
                            in list(getattr(Config(mmachinery), vmtag).keys())
                            for vmtag in vms
                    ]):
                        enabledconf["tags"] = True
                        break
            else:
                # Get VM names for machinery config elements
                vms = [
                    x.strip() for x in getattr(Config(
                        machinery), machinery).get("machines").split(",")
                ]
                # Check each VM config element for tags
                if any([
                        "tags"
                        in list(getattr(Config(machinery), vmtag).keys())
                        for vmtag in vms
                ]):
                    enabledconf["tags"] = True

        packages, machines = get_form_data("windows")

        socks5s = _load_socks5_operational()

        socks5s_random = ""
        vpn_random = ""

        if routing.socks5.random_socks5 and socks5s:
            socks5s_random = random.choice(socks5s.values()).get("name", False)

        if routing.vpn.random_vpn:
            vpn_random = random.choice(list(vpns.values())).get("name", False)

        if socks5s:
            socks5s_random = random.choice(list(socks5s.values())).get(
                "name", False)

        random_route = False
        if vpn_random and socks5s_random:
            random_route = random.choice((vpn_random, socks5s_random))
        elif vpn_random:
            random_route = vpn_random
        elif socks5s_random:
            random_route = socks5s_random

        existent_tasks = {}
        if resubmit_hash:
            if web_conf.general.get("existent_tasks", False):
                records = perform_search("target_sha256",
                                         resubmit_hash,
                                         search_limit=5)
                for record in records:
                    existent_tasks.setdefault(
                        record["target"]["file"]["sha256"], list())
                    existent_tasks[record["target"]["file"]["sha256"]].append(
                        record)

        return render(
            request,
            "submission/index.html",
            {
                "packages": sorted(packages),
                "machines": machines,
                "vpns": list(vpns.values()),
                "random_route": random_route,
                "socks5s": list(socks5s.values()),
                "route": routing.routing.route,
                "internet": routing.routing.internet,
                "inetsim": routing.inetsim.enabled,
                "tor": routing.tor.enabled,
                "config": enabledconf,
                "resubmit": resubmit_hash,
                "tags": sorted(list(set(all_vms_tags))),
                "existent_tasks": existent_tasks,
                "all_exitnodes": all_nodes_exits_list,
            },
        )
Esempio n. 30
0
def index(request, resubmit_hash=False):
    if request.method == "POST":
        package = request.POST.get("package", "")
        timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
        options = request.POST.get("options", "")
        lin_options = request.POST.get("lin_options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        clock = request.POST.get(
            "clock",
            datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S"))
        if not clock:
            clock = datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S")
        if "1970" in clock:
            clock = datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S")
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))
        referrer = validate_referrer(request.POST.get("referrer", None))
        tags = request.POST.get("tags", None)
        static = bool(request.POST.get("static", False))
        all_tags = load_vms_tags()
        if tags and not all(
            [tag.strip() in all_tags for tag in tags.split(",")]):
            return render(request, "error.html", {
                "error":
                "Check Tags help, you have introduced incorrect tag(s)"
            })

        if lin_options:
            options = lin_options
        # This is done to remove spaces in options but not breaks custom paths
        options = ','.join('='.join(value.strip()
                                    for value in option.split("=", 1))
                           for option in options.split(",")
                           if option and '=' in option)
        opt_filename = get_user_filename(options, custom)

        if options:
            options += ","

        if referrer:
            options += "referrer=%s," % (referrer)

        if request.POST.get("free"):
            options += "free=yes,"

        if request.POST.get("nohuman"):
            options += "nohuman=yes,"

        if request.POST.get("tor"):
            options += "tor=yes,"

        if request.POST.get("route", None):
            options += "route={0},".format(request.POST.get("route", None))

        if request.POST.get("process_dump"):
            options += "procdump=0,"

        if request.POST.get("process_memory"):
            options += "procmemdump=1,"

        if request.POST.get("import_reconstruction"):
            options += "import_reconstruction=1,"

        if request.POST.get("disable_cape"):
            options += "disable_cape=1,"

        if request.POST.get("kernel_analysis"):
            options += "kernel_analysis=yes,"

        if request.POST.get("norefer"):
            options += "norefer=1,"

        if request.POST.get("oldloader"):
            options += "loader=oldloader.exe,loader_64=oldloader_x64.exe,"

        if request.POST.get("unpack"):
            options += "unpack=yes,"

        options = options[:-1]

        unique = request.POST.get("unique", False)

        orig_options = options
        task_ids = []
        task_machines = []

        status = "ok"
        failed_hashes = list()
        task_ids_tmp = list()

        if "hash" in request.POST and request.POST.get(
                "hash", False) and request.POST.get("hash")[0] != '':
            resubmission_hash = request.POST.get("hash").strip()
            paths = db.sample_path_by_hash(resubmission_hash)
            if paths:
                paths = [
                    _f for _f in [
                        path if os.path.exists(path) else False
                        for path in paths
                    ] if _f
                ]
                if not paths and FULL_DB:
                    tasks = results_db.analysis.find(
                        {"dropped.sha256": resubmission_hash}, {
                            "info.id": 1,
                            "_id": 0
                        })
                    if tasks:
                        for task in tasks or []:
                            # grab task id and replace in path if needed aka distributed hack
                            path = os.path.join(settings.CUCKOO_PATH,
                                                "storage", "analyses",
                                                str(task["info"]["id"]),
                                                "files", resubmission_hash)
                            if os.path.exists(path):
                                paths = [path]
                                break

            if paths:
                content = False
                content = get_file_content(paths)
                if not content:
                    return render(
                        request, "error.html", {
                            "error":
                            "Can't find {} on disk, {}".format(
                                resubmission_hash, str(paths))
                        })
                base_dir = tempfile.mkdtemp(prefix='resubmit_',
                                            dir=settings.TEMP_PATH)
                if opt_filename:
                    filename = base_dir + "/" + opt_filename
                else:
                    filename = base_dir + "/" + sanitize_filename(
                        resubmission_hash)
                path = store_temp_file(content, filename)
                headers = {}
                url = 'local'
                params = {}

                status, task_ids = download_file(
                    False, content, request, db, task_ids, url, params,
                    headers, "Local", path, package, timeout, options,
                    priority, machine, clock, custom, memory, enforce_timeout,
                    referrer, tags, orig_options, "", static)
            else:
                return render(
                    request, "error.html",
                    {"error": "File not found on hdd for resubmission"})

        elif "sample" in request.FILES:
            samples = request.FILES.getlist("sample")
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(request, "error.html",
                                  {"error": "You uploaded an empty file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a file that exceeds the maximum allowed upload size "
                            "specified in web/web/local_settings.py."
                        })

                if opt_filename:
                    filename = opt_filename
                else:
                    filename = sanitize_filename(sample.name)
                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), filename)

                if unique and db.check_file_uniq(File(path).get_sha256()):
                    return render(
                        request, "error.html", {
                            "error":
                            "Duplicated file, disable unique option to force submission"
                        })

                magic_type = get_magic_type(path)
                if disable_x64 is True:
                    if magic_type and ("x86-64" in magic_type
                                       or "PE32+" in magic_type):
                        if len(samples) == 1:
                            return render(
                                request, "error.html",
                                {"error": "Sorry no x64 support yet"})
                        else:
                            continue

                    orig_options, timeout, enforce_timeout = recon(
                        path, orig_options, timeout, enforce_timeout)

                platform = get_platform(magic_type)
                if machine.lower() == "all":
                    task_machines = [
                        vm.name for vm in db.list_machines(platform=platform)
                    ]
                elif machine:
                    machine_details = db.view_machine(machine)
                    if hasattr(machine_details, "platform"
                               ) and not machine_details.platform == platform:
                        return render(
                            request, "error.html", {
                                "error":
                                "Wrong platform, {} VM selected for {} sample".
                                format(machine_details.platform, platform)
                            })
                    else:
                        task_machines = [machine]

                else:
                    task_machines = ["first"]

                for entry in task_machines:
                    if entry == "first":
                        entry = None
                    try:
                        task_ids_new = db.demux_sample_and_add_to_db(
                            file_path=path,
                            package=package,
                            timeout=timeout,
                            options=options,
                            priority=priority,
                            machine=entry,
                            custom=custom,
                            memory=memory,
                            platform=platform,
                            enforce_timeout=enforce_timeout,
                            tags=tags,
                            clock=clock,
                            static=static)
                        task_ids.extend(task_ids_new)
                    except CuckooDemuxError as err:
                        return render(request, "error.html", {"error": err})

        elif "quarantine" in request.FILES:
            samples = request.FILES.getlist("quarantine")
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(
                        request, "error.html",
                        {"error": "You uploaded an empty quarantine file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a quarantine file that exceeds the maximum \
                                  allowed upload size specified in web/web/local_settings.py."
                        })

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                tmp_path = store_temp_file(sample.read(), sample.name)

                path = unquarantine(tmp_path)
                try:
                    os.remove(tmp_path)
                except Exception as e:
                    pass

                if not path:
                    return render(request, "error.html", {
                        "error":
                        "You uploaded an unsupported quarantine file."
                    })

                if machine.lower() == "all":
                    task_machines = [
                        vm.name for vm in db.list_machines(platform="windows")
                    ]
                elif machine:
                    machine_details = db.view_machine(machine)
                    if not machine_details.platform == "windows":
                        return render(
                            request, "error.html", {
                                "error":
                                "Wrong platform, linux VM selected for {} sample"
                                .format(machine_details.platform)
                            })
                    else:
                        task_machines = [machine]

                if not task_machines:
                    task_machines = ["first"]

                for entry in task_machines:
                    if entry == "first":
                        entry = None
                    task_ids_new = db.demux_sample_and_add_to_db(
                        file_path=path,
                        package=package,
                        timeout=timeout,
                        options=options,
                        priority=priority,
                        machine=entry,
                        custom=custom,
                        memory=memory,
                        tags=tags,
                        enforce_timeout=enforce_timeout,
                        clock=clock)
                    if task_ids_new:
                        task_ids.extend(task_ids_new)

        elif "static" in request.FILES:
            samples = request.FILES.getlist("static")
            for sample in samples:
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(request, "error.html",
                                  {"error": "You uploaded an empty file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a file that exceeds the maximum \
                    allowed upload size specified in web/web/local_settings.py."
                        })

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), sample.name)

                task_id = db.add_static(file_path=path, priority=priority)
                if not task_id:
                    return render(
                        request, "error.html",
                        {"error": "We don't have static extractor for this"})
                task_ids.append(task_id)

        elif "pcap" in request.FILES:
            samples = request.FILES.getlist("pcap")
            for sample in samples:
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(
                        request, "error.html",
                        {"error": "You uploaded an empty PCAP file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a PCAP file that exceeds the maximum \
                     allowed upload size specified in web/web/local_settings.py."
                        })

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), sample.name)

                if sample.name.lower().endswith(".saz"):
                    saz = saz_to_pcap(path)
                    if saz:
                        try:
                            os.remove(path)
                        except Exception as e:
                            pass
                        path = saz
                    else:
                        return render(
                            request, "error.html",
                            {"error": "Conversion from SAZ to PCAP failed."})

                task_id = db.add_pcap(file_path=path, priority=priority)
                if task_id:
                    task_ids.append(task_id)

        elif "url" in request.POST and request.POST.get("url").strip():
            url = request.POST.get("url").strip()
            if not url:
                return render(request, "error.html",
                              {"error": "You specified an invalid URL!"})

            url = url.replace("hxxps://", "https://").replace(
                "hxxp://", "http://").replace("[.]", ".")

            if machine.lower() == "all":
                task_machines = [
                    vm.name for vm in db.list_machines(platform="windows")
                ]
            elif machine:
                machine_details = db.view_machine(machine)
                if not machine_details.platform == "windows":
                    return render(
                        request, "error.html", {
                            "error":
                            "Wrong platform, linux VM selected for {} sample".
                            format(machine_details.platform)
                        })
                else:
                    task_machines = [machine]

            else:
                task_machines = ["first"]

            for entry in task_machines:
                if entry == "first":
                    entry = None
                task_ids_new = db.add_url(url=url,
                                          package=package,
                                          timeout=timeout,
                                          options=options,
                                          priority=priority,
                                          machine=entry,
                                          custom=custom,
                                          memory=memory,
                                          enforce_timeout=enforce_timeout,
                                          tags=tags,
                                          clock=clock)
                if task_ids_new:
                    task_ids.extend(task_ids_new)

        elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip():
            url = request.POST.get("dlnexec").strip()
            if not url:
                return render(request, "error.html",
                              {"error": "You specified an invalid URL!"})

            url = url.replace("hxxps://", "https://").replace(
                "hxxp://", "http://").replace("[.]", ".")
            response = _download_file(request.POST.get("route", None), url,
                                      options)
            if not response:
                return render(request, "error.html",
                              {"error": "Was impossible to retrieve url"})

            name = os.path.basename(url)
            if not "." in name:
                name = get_user_filename(options,
                                         custom) or generate_fake_name()
            path = store_temp_file(response, name)

            magic_type = get_magic_type(path)
            platform = get_platform(magic_type)

            if machine.lower() == "all":
                task_machines = [
                    vm.name for vm in db.list_machines(platform=platform)
                ]
            elif machine:
                machine_details = db.view_machine(machine[0])
                if not machine_details.platform == platform:
                    return render(
                        request, "error.html", {
                            "error":
                            "Wrong platform, {} VM selected for {} sample".
                            format(machine_details.platform, platform)
                        })
                else:
                    task_machines = [machine]
            else:
                task_machines = ["first"]

            for entry in task_machines:
                if entry == "first":
                    entry = None
                task_ids_new = db.demux_sample_and_add_to_db(
                    file_path=path,
                    package=package,
                    timeout=timeout,
                    options=options,
                    priority=priority,
                    machine=entry,
                    custom=custom,
                    memory=memory,
                    enforce_timeout=enforce_timeout,
                    tags=tags,
                    platform=platform,
                    clock=clock)
                if task_ids_new:
                    task_ids.extend(task_ids_new)

        elif settings.VTDL_ENABLED and "vtdl" in request.POST and request.POST.get("vtdl", False) \
                and request.POST.get("vtdl")[0] != '':
            vtdl = request.POST.get("vtdl").strip()
            if (not settings.VTDL_PRIV_KEY
                    and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH:
                return render(
                    request, "error.html", {
                        "error":
                        "You specified VirusTotal but must edit the file and specify your "
                        "VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory"
                    })
            else:
                hashlist = []
                if "," in vtdl:
                    hashlist = [
                        _f for _f in vtdl.replace(" ", "").strip().split(",")
                        if _f
                    ]
                else:
                    hashlist.append(vtdl)

                for h in hashlist:
                    base_dir = tempfile.mkdtemp(prefix='cuckoovtdl',
                                                dir=settings.VTDL_PATH)
                    task_ids_tmp = list()
                    if opt_filename:
                        filename = base_dir + "/" + opt_filename
                    else:
                        filename = base_dir + "/" + sanitize_filename(h)
                    headers = {}
                    paths = db.sample_path_by_hash(h)
                    content = False
                    if paths:
                        content = get_file_content(paths)
                    if settings.VTDL_PRIV_KEY:
                        headers = {'x-apikey': settings.VTDL_PRIV_KEY}
                    elif settings.VTDL_INTEL_KEY:
                        headers = {'x-apikey': settings.VTDL_INTEL_KEY}
                    url = "https://www.virustotal.com/api/v3/files/{id}/download".format(
                        id=h)
                    params = {}

                    if not content:
                        status, task_ids_tmp = download_file(
                            False, content, request, db, task_ids, url, params,
                            headers, "VirusTotal", filename, package, timeout,
                            options, priority, machine, clock, custom, memory,
                            enforce_timeout, referrer, tags, orig_options, "",
                            static, h)
                    else:
                        status, task_ids_tmp = download_file(
                            False, content, request, db, task_ids, url, params,
                            headers, "Local", filename, package, timeout,
                            options, priority, machine, clock, custom, memory,
                            enforce_timeout, referrer, tags, orig_options, "",
                            static, h)
                    if status is "ok":
                        task_ids = task_ids_tmp
                    else:
                        failed_hashes.append(h)

        if not isinstance(task_ids, list) and status == "error":
            # is render msg
            return task_ids
        if not isinstance(task_ids_tmp, list) and status == "error":
            # is render msg
            return task_ids_tmp
        if isinstance(task_ids, list):
            tasks_count = len(task_ids)
        else:
            # ToDo improve error msg
            tasks_count = 0
        tasks_count = len(task_ids)
        if tasks_count > 0:
            data = {"tasks": task_ids, "tasks_count": tasks_count}
            if failed_hashes:
                data["failed_hashes"] = failed_hashes
            return render(request, "submission/complete.html", data)

        else:
            return render(request, "error.html",
                          {"error": "Error adding task to Cuckoo's database."})
    else:
        enabledconf = dict()
        enabledconf["vt"] = settings.VTDL_ENABLED
        enabledconf["kernel"] = settings.OPT_ZER0M0N
        enabledconf["memory"] = processing.memory.get("enabled")
        enabledconf["procmemory"] = processing.procmemory.get("enabled")
        enabledconf["dlnexec"] = settings.DLNEXEC
        enabledconf["url_analysis"] = settings.URL_ANALYSIS
        enabledconf["tags"] = False
        enabledconf[
            "dist_master_storage_only"] = repconf.distributed.master_storage_only
        enabledconf["linux_on_gui"] = web_conf.linux.enabled

        all_tags = load_vms_tags()
        if all_tags:
            enabledconf["tags"] = True

        if not enabledconf["tags"]:
            # load multi machinery tags:
            # Get enabled machinery
            machinery = cfg.cuckoo.get("machinery")
            if machinery == "multi":
                for mmachinery in Config(machinery).multi.get(
                        "machinery").split(","):
                    vms = [
                        x.strip() for x in getattr(Config(
                            mmachinery), mmachinery).get("machines").split(",")
                    ]
                    if any([
                            "tags"
                            in list(getattr(Config(mmachinery), vmtag).keys())
                            for vmtag in vms
                    ]):
                        enabledconf["tags"] = True
                        break
            else:
                # Get VM names for machinery config elements
                vms = [
                    x.strip() for x in getattr(Config(
                        machinery), machinery).get("machines").split(",")
                ]
                # Check each VM config element for tags
                if any([
                        "tags"
                        in list(getattr(Config(machinery), vmtag).keys())
                        for vmtag in vms
                ]):
                    enabledconf["tags"] = True

        packages, machines = get_form_data("windows")

        socks5s = _load_socks5_operational()
        socks5s_random = ""
        if socks5s:
            socks5s_random = random.choice(list(socks5s.values())).get(
                "description", False)

        return render(
            request, "submission/index.html", {
                "packages": sorted(packages),
                "machines": machines,
                "vpns": list(vpns.values()),
                "socks5s": list(socks5s.values()),
                "socks5s_random": socks5s_random,
                "route": routing.routing.route,
                "internet": routing.routing.internet,
                "inetsim": routing.inetsim.enabled,
                "tor": routing.tor.enabled,
                "config": enabledconf,
                "resubmit": resubmit_hash,
                "tags": sorted(list(set(all_tags)))
            })
Esempio n. 31
0
def index(request):
    if request.method == "POST":
        package = request.POST.get("package", "")
        timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
        options = request.POST.get("options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        gateway = request.POST.get("gateway", None)
        clock = request.POST.get("clock", None)
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))
        referer = validate_referer(request.POST.get("referer", None))
        tags = request.POST.get("tags", None)

        task_gateways = []
        ipaddy_re = re.compile(r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$")

        if referer:
            if options:
                options += ","
            options += "referer=%s" % (referer)

        if request.POST.get("free"):
            if options:
                options += ","
            options += "free=yes"

        if request.POST.get("nohuman"):
            if options:
                options += ","
            options += "nohuman=yes"

        if request.POST.get("tor"):
            if options:
                options += ","
            options += "tor=yes"

        if request.POST.get("process_memory"):
            if options:
                options += ","
            options += "procmemdump=yes"

        if request.POST.get("kernel_analysis"):
            if options:
                options += ","
            options += "kernel_analysis=yes"   

        orig_options = options

        if gateway and gateway.lower() == "all":
            for e in settings.GATEWAYS:
                if ipaddy_re.match(settings.GATEWAYS[e]):
                    task_gateways.append(settings.GATEWAYS[e])
        elif gateway and gateway in settings.GATEWAYS:
            if "," in settings.GATEWAYS[gateway]:
                if request.POST.get("all_gw_in_group"):
                    tgateway = settings.GATEWAYS[gateway].split(",")
                    for e in tgateway:
                        task_gateways.append(settings.GATEWAYS[e]) 
                else:
                    tgateway = random.choice(settings.GATEWAYS[gateway].split(","))
                    task_gateways.append(settings.GATEWAYS[tgateway])
            else:
                task_gateways.append(settings.GATEWAYS[gateway])

        if not task_gateways:
            # To reduce to the default case
            task_gateways = [None]

        db = Database()
        task_ids = []
        task_machines = []

        if machine.lower() == "all":
            for entry in db.list_machines():
                task_machines.append(entry.label)
        else:
            task_machines.append(machine)

        if "sample" in request.FILES:
            samples = request.FILES.getlist("sample")
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(request, "error.html",
                                              {"error": "You uploaded an empty file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(request, "error.html",
                                              {"error": "You uploaded a file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."})
    
                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(),
                                       sample.name)
    
                for gw in task_gateways:
                    options = update_options(gw, orig_options)

                    for entry in task_machines:
                        task_ids_new = db.demux_sample_and_add_to_db(file_path=path, package=package, timeout=timeout, options=options, priority=priority,
                                                                     machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock)
                        task_ids.extend(task_ids_new)
        elif "quarantine" in request.FILES:
            samples = request.FILES.getlist("quarantine")
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(request, "error.html",
                                              {"error": "You uploaded an empty quarantine file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(request, "error.html",
                                              {"error": "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."})
    
                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                tmp_path = store_temp_file(sample.read(),
                                       sample.name)

                path = unquarantine(tmp_path)
                try:
                    os.remove(tmp_path)
                except:
                    pass

                if not path:
                    return render(request, "error.html",
                                              {"error": "You uploaded an unsupported quarantine file."})

                for gw in task_gateways:
                    options = update_options(gw, orig_options)

                    for entry in task_machines:
                        task_ids_new = db.demux_sample_and_add_to_db(file_path=path, package=package, timeout=timeout, options=options, priority=priority,
                                                                     machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock)
                        task_ids.extend(task_ids_new)
        elif "pcap" in request.FILES:
            samples = request.FILES.getlist("pcap")
            for sample in samples:
                if not sample.size:
                    if len(samples) != 1:
                        continue
                    
                    return render(request, "error.html",
                                              {"error": "You uploaded an empty PCAP file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(request, "error.html",
                                              {"error": "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."})

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(),
                                       sample.name)

                if sample.name.lower().endswith(".saz"):
                    saz = saz_to_pcap(path)
                    if saz:
                        try:
                            os.remove(path)
                        except:
                            pass
                        path = saz
                    else:
                        return render(request, "error.html",
                                                  {"error": "Conversion from SAZ to PCAP failed."})
       
                task_id = db.add_pcap(file_path=path, priority=priority)
                task_ids.append(task_id)

        elif "url" in request.POST and request.POST.get("url").strip():
            url = request.POST.get("url").strip()
            if not url:
                return render(request, "error.html",
                                          {"error": "You specified an invalid URL!"})

            url = url.replace("hxxps://", "https://").replace("hxxp://", "http://").replace("[.]", ".")
            for gw in task_gateways:
                options = update_options(gw, orig_options)

                for entry in task_machines:
                    task_id = db.add_url(url=url,
                                         package=package,
                                         timeout=timeout,
                                         options=options,
                                         priority=priority,
                                         machine=entry,
                                         custom=custom,
                                         memory=memory,
                                         enforce_timeout=enforce_timeout,
                                         tags=tags,
                                         clock=clock)
                    if task_id:
                        task_ids.append(task_id)
        elif settings.VTDL_ENABLED and "vtdl" in request.POST:
            vtdl = request.POST.get("vtdl").strip()
            if (not settings.VTDL_PRIV_KEY and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH:
                    return render(request, "error.html",
                                              {"error": "You specified VirusTotal but must edit the file and specify your VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory"})
            else:
                base_dir = tempfile.mkdtemp(prefix='cuckoovtdl',dir=settings.VTDL_PATH)
                hashlist = []
                if "," in vtdl:
                    hashlist=vtdl.split(",")
                else:
                    hashlist.append(vtdl)
                onesuccess = False

                for h in hashlist:
                    filename = base_dir + "/" + h
                    if settings.VTDL_PRIV_KEY:
                        url = 'https://www.virustotal.com/vtapi/v2/file/download'
                        params = {'apikey': settings.VTDL_PRIV_KEY, 'hash': h}
                    else:
                        url = 'https://www.virustotal.com/intelligence/download/'
                        params = {'apikey': settings.VTDL_INTEL_KEY, 'hash': h}

                    try:
                        r = requests.get(url, params=params, verify=True)
                    except requests.exceptions.RequestException as e:
                        return render(request, "error.html",
                                              {"error": "Error completing connection to VirusTotal: {0}".format(e)})
                    if r.status_code == 200:
                        try:
                            f = open(filename, 'wb')
                            f.write(r.content)
                            f.close()
                        except:
                            return render(request, "error.html",
                                              {"error": "Error writing VirusTotal download file to temporary path"})

                        onesuccess = True

                        for gw in task_gateways:
                            options = update_options(gw, orig_options)

                            for entry in task_machines:
                                task_ids_new = db.demux_sample_and_add_to_db(file_path=filename, package=package, timeout=timeout, options=options, priority=priority,
                                                                             machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock)
                                task_ids.extend(task_ids_new)
                    elif r.status_code == 403:
                        return render(request, "error.html",
                                                  {"error": "API key provided is not a valid VirusTotal key or is not authorized for VirusTotal downloads"})


                if not onesuccess:
                    return render(request, "error.html",
                                              {"error": "Provided hash not found on VirusTotal"})



        tasks_count = len(task_ids)
        if tasks_count > 0:
            return render(request, "submission/complete.html",
                                      {"tasks" : task_ids,
                                       "tasks_count" : tasks_count})
        else:
            return render(request, "error.html",
                                      {"error": "Error adding task to Cuckoo's database."})
    else:
        enabledconf = dict()
        enabledconf["vt"] = settings.VTDL_ENABLED
        enabledconf["kernel"] = settings.OPT_ZER0M0N
        enabledconf["memory"] = Config("processing").memory.get("enabled")
        enabledconf["procmemory"] = Config("processing").procmemory.get("enabled")
        enabledconf["tor"] = Config("auxiliary").tor.get("enabled")
        if Config("auxiliary").gateways:
            enabledconf["gateways"] = True
        else:
            enabledconf["gateways"] = False
        enabledconf["tags"] = False
        # Get enabled machinery
        machinery = Config("cuckoo").cuckoo.get("machinery")
        # Get VM names for machinery config elements
        vms = [x.strip() for x in getattr(Config(machinery), machinery).get("machines").split(",")]
        # Check each VM config element for tags
        for vmtag in vms:
            if "tags" in getattr(Config(machinery), vmtag).keys():
                enabledconf["tags"] = True

        files = os.listdir(os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages"))

        packages = []
        for name in files:
            name = os.path.splitext(name)[0]
            if name == "__init__":
                continue

            packages.append(name)

        # Prepare a list of VM names, description label based on tags.
        machines = []
        for machine in Database().list_machines():
            tags = []
            for tag in machine.tags:
                tags.append(tag.name)

            if tags:
                label = machine.label + ": " + ", ".join(tags)
            else:
                label = machine.label

            machines.append((machine.label, label))

        # Prepend ALL/ANY options.
        machines.insert(0, ("", "First available"))
        machines.insert(1, ("all", "All"))

        return render(request, "submission/index.html",
                                  {"packages": sorted(packages),
                                   "machines": machines,
                                   "gateways": settings.GATEWAYS,
                                   "config": enabledconf})
Esempio n. 32
0
def experiment_create_file():
    response = {}

    data = request.files.file
    package = request.forms.get("package", "")
    timeout = request.forms.get("timeout", "")
    priority = request.forms.get("priority", 1)
    options = request.forms.get("options", "")
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    tags = request.forms.get("tags", "")
    custom = request.forms.get("custom", "")
    memory = request.forms.get("memory", False)
    clock = request.forms.get("clock", None)
    name = request.forms.get("name", None)
    delta = request.forms.get("delta")
    runs = request.forms.get("runs", "")

    if not runs.lstrip("-").isdigit():
        return HTTPError(
            400, "Please provide the `runs` variable indicating the maximum "
            "number of times this experiment should run.")

    delta = time_duration(delta)
    if not delta:
        return HTTPError(
            400, "Please provide a proper `delta` to specify the length of "
            "each analysis run.")

    if memory:
        memory = True

    enforce_timeout = request.forms.get("enforce_timeout", False)
    if enforce_timeout:
        enforce_timeout = True

    temp_file_path = store_temp_file(data.file.read(), data.filename)
    task_id = db.add_path(
        file_path=temp_file_path,
        package=package,
        timeout=time_duration(timeout),
        priority=priority,
        options=options,
        machine=machine,
        platform=platform,
        tags="longterm," + tags,
        custom=custom,
        memory=memory,
        enforce_timeout=enforce_timeout,
        clock=clock,
        name=name,
        repeat=TASK_RECURRENT,
        delta=delta,
        runs=runs,
    )

    if task_id:
        response["experiment_id"] = db.view_task(task_id).experiment_id

    response["task_id"] = task_id
    return jsonize(response)
Esempio n. 33
0
def index(request):
    if request.method == "POST":
        package = request.POST.get("package", "")
        timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
        options = request.POST.get("options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        gateway = request.POST.get("gateway", None)
        clock = request.POST.get("clock", None)
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))
        referrer = validate_referrer(request.POST.get("referrer", None))
        tags = request.POST.get("tags", None)

        task_gateways = []
        ipaddy_re = re.compile(
            r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
        )

        if referrer:
            if options:
                options += ","
            options += "referrer=%s" % (referrer)

        if request.POST.get("free"):
            if options:
                options += ","
            options += "free=yes"

        if request.POST.get("nohuman"):
            if options:
                options += ","
            options += "nohuman=yes"

        if request.POST.get("tor"):
            if options:
                options += ","
            options += "tor=yes"

        if request.POST.get("process_memory"):
            if options:
                options += ","
            options += "procmemdump=yes"

        if request.POST.get("kernel_analysis"):
            if options:
                options += ","
            options += "kernel_analysis=yes"

        orig_options = options

        if gateway and gateway.lower() == "all":
            for e in settings.GATEWAYS:
                if ipaddy_re.match(settings.GATEWAYS[e]):
                    task_gateways.append(settings.GATEWAYS[e])
        elif gateway and gateway in settings.GATEWAYS:
            if "," in settings.GATEWAYS[gateway]:
                if request.POST.get("all_gw_in_group"):
                    tgateway = settings.GATEWAYS[gateway].split(",")
                    for e in tgateway:
                        task_gateways.append(settings.GATEWAYS[e])
                else:
                    tgateway = random.choice(
                        settings.GATEWAYS[gateway].split(","))
                    task_gateways.append(settings.GATEWAYS[tgateway])
            else:
                task_gateways.append(settings.GATEWAYS[gateway])

        if not task_gateways:
            # To reduce to the default case
            task_gateways = [None]

        db = Database()
        task_ids = []
        task_machines = []

        if machine.lower() == "all":
            for entry in db.list_machines():
                task_machines.append(entry.label)
        else:
            task_machines.append(machine)

        if "sample" in request.FILES:
            samples = request.FILES.getlist("sample")
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(request, "error.html",
                                  {"error": "You uploaded an empty file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."
                        })

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), sample.name)

                for gw in task_gateways:
                    options = update_options(gw, orig_options)

                    for entry in task_machines:
                        task_ids_new = db.demux_sample_and_add_to_db(
                            file_path=path,
                            package=package,
                            timeout=timeout,
                            options=options,
                            priority=priority,
                            machine=entry,
                            custom=custom,
                            memory=memory,
                            enforce_timeout=enforce_timeout,
                            tags=tags,
                            clock=clock)
                        task_ids.extend(task_ids_new)
        elif "quarantine" in request.FILES:
            samples = request.FILES.getlist("quarantine")
            for sample in samples:
                # Error if there was only one submitted sample and it's empty.
                # But if there are multiple and one was empty, just ignore it.
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(
                        request, "error.html",
                        {"error": "You uploaded an empty quarantine file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a quarantine file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."
                        })

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                tmp_path = store_temp_file(sample.read(), sample.name)

                path = unquarantine(tmp_path)
                try:
                    os.remove(tmp_path)
                except:
                    pass

                if not path:
                    return render(request, "error.html", {
                        "error":
                        "You uploaded an unsupported quarantine file."
                    })

                for gw in task_gateways:
                    options = update_options(gw, orig_options)

                    for entry in task_machines:
                        task_ids_new = db.demux_sample_and_add_to_db(
                            file_path=path,
                            package=package,
                            timeout=timeout,
                            options=options,
                            priority=priority,
                            machine=entry,
                            custom=custom,
                            memory=memory,
                            enforce_timeout=enforce_timeout,
                            tags=tags,
                            clock=clock)
                        task_ids.extend(task_ids_new)
        elif "pcap" in request.FILES:
            samples = request.FILES.getlist("pcap")
            for sample in samples:
                if not sample.size:
                    if len(samples) != 1:
                        continue

                    return render(
                        request, "error.html",
                        {"error": "You uploaded an empty PCAP file."})
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render(
                        request, "error.html", {
                            "error":
                            "You uploaded a PCAP file that exceeds the maximum allowed upload size specified in web/web/local_settings.py."
                        })

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), sample.name)

                if sample.name.lower().endswith(".saz"):
                    saz = saz_to_pcap(path)
                    if saz:
                        try:
                            os.remove(path)
                        except:
                            pass
                        path = saz
                    else:
                        return render(
                            request, "error.html",
                            {"error": "Conversion from SAZ to PCAP failed."})

                task_id = db.add_pcap(file_path=path, priority=priority)
                task_ids.append(task_id)

        elif "url" in request.POST and request.POST.get("url").strip():
            url = request.POST.get("url").strip()
            if not url:
                return render(request, "error.html",
                              {"error": "You specified an invalid URL!"})

            url = url.replace("hxxps://", "https://").replace(
                "hxxp://", "http://").replace("[.]", ".")
            for gw in task_gateways:
                options = update_options(gw, orig_options)

                for entry in task_machines:
                    task_id = db.add_url(url=url,
                                         package=package,
                                         timeout=timeout,
                                         options=options,
                                         priority=priority,
                                         machine=entry,
                                         custom=custom,
                                         memory=memory,
                                         enforce_timeout=enforce_timeout,
                                         tags=tags,
                                         clock=clock)
                    if task_id:
                        task_ids.append(task_id)
        elif settings.VTDL_ENABLED and "vtdl" in request.POST:
            vtdl = request.POST.get("vtdl").strip()
            if (not settings.VTDL_PRIV_KEY
                    and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH:
                return render(
                    request, "error.html", {
                        "error":
                        "You specified VirusTotal but must edit the file and specify your VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory"
                    })
            else:
                base_dir = tempfile.mkdtemp(prefix='cuckoovtdl',
                                            dir=settings.VTDL_PATH)
                hashlist = []
                if "," in vtdl:
                    hashlist = vtdl.split(",")
                else:
                    hashlist.append(vtdl)
                onesuccess = False

                for h in hashlist:
                    filename = base_dir + "/" + h
                    if settings.VTDL_PRIV_KEY:
                        url = 'https://www.virustotal.com/vtapi/v2/file/download'
                        params = {'apikey': settings.VTDL_PRIV_KEY, 'hash': h}
                    else:
                        url = 'https://www.virustotal.com/intelligence/download/'
                        params = {'apikey': settings.VTDL_INTEL_KEY, 'hash': h}

                    try:
                        r = requests.get(url, params=params, verify=True)
                    except requests.exceptions.RequestException as e:
                        return render(
                            request, "error.html", {
                                "error":
                                "Error completing connection to VirusTotal: {0}"
                                .format(e)
                            })
                    if r.status_code == 200:
                        try:
                            f = open(filename, 'wb')
                            f.write(r.content)
                            f.close()
                        except:
                            return render(
                                request, "error.html", {
                                    "error":
                                    "Error writing VirusTotal download file to temporary path"
                                })

                        onesuccess = True

                        for gw in task_gateways:
                            options = update_options(gw, orig_options)

                            for entry in task_machines:
                                task_ids_new = db.demux_sample_and_add_to_db(
                                    file_path=filename,
                                    package=package,
                                    timeout=timeout,
                                    options=options,
                                    priority=priority,
                                    machine=entry,
                                    custom=custom,
                                    memory=memory,
                                    enforce_timeout=enforce_timeout,
                                    tags=tags,
                                    clock=clock)
                                task_ids.extend(task_ids_new)
                    elif r.status_code == 403:
                        return render(
                            request, "error.html", {
                                "error":
                                "API key provided is not a valid VirusTotal key or is not authorized for VirusTotal downloads"
                            })

                if not onesuccess:
                    return render(
                        request, "error.html",
                        {"error": "Provided hash not found on VirusTotal"})

        tasks_count = len(task_ids)
        if tasks_count > 0:
            return render(request, "submission/complete.html", {
                "tasks": task_ids,
                "tasks_count": tasks_count
            })
        else:
            return render(request, "error.html",
                          {"error": "Error adding task to Cuckoo's database."})
    else:
        enabledconf = dict()
        enabledconf["vt"] = settings.VTDL_ENABLED
        enabledconf["kernel"] = settings.OPT_ZER0M0N
        enabledconf["memory"] = Config("processing").memory.get("enabled")
        enabledconf["procmemory"] = Config("processing").procmemory.get(
            "enabled")
        enabledconf["tor"] = Config("auxiliary").tor.get("enabled")
        if Config("auxiliary").gateways:
            enabledconf["gateways"] = True
        else:
            enabledconf["gateways"] = False
        enabledconf["tags"] = False
        # Get enabled machinery
        machinery = Config("cuckoo").cuckoo.get("machinery")
        # Get VM names for machinery config elements
        vms = [
            x.strip() for x in getattr(Config(machinery), machinery).get(
                "machines").split(",")
        ]
        # Check each VM config element for tags
        for vmtag in vms:
            if "tags" in getattr(Config(machinery), vmtag).keys():
                enabledconf["tags"] = True

        files = os.listdir(
            os.path.join(settings.CUCKOO_PATH, "analyzer", "windows",
                         "modules", "packages"))

        packages = []
        for name in files:
            name = os.path.splitext(name)[0]
            if name == "__init__":
                continue

            packages.append(name)

        # Prepare a list of VM names, description label based on tags.
        machines = []
        for machine in Database().list_machines():
            tags = []
            for tag in machine.tags:
                tags.append(tag.name)

            if tags:
                label = machine.label + ": " + ", ".join(tags)
            else:
                label = machine.label

            machines.append((machine.label, label))

        # Prepend ALL/ANY options.
        machines.insert(0, ("", "First available"))
        machines.insert(1, ("all", "All"))

        return render(
            request, "submission/index.html", {
                "packages": sorted(packages),
                "machines": machines,
                "gateways": settings.GATEWAYS,
                "config": enabledconf
            })
Esempio n. 34
0
def get_new_emails(db):
    imaplib.IMAP4.debug = imaplib.IMAP4_SSL.debug = 1

    conn = imaplib.IMAP4_SSL(email_config.cuckoomx.get("server"))
    conn.login(email_config.cuckoomx.get("user"),
               email_config.cuckoomx.get("password"))
    conn.select("Inbox")

    (retcode, messages) = conn.search(None, "(UNSEEN)")
    if retcode == "OK" and messages:
        for num in messages[0].split(" "):
            if num:
                typ, data = conn.fetch(num, "(RFC822)")
                msg = email.message_from_string(data[0][1])
                if msg:
                    email_dict = dict()
                    email_dict["Attachments"] = list()
                    for k, v in msg.items():
                        email_dict[k] = v

                    if email_dict.get("Subject", ""):
                        print("[+] Procesing email with Subject: {0}".format(
                            email_dict["Subject"]))
                    for part in msg.walk():
                        attachment = False
                        if part.get_filename():
                            filename = part.get_filename()
                            content_type = part.get_content_type()
                            attachment = part.get_payload(decode=True)
                            sha256 = hashlib.sha256(attachment).hexdigest()

                            if attachment:
                                # unpack it
                                z = ZipFile(
                                    File(contents=attachment,
                                         password=email_config.cuckoomx.get(
                                             "archive_password")))
                                files = list(
                                    z.unpack(
                                        password=email_config.cuckoomx.get(
                                            "archive_password"),
                                        duplicates=[]))
                                for file in files:
                                    new_file = db.query(CUCKOOMX).filter(
                                        CUCKOOMX.sha256 ==
                                        file.sha256).first()
                                    if new_file is None:
                                        new_file = CUCKOOMX(sha256=file.sha256)

                                        temp_file_path = store_temp_file(
                                            file.contents, file.filename)
                                        task_id = main_db.add_path(
                                            file_path=temp_file_path)
                                        new_file.cuckoo_id = task_id
                                        new_file.email = email_dict.get(
                                            "From", "")
                                        db.add(new_file)
                                        db.commit()
                                    else:
                                        send_notification(db, new_file)
                # mark as seen
                typ, data = conn.store(num, "+FLAGS", "\Seen")

    conn.close()
    conn.logout()
Esempio n. 35
0
def index(request):
    if request.method == "POST":
        package = request.POST.get("package", "")
        # Timeout is in minute, but we handle seconds in the backend
        timeout = force_int(request.POST.get("timeout")) * 60
        options = request.POST.get("options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))
        tags = request.POST.get("tags", "")
        recurring = request.POST.get("recurring", None)
        experiment_name = request.POST.get("experiment_name", "")

        if request.POST.get("free"):
            if options:
                options += ","
            options += "free=yes"

        if request.POST.get("process_memory"):
            if options:
                options += ","
            options += "procmemdump=yes"

        recurring = TASK_SINGLE
        if request.POST.get("recurring"):
            recurring = TASK_RECURRENT
            tags = "longterm,%s" % tags

        db = Database()
        task_ids = []
        task_machines = []

        if machine.lower() == "all":
            for entry in db.list_machines(locked=False):
                task_machines.append(entry.label)
        else:
            task_machines.append(machine)

        if "sample" in request.FILES:
            if request.FILES["sample"].size == 0:
                return render_to_response(
                    "error.html", {"error": "You uploaded an empty file."},
                    context_instance=RequestContext(request))
            elif request.FILES["sample"].size > settings.MAX_UPLOAD_SIZE:
                return render_to_response("error.html", {
                    "error":
                    "You uploaded a file that exceeds that maximum allowed upload size."
                },
                                          context_instance=RequestContext(
                                              request))

            # Moving sample from django temporary file to Cuckoo temporary storage to
            # let it persist between reboot (if user like to configure it in that way).
            path = store_temp_file(request.FILES["sample"].read(),
                                   request.FILES["sample"].name)

            for entry in task_machines:
                task_id = db.add_path(file_path=path,
                                      package=package,
                                      timeout=timeout,
                                      options=options,
                                      priority=priority,
                                      machine=entry,
                                      custom=custom,
                                      memory=memory,
                                      enforce_timeout=enforce_timeout,
                                      tags=tags,
                                      name=experiment_name,
                                      repeat=recurring)
                if task_id:
                    task_ids.append(task_id)
        elif "url" in request.POST:
            url = request.POST.get("url").strip()
            if not url:
                return render_to_response(
                    "error.html", {"error": "You specified an invalid URL!"},
                    context_instance=RequestContext(request))

            for entry in task_machines:
                task_id = db.add_url(url=url,
                                     package=package,
                                     timeout=timeout,
                                     options=options,
                                     priority=priority,
                                     machine=entry,
                                     custom=custom,
                                     memory=memory,
                                     enforce_timeout=enforce_timeout,
                                     tags=tags,
                                     name=experiment_name)
                if task_id:
                    task_ids.append(task_id)

        tasks_count = len(task_ids)
        if tasks_count > 0:
            if tasks_count == 1:
                message = "The analysis task was successfully added with ID {0}.".format(
                    task_ids[0])
            else:
                message = "The analysis task were successfully added with IDs {0}.".format(
                    ", ".join(str(i) for i in task_ids))

            return render_to_response("success.html", {"message": message},
                                      context_instance=RequestContext(request))
        else:
            return render_to_response(
                "error.html",
                {"error": "Error adding task to Cuckoo's database."},
                context_instance=RequestContext(request))
    else:
        files = os.listdir(
            os.path.join(settings.CUCKOO_PATH, "analyzer", "windows",
                         "modules", "packages"))

        packages = []
        for name in files:
            name = os.path.splitext(name)[0]
            if name == "__init__":
                continue

            packages.append(name)

        # Prepare a list of VM names, description label based on tags.
        machines = []
        for machine in Database().list_machines(locked=False):
            tags = []
            for tag in machine.tags:
                tags.append(tag.name)

            if tags:
                label = machine.name + ": " + ", ".join(tags)
            else:
                label = machine.name

            machines.append((machine.name, label))

        # Prepend ALL/ANY options.
        machines.insert(0, ("", "First available"))
        machines.insert(1, ("all", "All"))

        return render_to_response("submission/index.html", {
            "packages": sorted(packages),
            "machines": machines
        },
                                  context_instance=RequestContext(request))
Esempio n. 36
0
def cuckoo_status():
    # In order to keep track of the diskspace statistics of the temporary
    # directory we create a temporary file so we can statvfs() on that.
    temp_file = store_temp_file("", "status")

    paths = dict(
        binaries=os.path.join(CUCKOO_ROOT, "storage", "binaries"),
        analyses=os.path.join(CUCKOO_ROOT, "storage", "analyses"),
        temporary=temp_file,
    )

    diskspace = {}
    for key, path in paths.items():
        if hasattr(os, "statvfs") and os.path.isdir(path):
            stats = os.statvfs(path)
            diskspace[key] = dict(
                free=stats.f_bavail * stats.f_frsize,
                total=stats.f_blocks * stats.f_frsize,
                used=(stats.f_blocks - stats.f_bavail) * stats.f_frsize,
            )

    # Now we remove the temporary file and its parent directory.
    os.unlink(temp_file)
    os.rmdir(os.path.dirname(temp_file))

    # Get the CPU load.
    if hasattr(os, "getloadavg"):
        cpuload = os.getloadavg()
    else:
        cpuload = []

    if os.path.isfile("/proc/meminfo"):
        values = {}
        for line in open("/proc/meminfo"):
            key, value = line.split(":", 1)
            values[key.strip()] = value.replace("kB", "").strip()

        if "MemAvailable" in values and "MemTotal" in values:
            memory = 100.0 * int(values["MemFree"]) / int(values["MemTotal"])
        else:
            memory = None
    else:
        memory = None

    response = dict(
        version=CUCKOO_VERSION,
        hostname=socket.gethostname(),
        machines=dict(
            total=len(db.list_machines()),
            available=db.count_machines_available()
        ),
        tasks=dict(
            total=db.count_tasks(),
            pending=db.count_tasks("pending"),
            running=db.count_tasks("running"),
            completed=db.count_tasks("completed"),
            reported=db.count_tasks("reported")
        ),
        diskspace=diskspace,
        cpuload=cpuload,
        memory=memory,
    )

    return jsonify(response)
Esempio n. 37
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("target",
                        help="URL, path to the file or folder to analyze")
    parser.add_argument("-d",
                        "--debug",
                        action="store_true",
                        help="Enable debug logging")
    parser.add_argument(
        "--remote",
        type=str,
        action="store",
        default=None,
        help="Specify IP:port to a Cuckoo API server to submit remotely",
        required=False,
    )
    parser.add_argument("--user",
                        type=str,
                        action="store",
                        default=None,
                        help="Username for Basic Auth",
                        required=False)
    parser.add_argument("--password",
                        type=str,
                        action="store",
                        default=None,
                        help="Password for Basic Auth",
                        required=False)
    parser.add_argument("--sslnoverify",
                        action="store_true",
                        default=False,
                        help="Do not validate SSL cert",
                        required=False)
    parser.add_argument("--ssl",
                        action="store_true",
                        default=False,
                        help="Use SSL/TLS for remote",
                        required=False)
    parser.add_argument("--url",
                        action="store_true",
                        default=False,
                        help="Specify whether the target is an URL",
                        required=False)
    parser.add_argument("--package",
                        type=str,
                        action="store",
                        default="",
                        help="Specify an analysis package",
                        required=False)
    parser.add_argument("--custom",
                        type=str,
                        action="store",
                        default="",
                        help="Specify any custom value",
                        required=False)
    parser.add_argument("--timeout",
                        type=int,
                        action="store",
                        default=0,
                        help="Specify an analysis timeout",
                        required=False)
    parser.add_argument(
        "--options",
        type=str,
        action="store",
        default="",
        help=
        'Specify options for the analysis package (e.g. "name=value,name2=value2")',
        required=False,
    )
    parser.add_argument(
        "--priority",
        type=int,
        action="store",
        default=1,
        help="Specify a priority for the analysis represented by an integer",
        required=False,
    )
    parser.add_argument(
        "--machine",
        type=str,
        action="store",
        default="",
        help="Specify the identifier of a machine you want to use",
        required=False,
    )
    parser.add_argument(
        "--platform",
        type=str,
        action="store",
        default="",
        help=
        "Specify the operating system platform you want to use (windows/darwin/linux)",
        required=False,
    )
    parser.add_argument(
        "--memory",
        action="store_true",
        default=False,
        help="Enable to take a memory dump of the analysis machine",
        required=False)
    parser.add_argument(
        "--enforce-timeout",
        action="store_true",
        default=False,
        help="Enable to force the analysis to run for the full timeout period",
        required=False,
    )
    parser.add_argument("--clock",
                        type=str,
                        action="store",
                        default=None,
                        help="Set virtual machine clock",
                        required=False)
    parser.add_argument(
        "--tags",
        type=str,
        action="store",
        default=None,
        help="Specify tags identifier of a machine you want to use",
        required=False,
    )
    parser.add_argument("--max",
                        type=int,
                        action="store",
                        default=None,
                        help="Maximum samples to add in a row",
                        required=False)
    parser.add_argument("--pattern",
                        type=str,
                        action="store",
                        default=None,
                        help="Pattern of files to submit",
                        required=False)
    parser.add_argument("--shuffle",
                        action="store_true",
                        default=False,
                        help="Shuffle samples before submitting them",
                        required=False)
    parser.add_argument("--unique",
                        action="store_true",
                        default=False,
                        help="Only submit new samples, ignore duplicates",
                        required=False)
    parser.add_argument("--quiet",
                        action="store_true",
                        default=False,
                        help="Only print text on failure",
                        required=False)
    parser.add_argument("--procdump",
                        action="store_true",
                        default=False,
                        help="Dump, upload and process proc/memdumps",
                        required=False)

    try:
        args = parser.parse_args()
    except IOError as e:
        parser.error(e)
        return False

    # If the quiet flag has been set, then we also disable the "warning"
    # level of the logging module. (E.g., when pydeep has not been installed,
    # there will be a warning message, because Cuckoo can't resolve the
    # ssdeep hash of this particular sample.)
    if args.debug:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig()

    if args.quiet:
        logging.disable(logging.WARNING)

    db = Database()

    target = to_unicode(args.target)

    sane_timeout = min(args.timeout, 60 * 60 * 24)

    if args.procdump:
        if args.options:
            args.options = ",procdump=1"
        else:
            args.options = "procdump=1"

    if args.url:
        if args.remote:
            if not HAVE_REQUESTS:
                print((
                    bold(red("Error")) +
                    ": you need to install python-requests (`pip3 install requests`)"
                ))
                return False

            if args.ssl:
                url = "https://{0}/tasks/create/url".format(args.remote)
            else:
                url = "http://{0}/tasks/create/url".format(args.remote)

            data = dict(
                url=target,
                package=args.package,
                timeout=sane_timeout,
                options=args.options,
                priority=args.priority,
                machine=args.machine,
                platform=args.platform,
                memory=args.memory,
                enforce_timeout=args.enforce_timeout,
                custom=args.custom,
                tags=args.tags,
            )

            try:
                if args.user and args.password:
                    if args.ssl:
                        if args.sslnoverify:
                            verify = False
                        else:
                            verify = True
                        response = requests.post(url,
                                                 auth=(args.user,
                                                       args.password),
                                                 data=data,
                                                 verify=verify)
                    else:
                        response = requests.post(url,
                                                 auth=(args.user,
                                                       args.password),
                                                 data=data)
                else:
                    if args.ssl:
                        if args.sslnoverify:
                            verify = False
                        else:
                            verify = True
                        response = requests.post(url, data=data, verify=verify)
                    else:
                        response = requests.post(url, data=data)

            except Exception as e:
                print((bold(red("Error")) +
                       ": unable to send URL: {0}".format(e)))
                return False

            json = response.json()
            task_id = json["task_id"]
        else:
            task_id = db.add_url(
                target,
                package=args.package,
                timeout=sane_timeout,
                options=args.options,
                priority=args.priority,
                machine=args.machine,
                platform=args.platform,
                custom=args.custom,
                memory=args.memory,
                enforce_timeout=args.enforce_timeout,
                clock=args.clock,
                tags=args.tags,
            )

        if task_id:
            if not args.quiet:
                print((bold(green("Success")) +
                       ': URL "{0}" added as task with ID {1}'.format(
                           target, task_id)))
        else:
            print((bold(red("Error")) + ": adding task to database"))
    else:
        # Get absolute path to deal with relative.
        path = to_unicode(os.path.abspath(target))
        if not os.path.exists(path):
            print((bold(red("Error")) +
                   ': the specified file/folder does not exist at path "{0}"'.
                   format(path)))
            return False

        files = []
        if os.path.isdir(path):
            for dirname, _, filenames in os.walk(path):
                for file_name in filenames:
                    file_path = os.path.join(dirname, file_name)

                    if os.path.isfile(file_path):
                        if args.pattern:
                            if fnmatch.fnmatch(file_name, args.pattern):
                                files.append(to_unicode(file_path))
                        else:
                            files.append(to_unicode(file_path))
        else:
            files.append(path)

        if args.shuffle:
            random.shuffle(files)
        else:
            files = sorted(files)

        for file_path in files:
            if not File(file_path).get_size():
                if not args.quiet:
                    print((bold(
                        yellow("Empty") +
                        ": sample {0} (skipping file)".format(file_path))))

                continue

            if args.max is not None:
                # Break if the maximum number of samples has been reached.
                if not args.max:
                    break

                args.max -= 1

            if args.remote:
                if not HAVE_REQUESTS:
                    print((
                        bold(red("Error")) +
                        ": you need to install python-requests (`pip3 install requests`)"
                    ))
                    return False
                if args.ssl:
                    url = "https://{0}/tasks/create/file".format(args.remote)
                else:
                    url = "http://{0}/tasks/create/file".format(args.remote)

                files = dict(file=open(file_path, "rb"),
                             filename=os.path.basename(file_path))

                data = dict(
                    package=args.package,
                    timeout=sane_timeout,
                    options=args.options,
                    priority=args.priority,
                    machine=args.machine,
                    platform=args.platform,
                    memory=args.memory,
                    enforce_timeout=args.enforce_timeout,
                    custom=args.custom,
                    tags=args.tags,
                )

                try:
                    if args.user and args.password:
                        if args.ssl:
                            if args.sslnoverify:
                                verify = False
                            else:
                                verify = True
                            response = requests.post(url,
                                                     auth=(args.user,
                                                           args.password),
                                                     files=files,
                                                     data=data,
                                                     verify=verify)
                        else:
                            response = requests.post(url,
                                                     auth=(args.user,
                                                           args.password),
                                                     files=files,
                                                     data=data)
                    else:
                        if args.ssl:
                            if args.sslnoverify:
                                verify = False
                            else:
                                verify = True
                            response = requests.post(url,
                                                     files=files,
                                                     data=data,
                                                     verify=verify)
                        else:
                            response = requests.post(url,
                                                     files=files,
                                                     data=data)

                except Exception as e:
                    print((bold(red("Error")) +
                           ": unable to send file: {0}".format(e)))
                    return False

                json = response.json()
                task_ids = [json.get("task_ids")]

            else:
                if args.unique and db.check_file_uniq(
                        File(file_path).get_sha256()):
                    msg = ": Sample {0} (skipping file)".format(file_path)
                    if not args.quiet:
                        print((bold(yellow("Duplicate")) + msg))
                    continue

                try:
                    tmp_path = store_temp_file(
                        open(file_path, "rb").read(),
                        sanitize_filename(os.path.basename(file_path)))
                    task_ids, extra_details = db.demux_sample_and_add_to_db(
                        file_path=tmp_path,
                        package=args.package,
                        timeout=sane_timeout,
                        options=args.options,
                        priority=args.priority,
                        machine=args.machine,
                        platform=args.platform,
                        memory=args.memory,
                        custom=args.custom,
                        enforce_timeout=args.enforce_timeout,
                        clock=args.clock,
                        tags=args.tags,
                    )
                except CuckooDemuxError as e:
                    task_ids = []
                    print((bold(red("Error")) + ": {0}".format(e)))
            tasks_count = len(task_ids)
            if tasks_count > 1:
                if not args.quiet:
                    print((bold(green("Success")) +
                           ': File "{0}" added as task with IDs {1}'.format(
                               file_path, task_ids)))
            elif tasks_count > 0:
                if not args.quiet:
                    print((bold(green("Success")) +
                           ': File "{0}" added as task with ID {1}'.format(
                               file_path, task_ids[0])))
            else:
                print((bold(red("Error")) + ": adding task to database"))
Esempio n. 38
0
def experiment_create_file():
    response = {}

    data = request.files.file
    package = request.forms.get("package", "")
    timeout = request.forms.get("timeout", "")
    priority = request.forms.get("priority", 1)
    options = request.forms.get("options", "")
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    tags = request.forms.get("tags", "")
    custom = request.forms.get("custom", "")
    memory = request.forms.get("memory", False)
    clock = request.forms.get("clock", None)
    name = request.forms.get("name", None)
    delta = request.forms.get("delta")
    runs = request.forms.get("runs", "")

    if not runs.lstrip("-").isdigit():
        return HTTPError(
            400,
            "Please provide the `runs` variable indicating the maximum "
            "number of times this experiment should run."
        )

    delta = time_duration(delta)
    if not delta:
        return HTTPError(
            400,
            "Please provide a proper `delta` to specify the length of "
            "each analysis run."
        )

    if memory:
        memory = True

    enforce_timeout = request.forms.get("enforce_timeout", False)
    if enforce_timeout:
        enforce_timeout = True

    temp_file_path = store_temp_file(data.file.read(), data.filename)
    task_id = db.add_path(
        file_path=temp_file_path,
        package=package,
        timeout=time_duration(timeout),
        priority=priority,
        options=options,
        machine=machine,
        platform=platform,
        tags="longterm," + tags,
        custom=custom,
        memory=memory,
        enforce_timeout=enforce_timeout,
        clock=clock,
        name=name,
        repeat=TASK_RECURRENT,
        delta=delta,
        runs=runs,
    )

    if task_id:
        response["experiment_id"] = db.view_task(task_id).experiment_id

    response["task_id"] = task_id
    return jsonize(response)
Esempio n. 39
0
def index(request):
    if request.method == "POST":
        package = request.POST.get("package", "")
        timeout = force_int(request.POST.get("timeout"))
        options = request.POST.get("options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))
        tags = request.POST.get("tags", None)

        if request.POST.get("free"):
            if options:
                options += ","
            options += "free=yes"

        if request.POST.get("process_memory"):
            if options:
                options += ","
            options += "procmemdump=yes"

        db = Database()
        task_ids = []
        task_machines = []

        if machine.lower() == "all":
            for entry in db.list_machines():
                task_machines.append(entry.label)
        else:
            task_machines.append(machine)

        if "sample" in request.FILES:
            for sample in request.FILES.getlist("sample"):
                if sample.size == 0:
                    return render_to_response("error.html",
                                              {"error": "You uploaded an empty file."},
                                              context_instance=RequestContext(request))
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render_to_response("error.html",
                                              {"error": "You uploaded a file that exceeds that maximum allowed upload size."},
                                              context_instance=RequestContext(request))

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(),
                                       sample.name)

                for entry in task_machines:
                    task_id = db.add_path(file_path=path,
                                          package=package,
                                          timeout=timeout,
                                          options=options,
                                          priority=priority,
                                          machine=entry,
                                          custom=custom,
                                          memory=memory,
                                          enforce_timeout=enforce_timeout,
                                          tags=tags)
                    if task_id:
                        task_ids.append(task_id)
        elif "url" in request.POST:
            url = request.POST.get("url").strip()
            if not url:
                return render_to_response("error.html",
                                          {"error": "You specified an invalid URL!"},
                                          context_instance=RequestContext(request))

            for entry in task_machines:
                task_id = db.add_url(url=url,
                                     package=package,
                                     timeout=timeout,
                                     options=options,
                                     priority=priority,
                                     machine=entry,
                                     custom=custom,
                                     memory=memory,
                                     enforce_timeout=enforce_timeout,
                                     tags=tags)
                if task_id:
                    task_ids.append(task_id)

        tasks_count = len(task_ids)
        if tasks_count > 0:
            return render_to_response("submission/complete.html",
                                      {"tasks": task_ids,
                                       "tasks_count": tasks_count},
                                      context_instance=RequestContext(request))
        else:
            return render_to_response("error.html",
                                      {"error": "Error adding task to Cuckoo's database."},
                                      context_instance=RequestContext(request))
    else:
        files = os.listdir(os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages"))

        packages = []
        for name in files:
            name = os.path.splitext(name)[0]
            if name == "__init__":
                continue

            packages.append(name)

        # Prepare a list of VM names, description label based on tags.
        machines = []
        for machine in Database().list_machines():
            tags = []
            for tag in machine.tags:
                tags.append(tag.name)

            if tags:
                label = machine.label + ": " + ", ".join(tags)
            else:
                label = machine.label

            machines.append((machine.label, label))

        # Prepend ALL/ANY options.
        machines.insert(0, ("", "First available"))
        machines.insert(1, ("all", "All"))

        return render_to_response("submission/index.html",
                                  {"packages": sorted(packages),
                                   "machines": machines},
                                  context_instance=RequestContext(request))
Esempio n. 40
0
def index(request):
    if request.method == "POST":
        package = request.POST.get("package", "")
        timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
        options = request.POST.get("options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        gateway = request.POST.get("gateway", None)
        clock = request.POST.get("clock", None)
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))

        tags = request.POST.get("tags", None)

        if request.POST.get("free"):
            if options:
                options += ","
            options += "free=yes"

        if request.POST.get("nohuman"):
            if options:
                options += ","
            options += "nohuman=yes"

        if request.POST.get("tor"):
            if options:
                options += ","
            options += "tor=yes"

        if request.POST.get("process_memory"):
            if options:
                options += ","
            options += "procmemdump=yes"

        if request.POST.get("kernel_analysis"):
            if options:
                options += ","
            options += "kernel_analysis=yes"

        if gateway and gateway in settings.GATEWAYS:
            if "," in settings.GATEWAYS[gateway]:
                tgateway = random.choice(settings.GATEWAYS[gateway].split(","))
                ngateway = settings.GATEWAYS[tgateway]
            else:
                ngateway = settings.GATEWAYS[gateway]
            if options:
                options += ","
            options += "setgw=%s" % (ngateway)

        db = Database()
        task_ids = []
        task_machines = []

        if machine.lower() == "all":
            for entry in db.list_machines():
                task_machines.append(entry.label)
        else:
            task_machines.append(machine)

        if "sample" in request.FILES:
            for sample in request.FILES.getlist("sample"):
                if sample.size == 0:
                    return render_to_response(
                        "error.html", {"error": "You uploaded an empty file."},
                        context_instance=RequestContext(request))
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render_to_response("error.html", {
                        "error":
                        "You uploaded a file that exceeds that maximum allowed upload size."
                    },
                                              context_instance=RequestContext(
                                                  request))

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(), sample.name)

                for entry in task_machines:
                    task_ids_new = db.demux_sample_and_add_to_db(
                        file_path=path,
                        package=package,
                        timeout=timeout,
                        options=options,
                        priority=priority,
                        machine=entry,
                        custom=custom,
                        memory=memory,
                        enforce_timeout=enforce_timeout,
                        tags=tags,
                        clock=clock)
                    task_ids.extend(task_ids_new)
        elif "url" in request.POST and request.POST.get("url").strip():
            url = request.POST.get("url").strip()
            if not url:
                return render_to_response(
                    "error.html", {"error": "You specified an invalid URL!"},
                    context_instance=RequestContext(request))

            for entry in task_machines:
                task_id = db.add_url(url=url,
                                     package=package,
                                     timeout=timeout,
                                     options=options,
                                     priority=priority,
                                     machine=entry,
                                     custom=custom,
                                     memory=memory,
                                     enforce_timeout=enforce_timeout,
                                     tags=tags,
                                     clock=clock)
                if task_id:
                    task_ids.append(task_id)
        elif settings.VTDL_ENABLED and "vtdl" in request.POST:
            vtdl = request.POST.get("vtdl").strip()
            if (not settings.VTDL_PRIV_KEY
                    and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH:
                return render_to_response("error.html", {
                    "error":
                    "You specified VirusTotal but must edit the file and specify your VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory"
                },
                                          context_instance=RequestContext(
                                              request))
            else:
                base_dir = tempfile.mkdtemp(prefix='cuckoovtdl',
                                            dir=settings.VTDL_PATH)
                hashlist = []
                if "," in vtdl:
                    hashlist = vtdl.split(",")
                else:
                    hashlist.append(vtdl)
                onesuccess = False

                for h in hashlist:
                    filename = base_dir + "/" + h
                    if settings.VTDL_PRIV_KEY:
                        url = 'https://www.virustotal.com/vtapi/v2/file/download'
                        params = {'apikey': settings.VTDL_PRIV_KEY, 'hash': h}
                    else:
                        url = 'https://www.virustotal.com/intelligence/download/'
                        params = {'apikey': settings.VTDL_INTEL_KEY, 'hash': h}

                    try:
                        r = requests.get(url, params=params, verify=True)
                    except requests.exceptions.RequestException as e:
                        return render_to_response(
                            "error.html", {
                                "error":
                                "Error completing connection to VirusTotal: {0}"
                                .format(e)
                            },
                            context_instance=RequestContext(request))
                    if r.status_code == 200:
                        try:
                            f = open(filename, 'wb')
                            f.write(r.content)
                            f.close()
                        except:
                            return render_to_response("error.html", {
                                "error":
                                "Error writing VirusTotal download file to temporary path"
                            },
                                                      context_instance=
                                                      RequestContext(request))

                        onesuccess = True

                        for entry in task_machines:
                            task_ids_new = db.demux_sample_and_add_to_db(
                                file_path=filename,
                                package=package,
                                timeout=timeout,
                                options=options,
                                priority=priority,
                                machine=entry,
                                custom=custom,
                                memory=memory,
                                enforce_timeout=enforce_timeout,
                                tags=tags,
                                clock=clock)
                            task_ids.extend(task_ids_new)
                    elif r.status_code == 403:
                        return render_to_response("error.html", {
                            "error":
                            "API key provided is not a valid VirusTotal key or is not authorized for VirusTotal downloads"
                        },
                                                  context_instance=
                                                  RequestContext(request))

                if not onesuccess:
                    return render_to_response(
                        "error.html",
                        {"error": "Provided hash not found on VirusTotal"},
                        context_instance=RequestContext(request))

        tasks_count = len(task_ids)
        if tasks_count > 0:
            return render_to_response("submission/complete.html", {
                "tasks": task_ids,
                "tasks_count": tasks_count
            },
                                      context_instance=RequestContext(request))
        else:
            return render_to_response(
                "error.html",
                {"error": "Error adding task to Cuckoo's database."},
                context_instance=RequestContext(request))
    else:
        files = os.listdir(
            os.path.join(settings.CUCKOO_PATH, "analyzer", "windows",
                         "modules", "packages"))

        packages = []
        for name in files:
            name = os.path.splitext(name)[0]
            if name == "__init__":
                continue

            packages.append(name)

        # Prepare a list of VM names, description label based on tags.
        machines = []
        for machine in Database().list_machines():
            tags = []
            for tag in machine.tags:
                tags.append(tag.name)

            if tags:
                label = machine.label + ": " + ", ".join(tags)
            else:
                label = machine.label

            machines.append((machine.label, label))

        # Prepend ALL/ANY options.
        machines.insert(0, ("", "First available"))
        machines.insert(1, ("all", "All"))

        return render_to_response("submission/index.html", {
            "packages": sorted(packages),
            "machines": machines,
            "gateways": settings.GATEWAYS,
            "vtdlenabled": settings.VTDL_ENABLED
        },
                                  context_instance=RequestContext(request))
Esempio n. 41
0
def tasks_create_file(request):
    resp = {}
    if request.method == "POST":
        # Check if this API function is enabled
        if not apiconf.filecreate.get("enabled"):
            resp = {"error": True,
                    "error_value": "File Create API is Disabled"}
            return jsonize(resp, response=True)
        # Check if files are actually provided
        if request.FILES.getlist("file") == []:
            resp = {"error": True, "error_value": "No file was submitted"}
            return jsonize(resp, response=True)
        resp["error"] = False
        # Parse potential POST options (see submission/views.py)
        package = request.POST.get("package", "")
        timeout = force_int(request.POST.get("timeout"))
        priority = force_int(request.POST.get("priority"))
        options = request.POST.get("options", "")
        machine = request.POST.get("machine", "")
        platform = request.POST.get("platform", "")
        tags = request.POST.get("tags", None)
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        clock = request.POST.get("clock", None)
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))

        task_ids = []
        task_machines = []
        vm_list = []
        for vm in db.list_machines():
            vm_list.append(vm.label)

        if machine.lower() == "all":
            if not apiconf.filecreate.get("allmachines"):
                resp = {"error": True,
                        "error_value": "Machine=all is disabled using the API"}
                return jsonize(resp, response=True)
            for entry in vm_list:
                task_machines.append(entry)
        else:
            # Check if VM is in our machines table
            if machine == "" or machine in vm_list:
                task_machines.append(machine)
            # Error if its not
            else:
                resp = {"error": True,
                        "error_value": ("Machine '{0}' does not exist. "
                                        "Available: {1}".format(machine,
                                        ", ".join(vm_list)))}
                return jsonize(resp, response=True)
        # Parse a max file size to be uploaded
        max_file_size = apiconf.filecreate.get("upload_limit")
        if not max_file_size or int(max_file_size) == 0:
            max_file_size = 5 * 1048576
        else:
            max_file_size = int(max_file_size) * 1048576
        # Check if we are allowing multiple file submissions
        multifile = apiconf.filecreate.get("multifile")
        if multifile:
            # Handle all files
            for sample in request.FILES.getlist("file"):
                if sample.size == 0:
                    resp = {"error": True,
                            "error_value": "You submitted an empty file"}
                    return jsonize(resp, response=True)
                if sample.size > max_file_size:
                    resp = {"error": True,
                            "error_value": "File size exceeds API limit"}
                    return jsonize(resp, response=True)
                path = store_temp_file(sample.read(), sample.name)
                for entry in task_machines:
                    task_id = db.add_path(file_path=path,
                                          package=package,
                                          timeout=timeout,
                                          priority=priority,
                                          options=options,
                                          machine=entry,
                                          platform=platform,
                                          tags=tags,
                                          custom=custom,
                                          memory=memory,
                                          enforce_timeout=enforce_timeout,
                                          clock=clock,
                                          )
                    if task_id:
                        task_ids.append(task_id)
        else:
            # Grab the first file
            sample = request.FILES.getlist("file")[0]
            if sample.size == 0:
                resp = {"error": True,
                        "error_value": "You submitted an empty file"}
                return jsonize(resp, response=True)
            if sample.size > max_file_size:
                resp = {"error": True,
                        "error_value": "File size exceeds API limit"}
                return jsonize(resp, response=True)
            if len(request.FILES.getlist("file")) > 1:
                resp["warning"] = ("Multi-file API submissions disabled - "
                                   "Accepting first file")
            path = store_temp_file(sample.read(), sample.name)
            for entry in task_machines:
                task_id = db.add_path(file_path=path,
                                      package=package,
                                      timeout=timeout,
                                      priority=priority,
                                      options=options,
                                      machine=entry,
                                      platform=platform,
                                      tags=tags,
                                      custom=custom,
                                      memory=memory,
                                      enforce_timeout=enforce_timeout,
                                      clock=clock,
                                      )
                if task_id:
                    task_ids.append(task_id)
                    
        if len(task_ids) > 0:
            resp["task_ids"] = task_ids
            callback = apiconf.filecreate.get("status")
            if len(task_ids) == 1:
                resp["data"] = "Task ID {0} has been submitted".format(
                               str(task_ids[0]))
                if callback:
                    resp["url"] = ["{0}/submit/status/{1}/".format(
                                  apiconf.api.get("url"), task_ids[0])]
            else:
                resp["task_ids"] = task_ids
                resp["data"] = "Task IDs {0} have been submitted".format(
                               ", ".join(str(x) for x in task_ids))
                if callback:
                    resp["url"] = list()
                    for tid in task_ids:
                        resp["url"].append("{0}/submit/status/{1}".format(
                                           apiconf.api.get("url"), tid))
        else:
            resp = {"error": True,
                    "error_value": "Error adding task to database"}
        return jsonize(resp, response=True)

    else:
        resp = {"error": True, "error_value": "Method not allowed"}
        return jsonize(resp, response=True)
Esempio n. 42
0
def import_analysis(request):
    if request.method == "POST":
        db = Database()
        task_ids = []
        samples = request.FILES.getlist("sample")

        for sample in samples:
            # Error if there was only one submitted sample and it's empty.
            # But if there are multiple and one was empty, just ignore it.
            if not sample.size:
                if len(samples) != 1:
                    continue

                return render_to_response("error.html",
                                         {"error": "You uploaded an empty file."},
                                         context_instance=RequestContext(request))
            elif sample.size > settings.MAX_UPLOAD_SIZE:
                return render_to_response("error.html",
                                         {"error": "You uploaded a file that exceeds that maximum allowed upload size."},
                                         context_instance=RequestContext(request))

            if not sample.name.endswith(".zip"):
                return render_to_response("error.html",
                                         {"error": "You uploaded a file that wasn't a .zip."},
                                         context_instance=RequestContext(request))

            path = store_temp_file(sample.read(), sample.name)
            zf = zipfile.ZipFile(path)

            #Path to store the extracted files from the zip
            extract_path = os.path.dirname(path) + "\\" + os.path.splitext(sample.name)[0]
            zf.extractall(extract_path)

            report = extract_path + "\\analysis.json"
            if os.path.isfile(report):
                with open(report) as json_file:
                    json_data = json.load(json_file)
                    category = json_data["Target"]["category"]

                    if category == "file":
                        binary = extract_path + "\\binary"

                        if os.path.isfile(binary):
                            task_id = db.add_path(file_path=binary,
                                      package="",
                                      timeout=0,
                                      options="",
                                      priority=0,
                                      machine="",
                                      custom="",
                                      memory=False,
                                      enforce_timeout=False,
                                      tags=None)
                            if task_id:
                                task_ids.append(task_id)

                    elif category == "url":
                        url = json_data["Target"]["url"]
                        if not url:
                            return render_to_response("error.html",
                                                      {"error": "You specified an invalid URL!"},
                                                      context_instance=RequestContext(request))

                        task_id = db.add_url(url=url,
                                     package="",
                                      timeout=0,
                                      options="",
                                      priority=0,
                                      machine="",
                                      custom="",
                                      memory=False,
                                      enforce_timeout=False,
                                      tags=None)
                        if task_id:
                            task_ids.append(task_id)
            else:
                return render_to_response("error.html",
                                                      {"error": "No analysis.json found!"},
                                                      context_instance=RequestContext(request))

            tasks_count = len(task_ids)
            if tasks_count > 0:
                return render_to_response("submission/complete.html",
                                         {"tasks": task_ids,
                                          "tasks_count": tasks_count,
                                          "baseurl": request.build_absolute_uri('/')[:-1]},
                                          context_instance=RequestContext(request))

    return render_to_response("analysis/import.html",
                                  context_instance=RequestContext(request))
Esempio n. 43
0
def submit_file(request):
    if request.method == "POST":
        package = request.POST.get("package", "")
        timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
        options = request.POST.get("options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        gateway = request.POST.get("gateway", None)
        clock = request.POST.get("clock", None)
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))
        status = bool(request.POST.get("user_status", False))
        if not status:
            user_status = 0
        else:
            user_status = 1

        if request.user.id == None:
            user_id = 1
        else:
            user_id = request.user.id

        tags = request.POST.get("tags", None)

        if request.POST.get("free"):
            if options:
                options += ","
            options += "free=yes"

        if request.POST.get("nohuman"):
            if options:
                options += ","
            options += "nohuman=yes"

        if request.POST.get("tor"):
            if options:
                options += ","
            options += "tor=yes"

        if request.POST.get("process_memory"):
            if options:
                options += ","
            options += "procmemdump=yes"

        if request.POST.get("kernel_analysis"):
            if options:
                options += ","
            options += "kernel_analysis=yes"

        if gateway and gateway in settings.GATEWAYS:
            if "," in settings.GATEWAYS[gateway]:
                tgateway = random.choice(settings.GATEWAYS[gateway].split(","))
                ngateway = settings.GATEWAYS[tgateway]
            else:
                ngateway = settings.GATEWAYS[gateway]
            if options:
                options += ","
            options += "setgw=%s" % (ngateway)

        db = Database()
        task_ids = []
        task_machines = []

        if machine.lower() == "all":
            for entry in db.list_machines():
                task_machines.append(entry.label)
        else:
            task_machines.append(machine)

        if "sample" in request.FILES:

            for sample in request.FILES.getlist("sample"):
                if sample.size == 0:
                    return render_to_response(
                        "error.html", {"error": "You uploaded an empty file."},
                        context_instance=RequestContext(request))
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render_to_response("error.html", {
                        "error":
                        "You uploaded a file that exceeds that maximum allowed upload size."
                    },
                                              context_instance=RequestContext(
                                                  request))

                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                print type(sample.name)
                path = store_temp_file(sample.read(), sample.name)
                pp.pprint("\nFile Path is %s\n" % path)
                currentMD5 = until.getBigFileMD5(path)

                provious_analysis = results_db.analysis.find({
                    "target.file.md5":
                    currentMD5
                }).sort([["_id", -1]])

                task = []

                for single in provious_analysis:
                    #pp.pprint(single)
                    single["info"]["base64"] = until.encrpt(
                        single["info"]["id"])
                    single["info"]["filename"] = single["target"]["file"][
                        "name"]
                    pp.pprint(single["info"])
                    task.append(single["info"])

                second_post = json.dumps(
                    {
                        "file_path": path,
                        "package": package,
                        "timeout": timeout,
                        "options": options,
                        "machine": machine,
                        "priority": priority,
                        "custom": custom,
                        "memory": memory,
                        "enforce_timeout": enforce_timeout,
                        "tags": tags,
                        "clock": clock,
                        "user_status": user_status,
                        "user_id": user_id
                    },
                    sort_keys=True)
                pp.pprint(second_post)

                if provious_analysis.count() >= 1:
                    return render_to_response(
                        "submission/ShowSimilar.html", {
                            "tasks": task,
                            "params": second_post
                        },
                        context_instance=RequestContext(request))
                else:
                    #tempfilePath = request.POST.get("file_path", "")
                    for entry in task_machines:
                        task_ids_new = db.demux_sample_and_add_to_db(
                            file_path=path,
                            package=package,
                            timeout=timeout,
                            options=options,
                            priority=priority,
                            machine=entry,
                            custom=custom,
                            memory=memory,
                            enforce_timeout=enforce_timeout,
                            tags=tags,
                            clock=clock,
                            user_status=user_status,
                            user_id=user_id)
                    pp.pprint(task_ids_new)
                    final_task_ids = []
                    for taskId in task_ids_new:
                        final_task_ids.append(until.encrpt(taskId))
                    task_ids.extend(final_task_ids)

                    tasks_count = len(task_ids)
                    pp.pprint(task_ids)

                    if tasks_count > 0:

                        return render_to_response(
                            "submission/complete.html", {
                                "tasks": task_ids,
                                "tasks_count": tasks_count
                            },
                            context_instance=RequestContext(request))
                    else:
                        return render_to_response(
                            "error.html", {
                                "error":
                                "Error adding task to Cuckoo's database."
                            },
                            context_instance=RequestContext(request))
    else:
        enabledconf = dict()
        enabledconf["vt"] = settings.VTDL_ENABLED
        enabledconf["kernel"] = settings.OPT_ZER0M0N
        enabledconf["memory"] = Config("processing").memory.get("enabled")
        enabledconf["procmemory"] = Config("processing").procmemory.get(
            "enabled")
        enabledconf["tor"] = Config("auxiliary").tor.get("enabled")
        if Config("auxiliary").gateways:
            enabledconf["gateways"] = True
        else:
            enabledconf["gateways"] = False
        enabledconf["tags"] = False
        # Get enabled machinery
        machinery = Config("cuckoo").cuckoo.get("machinery")
        # Get VM names for machinery config elements
        vms = [
            x.strip() for x in getattr(Config(machinery), machinery).get(
                "machines").split(",")
        ]
        # Check each VM config element for tags
        for vmtag in vms:
            if "tags" in getattr(Config(machinery), vmtag).keys():
                enabledconf["tags"] = True

        files = os.listdir(
            os.path.join(settings.CUCKOO_PATH, "analyzer", "windows",
                         "modules", "packages"))

        packages = []
        for name in files:
            name = os.path.splitext(name)[0]
            if name == "__init__":
                continue

            packages.append(name)

        # Prepare a list of VM names, description label based on tags.
        machines = []
        for machine in Database().list_machines():
            tags = []
            for tag in machine.tags:
                tags.append(tag.name)

            if tags:
                label = machine.label + ": " + ", ".join(tags)
            else:
                label = machine.label

            machines.append((machine.label, label))

        # Prepend ALL/ANY options.
        machines.insert(0, ("", "First available"))
        machines.insert(1, ("all", "All"))

        return render_to_response("submission/submit_file.html", {
            "packages": sorted(packages),
            "machines": machines,
            "gateways": settings.GATEWAYS,
            "config": enabledconf
        },
                                  context_instance=RequestContext(request))
Esempio n. 44
0
def submit():
    context = {}
    errors = False

    package  = request.forms.get("package", "")
    options  = request.forms.get("options", "")
    priority = request.forms.get("priority", 1)
    timeout  = request.forms.get("timeout", "")
    url      = request.forms.get("url","")
    urlrd    = request.forms.get("urlrd","")
    pool_id  = request.forms.get("pool_id","default")
    data = request.files.file

    try:
        priority = int(priority)
    except ValueError:
        context["error_toggle"] = True
        context["error_priority"] = "Needs to be a number"
        errors = True

    if pool_id not in pools:
        context["error_toggle"] = True
        context["error_pool_id"] = "Invalid Pool"
        errors = True
        print "poolid %s not in %s" % (pool_id,pools)

    # File or URL mandatory
    if (data == None or data == "") and (url == None or url == ""):
        context["error_toggle"] = True
        context["error_file"] = "Mandatory"
        errors = True
    
    if url and url != "":
        if urlrd != None and urlrd != "" and urlrd != "None":
            if rddict.has_key(urlrd):
               url = "%s%s" % (rddict[urlrd],url)
            else:
               context["error_toggle"] = True
               context["error_urlrd"] = "Invalid Redirector"
               errors = True
               print("urlrd %s not in %s" % (urlrd,rddict))
        if errors:
            template = env.get_template("submit.html")
            return template.render({"timeout" : timeout,
                                    "priority" : priority,
                                    "options" : options,
                                    "package" : package,
                                    "context" : context,
                                    "pool_id" : pool_id})
        else:
            task_id = db.add_url(url,
                                 package=package,
                                 timeout=timeout,
                                 options=options,
                                 priority=priority,
                                 pool_id=pool_id)
        
        template = env.get_template("success.html")
        return template.render({"taskid" : task_id,
                                "url" : url.decode("utf-8")})


    else:
        temp_file_path = store_temp_file(data.file.read(), data.filename)

        task_id= db.add_path(file_path=temp_file_path,
                             timeout=timeout,
                             priority=priority,
                             options=options,
                             package=package,
                             pool_id=pool_id)

        template = env.get_template("success.html")
        return template.render({"taskid" : task_id,
                                "submitfile" : data.filename.decode("utf-8")})
Esempio n. 45
0
def submit():
    context = {}
    errors = False

    package = request.forms.get("package", "")
    options = request.forms.get("options", "")
    priority = request.forms.get("priority", 1)
    timeout = request.forms.get("timeout", 0)
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    memory = request.forms.get("memory", "")
    data = request.files.file

    try:
        priority = int(priority)
    except ValueError:
        context["error_toggle"] = True
        context["error_priority"] = "Needs to be a number"
        errors = True

    if not data:
        context["error_toggle"] = True
        context["error_file"] = "Mandatory"
        errors = True

    if errors:
        template = env.get_template("submit.html")
        return template.render({
            "timeout": timeout,
            "priority": priority,
            "options": options,
            "package": package,
            "context": context,
            "machine": machine,
            "platform": platform,
            "memory": memory
        })

    temp_file_path = store_temp_file(data.file.read(), data.filename)

    task_id = db.add_path(file_path=temp_file_path,
                          timeout=timeout,
                          priority=priority,
                          options=options,
                          package=package,
                          machine=machine,
                          platform=platform,
                          memory=memory)

    if task_id:
        template = env.get_template("success.html")
        return template.render({
            "taskid": task_id,
            "submitfile": data.filename.decode("utf-8")
        })
    else:
        template = env.get_template("error.html")
        return template.render({
            "error":
            "The server encountered an internal error while submitting {0}".
            format(data.filename.decode("utf-8"))
        })
Esempio n. 46
0
def sep_unquarantine(f):
    filesize = os.path.getsize(f)
    with open(f, "rb") as quarfile:
        qdata = quarfile.read()

    data = bytearray(qdata)

    dataoffset = struct.unpack("<I", data[:4])[0]

    if dataoffset != 0x1290:
        # supporting older, simpler formats is trivial, will add
        # in a future commit
        return None

    # Space exists in the header for up to 384 characters of the original ASCII filename
    origname = str(bytes(data[4:388])).rstrip('\0')
    origname = os.path.basename(origname)

    data = bytearray_xor(data, 0x5a)

    dataoffset += 0x28
    offset = dataoffset
    decode_next_container = False
    xor_next_container = False
    has_header = True
    binsize = 0
    collectedsize = 0
    bindata = bytearray()
    iters = 0
    lastlen = 0

    while iters < 20000:  # prevent infinite loop on malformed files
        iters += 1
        code, length, codeval, tagdata = read_sep_tag(data, offset)
        extralen = len(tagdata)
        if code == 9:
            if xor_next_container:
                for i in xrange(len(tagdata)):
                    data[offset + 5 + i] ^= 0xff
                if has_header:
                    headerlen = 12 + struct.unpack_from(
                        "<I", data[offset + 5 + 8:offset + 5 + 12])[0] + 28
                    binsize = struct.unpack_from(
                        "<I", data[offset + 5 + headerlen - 12:offset + 5 +
                                   headerlen - 8])[0]
                    collectedsize += len(tagdata) - headerlen
                    if collectedsize > binsize:
                        binlen = binsize
                    else:
                        binlen = collectedsize
                    bindata += data[offset + 5 + headerlen:offset + 5 +
                                    headerlen + binlen]
                    has_header = False
                else:
                    binlen = len(tagdata)
                    collectedsize += binlen
                    if collectedsize > binsize:
                        binlen -= (collectedsize - binsize)
                    bindata += data[offset + 5:offset + 5 + binlen]
            else:
                if decode_next_container:
                    extralen = 0
                    decode_next_container = False
                elif codeval == 0x10 or codeval == 0x8:
                    if codeval == 0x8:
                        xor_next_container = True
                        lastlen = struct.unpack_from(
                            "<Q", data[offset + 5:offset + 5 + 8])[0]
                    else:
                        xor_next_container = False
                    decode_next_container = True
        elif code == 4:
            if xor_next_container and lastlen == codeval:
                binsize = codeval
                has_header = False

        offset += length + extralen
        if offset == filesize:
            break

    return store_temp_file(bindata, origname)
def tasks_create_file(request):
    resp = {}
    if request.method == "POST":
        # Check if this API function is enabled
        if not apiconf.filecreate.get("enabled"):
            resp = {
                "error": True,
                "error_value": "File Create API is Disabled"
            }
            return jsonize(resp, response=True)
        # Check if files are actually provided
        if request.FILES.getlist("file") == []:
            resp = {"error": True, "error_value": "No file was submitted"}
            return jsonize(resp, response=True)
        resp["error"] = False
        # Parse potential POST options (see submission/views.py)
        package = request.POST.get("package", "")
        timeout = force_int(request.POST.get("timeout"))
        priority = force_int(request.POST.get("priority"))
        options = request.POST.get("options", "")
        machine = request.POST.get("machine", "")
        platform = request.POST.get("platform", "")
        tags = request.POST.get("tags", None)
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        clock = request.POST.get("clock", None)
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))

        task_ids = []
        task_machines = []
        vm_list = []
        for vm in db.list_machines():
            vm_list.append(vm.label)

        if machine.lower() == "all":
            if not apiconf.filecreate.get("allmachines"):
                resp = {
                    "error": True,
                    "error_value": "Machine=all is disabled using the API"
                }
                return jsonize(resp, response=True)
            for entry in vm_list:
                task_machines.append(entry)
        else:
            # Check if VM is in our machines table
            if machine == "" or machine in vm_list:
                task_machines.append(machine)
            # Error if its not
            else:
                resp = {
                    "error":
                    True,
                    "error_value":
                    ("Machine '{0}' does not exist. "
                     "Available: {1}".format(machine, ", ".join(vm_list)))
                }
                return jsonize(resp, response=True)
        # Parse a max file size to be uploaded
        max_file_size = apiconf.filecreate.get("upload_limit")
        if not max_file_size or int(max_file_size) == 0:
            max_file_size = 5 * 1048576
        else:
            max_file_size = int(max_file_size) * 1048576
        # Check if we are allowing multiple file submissions
        multifile = apiconf.filecreate.get("multifile")
        if multifile:
            # Handle all files
            for sample in request.FILES.getlist("file"):
                if sample.size == 0:
                    resp = {
                        "error": True,
                        "error_value": "You submitted an empty file"
                    }
                    return jsonize(resp, response=True)
                if sample.size > max_file_size:
                    resp = {
                        "error": True,
                        "error_value": "File size exceeds API limit"
                    }
                    return jsonize(resp, response=True)
                path = store_temp_file(sample.read(), sample.name)
                for entry in task_machines:
                    task_id = db.add_path(
                        file_path=path,
                        package=package,
                        timeout=timeout,
                        priority=priority,
                        options=options,
                        machine=entry,
                        platform=platform,
                        tags=tags,
                        custom=custom,
                        memory=memory,
                        enforce_timeout=enforce_timeout,
                        clock=clock,
                    )
                    if task_id:
                        task_ids.append(task_id)
        else:
            # Grab the first file
            sample = request.FILES.getlist("file")[0]
            if sample.size == 0:
                resp = {
                    "error": True,
                    "error_value": "You submitted an empty file"
                }
                return jsonize(resp, response=True)
            if sample.size > max_file_size:
                resp = {
                    "error": True,
                    "error_value": "File size exceeds API limit"
                }
                return jsonize(resp, response=True)
            if len(request.FILES.getlist("file")) > 1:
                resp["warning"] = ("Multi-file API submissions disabled - "
                                   "Accepting first file")
            path = store_temp_file(sample.read(), sample.name)
            for entry in task_machines:
                task_id = db.add_path(
                    file_path=path,
                    package=package,
                    timeout=timeout,
                    priority=priority,
                    options=options,
                    machine=entry,
                    platform=platform,
                    tags=tags,
                    custom=custom,
                    memory=memory,
                    enforce_timeout=enforce_timeout,
                    clock=clock,
                )
                if task_id:
                    task_ids.append(task_id)
        if len(task_ids) > 0:
            callback = apiconf.filecreate.get("status")
            if len(task_ids) == 1:
                resp["data"] = "Task ID {0} has been submitted".format(
                    str(task_ids[0]))
                if callback:
                    resp["url"] = [
                        "{0}/submit/status/{1}/".format(
                            apiconf.filecreate.get("url"), task_ids[0])
                    ]
            else:
                resp["data"] = "Task IDs {0} have been submitted".format(
                    ", ".join(str(x) for x in task_ids))
                if callback:
                    resp["url"] = list()
                    for tid in task_ids:
                        resp["url"].append("{0}/submit/status/{1}".format(
                            apiconf.api.get("url"), tid))
        else:
            resp = {
                "error": True,
                "error_value": "Error adding task to database"
            }
        return jsonize(resp, response=True)

    else:
        resp = {"error": True, "error_value": "Method not allowed"}
        return jsonize(resp, response=True)
Esempio n. 48
0
def trend_unquarantine(f):
    with open(f, "rb") as quarfile:
        qdata = quarfile.read()

    data = bytearray_xor(bytearray(qdata), 0xff)

    magic, dataoffset, numtags = struct.unpack("<IIH", data[:10])
    if magic != 0x58425356: # VSBX
        return None
    origpath = "C:\\"
    origname = "UnknownTrendFile.bin"
    platform = "Unknown"
    attributes = 0x00000000
    unknownval = 0
    basekey = 0x00000000
    encmethod = 0

    if numtags > 15:
        return None

    dataoffset += 10
    offset = 10
    for i in range(numtags):
        code, tagdata = read_trend_tag(data, offset)
        if code == 1: # original pathname
            origpath = unicode(tagdata, encoding="utf16").encode("utf8", "ignore").rstrip("\0")
        elif code == 2: # original filename
            origname = unicode(tagdata, encoding="utf16").encode("utf8", "ignore").rstrip("\0")
        elif code == 3: # platform
            platform = str(tagdata)
        elif code == 4: # file attributes
            attributes = struct.unpack("<I", tagdata)[0]
        elif code == 5: # unknown, generally 1
            unknownval = struct.unpack("<I", tagdata)[0]
        elif code == 6: # base key
            basekey = struct.unpack("<I", tagdata)[0]
        elif code == 7: # encryption method: 1 == xor FF, 2 = CRC method
            encmethod = struct.unpack("<I", tagdata)[0]
        offset += 3 + len(tagdata)

    if encmethod != 2:
        return store_temp_file(data[dataoffset:], origname)

    bytesleft = len(data) - dataoffset
    unaligned = dataoffset % 4
    firstiter = True
    curoffset = dataoffset
    while bytesleft:
        off = curoffset
        if firstiter:
            off = curoffset - unaligned
            firstiter = False
        keyval = basekey + off
        buf = struct.pack("<I", keyval)
        crc = crc32(buf) & 0xffffffff
        crcbuf = bytearray(struct.pack("<I", crc))

        for i in range(unaligned, 4):
            if not bytesleft:
                break
            data[curoffset] ^= crcbuf[i]
            curoffset += 1
            bytesleft -= 1

        unaligned = 0

    return store_temp_file(data[dataoffset:], origname)
Esempio n. 49
0
def index(request, task_id=None, sha1=None):
    if request.method == "GET":
        return render_index(request)

    package = request.POST.get("package", "")
    timeout = force_int(request.POST.get("timeout"))
    options = request.POST.get("options", "")
    priority = force_int(request.POST.get("priority"))
    machine = request.POST.get("machine", "")
    custom = request.POST.get("custom", "")
    memory = bool(request.POST.get("memory", False))
    enforce_timeout = bool(request.POST.get("enforce_timeout", False))
    tags = request.POST.get("tags", None)

    options = parse_options(options)

    # The following POST fields take precedence over the options field.
    if request.POST.get("route"):
        options["route"] = request.POST.get("route")

    if request.POST.get("free"):
        options["free"] = "yes"

    if request.POST.get("process_memory"):
        options["procmemdump"] = "yes"

    if request.POST.get("services"):
        options["services"] = "yes"

    db = Database()
    task_ids = []
    task_machines = []

    if machine.lower() == "all":
        for entry in db.list_machines():
            task_machines.append(entry.label)
    else:
        task_machines.append(machine)

    # In case of resubmitting a file.
    if request.POST.get("category") == "file":
        task = Database().view_task(task_id)

        for entry in task_machines:
            task_id = db.add_path(file_path=task.target,
                                  package=package,
                                  timeout=timeout,
                                  options=emit_options(options),
                                  priority=priority,
                                  machine=entry,
                                  custom=custom,
                                  memory=memory,
                                  enforce_timeout=enforce_timeout,
                                  tags=tags)
            if task_id:
                task_ids.append(task_id)

    elif request.FILES.getlist("sample"):
        samples = request.FILES.getlist("sample")
        for sample in samples:
            # Error if there was only one submitted sample and it's empty.
            # But if there are multiple and one was empty, just ignore it.
            if not sample.size:
                if len(samples) != 1:
                    continue

                return render_to_response("error.html",
                                          {"error": "You uploaded an empty file."},
                                          context_instance=RequestContext(request))
            elif sample.size > settings.MAX_UPLOAD_SIZE:
                return render_to_response("error.html",
                                          {"error": "You uploaded a file that exceeds that maximum allowed upload size."},
                                          context_instance=RequestContext(request))

            # Moving sample from django temporary file to Cuckoo temporary
            # storage to let it persist between reboot (if user like to
            # configure it in that way).
            path = store_temp_file(sample.read(), sample.name)

            for entry in task_machines:
                task_id = db.add_path(file_path=path,
                                      package=package,
                                      timeout=timeout,
                                      options=emit_options(options),
                                      priority=priority,
                                      machine=entry,
                                      custom=custom,
                                      memory=memory,
                                      enforce_timeout=enforce_timeout,
                                      tags=tags)
                if task_id:
                    task_ids.append(task_id)

    # When submitting a dropped file.
    elif request.POST.get("category") == "dropped_file":
        filepath = dropped_filepath(task_id, sha1)

        for entry in task_machines:
            task_id = db.add_path(file_path=filepath,
                                  package=package,
                                  timeout=timeout,
                                  options=emit_options(options),
                                  priority=priority,
                                  machine=entry,
                                  custom=custom,
                                  memory=memory,
                                  enforce_timeout=enforce_timeout,
                                  tags=tags)
            if task_id:
                task_ids.append(task_id)

    else:
        url = request.POST.get("url").strip()
        if not url:
            return render_to_response("error.html",
                                      {"error": "You specified an invalid URL!"},
                                      context_instance=RequestContext(request))

        for entry in task_machines:
            task_id = db.add_url(url=url,
                                 package=package,
                                 timeout=timeout,
                                 options=emit_options(options),
                                 priority=priority,
                                 machine=entry,
                                 custom=custom,
                                 memory=memory,
                                 enforce_timeout=enforce_timeout,
                                 tags=tags)
            if task_id:
                task_ids.append(task_id)

    tasks_count = len(task_ids)
    if tasks_count > 0:
        return render_to_response("submission/complete.html",
                                  {"tasks": task_ids,
                                   "tasks_count": tasks_count,
                                   "baseurl": request.build_absolute_uri('/')[:-1]},
                                  context_instance=RequestContext(request))
    else:
        return render_to_response("error.html",
                                  {"error": "Error adding task to Cuckoo's database."},
                                  context_instance=RequestContext(request))
Esempio n. 50
0
def trend_unquarantine(f):
    with open(f, "rb") as quarfile:
        qdata = quarfile.read()

    data = bytearray_xor(bytearray(qdata), 0xff)

    magic, dataoffset, numtags = struct.unpack("<IIH", data[:10])
    if magic != 0x58425356:  # VSBX
        return None
    origpath = "C:\\"
    origname = "UnknownTrendFile.bin"
    platform = "Unknown"
    attributes = 0x00000000
    unknownval = 0
    basekey = 0x00000000
    encmethod = 0

    if numtags > 15:
        return None

    dataoffset += 10
    offset = 10
    for i in xrange(numtags):
        code, tagdata = read_trend_tag(data, offset)
        if code == 1:  # original pathname
            origpath = unicode(tagdata,
                               encoding="utf16").encode("utf8",
                                                        "ignore").rstrip("\0")
        elif code == 2:  # original filename
            origname = unicode(tagdata,
                               encoding="utf16").encode("utf8",
                                                        "ignore").rstrip("\0")
        elif code == 3:  # platform
            platform = str(tagdata)
        elif code == 4:  # file attributes
            attributes = struct.unpack("<I", tagdata)[0]
        elif code == 5:  # unknown, generally 1
            unknownval = struct.unpack("<I", tagdata)[0]
        elif code == 6:  # base key
            basekey = struct.unpack("<I", tagdata)[0]
        elif code == 7:  # encryption method: 1 == xor FF, 2 = CRC method
            encmethod = struct.unpack("<I", tagdata)[0]
        offset += 3 + len(tagdata)

    if encmethod != 2:
        return store_temp_file(data[dataoffset:], origname)

    bytesleft = len(data) - dataoffset
    unaligned = dataoffset % 4
    firstiter = True
    curoffset = dataoffset
    while bytesleft:
        off = curoffset
        if firstiter:
            off = curoffset - unaligned
            firstiter = False
        keyval = basekey + off
        buf = struct.pack("<I", keyval)
        crc = crc32(buf) & 0xffffffff
        crcbuf = bytearray(struct.pack("<I", crc))

        for i in xrange(unaligned, 4):
            if not bytesleft:
                break
            data[curoffset] ^= crcbuf[i]
            curoffset += 1
            bytesleft -= 1

        unaligned = 0

    return store_temp_file(data[dataoffset:], origname)
Esempio n. 51
0
def import_analysis(request):
    if request.method == "GET":
        return render(request, "analysis/import.html")

    db = Database()
    task_ids = []

    for analysis in request.FILES.getlist("analyses"):
        if not analysis.size:
            return render(request, "error.html", {
                "error": "You uploaded an empty analysis.",
            })

        # if analysis.size > settings.MAX_UPLOAD_SIZE:
            # return render(request, "error.html", {
            #     "error": "You uploaded a file that exceeds that maximum allowed upload size.",
            # })

        if not analysis.name.endswith(".zip"):
            return render(request, "error.html", {
                "error": "You uploaded an analysis that wasn't a .zip.",
            })

        zf = zipfile.ZipFile(analysis)

        # As per Python documentation we have to make sure there are no
        # incorrect filenames.
        for filename in zf.namelist():
            if filename.startswith("/") or ".." in filename or ":" in filename:
                return render(request, "error.html", {
                    "error": "The zip file contains incorrect filenames, "
                             "please provide a legitimate .zip file.",
                })

        if "analysis.json" in zf.namelist():
            analysis_info = json.loads(zf.read("analysis.json"))
        elif "binary" in zf.namelist():
            analysis_info = {
                "target": {
                    "category": "file",
                },
            }
        else:
            analysis_info = {
                "target": {
                    "category": "url",
                    "url": "unknown",
                },
            }

        category = analysis_info["target"]["category"]
        info = analysis_info.get("info", {})

        if category == "file":
            binary = store_temp_file(zf.read("binary"), "binary")

            if os.path.isfile(binary):
                task_id = db.add_path(file_path=binary,
                                      package=info.get("package"),
                                      timeout=0,
                                      options=info.get("options"),
                                      priority=0,
                                      machine="",
                                      custom=info.get("custom"),
                                      memory=False,
                                      enforce_timeout=False,
                                      tags=info.get("tags"))
                if task_id:
                    task_ids.append(task_id)

        elif category == "url":
            url = analysis_info["target"]["url"]
            if not url:
                return render(request, "error.html", {
                    "error": "You specified an invalid URL!",
                })

            task_id = db.add_url(url=url,
                                 package=info.get("package"),
                                 timeout=0,
                                 options=info.get("options"),
                                 priority=0,
                                 machine="",
                                 custom=info.get("custom"),
                                 memory=False,
                                 enforce_timeout=False,
                                 tags=info.get("tags"))
            if task_id:
                task_ids.append(task_id)

        if not task_id:
            continue

        # Extract all of the files related to this analysis. This probably
        # requires some hacks depending on the user/group the Web
        # Interface is running under.
        analysis_path = os.path.join(
            CUCKOO_ROOT, "storage", "analyses", "%d" % task_id
        )

        if not os.path.exists(analysis_path):
            os.mkdir(analysis_path)

        zf.extractall(analysis_path)

        # We set this analysis as completed so that it will be processed
        # automatically (assuming process.py / process2.py is running).
        db.set_status(task_id, TASK_COMPLETED)

    if task_ids:
        return render(request, "submission/complete.html", {
            "tasks": task_ids,
            "baseurl": request.build_absolute_uri("/")[:-1],
        })
Esempio n. 52
0
def import_analysis(request):
    if request.method == "GET":
        return render(request, "analysis/import.html")

    db = Database()
    task_ids = []
    analyses = request.FILES.getlist("sample")

    for analysis in analyses:
        if not analysis.size:
            return render(request, "error.html", {
                "error": "You uploaded an empty analysis.",
            })

        # if analysis.size > settings.MAX_UPLOAD_SIZE:
        # return render(request, "error.html", {
        #     "error": "You uploaded a file that exceeds that maximum allowed upload size.",
        # })

        if not analysis.name.endswith(".zip"):
            return render(
                request, "error.html", {
                    "error": "You uploaded an analysis that wasn't a .zip.",
                })

        zf = zipfile.ZipFile(analysis)

        # As per Python documentation we have to make sure there are no
        # incorrect filenames.
        for filename in zf.namelist():
            if filename.startswith("/") or ".." in filename or ":" in filename:
                return render(
                    request, "error.html", {
                        "error":
                        "The zip file contains incorrect filenames, "
                        "please provide a legitimate .zip file.",
                    })

        if "analysis.json" in zf.namelist():
            analysis_info = json.loads(zf.read("analysis.json"))
        elif "binary" in zf.namelist():
            analysis_info = {
                "target": {
                    "category": "file",
                },
            }
        else:
            analysis_info = {
                "target": {
                    "category": "url",
                    "url": "unknown",
                },
            }

        category = analysis_info["target"]["category"]

        if category == "file":
            binary = store_temp_file(zf.read("binary"), "binary")

            if os.path.isfile(binary):
                task_id = db.add_path(file_path=binary,
                                      package="",
                                      timeout=0,
                                      options="",
                                      priority=0,
                                      machine="",
                                      custom="",
                                      memory=False,
                                      enforce_timeout=False,
                                      tags=None)
                if task_id:
                    task_ids.append(task_id)

        elif category == "url":
            url = analysis_info["target"]["url"]
            if not url:
                return render(request, "error.html", {
                    "error": "You specified an invalid URL!",
                })

            task_id = db.add_url(url=url,
                                 package="",
                                 timeout=0,
                                 options="",
                                 priority=0,
                                 machine="",
                                 custom="",
                                 memory=False,
                                 enforce_timeout=False,
                                 tags=None)
            if task_id:
                task_ids.append(task_id)

        if not task_id:
            continue

        # Extract all of the files related to this analysis. This probably
        # requires some hacks depending on the user/group the Web
        # Interface is running under.
        analysis_path = os.path.join(CUCKOO_ROOT, "storage", "analyses",
                                     "%d" % task_id)

        if not os.path.exists(analysis_path):
            os.mkdir(analysis_path)

        zf.extractall(analysis_path)

        # We set this analysis as completed so that it will be processed
        # automatically (assuming process.py / process2.py is running).
        db.set_status(task_id, TASK_COMPLETED)

    if task_ids:
        return render(request, "submission/complete.html", {
            "tasks": task_ids,
            "baseurl": request.build_absolute_uri("/")[:-1],
        })
Esempio n. 53
0
def index(request, task_id=None, sha1=None):
    if request.method == "GET":
        return render_index(request)

    package = request.POST.get("package", "")
    timeout = force_int(request.POST.get("timeout"))
    options = request.POST.get("options", "")
    priority = force_int(request.POST.get("priority"))
    machine = request.POST.get("machine", "")
    custom = request.POST.get("custom", "")
    memory = bool(request.POST.get("memory", False))
    enforce_timeout = bool(request.POST.get("enforce_timeout", False))
    tags = request.POST.get("tags", None)

    options = parse_options(options)

    # The following POST fields take precedence over the options field.
    if request.POST.get("route"):
        options["route"] = request.POST.get("route")

    if request.POST.get("free"):
        options["free"] = "yes"

    if request.POST.get("process_memory"):
        options["procmemdump"] = "yes"

    if request.POST.get("services"):
        options["services"] = "yes"

    if not request.POST.get("human"):
        options["human"] = "0"

    if request.POST.get("screenshots"):
        options["screenshots"] = force_int(request.POST.get("screenshots"))

    db = Database()
    task_ids = []
    task_machines = []

    if machine.lower() == "all":
        for entry in db.list_machines():
            task_machines.append(entry.label)
    else:
        task_machines.append(machine)

    # In case of resubmitting a file.
    if request.POST.get("category") == "file":
        task = Database().view_task(task_id)

        for entry in task_machines:
            task_id = db.add_path(file_path=task.target,
                                  package=package,
                                  timeout=timeout,
                                  options=emit_options(options),
                                  priority=priority,
                                  machine=entry,
                                  custom=custom,
                                  memory=memory,
                                  enforce_timeout=enforce_timeout,
                                  tags=tags)
            if task_id:
                task_ids.append(task_id)

    elif request.FILES.getlist("sample"):
        samples = request.FILES.getlist("sample")
        for sample in samples:
            # Error if there was only one submitted sample and it's empty.
            # But if there are multiple and one was empty, just ignore it.
            if not sample.size:
                if len(samples) != 1:
                    continue

                return render(request, "error.html", {
                    "error": "You uploaded an empty file.",
                })
            elif sample.size > settings.MAX_UPLOAD_SIZE:
                return render(
                    request, "error.html", {
                        "error":
                        "You uploaded a file that exceeds that maximum allowed upload size.",
                    })

            # Moving sample from django temporary file to Cuckoo temporary
            # storage to let it persist between reboot (if user like to
            # configure it in that way).
            path = store_temp_file(sample.read(), sample.name)

            for entry in task_machines:
                task_id = db.add_path(file_path=path,
                                      package=package,
                                      timeout=timeout,
                                      options=emit_options(options),
                                      priority=priority,
                                      machine=entry,
                                      custom=custom,
                                      memory=memory,
                                      enforce_timeout=enforce_timeout,
                                      tags=tags)
                if task_id:
                    task_ids.append(task_id)

    # When submitting a dropped file.
    elif request.POST.get("category") == "dropped_file":
        filepath = dropped_filepath(task_id, sha1)

        for entry in task_machines:
            task_id = db.add_path(file_path=filepath,
                                  package=package,
                                  timeout=timeout,
                                  options=emit_options(options),
                                  priority=priority,
                                  machine=entry,
                                  custom=custom,
                                  memory=memory,
                                  enforce_timeout=enforce_timeout,
                                  tags=tags)
            if task_id:
                task_ids.append(task_id)

    else:
        url = request.POST.get("url").strip()
        if not url:
            return render(request, "error.html", {
                "error": "You specified an invalid URL!",
            })

        for entry in task_machines:
            task_id = db.add_url(url=url,
                                 package=package,
                                 timeout=timeout,
                                 options=emit_options(options),
                                 priority=priority,
                                 machine=entry,
                                 custom=custom,
                                 memory=memory,
                                 enforce_timeout=enforce_timeout,
                                 tags=tags)
            if task_id:
                task_ids.append(task_id)

    tasks_count = len(task_ids)
    if tasks_count > 0:
        return render(
            request, "submission/complete.html", {
                "tasks": task_ids,
                "tasks_count": tasks_count,
                "baseurl": request.build_absolute_uri('/')[:-1],
            })
    else:
        return render(request, "error.html", {
            "error": "Error adding task to Cuckoo's database.",
        })
Esempio n. 54
0
def sep_unquarantine(f):
    filesize = os.path.getsize(f)
    with open(f, "rb") as quarfile:
        qdata = quarfile.read()

    data = bytearray(qdata)

    dataoffset = struct.unpack("<I", data[:4])[0]

    if dataoffset != 0x1290:
        # supporting older, simpler formats is trivial, will add
        # in a future commit
        return None

    # Space exists in the header for up to 384 characters of the original ASCII filename 
    origname = str(bytes(data[4:388])).rstrip('\0')
    origname = os.path.basename(origname)

    data = bytearray_xor(data, 0x5a)

    dataoffset += 0x28
    offset = dataoffset
    decode_next_container = False
    xor_next_container = False
    has_header = True
    binsize = 0
    collectedsize = 0
    bindata = bytearray()
    iters = 0
    lastlen = 0

    while iters < 20000: # prevent infinite loop on malformed files
        iters += 1
        code, length, codeval, tagdata = read_sep_tag(data, offset)
        extralen = len(tagdata)
        if code == 9:
            if xor_next_container:
                for i in range(len(tagdata)):
                    data[offset+5+i] ^= 0xff
                if has_header:
                    headerlen = 12 + struct.unpack_from("<I", data[offset+5+8:offset+5+12])[0] + 28
                    binsize = struct.unpack_from("<I", data[offset+5+headerlen-12:offset+5+headerlen-8])[0]
                    collectedsize += len(tagdata) - headerlen
                    if collectedsize > binsize:
                        binlen = binsize
                    else:
                        binlen = collectedsize
                    bindata += data[offset+5+headerlen:offset+5+headerlen+binlen]
                    has_header = False
                else:
                    binlen = len(tagdata)
                    collectedsize += binlen
                    if collectedsize > binsize:
                        binlen -= (collectedsize - binsize)
                    bindata += data[offset+5:offset+5+binlen]
            else:
                if decode_next_container:
                    extralen = 0
                    decode_next_container = False
                elif codeval == 0x10 or codeval == 0x8:
                    if codeval == 0x8:
                        xor_next_container = True
                        lastlen = struct.unpack_from("<Q", data[offset+5:offset+5+8])[0]
                    else:
                        xor_next_container = False
                    decode_next_container = True
        elif code == 4:
            if xor_next_container and lastlen == codeval:
                binsize = codeval
                has_header = False

        offset += length + extralen
        if offset == filesize:
            break

    return store_temp_file(bindata, origname)
Esempio n. 55
0
File: qrat.py Progetto: xme/CAPEv2
def extract_config(file_path, decomp_jar):
    enckey = coded_jar = False

    if not decomp_jar:
        return None

    ret = {}

    try:
        with ZipFile(file_path, 'r') as zip:
            for name in zip.namelist():
                if name == 'e-data':
                    coded_data = zip.read(name)
                    seed = coded_data[:8]
                    enckey = unpack('>Q', seed)[0]

        if enckey and coded_data:
            java_rand = JavaRandom(enckey)
            coded_data = coded_data[8:]
            decoded_data = ""
            for i in range(len(coded_data)):
                key = java_rand.nextInt(255)
                dec_byte = chr((ord(coded_data[i]) - key + 256) % 256)
                decoded_data += dec_byte
            decoded_path = store_temp_file(decoded_data, "qrat.jar")

            try:
                p = Popen(["java", "-jar", decomp_jar, decoded_path],
                          stdout=PIPE)
                decompiled_data = p.stdout.read()
            except:
                pass

            match = re.search(
                "Utils\.serverHost = new String\[\] \{(?P<stringlist>[^};\r\n]*)\};",
                decompiled_data)
            if match:
                hostlist = match.group('stringlist').split(',')
                serverhosts = [x.strip(" \"") for x in hostlist]
                for i in range(len(serverhosts)):
                    ret["ServerHost" + str(i)] = serverhosts[i]
            match = re.search("Utils\.serverPort = (?P<portnum>\d+);",
                              decompiled_data)
            if match:
                ret["ServerPort"] = int(match.group('portnum'))
            match = re.search(
                "Utils\.instanceControlPortAgent = (?P<portnum>\d+);",
                decompiled_data)
            if match:
                ret["InstanceControlPortAgent"] = int(match.group('portnum'))
            match = re.search(
                "Utils\.instanceControlPortClient = (?P<portnum>\d+);",
                decompiled_data)
            if match:
                ret["InstanceControlPortClient"] = int(match.group('portnum'))

            try:
                os.unlink(decoded_path)
            except:
                pass

            return ret
    except:
        pass

    return None
Esempio n. 56
0
def index(request, resubmit_hash=False):
    remote_console = False
    if request.method == "POST":

        (
            static,
            package,
            timeout,
            priority,
            options,
            machine,
            platform,
            tags,
            custom,
            memory,
            clock,
            enforce_timeout,
            shrike_url,
            shrike_msg,
            shrike_sid,
            shrike_refer,
            unique,
            referrer,
            tlp,
            tags_tasks,
            route,
            cape,
        ) = parse_request_arguments(request)

        # This is done to remove spaces in options but not breaks custom paths
        options = ",".join("=".join(value.strip()
                                    for value in option.split("=", 1))
                           for option in options.split(",")
                           if option and "=" in option)
        opt_filename = get_user_filename(options, custom)

        if priority and web_conf.public.enabled and web_conf.public.priority and not request.user.is_staff:
            priority = web_conf.public.priority

        if timeout and web_conf.public.enabled and web_conf.public.timeout and not request.user.is_staff:
            timeout = web_conf.public.timeout

        if options:
            options += ","

        if referrer:
            options += "referrer=%s," % (referrer)

        if request.POST.get("free"):
            options += "free=yes,"

        if request.POST.get("nohuman"):
            options += "nohuman=yes,"

        if web_conf.guacamole.enabled and request.POST.get(
                "interactive_desktop"):
            remote_console = True
            if "nohuman=yes," not in options:
                options += "nohuman=yes,"

        if request.POST.get("tor"):
            options += "tor=yes,"

        if request.POST.get("process_dump"):
            options += "procdump=0,"

        if request.POST.get("process_memory"):
            options += "procmemdump=1,"

        if request.POST.get("import_reconstruction"):
            options += "import_reconstruction=1,"

        if request.POST.get("disable_cape"):
            options += "disable_cape=1,"

        if request.POST.get("kernel_analysis"):
            options += "kernel_analysis=yes,"

        if request.POST.get("norefer"):
            options += "norefer=1,"

        if request.POST.get("oldloader"):
            options += "no-iat=1,"

        if request.POST.get("unpack"):
            options += "unpack=yes,"

        job_category = False
        if request.POST.get("job_category"):
            job_category = request.POST.get("job_category")

        # amsidump is enabled by default in the monitor for Win10+
        if web_conf.amsidump.enabled and not request.POST.get("amsidump"):
            options += "amsidump=0,"

        options = options[:-1]

        opt_apikey = False
        opts = get_options(options)
        if opts:
            opt_apikey = opts.get("apikey", False)

        status = "ok"
        task_ids_tmp = []
        existent_tasks = {}
        details = {
            "errors": [],
            "content": False,
            "request": request,
            "task_ids": [],
            "url": False,
            "params": {},
            "headers": {},
            "service": "Local",
            "path": "",
            "fhash": False,
            "options": options,
            "only_extraction": False,
            "user_id": request.user.id or 0,
        }
        task_category = False
        samples = []
        if "hash" in request.POST and request.POST.get(
                "hash", False) and request.POST.get("hash")[0] != "":
            task_category = "resubmit"
            samples = request.POST.get("hash").strip().split(",")
        elif "sample" in request.FILES:
            task_category = "sample"
            samples = request.FILES.getlist("sample")
        elif "quarantine" in request.FILES:
            task_category = "quarantine"
            samples = request.FILES.getlist("quarantine")
        elif "static" in request.FILES:
            task_category = "static"
            samples = request.FILES.getlist("static")
        elif "pcap" in request.FILES:
            task_category = "pcap"
            samples = request.FILES.getlist("pcap")
        elif "url" in request.POST and request.POST.get("url").strip():
            task_category = "url"
            samples = request.POST.get("url").strip()
        elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip():
            task_category = "dlnexec"
            samples = request.POST.get("dlnexec").strip()
        elif (settings.VTDL_ENABLED and "vtdl" in request.POST
              and request.POST.get("vtdl", False)
              and request.POST.get("vtdl")[0] != ""):
            task_category = "vtdl"
            samples = request.POST.get("vtdl").strip()

        list_of_files = []
        if task_category in ("url", "dlnexec"):
            if not samples:
                return render(request, "error.html",
                              {"error": "You specified an invalid URL!"})

            for url in samples.split(","):
                url = url.replace("hxxps://", "https://").replace(
                    "hxxp://", "http://").replace("[.]", ".")
                if task_category == "dlnexec":
                    path, content, sha256 = process_new_dlnexec_task(
                        url, route, options, custom)
                    if path:
                        list_of_files.append((content, path, sha256))
                elif task_category == "url":
                    list_of_files.append(("", url, ""))

        elif task_category in ("sample", "quarantine", "static", "pcap"):
            list_of_files, details = process_new_task_files(
                request, samples, details, opt_filename, unique)

        elif task_category == "resubmit":
            for hash in samples:
                paths = []
                if len(hash) in (32, 40, 64):
                    paths = db.sample_path_by_hash(hash)
                else:
                    task_binary = os.path.join(settings.CUCKOO_PATH, "storage",
                                               "analyses", str(hash), "binary")
                    if os.path.exists(task_binary):
                        paths.append(task_binary)
                    else:
                        tmp_paths = db.find_sample(task_id=int(hash))
                        if not tmp_paths:
                            details["errors"].append(
                                {hash: "Task not found for resubmission"})
                            continue
                        for tmp_sample in tmp_paths:
                            path = False
                            tmp_dict = tmp_sample.to_dict()
                            if os.path.exists(tmp_dict.get("target", "")):
                                path = tmp_dict["target"]
                            else:
                                tmp_tasks = db.find_sample(
                                    sample_id=tmp_dict["sample_id"])
                                for tmp_task in tmp_tasks:
                                    tmp_path = os.path.join(
                                        settings.CUCKOO_PATH, "storage",
                                        "binaries",
                                        tmp_task.to_dict()["sha256"])
                                    if os.path.exists(tmp_path):
                                        path = tmp_path
                                        break
                            if path:
                                paths.append(path)
                if not paths:
                    details["errors"].append(
                        {hash: "File not found on hdd for resubmission"})
                    continue

                content = get_file_content(paths)
                if not content:
                    details["errors"].append(
                        {hash: f"Can't find {hash} on disk"})
                    continue
                folder = os.path.join(settings.TEMP_PATH, "cape-resubmit")
                if not os.path.exists(folder):
                    os.makedirs(folder)
                base_dir = tempfile.mkdtemp(prefix="resubmit_", dir=folder)
                if opt_filename:
                    filename = base_dir + "/" + opt_filename
                else:
                    filename = base_dir + "/" + sanitize_filename(hash)
                path = store_temp_file(content, filename)
                list_of_files.append((content, path, hash))

        # Hack for resubmit first find all files and then put task as proper category
        if job_category and job_category in ("resubmit", "sample",
                                             "quarantine", "static", "pcap",
                                             "dlnexec", "vtdl"):
            task_category = job_category

        if task_category == "resubmit":
            for content, path, sha256 in list_of_files:
                details["path"] = path
                details["content"] = content
                status, task_ids_tmp = download_file(**details)
                if status == "error":
                    details["errors"].append(
                        {os.path.basename(filename): task_ids_tmp})
                else:
                    details["task_ids"] = task_ids_tmp
                    if web_conf.general.get("existent_tasks", False):
                        records = perform_search("target_sha256",
                                                 hash,
                                                 search_limit=5)
                        for record in records or []:
                            existent_tasks.setdefault(
                                record["target"]["file"]["sha256"],
                                []).append(record)

        elif task_category == "sample":
            details["service"] = "WebGUI"
            for content, path, sha256 in list_of_files:
                if web_conf.pre_script.enabled and "pre_script" in request.FILES:
                    pre_script = request.FILES["pre_script"]
                    details["pre_script_name"] = request.FILES[
                        "pre_script"].name
                    details["pre_script_content"] = pre_script.read()

                if web_conf.during_script.enabled and "during_script" in request.FILES:
                    during_script = request.FILES["during_script"]
                    details["during_script_name"] = request.FILES[
                        "during_script"].name
                    details["during_script_content"] = during_script.read()

                if timeout and web_conf.public.enabled and web_conf.public.timeout and timeout > web_conf.public.timeout:
                    timeout = web_conf.public.timeout

                details["path"] = path
                details["content"] = content
                status, task_ids_tmp = download_file(**details)
                if status == "error":
                    details["errors"].append(
                        {os.path.basename(path): task_ids_tmp})
                else:
                    if web_conf.general.get("existent_tasks", False):
                        records = perform_search("target_sha256",
                                                 sha256,
                                                 search_limit=5)
                        for record in records:
                            if record.get("target").get("file",
                                                        {}).get("sha256"):
                                existent_tasks.setdefault(
                                    record["target"]["file"]["sha256"],
                                    []).append(record)
                    details["task_ids"] = task_ids_tmp

        elif task_category == "quarantine":
            for content, tmp_path, sha256 in list_of_files:
                path = unquarantine(tmp_path)
                try:
                    os.remove(tmp_path)
                except Exception as e:
                    print(e)

                if not path:
                    details["errors"].append({
                        os.path.basename(path):
                        "You uploaded an unsupported quarantine file."
                    })
                    continue

                details["path"] = path
                details["content"] = content
                status, task_ids_tmp = download_file(**details)
                if status == "error":
                    details["errors"].append(
                        {os.path.basename(path): task_ids_tmp})
                else:
                    details["task_ids"] = task_ids_tmp

        elif task_category == "static":
            for content, path, sha256 in list_of_files:
                task_id = db.add_static(file_path=path,
                                        priority=priority,
                                        tlp=tlp,
                                        user_id=request.user.id or 0)
                if not task_id:
                    return render(
                        request, "error.html",
                        {"error": "We don't have static extractor for this"})
                details["task_ids"] += task_id

        elif task_category == "pcap":
            for content, path, sha256 in list_of_files:
                if path.lower().endswith(b".saz"):
                    saz = saz_to_pcap(path)
                    if saz:
                        try:
                            os.remove(path)
                        except Exception as e:
                            pass
                        path = saz
                    else:
                        details["errors"].append({
                            os.path.basename(path):
                            "Conversion from SAZ to PCAP failed."
                        })
                        continue

                task_id = db.add_pcap(file_path=path,
                                      priority=priority,
                                      tlp=tlp,
                                      user_id=request.user.id or 0)
                if task_id:
                    details["task_ids"].append(task_id)

        elif task_category == "url":
            for _, url, _ in list_of_files:
                if machine.lower() == "all":
                    machines = [
                        vm.name for vm in db.list_machines(platform=platform)
                    ]
                elif machine:
                    machine_details = db.view_machine(machine)
                    if platform and hasattr(
                            machine_details, "platform"
                    ) and not machine_details.platform == platform:
                        details["errors"].append({
                            os.path.basename(url):
                            f"Wrong platform, {machine_details.platform} VM selected for {platform} sample"
                        })
                        continue
                    else:
                        machines = [machine]

                else:
                    machines = [None]
                for entry in machines:
                    task_id = db.add_url(
                        url=url,
                        package=package,
                        timeout=timeout,
                        priority=priority,
                        options=options,
                        machine=entry,
                        platform=platform,
                        tags=tags,
                        custom=custom,
                        memory=memory,
                        enforce_timeout=enforce_timeout,
                        clock=clock,
                        shrike_url=shrike_url,
                        shrike_msg=shrike_msg,
                        shrike_sid=shrike_sid,
                        shrike_refer=shrike_refer,
                        route=route,
                        cape=cape,
                        tags_tasks=tags_tasks,
                        user_id=request.user.id or 0,
                    )
                    details["task_ids"].append(task_id)

        elif task_category == "dlnexec":
            for content, path, sha256 in list_of_files:
                details["path"] = path
                details["content"] = content
                details["service"] = "DLnExec"
                details["source_url"] = samples
                status, task_ids_tmp = download_file(**details)
                if status == "error":
                    details["errors"].append(
                        {os.path.basename(path): task_ids_tmp})
                else:
                    details["task_ids"] = task_ids_tmp

        elif task_category == "vtdl":
            if not settings.VTDL_KEY or not settings.VTDL_PATH:
                return render(
                    request,
                    "error.html",
                    {
                        "error":
                        "You specified VirusTotal but must edit the file and specify your VTDL_KEY variable and VTDL_PATH base directory"
                    },
                )
            else:
                if opt_apikey:
                    details["apikey"] = opt_apikey
                details = download_from_vt(samples, details, opt_filename,
                                           settings)

        if details.get("task_ids"):
            tasks_count = len(details["task_ids"])
        else:
            tasks_count = 0
        if tasks_count > 0:
            data = {
                "tasks": details["task_ids"],
                "tasks_count": tasks_count,
                "errors": details["errors"],
                "existent_tasks": existent_tasks,
                "remote_console": remote_console,
            }
            return render(request, "submission/complete.html", data)
        else:
            return render(
                request, "error.html", {
                    "error": "Error adding task(s) to CAPE's database.",
                    "errors": details["errors"]
                })
    else:
        enabledconf = {}
        enabledconf["vt"] = settings.VTDL_ENABLED
        enabledconf["kernel"] = settings.OPT_ZER0M0N
        enabledconf["memory"] = processing.memory.get("enabled")
        enabledconf["procmemory"] = processing.procmemory.get("enabled")
        enabledconf["dlnexec"] = settings.DLNEXEC
        enabledconf["url_analysis"] = settings.URL_ANALYSIS
        enabledconf["tags"] = False
        enabledconf[
            "dist_master_storage_only"] = repconf.distributed.master_storage_only
        enabledconf["linux_on_gui"] = web_conf.linux.enabled
        enabledconf["tlp"] = web_conf.tlp.enabled
        enabledconf["timeout"] = cfg.timeouts.default
        enabledconf["amsidump"] = web_conf.amsidump.enabled
        enabledconf["pre_script"] = web_conf.pre_script.enabled
        enabledconf["during_script"] = web_conf.during_script.enabled

        if all_vms_tags:
            enabledconf["tags"] = True

        if not enabledconf["tags"]:
            # load multi machinery tags:
            # Get enabled machinery
            machinery = cfg.cuckoo.get("machinery")
            if machinery == "multi":
                for mmachinery in Config(machinery).multi.get(
                        "machinery").split(","):
                    vms = [
                        x.strip() for x in getattr(Config(
                            mmachinery), mmachinery).get("machines").split(",")
                        if x.strip()
                    ]
                    if any([
                            "tags"
                            in list(getattr(Config(mmachinery), vmtag).keys())
                            for vmtag in vms
                    ]):
                        enabledconf["tags"] = True
                        break
            else:
                # Get VM names for machinery config elements
                vms = [
                    x.strip() for x in str(
                        getattr(Config(machinery), machinery).get(
                            "machines")).split(",") if x.strip()
                ]
                # Check each VM config element for tags
                if any([
                        "tags"
                        in list(getattr(Config(machinery), vmtag).keys())
                        for vmtag in vms
                ]):
                    enabledconf["tags"] = True

        packages, machines = get_form_data("windows")

        socks5s = _load_socks5_operational()

        socks5s_random = ""
        vpn_random = ""

        if routing.socks5.random_socks5 and socks5s:
            socks5s_random = random.choice(socks5s.values()).get("name", False)

        if routing.vpn.random_vpn:
            vpn_random = random.choice(list(vpns.values())).get("name", False)

        if socks5s:
            socks5s_random = random.choice(list(socks5s.values())).get(
                "name", False)

        random_route = False
        if vpn_random and socks5s_random:
            random_route = random.choice((vpn_random, socks5s_random))
        elif vpn_random:
            random_route = vpn_random
        elif socks5s_random:
            random_route = socks5s_random

        existent_tasks = {}
        if resubmit_hash:
            if web_conf.general.get("existent_tasks", False):
                records = perform_search("target_sha256",
                                         resubmit_hash,
                                         search_limit=5)
                for record in records:
                    existent_tasks.setdefault(
                        record["target"]["file"]["sha256"], list())
                    existent_tasks[record["target"]["file"]["sha256"]].append(
                        record)

        return render(
            request,
            "submission/index.html",
            {
                "packages": sorted(packages),
                "machines": machines,
                "vpns": list(vpns.values()),
                "random_route": random_route,
                "socks5s": list(socks5s.values()),
                "route": routing.routing.route,
                "internet": routing.routing.internet,
                "inetsim": routing.inetsim.enabled,
                "tor": routing.tor.enabled,
                "config": enabledconf,
                "resubmit": resubmit_hash,
                "tags": sorted(list(set(all_vms_tags))),
                "existent_tasks": existent_tasks,
                "all_exitnodes": all_nodes_exits_list,
            },
        )
Esempio n. 57
0
def tasks_create_file():
    response = {}

    data = request.files.file
    pcap = request.POST.get("pcap", "")
    package = request.forms.get("package", "")
    timeout = request.forms.get("timeout", "")
    priority = request.forms.get("priority", 1)
    options = request.forms.get("options", "")
    machine = request.forms.get("machine", "")
    platform = request.forms.get("platform", "")
    tags = request.forms.get("tags", None)
    custom = request.forms.get("custom", "")
    memory = request.forms.get("memory", "False")
    clock = request.forms.get("clock", datetime.now().strftime("%m-%d-%Y %H:%M:%S"))
    if clock is False or clock is None:
        clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S")
    if "1970" in clock:
        clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S")
    shrike_url = request.forms.get("shrike_url", None)
    shrike_msg = request.forms.get("shrike_msg", None)
    shrike_sid = request.forms.get("shrike_sid", None)
    shrike_refer = request.forms.get("shrike_refer", None)
    static = bool(request.POST.get("static", False))
    unique = bool(request.forms.get("unique", False))
    if memory.upper() == "FALSE" or memory == "0":
        memory = False
    else:
        memory = True

    enforce_timeout = request.forms.get("enforce_timeout", "False")
    if enforce_timeout.upper() == "FALSE" or enforce_timeout == "0":
        enforce_timeout = False
    else:
        enforce_timeout = True

    temp_file_path = store_temp_file(data.file.read(), data.filename)

    if unique and db.check_file_uniq(File(temp_file_path).get_sha256()):
        resp = {"error": True, "error_value": "Duplicated file, disable unique option to force submission"}
        return jsonize(resp)

    if pcap:
        if data.filename.lower().endswith(".saz"):
            saz = saz_to_pcap(temp_file_path)
            if saz:
                path = saz
                try:
                    os.remove(temp_file_path)
                except:
                    pass
            else:
                resp = {"error": True, "error_value": "Failed to convert PCAP to SAZ"}
                return jsonize(resp)
        else:
            path = temp_file_path
        task_id = db.add_pcap(file_path=path)
        task_ids = [task_id]
    else:

        try:
            task_ids = db.demux_sample_and_add_to_db(
                file_path=temp_file_path,
                package=package,
                timeout=timeout,
                options=options,
                priority=priority,
                machine=machine,
                platform=platform,
                custom=custom,
                memory=memory,
                enforce_timeout=enforce_timeout,
                tags=tags,
                clock=clock,
                shrike_url=shrike_url,
                shrike_msg=shrike_msg,
                shrike_sid=shrike_sid,
                shrike_refer=shrike_refer,
                static=static,
            )
        except CuckooDemuxError as e:
            return HTTPError(500, e)

    response["task_ids"] = task_ids
    return jsonize(response)
Esempio n. 58
0
def index(request):
    if request.method == "POST":
        package = request.POST.get("package", "")
        timeout = min(force_int(request.POST.get("timeout")), 60 * 60 * 24)
        options = request.POST.get("options", "")
        priority = force_int(request.POST.get("priority"))
        machine = request.POST.get("machine", "")
        gateway = request.POST.get("gateway", None)
        clock = request.POST.get("clock", None)
        custom = request.POST.get("custom", "")
        memory = bool(request.POST.get("memory", False))
        enforce_timeout = bool(request.POST.get("enforce_timeout", False))

        tags = request.POST.get("tags", None)

        if request.POST.get("free"):
            if options:
                options += ","
            options += "free=yes"

        if request.POST.get("nohuman"):
            if options:
                options += ","
            options += "nohuman=yes"

        if request.POST.get("tor"):
            if options:
                options += ","
            options += "tor=yes"

        if request.POST.get("process_memory"):
            if options:
                options += ","
            options += "procmemdump=yes"

        if request.POST.get("kernel_analysis"):
            if options:
                options += ","
            options += "kernel_analysis=yes"   

        if gateway and gateway in settings.GATEWAYS:
            if "," in settings.GATEWAYS[gateway]:
                tgateway = random.choice(settings.GATEWAYS[gateway].split(","))
                ngateway = settings.GATEWAYS[tgateway]
            else:
                ngateway = settings.GATEWAYS[gateway]
            if options:
                options += ","
            options += "setgw=%s" % (ngateway)

        db = Database()
        task_ids = []
        task_machines = []

        if machine.lower() == "all":
            for entry in db.list_machines():
                task_machines.append(entry.label)
        else:
            task_machines.append(machine)

        if "sample" in request.FILES:
            for sample in request.FILES.getlist("sample"):
                if sample.size == 0:
                    return render_to_response("error.html",
                                              {"error": "You uploaded an empty file."},
                                              context_instance=RequestContext(request))
                elif sample.size > settings.MAX_UPLOAD_SIZE:
                    return render_to_response("error.html",
                                              {"error": "You uploaded a file that exceeds that maximum allowed upload size."},
                                              context_instance=RequestContext(request))
    
                # Moving sample from django temporary file to Cuckoo temporary storage to
                # let it persist between reboot (if user like to configure it in that way).
                path = store_temp_file(sample.read(),
                                       sample.name)
    
                for entry in task_machines:
                    task_ids_new = db.demux_sample_and_add_to_db(file_path=path, package=package, timeout=timeout, options=options, priority=priority,
                                                                 machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock)
                    task_ids.extend(task_ids_new)
        elif "url" in request.POST and request.POST.get("url").strip():
            url = request.POST.get("url").strip()
            if not url:
                return render_to_response("error.html",
                                          {"error": "You specified an invalid URL!"},
                                          context_instance=RequestContext(request))

            for entry in task_machines:
                task_id = db.add_url(url=url,
                                     package=package,
                                     timeout=timeout,
                                     options=options,
                                     priority=priority,
                                     machine=entry,
                                     custom=custom,
                                     memory=memory,
                                     enforce_timeout=enforce_timeout,
                                     tags=tags,
                                     clock=clock)
                if task_id:
                    task_ids.append(task_id)
        elif settings.VTDL_ENABLED and "vtdl" in request.POST:
            vtdl = request.POST.get("vtdl").strip()
            if (not settings.VTDL_PRIV_KEY and not settings.VTDL_INTEL_KEY) or not settings.VTDL_PATH:
                    return render_to_response("error.html",
                                              {"error": "You specified VirusTotal but must edit the file and specify your VTDL_PRIV_KEY or VTDL_INTEL_KEY variable and VTDL_PATH base directory"},
                                              context_instance=RequestContext(request))
            else:
                base_dir = tempfile.mkdtemp(prefix='cuckoovtdl',dir=settings.VTDL_PATH)
                hashlist = []
                if "," in vtdl:
                    hashlist=vtdl.split(",")
                else:
                    hashlist.append(vtdl)
                onesuccess = False

                for h in hashlist:
                    filename = base_dir + "/" + h
                    if settings.VTDL_PRIV_KEY:
                        url = 'https://www.virustotal.com/vtapi/v2/file/download'
                        params = {'apikey': settings.VTDL_PRIV_KEY, 'hash': h}
                    else:
                        url = 'https://www.virustotal.com/intelligence/download/'
                        params = {'apikey': settings.VTDL_INTEL_KEY, 'hash': h}

                    try:
                        r = requests.get(url, params=params, verify=True)
                    except requests.exceptions.RequestException as e:
                        return render_to_response("error.html",
                                              {"error": "Error completing connection to VirusTotal: {0}".format(e)},
                                              context_instance=RequestContext(request))
                    if r.status_code == 200:
                        try:
                            f = open(filename, 'wb')
                            f.write(r.content)
                            f.close()
                        except:
                            return render_to_response("error.html",
                                              {"error": "Error writing VirusTotal download file to temporary path"},
                                              context_instance=RequestContext(request))

                        onesuccess = True

                        for entry in task_machines:
                            task_ids_new = db.demux_sample_and_add_to_db(file_path=filename, package=package, timeout=timeout, options=options, priority=priority,
                                                                         machine=entry, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock)
                            task_ids.extend(task_ids_new)
                    elif r.status_code == 403:
                        return render_to_response("error.html",
                                                  {"error": "API key provided is not a valid VirusTotal key or is not authorized for VirusTotal downloads"},
                                                  context_instance=RequestContext(request))


                if not onesuccess:
                    return render_to_response("error.html",
                                              {"error": "Provided hash not found on VirusTotal"},
                                              context_instance=RequestContext(request))



        tasks_count = len(task_ids)
        if tasks_count > 0:
            return render_to_response("submission/complete.html",
                                      {"tasks" : task_ids,
                                       "tasks_count" : tasks_count},
                                      context_instance=RequestContext(request))
        else:
            return render_to_response("error.html",
                                      {"error": "Error adding task to Cuckoo's database."},
                                      context_instance=RequestContext(request))
    else:
        files = os.listdir(os.path.join(settings.CUCKOO_PATH, "analyzer", "windows", "modules", "packages"))

        packages = []
        for name in files:
            name = os.path.splitext(name)[0]
            if name == "__init__":
                continue

            packages.append(name)

        # Prepare a list of VM names, description label based on tags.
        machines = []
        for machine in Database().list_machines():
            tags = []
            for tag in machine.tags:
                tags.append(tag.name)

            if tags:
                label = machine.label + ": " + ", ".join(tags)
            else:
                label = machine.label

            machines.append((machine.label, label))

        # Prepend ALL/ANY options.
        machines.insert(0, ("", "First available"))
        machines.insert(1, ("all", "All"))

        return render_to_response("submission/index.html",
                                  {"packages": sorted(packages),
                                   "machines": machines,
                                   "gateways": settings.GATEWAYS,
                                   "vtdlenabled": settings.VTDL_ENABLED},
                                  context_instance=RequestContext(request))