Example #1
0
def group_delete_url():
    urls = request.form.get("urls", "")
    name = request.form.get("group_name", "")
    group_id = request.form.get("group_id")
    delall = request.form.get("delall", False)
    separator = request.form.get("separator", "\n")

    if not group_id and not name:
        return json_error(400, "No valid group name or id specified")

    if group_id:
        if not group_id.isdigit():
            return json_error(400, "group_id must be an integer")

        group_id = int(group_id)

    group = db.find_group(name=name, group_id=group_id)
    if not group:
        return json_error(404, "Specified group does not exist")

    urls = filter(None, [url.strip() for url in urls.split(separator)])
    if not urls and not delall:
        return json_error(400, "No URLs specified")

    if db.delete_url_from_group(urls, group.id, clearall=delall):
        return jsonify(message="success")

    return json_error(500, "Error removing URLs from group")
Example #2
0
def group_bulk_url(group_id):
    group = db.find_group(group_id=group_id)
    separator = request.args.get("separator", "\n")

    if not group:
        return json_error(404, "Group does not exist")

    urldata = request.files.get("urls")
    if not urldata:
        return json_error(404, "URLs not provided")

    if not urldata.mimetype in ("text/plain"):
        return json_error(
            400, "URLs file can only be text/plain. Not %r" % urldata.mimetype
        )

    urls = []
    try:
        urls = urldata.read().split(separator)
        urls = filter(
            None, [
                url.strip() for url in urls if len(url) <= 2048
            ]
        )
    except UnicodeDecodeError:
        return json_error(400, "Invalid URLs file provided")

    db.mass_group_add(urls, group_id=group_id)
    return jsonify(
        message="Added %d URLs to group %s" % (len(urls), group.name)
    )
Example #3
0
def schedule_group(group_id):
    schedule = request.form.get("schedule")

    group = db.find_group(group_id=group_id)
    if not group:
        return json_error(404, message="Group does not exist")

    if not group.profiles:
        return json_error(400, "Group has no profiles. Cannot be scheduled")

    if not schedule:
        db.remove_schedule(group_id)
        return jsonify(message="OK")

    if not db.find_urls_group(group_id=group.id, limit=1):
        return json_error(400, "Group has no URLs")

    if schedule == "now":
        if not group.completed:
            return json_error(400, "Group is already pending or running")

        schedule_next = datetime.datetime.utcnow() + \
                        datetime.timedelta(seconds=10)
        db.set_schedule_next(group_id, schedule_next)
        return jsonify(message="Scheduled at %s" % schedule_next)

    try:
        schedutil.schedule_time_next(schedule)
    except ValueError as e:
        return json_error(500, message=str(e))

    db.add_schedule(group_id, schedule)
    return jsonify(message="OK")
Example #4
0
def view_group(group_id=None, name=None):
    if not group_id and not name:
        return json_error(400, "No group_id or name specified to view")

    try:
        details = int(request.args.get("details", 0))
    except ValueError:
        return json_error(400, "Invalid value for 'details'. Can be 0 or 1")

    group = db.find_group(name=name, group_id=group_id, details=True)
    if not group:
        return json_error(404, "Group not found")

    if details:
        return jsonify(group.to_dict(
            additional=["urlcount", "unread", "highalert"]
        ))
    else:
        return jsonify(group.to_dict())
Example #5
0
def view_group_urls(group_id=None, name=None):
    if not group_id and not name:
        return json_error(400, "No group_id or name specified to view")

    limit = request.args.get("limit", "1000")
    if not limit.isdigit():
        return json_error(400, "Invalid limit")
    limit = int(limit)

    offset = request.args.get("offset", "0")
    if not offset.isdigit():
        return json_error(400, "Invalid offset")
    offset = int(offset)

    group = db.find_group(name=name, group_id=group_id)
    if not group:
        return json_error(404, "Specified group does not exist")

    urls = db.find_urls_group(
        group.id, limit=limit, offset=offset, include_id=True
    )

    return jsonify(name=group.name, group_id=group.id, urls=urls)
Example #6
0
def insert_group_tasks(group):
    log.debug("Creating group tasks for %r", group["name"])
    group = massurldb.find_group(group_id=group.id)
    urls = massurldb.find_urls_group(group.id, limit=None)
    run = group.run + 1

    groupid_task = []
    for profile in group.profiles:
        log.info(
            "Creating tasks for group %r with profile %r",
            group.name, profile.name
        )
        args = generate_task_args(profile, group)
        for task_id in create_parallel_tasks(urls, group.max_parallel, **args):
            groupid_task.append({
                "url_group_id": group.id,
                "task_id": task_id,
                "run": run
            })

    if groupid_task:
        s = db.Session()
        try:
            s.query(URLGroup).filter(URLGroup.id==group.id).update({
                "completed": False,
                "schedule_next": None,
                "status": "pending",
                "run": run
            })
            s.bulk_insert_mappings(URLGroupTask, groupid_task)
            s.commit()
        finally:
            s.close()

        log.debug(
            "Created %s new tasks for group %s", len(groupid_task), group.name
        )
Example #7
0
    def start(self):
        log.debug("Using group with name '%s'", self.groupname)
        tags = {t.name: t.id for t in db.db.list_tags()}
        profile_tags = ["windows7", "ie11", "flash2000228"]
        for t in profile_tags:
            if t not in tags:
                return self.markfail(
                    "Missing machine tag '%s'. There much be a machine with the"
                    " machine tags %s. This is required to run the EK detection"
                    " test " % (t, profile_tags),
                    fix="Create/add a Windows 7 VM with Internet Explorer 11"
                    " and Flash 20.0.0.228 and add the tags: %s to this"
                    " machine in the machinery config." % profile_tags)

        self.group_id = db.add_group(self.groupname, rand_string(20))
        db.update_settings_group(group_id=self.group_id,
                                 batch_time=60,
                                 threshold=10,
                                 batch_size=1)

        rigekurl = "%s/rigekexploit.html" % settings.webserver
        try:
            urllib2.urlopen(rigekurl).read()
        except Exception as e:
            log.exception("Error: %s", e)
            return self.markfail(
                "Failed to perform GET request to URL '%s'" % rigekurl,
                fix="Use the mini webserver in the 'data' folder to serve "
                "the files in the 'data' folder. Make sure the current "
                "server is allowed to connect to it.")

        db.mass_group_add([rigekurl], group_id=self.group_id)
        profile_id = db.add_profile(
            name=rand_string(12),
            browser="ie",
            route="internet",
            tags=[tid for n, tid in tags.iteritems() if n in profile_tags])
        db.update_profile_group([profile_id], self.group_id)
        db.set_schedule_next(group_id=self.group_id,
                             next_datetime=datetime.datetime.utcnow())

        log.debug(
            "Scheduled group. Waiting at least 50 seconds before checking")
        time.sleep(50)
        while True:
            group = db.find_group(group_id=self.group_id)
            if group.status == "completed":
                break
            log.debug("Group status still '%s'. Waiting..", group.status)
            time.sleep(5)

        alerts = db.list_alerts(level=3, url_group_name=self.groupname)
        if alerts:
            return self.markpass()
        else:
            return self.markfail(
                "Group has completed, but no level 3 alerts exist. One level 3"
                "alert was expected.",
                fix="Make sure the analysis VM has internet explorer 11 with "
                "Flash 20.0.0.228 installed using VMCloak. "
                "Verify the analysis logs to see if Onemon was loaded. "
                "It might happen an exploit does not trigger. It is "
                "advised to run this test at least more than once in case "
                "of a fail")
Example #8
0
    def start(self):
        log.debug("Using group with name '%s'", self.groupname)
        tags = {t.name: t.id for t in db.db.list_tags()}
        profile_tags = ["windows7", "ie11"]
        for t in profile_tags:
            if t not in tags:
                return self.markfail(
                    "Missing machine tag '%s'. There much be a machine with the"
                    " machine tags %s. This is required to run the EK detection"
                    " test " % (t, profile_tags),
                    fix="Create/add a Windows 7 VM with Internet Explorer 11"
                    "  and add the tags: %s to this machine in the"
                    " machinery config." % profile_tags)

        self.group_id = db.add_group(self.groupname, rand_string(20))
        db.update_settings_group(group_id=self.group_id,
                                 batch_time=30,
                                 threshold=10,
                                 batch_size=5)

        urls = [
            "http://facebook.com", "http://baidu.com", "http://wikipedia.org",
            "http://qq.com", "http://taobao.com"
        ]
        db.mass_group_add(urls, group_id=self.group_id)
        profile_id = db.add_profile(
            name=rand_string(12),
            browser="ie",
            route="internet",
            tags=[tid for n, tid in tags.iteritems() if n in profile_tags])
        db.update_profile_group([profile_id], self.group_id)
        db.set_schedule_next(group_id=self.group_id,
                             next_datetime=datetime.datetime.utcnow())

        beforefinish = int(time.time() * 1000)
        log.debug(
            "Scheduled group. Waiting at least 30 seconds before checking")
        time.sleep(30)
        while True:
            group = db.find_group(group_id=self.group_id)
            if group.status == "completed":
                break

            log.debug("Group status still '%s'. Waiting..", group.status)
            time.sleep(5)

        requests_extracted = False
        for url in urls:
            url = URL(url)
            diary = URLDiaries.get_latest_diary(
                url.get_sha256(),
                return_fields=["datetime", "requested_urls", "signatures"])
            if not diary or not diary.get("datetime", 0) > beforefinish:
                return self.markfail("No new URL diary was created for %s" %
                                     url)

            if len(diary.get("requested_urls")):
                requests_extracted = True

            if len(diary.get("signatures")):
                return self.markfail(
                    "One ore more realtime signature were triggered. This "
                    "should not happen for these submitted URLs '%s'. Check "
                    "the result." % urls)

        if not requests_extracted:
            return self.markfail(
                "No HTTP requests were extracted for any of the analyzed URLs",
                fix="Is internet routing enabled and Cuckoo rooter running? "
                "Verify if onemon.pb exists in the logs directory to "
                "see if onemon collected any events.")

        alerts = db.list_alerts(level=3, url_group_name=self.groupname)
        if alerts:
            return self.markfail("One or more level 3 alerts was triggered. "
                                 "No level 3 alerts should have been sent.")

        return self.markpass()