Esempio n. 1
0
def what_to_improve_epgs(country: str, organization_type: str, issue_type: str,
                         policy):
    scans = EndpointGenericScan.objects.all().filter(
        type=issue_type,
        is_the_latest_scan=True,
        comply_or_explain_is_explained=False,
        rating__in=policy["high"] + policy["medium"],
        endpoint__is_dead=False,
        endpoint__url__is_dead=False,
        endpoint__url__not_resolvable=False,
        endpoint__url__organization__country=country,
        endpoint__url__organization__type=organization_type,
    )[0:500]

    return [
        {
            # "organization_id": scan.endpoint.url.organization.pk,
            # "organization_name": scan.endpoint.url.organization.name,
            "url_url": scan.endpoint.url.url,
            "severity": get_impact(get_severity(scan)),
            "last_scan_moment": scan.last_scan_moment,
            "rating_determined_on": scan.rating_determined_on,
        } for scan in scans
        if get_impact(get_severity(scan)) in ["high", "medium"]
    ]
Esempio n. 2
0
def get_explanation(type, scan):
    calculation = get_severity(scan)

    now = timezone.now().isoformat()
    explained_on = scan.comply_or_explain_explained_on.isoformat() if scan.comply_or_explain_explained_on else now

    this_explain = {
        "scan_type": scan.type,
        "explanation": scan.comply_or_explain_explanation,
        "explained_by": scan.comply_or_explain_explained_by,
        "explained_on": explained_on,
        "valid_until": scan.comply_or_explain_explanation_valid_until.isoformat(),
        "original_severity": "high" if calculation["high"] else "medium" if calculation["medium"] else "low",
        "original_explanation": calculation["explanation"],
    }

    if type == "url":
        this_explain["organizations"] = scan.url.organization.name
        this_explain["subject"] = str(scan.url.url)

    if type == "endpoint":
        this_explain["organizations"] = list(scan.endpoint.url.organization.all().values("id", "name"))
        this_explain["subject"] = str("%s - %s/%s - IPv%s") % (
            scan.endpoint.url,
            scan.endpoint.protocol,
            scan.endpoint.port,
            scan.endpoint.ip_version,
        )

    return this_explain
Esempio n. 3
0
def update_feature(feature, scan):
    # log.debug('Updating feature %s, with scan %s' % (feature['properties']['organization_id'], scan))
    calculation = get_severity(scan)

    feature["properties"]["high"] += calculation["high"]
    feature["properties"]["medium"] += calculation["medium"]
    feature["properties"]["low"] += calculation["low"]

    color = ("red" if feature["properties"]["high"] else
             "orange" if feature["properties"]["medium"] else
             "yellow" if feature["properties"]["low"] else "green")

    feature["properties"]["color"] = color

    return feature
Esempio n. 4
0
def make_new_feature(organization, scan):
    # log.debug('Making new feature %s, with scan %s' % (organization, scan))

    calculation = get_severity(scan)
    color = ("red"
             if calculation["high"] else "orange" if calculation["medium"] else
             "yellow" if calculation["low"] else "green")

    from websecmap.organizations.models import Coordinate

    # only one multipoint or multipolygon. Unfortunately one query per organization :((((((((((((
    coordinate = Coordinate.objects.all().filter(
        organization=organization).order_by("-created_on").first()

    # early contest didn't require the pinpointing of a location, later contests an organization is always required
    if not coordinate:
        area = ""
        geojsontype = ""
    else:
        area = coordinate.area
        geojsontype = coordinate.geojsontype

    return {
        "type": "Feature",
        "properties": {
            "organization_id": organization.pk,
            "organization_type": organization.type.name,
            "organization_name": organization.name,
            "organization_slug": slugify(organization.name),
            "overall": 0,
            "high": calculation["high"],
            "medium": calculation["medium"],
            "low": calculation["low"],
            "data_from": scan.last_scan_moment,
            "color": color,
            "total_urls": 0,  # = 100%
            "high_urls": 0,
            "medium_urls": 0,
            "low_urls": 0,
            "origin": "make_new_feature",
        },
        "geometry": {
            "type": geojsontype,
            # Sometimes the data is a string, sometimes it's a list. The admin
            # interface might influence this.
            "coordinates": area,
        },
    }
Esempio n. 5
0
    def item_title(self, item):
        calculation = get_severity(item)
        if not calculation:
            return ""

        rating = (_("Perfect") if not any(
            [calculation["high"], calculation["medium"], calculation["low"]])
                  else _("High") if calculation["high"] else
                  _("Medium") if calculation["medium"] else _("Low"))

        badge = ("✅" if not any(
            [calculation["high"], calculation["medium"], calculation["low"]])
                 else "🔴" if calculation["high"] else
                 "🔶" if calculation["medium"] else "🍋")

        if item.type in URL_SCAN_TYPES:
            # url generic scan:
            return "%s %s - %s" % (badge, rating, item.url.url)
        else:
            # endpoint scan
            return "%s %s - %s" % (badge, rating, item.endpoint.url.url)
Esempio n. 6
0
def scores(request):

    # todo: this param handling code is absolutely disgusting, it should be more beautiful.
    # todo: should we just get the last contest if there is no contest at all?
    submitted_contest = request.GET.get("contest", "")
    if submitted_contest is not None and submitted_contest.isnumeric():
        submitted_contest = int(submitted_contest)
    else:
        submitted_contest = 0

    if submitted_contest > -1:
        try:
            contest = Contest.objects.get(id=submitted_contest)
        except ObjectDoesNotExist:
            contest = get_default_contest(request)
    else:
        contest = get_default_contest(request)

    # remove disqualified teams.
    teams = Team.objects.all().filter(participating_in_contest=contest,
                                      allowed_to_submit_things=True)

    scores = []
    for team in teams:
        """
        Out of simplicity _ALL_ scores are retrieved instead of the last one per URL. Last one-per is not supported
        in Django and therefore requires a lot of code. The deviation is negligible during a contest as not so much
        will change in a day or two. On the long run it might increase the score a bit when incorrect fixes are applied
        or a new error is found. If the discovered issue is fixed it doesn't deliver additional points.
        """
        scans = list(EndpointGenericScan.objects.all().filter(
            endpoint__url__urlsubmission__added_by_team=team.id,
            endpoint__url__urlsubmission__has_been_accepted=True,
            rating_determined_on__lte=contest.until_moment,
            type__in=ENDPOINT_SCAN_TYPES,
        ))

        scans += list(UrlGenericScan.objects.all().filter(
            url__urlsubmission__added_by_team=team.id,
            url__urlsubmission__has_been_accepted=True,
            rating_determined_on__lte=contest.until_moment,
            type__in=URL_SCAN_TYPES,
        ))

        added_urls = (UrlSubmission.objects.all().filter(
            added_by_team=team.id,
            has_been_accepted=True,
            has_been_rejected=False,
        ).count())

        added_organizations = (OrganizationSubmission.objects.all().filter(
            added_by_team=team.id,
            has_been_accepted=True,
            has_been_rejected=False).count())

        rejected_organizations = (OrganizationSubmission.objects.all().filter(
            added_by_team=team.id,
            has_been_accepted=False,
            has_been_rejected=True,
        ).count())

        rejected_urls = (UrlSubmission.objects.all().filter(
            added_by_team=team.id,
            has_been_accepted=False,
            has_been_rejected=True,
        ).count())

        final_calculation = {
            "high": 0,
            "medium": 0,
            "low": 0,
        }

        for scan in scans:
            temp_calculation = get_severity(scan)
            final_calculation["high"] += temp_calculation["high"]
            final_calculation["medium"] += temp_calculation["medium"]
            final_calculation["low"] += temp_calculation["low"]

        score_multiplier = {
            "low": 100,
            "medium": 250,
            "high": 1000,
            "rejected_organization": 1337,
            "rejected_url": 1337,
            "organization": 500,
            "url": 250,
        }

        # if you're too lazy to enter a color.
        # or the control doesn't work.
        if team.color:
            color = spectra.html(team.color.upper())
            # nope, deep frying doesn't help us
            # color = color.saturate(100)  # deep fry the color, so something remains even after insane brighten
            color = color.brighten(10)
            color_code = color.hexcode
        else:
            color_code = "#FFFFFF"

        score = {
            "team":
            team.name,
            "team_color":
            team.color,
            # transparency makes it lighter and more beautiful.
            "team_color_soft":
            "%s%s" % (color_code, "33"),
            "high":
            final_calculation["high"],
            "high_multiplier":
            score_multiplier["high"],
            "high_score":
            final_calculation["high"] * score_multiplier["high"],
            "medium":
            final_calculation["medium"],
            "medium_multiplier":
            score_multiplier["medium"],
            "medium_score":
            final_calculation["medium"] * score_multiplier["medium"],
            "low":
            final_calculation["low"],
            "low_multiplier":
            score_multiplier["low"],
            "low_score":
            final_calculation["low"] * score_multiplier["low"],
            "added_organizations":
            added_organizations,
            "added_organizations_multiplier":
            score_multiplier["organization"],
            "added_organizations_score":
            added_organizations * score_multiplier["organization"],
            "added_urls":
            added_urls,
            "added_urls_multiplier":
            score_multiplier["url"],
            "added_urls_score":
            added_urls * score_multiplier["url"],
            "rejected_organizations":
            rejected_organizations,
            "rejected_organizations_multiplier":
            score_multiplier["rejected_organization"],
            "rejected_organizations_score":
            rejected_organizations * score_multiplier["rejected_organization"],
            "rejected_urls":
            rejected_urls,
            "rejected_urls_multiplier":
            score_multiplier["rejected_url"],
            "rejected_urls_score":
            rejected_urls * score_multiplier["rejected_url"],
            "total_score":
            final_calculation["high"] * score_multiplier["high"] +
            final_calculation["medium"] * score_multiplier["medium"] +
            final_calculation["low"] * score_multiplier["low"] +
            added_organizations * score_multiplier["organization"] +
            added_urls * score_multiplier["url"] -
            (rejected_urls * score_multiplier["rejected_url"] +
             rejected_organizations *
             score_multiplier["rejected_organization"]),
        }

        scores.append(score)

    # order the scores from high to low.
    scores = sorted(scores,
                    key=lambda k:
                    (k["total_score"], k["high"], k["medium"], k["low"]),
                    reverse=True)

    return render(
        request,
        "game/scores.html",
        {
            "team": get_team_info(request),
            "scores": scores,
            "contest": contest,
            "menu_selected": "scores"
        },
    )
Esempio n. 7
0
def get_all_latest_scans(country, organization_type):

    dataset = {
        "scans": defaultdict(list),
        "render_date": timezone.now().isoformat(),
        "remark": remark,
    }

    filtered_organization_type = get_organization_type(organization_type)
    filtered_country = get_country(country)

    # Really get the latest, without double results that apply for multiple organizations.
    # Do not show anything that is dead, on any level.
    for scan_type in PUBLISHED_ENDPOINT_SCAN_TYPES:
        scans = (
            EndpointGenericScan.objects.filter(
                type=scan_type,
                is_the_latest_scan=True,
            )
            .annotate(
                n_urls=Count(
                    "endpoint",
                    filter=Q(
                        endpoint__is_dead=False,
                        endpoint__url__not_resolvable=False,
                        endpoint__url__is_dead=False,
                        endpoint__url__organization__is_dead=False,
                        endpoint__url__organization__country=filtered_country,
                        endpoint__url__organization__type_id=filtered_organization_type,
                    ),
                )
            )
            .filter(n_urls__gte=1)
            .order_by("-rating_determined_on")[0:6]
        )

        print(scans.query)

        for scan in scans:
            calculation = get_severity(scan)

            dataset["scans"][scan_type].append(
                {
                    "url": scan.endpoint.url.url,
                    "service": f"{scan.endpoint.protocol}/{scan.endpoint.port} (IPv{scan.endpoint.ip_version})",
                    "protocol": scan.endpoint.protocol,
                    "port": scan.endpoint.port,
                    "ip_version": scan.endpoint.ip_version,
                    "explanation": calculation.get("explanation", ""),
                    "high": calculation.get("high", 0),
                    "medium": calculation.get("medium", 0),
                    "low": calculation.get("low", 0),
                    "last_scan_humanized": naturaltime(scan.last_scan_moment),
                    "last_scan_moment": scan.last_scan_moment.isoformat(),
                }
            )

    for scan_type in PUBLISHED_URL_SCAN_TYPES:

        scans = (
            UrlGenericScan.objects.filter(
                type=scan_type,
                is_the_latest_scan=True,
            )
            .annotate(
                n_urls=Count(
                    "url",
                    filter=Q(
                        url__organization__is_dead=False,
                        url__organization__country=filtered_country,
                        url__organization__type_id=filtered_organization_type,
                        url__is_dead=False,
                        url__not_resolvable=False,
                    ),
                )
            )
            .filter(n_urls=1)
            .order_by("-rating_determined_on")[0:6]
        )

        for scan in scans:
            calculation = get_severity(scan)

            # url scans
            dataset["scans"][scan_type].append(
                {
                    "url": scan.url.url,
                    "service": f"{scan.url.url}",
                    "protocol": scan_type,
                    "port": "-",
                    "ip_version": "-",
                    "explanation": calculation.get("explanation", ""),
                    "high": calculation.get("high", 0),
                    "medium": calculation.get("medium", 0),
                    "low": calculation.get("low", 0),
                    "last_scan_humanized": naturaltime(scan.last_scan_moment),
                    "last_scan_moment": scan.last_scan_moment.isoformat(),
                }
            )

    return dataset
Esempio n. 8
0
def latest_updates(organization_id):
    """

    :param request:
    :param organization_id: the id will always be "correct", whereas name will have all kinds of terribleness:
    multiple organizations that have the same name in different branches, organizations with generic names etc.
    Finding an organization by name is tricky. Therefore ID.

    We're not filtering any further: given this might result in turning a blind eye to low or medium vulnerabilities.
    :return:
    """

    try:
        # todo: check that the organization is displayed on the map
        organization = Organization.objects.all().filter(
            pk=organization_id).get()
    except ObjectDoesNotExist:
        return {}

    dataset = {
        "scans": [],
        "render_date": datetime.now(pytz.utc).isoformat(),
        "remark": remark,
    }

    # semi-union, given not all columns are the same. (not python/django-esque solution)
    generic_endpoint_scans = list(
        EndpointGenericScan.objects.filter(
            endpoint__url__organization=organization,
            type__in=ENDPOINT_SCAN_TYPES).order_by("-rating_determined_on")
        [0:60])
    url_endpoint_scans = list(
        UrlGenericScan.objects.filter(
            url__organization=organization,
            type__in=URL_SCAN_TYPES).order_by("-rating_determined_on")[0:60])

    scans = generic_endpoint_scans + url_endpoint_scans

    scans = sorted(scans,
                   key=lambda k: getattr(k, "rating_determined_on",
                                         datetime.now(pytz.utc)),
                   reverse=True)

    for scan in scans:
        scan_type = scan.type
        calculation = get_severity(scan)
        if scan_type in URL_SCAN_TYPES:
            # url scans
            dataset["scans"].append({
                "organization":
                organization.name,
                "organization_id":
                organization.pk,
                "url":
                scan.url.url,
                "service":
                "%s" % scan.url.url,
                "protocol":
                scan_type,
                "port":
                "",
                "ip_version":
                "",
                "scan_type":
                scan_type,
                "explanation":
                calculation.get("explanation",
                                ""),  # sometimes you dont get one.
                "high":
                calculation.get("high", 0),
                "medium":
                calculation.get("medium", 0),
                "low":
                calculation.get("low", 0),
                "rating_determined_on_humanized":
                naturaltime(scan.rating_determined_on),
                "rating_determined_on":
                scan.rating_determined_on,
                "last_scan_humanized":
                naturaltime(scan.last_scan_moment),
                "last_scan_moment":
                scan.last_scan_moment.isoformat(),
            })

        else:
            # endpoint scans
            dataset["scans"].append({
                "organization":
                organization.name,
                "organization_id":
                organization.pk,
                "url":
                scan.endpoint.url.url,
                "service":
                "%s/%s (IPv%s)" % (scan.endpoint.protocol, scan.endpoint.port,
                                   scan.endpoint.ip_version),
                "protocol":
                scan.endpoint.protocol,
                "port":
                scan.endpoint.port,
                "ip_version":
                scan.endpoint.ip_version,
                "scan_type":
                scan_type,
                "explanation":
                calculation.get("explanation",
                                ""),  # sometimes you dont get one.
                "high":
                calculation.get("high", 0),
                "medium":
                calculation.get("medium", 0),
                "low":
                calculation.get("low", 0),
                "rating_determined_on_humanized":
                naturaltime(scan.rating_determined_on),
                "rating_determined_on":
                scan.rating_determined_on,
                "last_scan_humanized":
                naturaltime(scan.last_scan_moment),
                "last_scan_moment":
                scan.last_scan_moment.isoformat(),
            })

    return dataset
Esempio n. 9
0
 def item_description(self, item):
     calculation = get_severity(item)
     return _(calculation.get("explanation", ""))