Ejemplo n.º 1
0
def process(api: act.api.Act, args: argparse.Namespace) -> None:
    """Get events associated to cases since last update"""

    last_update: Optional[int] = args.last_update
    content_props: List[Text] = [prop.strip() for prop in args.content_props.split(",")]
    hash_props: List[Text] = [prop.strip() for prop in args.hash_props.split(",")]

    # Last update is not specified, get last_update from file (cache)
    if not last_update:
        last_update = get_last_update()

    # Get events
    for counter, event in enumerate(
        event_case_query(
            args.argus_baseurl,
            args.argus_apikey,
            last_update,
            timeout=args.timeout,
            proxy_string=args.proxy_string,
        )
    ):

        # Result is sorted by lastUpdateTimestamp, so we update
        # last_update from the event
        last_update = cast(int, event["lastUpdatedTimestamp"])

        if args.organization_from_argus:
            shortName = event.get("customerInfo", {}).get("shortName")

            if shortName:
                api.config.organization = shortName
            else:
                error(f"Unable to get organization from event: {event}")
                continue

        # Create facts from event
        argus.handle_argus_event(
            api, event, content_props, hash_props, args.output_format
        )

        # For every Nth event, update last updated event
        if (counter % 1000) == 0:
            info(
                "Offset: {}, last_update: {}".format(
                    counter, time.asctime(time.localtime(last_update / 1000))
                )
            )
            update_last_update(last_update)

    # Make sure last_update is updated
    update_last_update(last_update)
Ejemplo n.º 2
0
def test_argus_case_facts(capsys, caplog) -> None:  # type: ignore
    """ Test for argus case facts, by comparing to captue of stdout """
    with open("test/data/argus-event.json") as argus_event:
        event = json.loads(argus_event.read())

    api = act.api.Act("", None, "error")

    argus.handle_argus_event(api,
                             event,
                             content_props=["file.sha256", "process.sha256"],
                             hash_props=[
                                 "file.md5", "process.md5", "file.sha1",
                                 "process.sha1", "file.sha512",
                                 "process.sha512"
                             ],
                             output_format="str")

    captured = capsys.readouterr()
    facts = set(captured.out.split("\n"))
    logs = [rec.message for rec in caplog.records]

    print(captured.out)

    prop = event["properties"]
    uri = event["uri"]
    incident_id = "ARGUS-{}".format(event["associatedCase"]["id"])
    event_id = "ARGUS-{}".format(event["id"])

    signature = event["attackInfo"]["signature"]

    # Fact chain from md5 hash through content to alert
    md5_chain = act.api.fact.fact_chain(
        api.fact("represents").source("hash", prop["file.md5"]).destination(
            "content", "*"),
        api.fact("observedIn",
                 "event").source("content",
                                 "*").destination("event", event_id))

    sha256 = "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b"

    fact_assertions = [
        api.fact("attributedTo",
                 "incident").source("event", event_id).destination(
                     "incident", incident_id),
        api.fact("observedIn",
                 "event").source("content",
                                 sha256).destination("event", event_id),
        api.fact("detects",
                 "event").source("signature",
                                 signature).destination("event", event_id),
        api.fact("name", "Infected host").source("incident", incident_id),
        api.fact("observedIn",
                 "event").source("uri", uri).destination("event", event_id),
        api.fact("componentOf").source("fqdn", "test-domain.com").destination(
            "uri", uri),
        api.fact("componentOf").source("path",
                                       "/path.cgi").destination("uri", uri),
        api.fact("scheme", "http").source("uri", uri),
        api.fact("observedIn", "event").source("uri",
                                               "tcp://1.2.3.4").destination(
                                                   "event", event_id),
    ]

    fact_negative_assertions = [
        # This fact should not exist, since we only add IPs with public addresses
        api.fact("observedIn",
                 "event").source("uri", "tcp://192.168.1.1").destination(
                     "event", event_id),

        # We have URI, so this should not be constructed from the fqdn
        api.fact("observedIn",
                 "event").source("uri", "tcp://test-domain.com").destination(
                     "event", event_id),

        # Not valid content hash (sha256)
        api.fact("observedIn",
                 "event").source("content",
                                 "bogus").destination("event", event_id),
    ]

    assert 'Illegal sha256: "bogus" in property "file.sha256"' in logs

    for fact_assertion in fact_assertions:
        assert str(fact_assertion) in facts

    for fact_assertion in fact_negative_assertions:
        assert str(fact_assertion) not in facts

    for fact_assertion in md5_chain:
        assert str(fact_assertion) in facts
Ejemplo n.º 3
0
def test_argus_case_facts(capsys, caplog) -> None:  # type: ignore
    """Test for argus case facts, by comparing to captue of stdout"""
    with open("test/data/argus-event.json") as argus_event:
        event = json.loads(argus_event.read())

    api = act.api.Act(
        "",
        None,
        "error",
        strict_validator=True,
        object_formatter=object_format,
        object_validator=object_validates,
    )
    act.api.helpers.handle_fact.cache_clear()

    argus.handle_argus_event(
        api,
        event,
        content_props=["file.sha256", "process.sha256"],
        hash_props=[
            "file.md5",
            "process.md5",
            "file.sha1",
            "process.sha1",
            "file.sha512",
            "process.sha512",
        ],
        output_format="str",
    )

    captured = capsys.readouterr()
    facts = set(captured.out.split("\n"))
    logs = [rec.message for rec in caplog.records]

    print(captured.out)

    prop = event["properties"]
    uri1 = event["uri"]
    uri2 = "http://test-domain2.com/path.cgi"
    uri3 = "http://test-domain3.com/abc"
    case_id = "ARGUS-{}".format(event["associatedCase"]["id"])
    observationTime = event["startTimestamp"]

    signature = event["attackInfo"]["signature"]

    # Fact chain from md5 hash through content to incident
    md5_chain = act.api.fact.fact_chain(
        api.fact("represents")
        .source("hash", prop["file.md5"])
        .destination("content", "*"),
        api.fact("observedIn").source("content", "*").destination("incident", case_id),
    )

    # Fact chain from event through technique to tactic
    tactic_chain = act.api.fact.fact_chain(
        api.fact("observedIn")
        .source("technique", "*")
        .destination("incident", case_id),
        api.fact("implements")
        .source("technique", "*")
        .destination("tactic", "TA0007"),
    )

    sha256 = "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b"

    fact_assertions = (
        api.fact("observedIn")
        .source("content", sha256)
        .destination("incident", case_id),
        api.fact("name", "Infected host").source("incident", case_id),
        api.fact("observedIn").source("uri", uri1).destination("incident", case_id),
        api.fact("observedIn").source("uri", uri2).destination("incident", case_id),
        api.fact("observedIn").source("uri", uri3).destination("incident", case_id),
        api.fact("componentOf")
        .source("fqdn", "test-domain.com")
        .destination("uri", uri1),
        api.fact("componentOf").source("path", "/path.cgi").destination("uri", uri1),
        api.fact("scheme", "http").source("uri", uri1),
        api.fact("observedIn")
        .source("uri", "tcp://1.2.3.4")
        .destination("incident", case_id),
    )

    # All facts should have a corresponding meta fact observationTime
    meta_fact_assertions = [
        fact.meta("observationTime", str(observationTime))
        for fact in fact_assertions + tactic_chain + md5_chain
    ]

    fact_negative_assertions = [
        # signature is removed from the data model in 2.0
        api.fact("detects")
        .source("signature", signature)
        .destination("incident", case_id),
        # This fact should not exist, since we only add IPs with public addresses
        api.fact("observedIn")
        .source("uri", "tcp://192.168.1.1")
        .destination("incident", case_id),
        # This fact should not exist, since it does not have scheme
        api.fact("observedIn")
        .source("uri", "illegal-url.com")
        .destination("incident", case_id),
        # We have URI, so this should not be constructed from the fqdn
        api.fact("observedIn")
        .source("uri", "tcp://test-domain.com")
        .destination("incident", case_id),
        # Not valid content hash (sha256)
        api.fact("observedIn")
        .source("content", "bogus")
        .destination("incident", case_id),
    ]

    assert 'Illegal sha256: "bogus" in property "file.sha256"' in logs

    for fact_assertion in fact_assertions:
        assert str(fact_assertion) in facts

    for fact_assertion in fact_negative_assertions:
        assert str(fact_assertion) not in facts

    for fact_assertion in md5_chain:
        assert str(fact_assertion) in facts

    for fact_assertion in tactic_chain:
        assert str(fact_assertion) in facts

    for fact_assertion in meta_fact_assertions:
        assert str(fact_assertion) in facts