async def test_publish_no_data():
    mock_sns_client = MagicMock()
    mock_sns_client.publish.return_value = coroutine_of({"MessageId": "Msg32"})
    context = ResultsContext(
        "PubTopic",
        {"address": "123.123.123.123"},
        "scan_12",
        iso_date_string_from_timestamp(123456),
        iso_date_string_from_timestamp(789123),
        "scan_name",
        mock_sns_client
    )

    await context.publish_results()

    # it should publish the top level info parent and temporal key
    mock_sns_client.publish.assert_called_with(
        TopicArn="PubTopic",
        Subject="scan_name",
        Message=dumps(
            {
                "scan_id": "scan_12",
                "scan_start_time": iso_date_string_from_timestamp(123456),
                "scan_end_time": iso_date_string_from_timestamp(789123),
                "__docs": {}
            }
        ),
        MessageAttributes={
            "ParentKey": {"StringValue": ResultsContext._hash_of({"address": "123.123.123.123"}), "DataType": "String"},
            "TemporalKey": {"StringValue": ResultsContext._hash_of(iso_date_string_from_timestamp(789123)),
                            "DataType": "String"}
        }
    )
async def test_summary_info_published():
    mock_sns_client = MagicMock()
    mock_sns_client.publish.return_value = coroutine_of({"MessageId": "Msg32"})
    context = ResultsContext(
        "PubTopic",
        {"address": "123.456.123.456"},
        "scan_9",
        iso_date_string_from_timestamp(4),
        iso_date_string_from_timestamp(5),
        "scan_name",
        mock_sns_client
    )

    context.add_summaries({"foo": "bar", "boo": "baz"})
    context.add_summary("banana", "man")
    context.post_results("host_info", {"uptime": "1234567"}, include_summaries=True)
    await context.publish_results()

    mock_sns_client.publish.assert_called_with(
        TopicArn="PubTopic",
        Subject="scan_name",
        Message=dumps(
            {
                "scan_id": "scan_9",
                "scan_start_time": iso_date_string_from_timestamp(4),
                "scan_end_time": iso_date_string_from_timestamp(5),
                "__docs": {
                    "host_info": [
                        {
                            "NonTemporalKey": ResultsContext._hash_of({
                                "address": "123.456.123.456",
                            }),
                            "Data": {
                                "address": "123.456.123.456",
                                "uptime": "1234567",
                                "summary_foo": "bar",
                                "summary_boo": "baz",
                                "summary_banana": "man",
                                "__ParentKey": ResultsContext._hash_of({"address": "123.456.123.456"}),
                            }
                        }
                    ]
                }
            }
        ),
        MessageAttributes={
            "ParentKey": {
                "StringValue": ResultsContext._hash_of({"address": "123.456.123.456"}),
                "DataType": "String"
            },
            "TemporalKey": {
                "StringValue": ResultsContext._hash_of(iso_date_string_from_timestamp(5)),
                "DataType": "String"
            }
        }
    )
async def report_letters(event, _):
    es_queue = event['ssm_params'][ES_SQS]
    writes = []
    for record in event["Records"]:
        s3_object = objectify(record["s3"])
        bucket = s3_object.bucket.name
        key = unquote_plus(s3_object.object.key)

        print(f"Loading new dead letter file: {(bucket, key)}")
        obj = await s3_client.get_object(Bucket=bucket, Key=key)
        dead_letter_details = obj["Metadata"]
        print(f"Wring new dead letter with metadata: {dead_letter_details}")

        ensure_essential_metadata(dead_letter_details, [
            ("deadletterqueuename", "Metadata missing"),
            ("deadletterkey", "Metadata missing"),
            ("deadlettersenttime",
             str(iso_date_string_from_timestamp(datetime.now().timestamp())))
        ])

        writes.append(
            sqs_client.send_message(QueueUrl=es_queue,
                                    MessageBody=dumps({
                                        "Subject":
                                        "dead_letter:data:write",
                                        "Message":
                                        dumps(dead_letter_details)
                                    })))
    print(f"Gathering writes")
    await gather(*writes)
    print(f"Written successfully")
Example #4
0
def test_exception_on_no_parent_key(post_mock):
    queue_ingestor.ssm_client.get_parameters.return_value = ssm_return_vals()

    # mock response contains error
    post_mock.return_value = response_mock = MagicMock()
    response_mock.json.return_value = {}
    response_mock.text = "Walk"

    test_event = {
        "Records": [{
            "body":
            dumps({
                "Subject": "my_scan",
                "Message": dumps({"some_field": "some_value"}),
                "MessageAttributes": {
                    "TemporalKey": {
                        "Value":
                        ResultsContext._hash_of(
                            iso_date_string_from_timestamp(5)),
                        "DataType":
                        "String"
                    }
                }
            })
        }]
    }

    with pytest.raises(
            ValueError,
            match=
            "Analytics ingestor requires the ParentKey message attribute be present"
    ):
        queue_ingestor.ingest(test_event, MagicMock())
    async def scan(self, scan_request_id, scan_request):
        scan_request = loads(scan_request)
        msg = loads(scan_request["Message"])
        if msg["port_id"] == "443" or msg["service"] == "https":
            print(f"address {msg['address']}")
            host_names = self.get_hosts(
                msg["address"], self.get_ssm_param(self._dynamodb_param))
            print(host_names)
            for host in host_names:
                target = f"{host}:{msg['port_id']}"
                msg["target"] = target
                scan_start_time = iso_date_string_from_timestamp(
                    datetime.now().timestamp())
                print(
                    f"open ssl scan: {target} for request id {scan_request_id}"
                )
                print("running openssl")
                cmd = f"openssl s_client -showcerts -connect {target}"
                merged_file = io.BytesIO()
                try:
                    # openssl outputs on two tty streams, so merge the two together and put in S3 for processing later
                    out = subprocess.check_output(
                        f"{cmd} </dev/null 1>/tmp/tty1.txt 2>/tmp/tty2.txt",
                        shell=True)
                except subprocess.CalledProcessError as e:
                    # openssl will generate an error if there"s a problem in the chain
                    # that is used to source the error info and we do want to suppress this exception
                    pass
                scan_end_time = iso_date_string_from_timestamp(
                    datetime.now().timestamp())
                with open("/tmp/tty2.txt", "r") as f:
                    merged_file.write(f.read().encode("UTF-8"))
                with open("/tmp/tty1.txt", "r") as f:
                    merged_file.write(f.read().encode("UTF-8"))

                merged_file.seek(0)
                await self.write_results_set(f"{scan_request_id}-{host}",
                                             merged_file, ".txt", msg,
                                             scan_start_time, scan_end_time)
        else:
            print("Ignoring non 443 or https service")
Example #6
0
def test_snapshot_only_mode(post_mock):
    queue_ingestor.ssm_client.get_parameters.return_value = ssm_return_vals()
    post_mock.return_value = post_response_mock = MagicMock()
    post_response_mock.json.return_value = {}

    # Using as a sample event the expected output of the test_scan_results.py test
    test_event = {
        "Records": [{
            "body":
            dumps({
                "Subject": "scan_name",
                "Message": SAMPLE_DOC_COLLECTION,
                "MessageAttributes": {
                    # N.B. No TemporalKey here
                    "ParentKey": {
                        "Value":
                        ResultsContext._hash_of({"address":
                                                 "123.456.123.456"}),
                        "DataType":
                        "String"
                    }
                }
            })
        }]
    }
    queue_ingestor.ingest(test_event, MagicMock())

    # There will be 2 port info, 2 vuln info, and one host info docs posted, but only for the snapshot collections
    # There will be 3 delete old snapshot requests made, one for each doc_type, and each using the parent key
    assert post_mock.call_count == 5 + 3

    expected_deletes = {
        doc_type: call(
            f"https://elastic.url.com/scan_name:{doc_type}_snapshot:write/_doc/_delete_by_query?conflicts=proceed",
            auth=auth_mock,
            data=dumps({
                "query": {
                    "term": {
                        "__ParentKey":
                        ResultsContext._hash_of({"address": "123.456.123.456"})
                    }
                }
            }),
            headers={"content-type": "application/json"})
        for doc_type in ["port_info", "vuln_info", "host_info"]
    }

    parent_key = ResultsContext._hash_of({"address": "123.456.123.456"})

    assert post_mock.call_args_list == [
        # info for port 22 and 80
        expected_deletes["port_info"],
        _expected_snapshot_write(
            "port_info", {
                'address': '123.456.123.456',
                'port': '22'
            }, auth_mock, {
                "scan_id": "scan_2",
                "scan_start_time": iso_date_string_from_timestamp(4),
                "scan_end_time": iso_date_string_from_timestamp(5),
                "address": "123.456.123.456",
                "port": "22",
                "open": "false",
                "__ParentKey": parent_key,
            }),
        _expected_snapshot_write(
            "port_info", {
                'address': '123.456.123.456',
                'port': '80'
            }, auth_mock, {
                "scan_id": "scan_2",
                "scan_start_time": iso_date_string_from_timestamp(4),
                "scan_end_time": iso_date_string_from_timestamp(5),
                "address": "123.456.123.456",
                "port": "80",
                "open": "true",
                "__ParentKey": parent_key,
            }),
        # info for the two cves
        expected_deletes["vuln_info"],
        _expected_snapshot_write(
            "vuln_info", {
                "address": "123.456.123.456",
                "port": "22",
                "vulnerability": "cve4"
            }, auth_mock, {
                "scan_id": "scan_2",
                "scan_start_time": iso_date_string_from_timestamp(4),
                "scan_end_time": iso_date_string_from_timestamp(5),
                "address": "123.456.123.456",
                "port": "22",
                "vulnerability": "cve4",
                "severity": "5",
                "__ParentKey": parent_key,
            }),
        _expected_snapshot_write(
            "vuln_info", {
                "address": "123.456.123.456",
                "port": "22",
                "vulnerability": "cve5"
            }, auth_mock, {
                "scan_id": "scan_2",
                "scan_start_time": iso_date_string_from_timestamp(4),
                "scan_end_time": iso_date_string_from_timestamp(5),
                "address": "123.456.123.456",
                "port": "22",
                "vulnerability": "cve5",
                "severity": "2",
                "__ParentKey": parent_key,
            }),
        # Host info
        expected_deletes["host_info"],
        _expected_snapshot_write(
            "host_info", {"address": "123.456.123.456"}, auth_mock, {
                "scan_id": "scan_2",
                "scan_start_time": iso_date_string_from_timestamp(4),
                "scan_end_time": iso_date_string_from_timestamp(5),
                "address": "123.456.123.456",
                "uptime": "1234567",
                "__ParentKey": parent_key,
            }),
    ]
Example #7
0
        }]
    }

    with pytest.raises(
            ValueError,
            match=
            "Analytics ingestor requires the ParentKey message attribute be present"
    ):
        queue_ingestor.ingest(test_event, MagicMock())


SAMPLE_DOC_COLLECTION = dumps({
    "scan_id":
    "scan_2",
    "scan_start_time":
    iso_date_string_from_timestamp(4),
    "scan_end_time":
    iso_date_string_from_timestamp(5),
    "__docs": {
        "port_info": [{
            "NonTemporalKey":
            ResultsContext._hash_of({
                "address": "123.456.123.456",
                "port": "22"
            }),
            "Data": {
                "address":
                "123.456.123.456",
                "port":
                "22",
                "open":
async def test_context_push_and_pop():
    mock_sns_client = MagicMock()
    mock_sns_client.publish.return_value = coroutine_of({"MessageId": "Msg32"})
    context = ResultsContext(
        "PubTopic",
        {"address": "123.456.123.456"},
        "scan_2",
        iso_date_string_from_timestamp(4),
        iso_date_string_from_timestamp(5),
        "scan_name",
        mock_sns_client
    )

    context.push_context({"port": "22"})
    context.post_results("port_info", {"open": "false"})
    context.push_context({"vulnerability": "cve4"})
    context.post_results("vuln_info", {"severity": "5"})
    context.pop_context()
    context.push_context({"vulnerability": "cve5"})
    context.post_results("vuln_info", {"severity": "2"})
    context.pop_context()
    context.pop_context()
    context.push_context({"port": "80"})
    context.post_results("port_info", {"open": "true"})
    context.pop_context()
    context.post_results("host_info", {"uptime": "1234567"})
    await context.publish_results()

    # it should publish the top level info parent and temporal key
    mock_sns_client.publish.assert_called_with(
        TopicArn="PubTopic",
        Subject="scan_name",
        Message=dumps(
            {
                "scan_id": "scan_2",
                "scan_start_time": iso_date_string_from_timestamp(4),
                "scan_end_time": iso_date_string_from_timestamp(5),
                "__docs": {
                    "port_info": [
                        {
                            "NonTemporalKey": ResultsContext._hash_of({
                                "address": "123.456.123.456",
                                "port": "22"
                            }),
                            "Data": {
                                "address": "123.456.123.456",
                                "port": "22",
                                "open": "false",
                                "__ParentKey": ResultsContext._hash_of({"address": "123.456.123.456"}),
                            }
                        },
                        {
                            "NonTemporalKey": ResultsContext._hash_of({
                                "address": "123.456.123.456",
                                "port": "80"
                            }),
                            "Data": {
                                "address": "123.456.123.456",
                                "port": "80",
                                "open": "true",
                                "__ParentKey": ResultsContext._hash_of({"address": "123.456.123.456"}),
                            }
                        }
                    ],
                    "vuln_info": [
                        {
                            "NonTemporalKey": ResultsContext._hash_of({
                                "address": "123.456.123.456",
                                "port": "22",
                                "vulnerability": "cve4"
                            }),
                            "Data": {
                                "address": "123.456.123.456",
                                "port": "22",
                                "vulnerability": "cve4",
                                "severity": "5",
                                "__ParentKey": ResultsContext._hash_of({"address": "123.456.123.456"}),
                            }
                        },
                        {
                            "NonTemporalKey": ResultsContext._hash_of({
                                "address": "123.456.123.456",
                                "port": "22",
                                "vulnerability": "cve5"
                            }),
                            "Data": {
                                "address": "123.456.123.456",
                                "port": "22",
                                "vulnerability": "cve5",
                                "severity": "2",
                                "__ParentKey": ResultsContext._hash_of({"address": "123.456.123.456"}),
                            }
                        }
                    ],
                    "host_info": [
                        {
                            "NonTemporalKey": ResultsContext._hash_of({
                                "address": "123.456.123.456",
                            }),
                            "Data": {
                                "address": "123.456.123.456",
                                "uptime": "1234567",
                                "__ParentKey": ResultsContext._hash_of({"address": "123.456.123.456"}),
                            }
                        }
                    ]
                }
            }
        ),
        MessageAttributes={
            "ParentKey": {
                "StringValue": ResultsContext._hash_of({"address": "123.456.123.456"}),
                "DataType": "String"
            },
            "TemporalKey": {
                "StringValue": ResultsContext._hash_of(iso_date_string_from_timestamp(5)),
                "DataType": "String"
            }
        }
    )