def setUp(self):
        self.event = {
            "server": "https://fake-server.net/v1",
            "auth": "arpit73:pAsSwErD",
        }
        client = Client(
            server_url=self.event.get("server"),
            auth=("arpit73", "pAsSwErD"),
            bucket=BUCKET_ID,
            collection=COLLECTION_ID,
        )
        self.record_uri = self.event.get("server") + client.get_endpoint(
            "record", id=RECORD_ID, bucket=BUCKET_ID, collection=COLLECTION_ID
        )
        self.record_uri_preview = self.event.get("server") + client.get_endpoint(
            "record", id=RECORD_ID, bucket=BUCKET_ID_PREVIEW, collection=COLLECTION_ID
        )

        mocked = mock.patch("commands.publish_dafsa.prepare_dafsa")
        self.addCleanup(mocked.stop)
        self.mocked_prepare = mocked.start()

        mocked = mock.patch("commands.publish_dafsa.remote_settings_publish")
        self.addCleanup(mocked.stop)
        self.mocked_publish = mocked.start()
class TestGetStoredHash(unittest.TestCase):
    def setUp(self):
        server = "https://fake-server.net/v1"
        auth = ("arpit73", "pAsSwErD")
        self.client = Client(
            server_url=server, auth=auth, bucket=BUCKET_ID, collection=COLLECTION_ID
        )
        self.record_uri = server + self.client.get_endpoint(
            "record", id=RECORD_ID, bucket=BUCKET_ID, collection=COLLECTION_ID
        )

    @responses.activate
    def test_stored_hash_fetched_successfully(self):
        responses.add(
            responses.GET,
            self.record_uri,
            json={"data": {"commit-hash": "fake-commit-hash"}},
        )
        stored_hash = get_stored_hash(self.client)
        self.assertEqual(stored_hash, "fake-commit-hash")

    @responses.activate
    def test_returns_none_when_no_record_found(self):
        responses.add(
            responses.GET, self.record_uri, json={"error": "not found"}, status=404
        )
        self.assertIsNone(get_stored_hash(self.client))
Beispiel #3
0
def validate_changes_collection(event, context, **kwargs):
    """Validate the entries of the monitor endpoint.
    """
    # 1. Grab the changes collection
    server_url = event["server"]
    bucket = event.get("bucket", os.getenv("BUCKET", "monitor"))
    collection = event.get("collection", os.getenv("COLLECTION", "changes"))

    client = Client(server_url=server_url,
                    bucket=bucket,
                    collection=collection)
    print("Looking at %s: " % client.get_endpoint("collection"))

    collections = client.get_records()
    # 2. For each collection there, validate the ETag
    everything_ok = True
    for collection in collections:
        bid = collection["bucket"]
        cid = collection["collection"]
        last_modified = collection["last_modified"]
        etag = client.get_records_timestamp(bucket=bid, collection=cid)
        if str(etag) == str(last_modified):
            print("Etag OK for {}/{} : {}".format(bid, cid, etag))
        else:
            everything_ok = False
            print("Etag NOT OK for {}/{} : {} != {}".format(
                bid, cid, last_modified, etag))

    if not everything_ok:
        raise ValueError("One of the collection did not validate.")
Beispiel #4
0
def fetch_signed_resources(server_url, auth):
    # List signed collection using capabilities.
    client = Client(server_url=server_url,
                    auth=auth,
                    bucket="monitor",
                    collection="changes")
    info = client.server_info()
    try:
        resources = info["capabilities"]["signer"]["resources"]
    except KeyError:
        raise ValueError(
            "No signer capabilities found. Run on *writer* server!")

    # Build the list of signed collections, source -> preview -> destination
    # For most cases, configuration of signed resources is specified by bucket and
    # does not contain any collection information.
    resources_by_bid = {}
    resources_by_cid = {}
    preview_buckets = set()
    for resource in resources:
        if resource["source"]["collection"] is not None:
            resources_by_cid[(
                resource["destination"]["bucket"],
                resource["destination"]["collection"],
            )] = resource
        else:
            resources_by_bid[resource["destination"]["bucket"]] = resource
        if "preview" in resource:
            preview_buckets.add(resource["preview"]["bucket"])

    print("Read collection list from {}".format(
        client.get_endpoint("collection")))
    resources = []
    monitored = client.get_records(_sort="bucket,collection")
    for entry in monitored:
        bid = entry["bucket"]
        cid = entry["collection"]

        # Skip preview collections entries
        if bid in preview_buckets:
            continue

        if (bid, cid) in resources_by_cid:
            r = resources_by_cid[(bid, cid)]
        elif bid in resources_by_bid:
            r = copy.deepcopy(resources_by_bid[bid])
            r["source"]["collection"] = r["destination"]["collection"] = cid
            if "preview" in r:
                r["preview"]["collection"] = cid
        else:
            raise ValueError(f"Unknown signed collection {bid}/{cid}")
        resources.append(r)

    return resources
class TestRemoteSettingsPublish(unittest.TestCase):
    def setUp(self):
        server = "https://fake-server.net/v1"
        auth = ("arpit73", "pAsSwErD")
        self.client = Client(
            server_url=server, auth=auth, bucket=BUCKET_ID, collection=COLLECTION_ID
        )
        record_uri = server + self.client.get_endpoint(
            "record", id=RECORD_ID, bucket=BUCKET_ID, collection=COLLECTION_ID
        )
        self.collection_uri = server + self.client.get_endpoint(
            "collection", bucket=BUCKET_ID, collection=COLLECTION_ID
        )
        self.attachment_uri = f"{record_uri}/attachment"

    @responses.activate
    def test_record_was_posted(self):
        responses.add(
            responses.POST,
            self.attachment_uri,
            json={"data": {"commit-hash": "fake-commit-hash"}},
        )
        responses.add(
            responses.PATCH, self.collection_uri, json={"data": {"status": "to-review"}}
        )

        with tempfile.TemporaryDirectory() as tmp:
            dafsa_filename = f"{tmp}/dafsa.bin"
            with open(dafsa_filename, "wb") as f:
                f.write(b"some binary data")
            remote_settings_publish(self.client, "fake-commit-hash", dafsa_filename)

            self.assertEqual(len(responses.calls), 2)

            self.assertEqual(responses.calls[0].request.url, self.attachment_uri)
            self.assertEqual(responses.calls[0].request.method, "POST")

            self.assertEqual(responses.calls[1].request.url, self.collection_uri)
            self.assertEqual(responses.calls[1].request.method, "PATCH")
def download_collection_data(server_url, collection):
    client = Client(
        server_url=server_url,
        bucket=collection["bucket"],
        collection=collection["collection"],
    )
    endpoint = client.get_endpoint("collection")
    # Collection metadata with cache busting
    metadata = client.get_collection(
        _expected=collection["last_modified"])["data"]
    # Download records with cache busting
    records = client.get_records(_sort="-last_modified",
                                 _expected=collection["last_modified"])
    timestamp = client.get_records_timestamp()
    return (collection, endpoint, metadata, records, timestamp)
def validate_signature(event, context, **kwargs):
    """Validate the signature of each collection.
    """
    server_url = event["server"]
    bucket = event.get("bucket", "monitor")
    collection = event.get("collection", "changes")
    client = Client(server_url=server_url,
                    bucket=bucket,
                    collection=collection)
    print("Read collection list from {}".format(
        client.get_endpoint("collection")))

    error_messages = []

    checked_certificates = {}

    collections = client.get_records()

    # Grab server data in parallel.
    start_time = time.time()
    collections_data = []
    with concurrent.futures.ThreadPoolExecutor(
            max_workers=PARALLEL_REQUESTS) as executor:
        futures = [
            executor.submit(download_collection_data, server_url, c)
            for c in collections
        ]
        for future in concurrent.futures.as_completed(futures):
            collections_data.append(future.result())
    elapsed_time = time.time() - start_time
    print(f"Downloaded all data in {elapsed_time:.2f}s")

    for i, (collection, endpoint, metadata, records,
            timestamp) in enumerate(collections_data):
        start_time = time.time()

        message = "{:02d}/{:02d} {}:  ".format(i + 1, len(collections),
                                               endpoint)

        # 1. Serialize
        serialized = canonical_json(records, timestamp)
        data = b"Content-Signature:\x00" + serialized.encode("utf-8")

        # 2. Grab the signature
        try:
            signature = metadata["signature"]
        except KeyError:
            # Destination has no signature attribute.
            # Be smart and check if it was just configured.
            # See https://github.com/mozilla-services/remote-settings-lambdas/issues/31
            client = Client(
                server_url=server_url,
                bucket=collection["bucket"],
                collection=collection["collection"],
            )
            with_tombstones = client.get_records(_since=1)
            if len(with_tombstones) == 0:
                # It never contained records. Let's assume it is newly configured.
                message += "SKIP"
                print(message)
                continue
            # Some records and empty signature? It will fail below.
            signature = {}

        try:
            # 3. Verify the signature with the public key
            pubkey = signature["public_key"].encode("utf-8")
            verifier = ecdsa.VerifyingKey.from_pem(pubkey)
            signature_bytes = base64.urlsafe_b64decode(signature["signature"])
            verified = verifier.verify(signature_bytes,
                                       data,
                                       hashfunc=hashlib.sha384)
            assert verified, "Signature verification failed"

            # 4. Verify that the x5u certificate is valid (ie. that signature was well refreshed)
            x5u = signature["x5u"]
            if x5u not in checked_certificates:
                resp = requests.get(signature["x5u"])
                cert_pem = resp.text.encode("utf-8")
                cert = cryptography.x509.load_pem_x509_certificate(
                    cert_pem, crypto_default_backend())
                assert (cert.not_valid_before <
                        datetime.now()), "certificate not yet valid"
                assert cert.not_valid_after > datetime.now(
                ), "certificate expired"
                subject = cert.subject.get_attributes_for_oid(
                    NameOID.COMMON_NAME)[0].value
                # eg. ``onecrl.content-signature.mozilla.org``, or
                # ``pinning-preload.content-signature.mozilla.org``
                assert subject.endswith(
                    ".content-signature.mozilla.org"), "invalid subject name"
                checked_certificates[x5u] = cert

            # 5. Check that public key matches the certificate one.
            cert = checked_certificates[x5u]
            cert_pubkey_pem = cert.public_key().public_bytes(
                crypto_serialization.Encoding.PEM,
                crypto_serialization.PublicFormat.SubjectPublicKeyInfo,
            )
            assert (unpem(cert_pubkey_pem) == pubkey
                    ), "signature public key does not match certificate"

            elapsed_time = time.time() - start_time
            message += f"OK ({elapsed_time:.2f}s)"
            print(message)
        except Exception:
            message += "⚠ BAD Signature ⚠"
            print(message)

            # Gather details for the global exception that will be raised.
            signed_on = metadata["last_modified"]
            signed_on_date = timestamp_to_date(signed_on)
            timestamp_date = timestamp_to_date(timestamp)
            error_message = (
                "Signature verification failed on {endpoint}\n"
                " - Signed on: {signed_on} ({signed_on_date})\n"
                " - Records timestamp: {timestamp} ({timestamp_date})").format(
                    **locals())
            error_messages.append(error_message)

    # Make the lambda to fail in case an exception occured
    if len(error_messages) > 0:
        raise ValidationError("\n" + "\n\n".join(error_messages))