def test_does_not_alter_records():
    records = [
        {
            'foo': 'bar',
            'last_modified': '12345',
            'id': '1'
        },
        {
            'bar': 'baz',
            'last_modified': '45678',
            'id': '2'
        },
    ]
    canonical_json(records, '45678')

    assert records == [
        {
            'foo': 'bar',
            'last_modified': '12345',
            'id': '1'
        },
        {
            'bar': 'baz',
            'last_modified': '45678',
            'id': '2'
        },
    ]
Exemple #2
0
def test_supports_records_as_iterators():
    records = iter([
        {
            'bar': 'baz',
            'last_modified': '45678',
            'id': '1'
        },
    ])
    canonical_json(records, '45678')
def test_does_not_alter_records():
    records = [
        {'foo': 'bar', 'last_modified': '12345', 'id': '1'},
        {'bar': 'baz', 'last_modified': '45678', 'id': '2'},
    ]
    canonical_json(records, '45678')

    assert records == [
        {'foo': 'bar', 'last_modified': '12345', 'id': '1'},
        {'bar': 'baz', 'last_modified': '45678', 'id': '2'},
    ]
Exemple #4
0
    def test_destination_creation_and_new_records_signature(self):
        self.source.create_bucket()
        self.source.create_collection()

        # Send new data to the signer.
        with self.source.batch() as batch:
            for n in range(0, 10):
                batch.create_record(data={'newdata': n})

        source_records = self.source.get_records()
        assert len(source_records) == 10

        # Trigger a signature.
        self.source.update_collection(
            data={'status': 'to-sign'},
            method="put")

        # Ensure the remote data is signed properly.
        data = self.destination.get_collection()
        signature = data['data']['signature']
        assert signature is not None

        records = self.destination.get_records()
        assert len(records) == 10
        serialized_records = canonical_json(records)
        self.signer.verify(serialized_records, signature)

        # the status of the source collection should be "signed".
        source_collection = self.source.get_collection()['data']
        assert source_collection['status'] == 'signed'
Exemple #5
0
    def test_records_deletion_and_signature(self):
        self.source.create_bucket()
        self.source.create_collection()

        # Create some data on the source collection and send it.
        with self.source.batch() as batch:
            for n in range(0, 10):
                batch.create_record(data={'newdata': n})

        source_records = self.source.get_records()
        assert len(source_records) == 10

        # Trigger a signature.
        self.source.update_collection(data={'status': 'to-sign'}, method="put")

        # Wait so the new last_modified timestamp will be greater than the
        # one from the previous records.
        time.sleep(0.01)
        # Now delete one record on the source and trigger another signature.
        self.source.delete_record(source_records[0]['id'])
        self.source.update_collection(data={'status': 'to-sign'}, method="put")

        records = self.destination.get_records()
        assert len(records) == 9

        data = self.destination.get_collection()
        signature = data['data']['signature']
        assert signature is not None

        serialized_records = canonical_json(records)
        # This raises when the signature is invalid.
        self.signer.verify(serialized_records, signature)
Exemple #6
0
    def test_records_delete_all_and_signature(self):
        source_records = self.source.get_records()
        destination_records = self.destination.get_records()

        assert len(source_records) == len(destination_records)

        self.source.delete_records()

        trigger_signature(editor_client=self.editor_client,
                          reviewer_client=self.source)

        source_records = self.source.get_records()
        destination_records = self.destination.get_records()

        assert len(source_records) == len(destination_records) == 0

        last_modified = collection_timestamp(self.destination)
        serialized_records = canonical_json(destination_records, last_modified)

        data = self.destination.get_collection()
        signature = data['data']['signature']
        assert signature is not None

        # This raises when the signature is invalid.
        self.signer.verify(serialized_records, signature)
def test_removes_spaces():
    records = [
        {'foo': 'bar', 'last_modified': '12345', 'id': '1'},
        {'bar': 'baz', 'last_modified': '45678', 'id': '2'},
    ]
    serialized = canonical_json(records, "42")
    assert " " not in serialized
Exemple #8
0
    def sign_and_update_destination(self,
                                    request,
                                    source,
                                    next_source_status=STATUS.SIGNED):
        """Sign the specified collection.

        0. Create the destination bucket / collection
        1. Get all the records of the collection
        2. Send all records since the last_modified of the destination
        3. Compute a hash of these records
        4. Ask the signer for a signature
        5. Send the signature to the destination.
        """
        with self.send_events(request):
            self.create_destination(request)

            self.push_records_to_destination(request)

            records, timestamp = self.get_destination_records()
            serialized_records = canonical_json(records, timestamp)
            logger.debug(self.source_collection_uri + ":\t" +
                         serialized_records)
            signature = self.signer.sign(serialized_records)

            self.set_destination_signature(signature, source, request)
            self.update_source_status(next_source_status, request)
            self.invalidate_cloudfront_cache(request, timestamp)
def test_provides_records_in_data_along_last_modified():
    records = [
        {'bar': 'baz', 'last_modified': '45678', 'id': '1'},
    ]
    serialized = json.loads(canonical_json(records, '45678'))
    assert 'data' in serialized
    assert 'last_modified' in serialized
Exemple #10
0
def test_preserves_data():
    records = [
        {'foo': 'bar', 'last_modified': '12345', 'id': '1'},
        {'bar': 'baz', 'last_modified': '45678', 'id': '2'},
    ]
    serialized = canonical_json(records, '45678')
    assert records == json.loads(serialized)['data']
Exemple #11
0
    def test_records_delete_all_and_signature(self):
        source_records = self.source.get_records()
        destination_records = self.destination.get_records()

        assert len(source_records) == len(destination_records)

        self.source.delete_records()

        trigger_signature(editor_client=self.editor_client,
                          reviewer_client=self.source)

        source_records = self.source.get_records()
        destination_records = self.destination.get_records()

        assert len(source_records) == len(destination_records) == 0

        last_modified = collection_timestamp(self.destination)
        serialized_records = canonical_json(destination_records, last_modified)

        data = self.destination.get_collection()
        signature = data['data']['signature']
        assert signature is not None

        # This raises when the signature is invalid.
        self.signer.verify(serialized_records, signature)
Exemple #12
0
    def sign_and_update_destination(self, request,
                                    next_source_status=STATUS.SIGNED):
        """Sign the specified collection.

        0. Create the destination bucket / collection
        1. Get all the records of the collection
        2. Send all records since the last_modified of the destination
        3. Compute a hash of these records
        4. Ask the signer for a signature
        5. Send the signature to the destination.
        """
        before_events = request.bound_data["resource_events"]
        request.bound_data["resource_events"] = OrderedDict()

        self.create_destination(request)

        self.push_records_to_destination(request)

        records, timestamp = self.get_destination_records()
        serialized_records = canonical_json(records, timestamp)
        logger.debug(self.source_collection_uri + ":\t" + serialized_records)
        signature = self.signer.sign(serialized_records)

        self.set_destination_signature(signature, request)
        self.update_source_status(next_source_status, request)

        # Re-trigger events from event listener \o/
        for event in request.get_resource_events():
            request.registry.notify(event)
        request.bound_data["resource_events"] = before_events
Exemple #13
0
def test_provides_records_in_data_along_last_modified():
    records = [
        {'bar': 'baz', 'last_modified': '45678', 'id': '1'},
    ]
    serialized = json.loads(canonical_json(records, '45678'))
    assert 'data' in serialized
    assert 'last_modified' in serialized
Exemple #14
0
    def sign_and_update_destination(self, request, source_attributes,
                                    next_source_status=STATUS.SIGNED,
                                    previous_source_status=None,
                                    push_records=True):
        """Sign the specified collection.

        0. Create the destination bucket / collection
        1. Get all the records of the collection
        2. Send all records since the last_modified of the destination
        3. Compute a hash of these records
        4. Ask the signer for a signature
        5. Send the signature to the destination.
        """
        self.create_destination(request)

        if push_records:
            self.push_records_to_destination(request)

        records, timestamp = self.get_destination_records(empty_none=False)
        serialized_records = canonical_json(records, timestamp)
        logger.debug("{}:\t'{}'".format(self.source_collection_uri, serialized_records))
        signature = self.signer.sign(serialized_records)

        self.set_destination_signature(signature, source_attributes, request)
        if next_source_status is not None:
            self.update_source_status(next_source_status, request, previous_source_status)

        self.invalidate_cloudfront_cache(request, timestamp)
def test_orders_with_deeply_nested_dicts():
    records = [{
        'a': {
            'b': 'b',
            'a': 'a',
            'c': {
                'b': 'b',
                'a': 'a',
                'c': ['b', 'a', 'c'],
                'd': {
                    'b': 'b',
                    'a': 'a'
                },
                'id': '1',
                'e': 1,
                'f': [2, 3, 1],
                'g': {
                    2: 2,
                    3: 3,
                    1: {
                        'b': 'b',
                        'a': 'a',
                        'c': 'c'
                    }
                }
            }
        },
        'id': '1'
    }]
    expected = (
        '[{"a":{"a":"a","b":"b","c":{"a":"a","b":"b","c":["b","a","c"],'
        '"d":{"a":"a","b":"b"},"e":1,"f":[2,3,1],"g":{'
        '"1":{"a":"a","b":"b","c":"c"},"2":2,"3":3},"id":"1"}},"id":"1"}]')
    assert expected in canonical_json(records, "42")
Exemple #16
0
    def sign_and_update_destination(self,
                                    request,
                                    source_attributes,
                                    next_source_status=STATUS.SIGNED,
                                    previous_source_status=None):
        """Sign the specified collection.

        0. Create the destination bucket / collection
        1. Get all the records of the collection
        2. Send all records since the last_modified of the destination
        3. Compute a hash of these records
        4. Ask the signer for a signature
        5. Send the signature to the destination.
        """
        self.create_destination(request)

        self.push_records_to_destination(request)

        records, timestamp = self.get_destination_records(empty_none=False)
        serialized_records = canonical_json(records, timestamp)
        logger.debug("{}:\t'{}'".format(self.source_collection_uri,
                                        serialized_records))
        signature = self.signer.sign(serialized_records)

        self.set_destination_signature(signature, source_attributes, request)
        if next_source_status is not None:
            self.update_source_status(next_source_status, request,
                                      previous_source_status)

        self.invalidate_cloudfront_cache(request, timestamp)
Exemple #17
0
    def sign_and_update_destination(self,
                                    request,
                                    source,
                                    next_source_status=STATUS.SIGNED):
        """Sign the specified collection.

        0. Create the destination bucket / collection
        1. Get all the records of the collection
        2. Send all records since the last_modified of the destination
        3. Compute a hash of these records
        4. Ask the signer for a signature
        5. Send the signature to the destination.
        """
        before_events = request.bound_data["resource_events"]
        request.bound_data["resource_events"] = OrderedDict()

        self.create_destination(request)

        self.push_records_to_destination(request)

        records, timestamp = self.get_destination_records()
        serialized_records = canonical_json(records, timestamp)
        logger.debug(self.source_collection_uri + ":\t" + serialized_records)
        signature = self.signer.sign(serialized_records)

        self.set_destination_signature(signature, source, request)
        self.update_source_status(next_source_status, request)

        # Re-trigger events from event listener \o/
        for event in request.get_resource_events():
            request.registry.notify(event)
        request.bound_data["resource_events"] = before_events
Exemple #18
0
def test_uses_lowercase_unicode():
    records = [{'id': '4', 'a': '"quoted"', 'b': 'Ich ♥ Bücher'},
               {'id': '26', 'd': None, 'a': ''}]
    assert (
        '[{"a":"","d":null,"id":"26"},'
        '{"a":"\\"quoted\\"","b":"Ich \\u2665 B\\u00fccher","id":"4"}]'
        ) in canonical_json(records, "42")
Exemple #19
0
def test_preserves_data():
    records = [
        {'foo': 'bar', 'last_modified': '12345', 'id': '1'},
        {'bar': 'baz', 'last_modified': '45678', 'id': '2'},
    ]
    serialized = canonical_json(records, '45678')
    assert records == json.loads(serialized)['data']
def test_canonical_json_orders_records_by_id():
    records = [
        {'bar': 'baz', 'last_modified': '45678', 'id': '2'},
        {'foo': 'bar', 'last_modified': '12345', 'id': '1'},
    ]
    serialized = json.loads(canonical_json(records))
    assert serialized[0]['id'] == '1'
    assert serialized[1]['id'] == '2'
Exemple #21
0
def test_orders_records_by_id():
    records = [
        {'bar': 'baz', 'last_modified': '45678', 'id': '2'},
        {'foo': 'bar', 'last_modified': '12345', 'id': '1'},
    ]
    serialized = json.loads(canonical_json(records, '45678'))
    assert serialized['last_modified'] == '45678'
    assert serialized['data'][0]['id'] == '1'
    assert serialized['data'][1]['id'] == '2'
Exemple #22
0
def test_removes_deleted_items():
    record = {'bar': 'baz', 'last_modified': '45678', 'id': '2'}
    deleted_record = {'deleted': True, 'last_modified': '12345', 'id': '1'}
    records = [
        deleted_record,
        record,
    ]
    serialized = canonical_json(records, "42")
    assert [record] == json.loads(serialized)['data']
Exemple #23
0
async def validate_signature(verifier, metadata, records, timestamp):
    signature = metadata.get("signature")
    assert signature is not None, "Missing signature"
    x5u = signature["x5u"]
    signature = signature["signature"]

    data = canonical_json(records, timestamp).encode("utf-8")

    return await verifier.verify(data, signature, x5u)
def test_removes_deleted_items():
    record = {'bar': 'baz', 'last_modified': '45678', 'id': '2'}
    deleted_record = {'deleted': True, 'last_modified': '12345', 'id': '1'}
    records = [
        deleted_record,
        record,
    ]
    serialized = canonical_json(records, "42")
    assert [record] == json.loads(serialized)['data']
def verify_signatures(collection, records, timestamp):
    try:
        serialized = canonical_json(list(records), timestamp)
        signature = collection['data']['signature']
        with open('pub', 'w') as f:
            f.write(signature['public_key'])
        signer = ECDSASigner(public_key='pub')
        return signer.verify(serialized, signature) is None
    except KintoException as e:
        if e.response.status_code == 401:
            return -1
        return 0
def test_uses_lowercase_unicode():
    records = [{
        'id': '4',
        'a': '"quoted"',
        'b': 'Ich ♥ Bücher'
    }, {
        'id': '26',
        'd': None,
        'a': ''
    }]
    assert ('[{"a":"","d":null,"id":"26"},'
            '{"a":"\\"quoted\\"","b":"Ich \\u2665 B\\u00fccher","id":"4"}]'
            ) in canonical_json(records, "42")
Exemple #27
0
    def test_preview_collection_is_updated_and_signed_on_to_review(self):
        create_records(self.client)
        self.anna_client.patch_collection(data={'status': 'to-review'})

        collection = self.client.get_collection(id="preview")
        records = self.client.get_records(collection="preview")
        last_modified = collection_timestamp(self.client, collection="preview")
        serialized_records = canonical_json(records, last_modified)

        signature = collection['data']['signature']
        assert signature is not None
        self.signer.verify(serialized_records, signature)

        assert len(records) == 10
Exemple #28
0
    def test_preview_collection_is_updated_and_signed_on_to_review(self):
        create_records(self.client)
        self.anna_client.patch_collection(data={'status': 'to-review'})

        collection = self.client.get_collection(collection="preview")
        records = self.client.get_records(collection="preview")
        last_modified = collection_timestamp(self.client, collection="preview")
        serialized_records = canonical_json(records, last_modified)

        signature = collection['data']['signature']
        assert signature is not None
        self.signer.verify(serialized_records, signature)

        assert len(records) == 10
def test_removes_spaces():
    records = [
        {
            'foo': 'bar',
            'last_modified': '12345',
            'id': '1'
        },
        {
            'bar': 'baz',
            'last_modified': '45678',
            'id': '2'
        },
    ]
    serialized = canonical_json(records, "42")
    assert " " not in serialized
Exemple #30
0
    def refresh_signature(self, request, next_source_status):
        """Refresh the signature without moving records.
        """
        records, timestamp = self.get_destination_records(empty_none=False)
        serialized_records = canonical_json(records, timestamp)
        logger.debug("{}:\t'{}'".format(self.source_collection_uri, serialized_records))
        signature = self.signer.sign(serialized_records)
        self.set_destination_signature(signature, request=request, source_attributes={})

        current_userid = request.prefixed_userid
        current_date = datetime.datetime.now(datetime.timezone.utc).isoformat()
        attrs = {'status': next_source_status}
        attrs[TRACKING_FIELDS.LAST_SIGNATURE_BY.value] = current_userid
        attrs[TRACKING_FIELDS.LAST_SIGNATURE_DATE.value] = current_date
        self._update_source_attributes(request, **attrs)

        self.invalidate_cloudfront_cache(request, timestamp)
Exemple #31
0
    def test_destination_creation_and_new_records_signature(self):
        # Create some records and trigger another signature.
        self.source.create_record({'newdata': 'hello'})
        self.source.create_record({'newdata': 'bonjour'})

        time.sleep(0.1)

        self.trigger_signature()
        data = self.destination.get_collection()
        signature = data['data']['signature']
        assert signature is not None

        records = self.destination.get_records()
        assert len(records) == 12
        last_modified = collection_timestamp(self.destination)
        serialized_records = canonical_json(records, last_modified)
        # This raises when the signature is invalid.
        self.signer.verify(serialized_records, signature)
Exemple #32
0
    def sign_and_update_remote(self):
        """Sign the specified collection.

        0. Create the destination bucket / collection
        1. Get all the records of the collection
        2. Compute a hash of these records
        3. Ask the signer for a signature
        4. Send all records since the last_modified field of the Authoritative
           server
        5. Send the signature to the Authoritative server.
        """
        self.create_destination()
        records = self.get_local_records()
        serialized_records = canonical_json(records)
        signature = self.signer.sign(serialized_records)

        self.push_records_to_destination()
        self.set_destination_signature(signature)
        self.update_source_status("signed")
Exemple #33
0
    def refresh_signature(self, request, next_source_status):
        """Refresh the signature without moving records.
        """
        records, timestamp = self.get_destination_records(empty_none=False)
        serialized_records = canonical_json(records, timestamp)
        logger.debug("{}:\t'{}'".format(self.source_collection_uri,
                                        serialized_records))
        signature = self.signer.sign(serialized_records)
        self.set_destination_signature(signature,
                                       request=request,
                                       source_attributes={})

        current_userid = request.prefixed_userid
        current_date = datetime.datetime.now(datetime.timezone.utc).isoformat()
        attrs = {'status': next_source_status}
        attrs[TRACKING_FIELDS.LAST_SIGNATURE_BY.value] = current_userid
        attrs[TRACKING_FIELDS.LAST_SIGNATURE_DATE.value] = current_date
        self._update_source_attributes(request, **attrs)

        self.invalidate_cloudfront_cache(request, timestamp)
Exemple #34
0
def test_orders_with_deeply_nested_dicts():
    records = [{
        'a': {
            'b': 'b',
            'a': 'a',
            'c': {
                'b': 'b',
                'a': 'a',
                'c': ['b', 'a', 'c'],
                'd': {'b': 'b', 'a': 'a'},
                'id': '1',
                'e': 1,
                'f': [2, 3, 1],
                'g': {2: 2, 3: 3, 1: {
                    'b': 'b', 'a': 'a', 'c': 'c'}}}},
        'id': '1'}]
    expected = (
        '[{"a":{"a":"a","b":"b","c":{"a":"a","b":"b","c":["b","a","c"],'
        '"d":{"a":"a","b":"b"},"e":1,"f":[2,3,1],"g":{'
        '"1":{"a":"a","b":"b","c":"c"},"2":2,"3":3},"id":"1"}},"id":"1"}]')
    assert expected in canonical_json(records, "42")
Exemple #35
0
    def test_records_deletion_and_signature(self):
        # Now delete one record on the source and trigger another signature.
        self.source.delete_record(self.source_records[1]['id'])
        self.source.delete_record(self.source_records[5]['id'])

        time.sleep(0.1)

        self.trigger_signature()

        data = self.destination.get_collection()
        signature = data['data']['signature']
        assert signature is not None

        records = self.destination.get_records(_since=0)  # obtain deleted too
        last_modified = collection_timestamp(self.destination)
        serialized_records = canonical_json(records, last_modified)

        assert len(records) == 10  # two of them are deleted.
        assert len([r for r in records if r.get('deleted', False)]) == 2

        # This raises when the signature is invalid.
        self.signer.verify(serialized_records, signature)
def main(args=None):
    parser = cli_utils.add_parser_options(
        description='Validate collection signature',
        default_server=DEFAULT_SERVER,
        default_bucket=DEST_BUCKET,
        default_collection=DEST_COLLECTION)

    args = parser.parse_args(args)

    client = cli_utils.create_client_from_args(args)

    # 1. Grab collection information
    dest_col = client.get_collection()

    # 2. Grab records
    records = list(client.get_records(_sort='-last_modified'))
    timestamp = client.get_records_timestamp()

    # 3. Serialize
    serialized = canonical_json(records, timestamp)

    # 4. Compute the hash
    computed_hash = compute_hash(serialized)

    # 5. Grab the signature
    signature = dest_col['data']['signature']

    # 6. Grab the public key
    with open('pub', 'w') as f:
        f.write(signature['public_key'])

    # 7. Verify the signature matches the hash
    signer = ECDSASigner(public_key='pub')
    try:
        signer.verify(serialized, signature)
        print('Signature OK')
    except Exception:
        print('Signature KO. Computed hash: %s' % computed_hash)
        raise
Exemple #37
0
def main(args=None):
    parser = cli_utils.add_parser_options(
        description='Validate collection signature',
        default_server=DEFAULT_SERVER,
        default_bucket=DEST_BUCKET,
        default_collection=DEST_COLLECTION)

    args = parser.parse_args(args)

    client = cli_utils.create_client_from_args(args)

    # 1. Grab collection information
    dest_col = client.get_collection()

    # 2. Grab records
    records = list(client.get_records(_sort='-last_modified'))
    timestamp = client.get_records_timestamp()

    # 3. Serialize
    serialized = canonical_json(records, timestamp)

    # 4. Compute the hash
    computed_hash = compute_hash(serialized)

    # 5. Grab the signature
    signature = dest_col['data']['signature']

    # 6. Grab the public key
    with open('pub', 'w') as f:
        f.write(signature['public_key'])

    # 7. Verify the signature matches the hash
    signer = ECDSASigner(public_key='pub')
    try:
        signer.verify(serialized, signature)
        print('Signature OK')
    except Exception:
        print('Signature KO. Computed hash: %s' % computed_hash)
        raise
Exemple #38
0
    def test_records_update_and_signature(self):
        # Update some records and trigger another signature.
        updated = self.source_records[5].copy()
        updated['newdata'] = 'bump'
        self.source.update_record(updated)
        updated = self.source_records[0].copy()
        updated['newdata'] = 'hoop'
        self.source.update_record(updated)

        time.sleep(0.1)

        self.trigger_signature()
        data = self.destination.get_collection()
        signature = data['data']['signature']
        assert signature is not None

        records = self.destination.get_records()
        assert len(records) == 10
        last_modified = collection_timestamp(self.destination)
        serialized_records = canonical_json(records, last_modified)
        # This raises when the signature is invalid.
        self.signer.verify(serialized_records, signature)
Exemple #39
0
def main():
    args = _get_args()

    client = Client(server_url=args.server,
                    auth=tuple(args.auth.split(':')),
                    bucket=args.source_bucket,
                    collection=args.source_col)

    if args.editor_auth is None:
        args.editor_auth = args.auth

    if args.reviewer_auth is None:
        args.reviewer_auth = args.auth

    editor_client = Client(server_url=args.server,
                           auth=tuple(args.editor_auth.split(':')),
                           bucket=args.source_bucket,
                           collection=args.source_col)
    reviewer_client = Client(server_url=args.server,
                             auth=tuple(args.reviewer_auth.split(':')),
                             bucket=args.source_bucket,
                             collection=args.source_col)

    # 0. initialize source bucket/collection (if necessary)
    server_info = client.server_info()
    editor_id = editor_client.server_info()['user']['id']
    reviewer_id = reviewer_client.server_info()['user']['id']
    print('Server: {0}'.format(args.server))
    print('Author: {user[id]}'.format(**server_info))
    print('Editor: {0}'.format(editor_id))
    print('Reviewer: {0}'.format(reviewer_id))

    # 0. check that this collection is well configured.
    signer_capabilities = server_info['capabilities']['signer']
    to_review_enabled = signer_capabilities.get('to_review_enabled', False)
    group_check_enabled = signer_capabilities.get('group_check_enabled', False)

    resources = [
        r for r in signer_capabilities['resources']
        if (args.source_bucket, args.source_col) == (r['source']['bucket'],
                                                     r['source']['collection'])
    ]
    assert len(resources) > 0, 'Specified source not configured to be signed'
    resource = resources[0]
    if to_review_enabled and 'preview' in resource:
        print(
            'Signoff: {source[bucket]}/{source[collection]} => {preview[bucket]}/{preview[collection]} => {destination[bucket]}/{destination[collection]}'
            .format(**resource))
    else:
        print(
            'Signoff: {source[bucket]}/{source[collection]} => {destination[bucket]}/{destination[collection]}'
            .format(**resource))
    print('Group check: {0}'.format(group_check_enabled))
    print('Review workflow: {0}'.format(to_review_enabled))

    print('_' * 80)

    bucket = client.create_bucket(if_not_exists=True)
    client.patch_bucket(permissions={
        'write': [editor_id, reviewer_id] + bucket['permissions']['write']
    },
                        if_match=bucket['data']['last_modified'],
                        safe=True)

    client.create_collection(if_not_exists=True)

    if args.reset:
        client.delete_records()
        existing = 0
    else:
        existing_records = client.get_records()
        existing = len(existing_records)

    if group_check_enabled:
        editors_group = signer_capabilities['editors_group']
        client.create_group(editors_group,
                            data={'members': [editor_id]},
                            if_not_exists=True)
        reviewers_group = signer_capabilities['reviewers_group']
        client.create_group(reviewers_group,
                            data={'members': [reviewer_id]},
                            if_not_exists=True)

    dest_client = Client(server_url=args.server,
                         bucket=resource['destination']['bucket'],
                         collection=resource['destination']['collection'])

    preview_client = None
    if to_review_enabled and 'preview' in resource:
        preview_bucket = resource['preview']['bucket']
        preview_collection = resource['preview']['collection']
        preview_client = Client(server_url=args.server,
                                bucket=preview_bucket,
                                collection=preview_collection)

    # 1. upload data
    print('Author uploads 20 random records')
    records = upload_records(client, 20)

    # 2. ask for a signature
    # 2.1 ask for review (noop on old versions)
    print('Editor asks for review')
    data = {"status": "to-review"}
    editor_client.patch_collection(data=data)
    # 2.2 check the preview collection (if enabled)
    if preview_client:
        print('Check preview collection')
        preview_records = preview_client.get_records()
        expected = existing + 20
        assert len(preview_records) == expected, '%s != %s records' % (
            len(preview_records), expected)
        metadata = preview_client.get_collection()['data']
        preview_signature = metadata.get('signature')
        assert preview_signature, 'Preview collection not signed'
        preview_timestamp = collection_timestamp(preview_client)
    # 2.3 approve the review
    print('Reviewer approves and triggers signature')
    data = {"status": "to-sign"}
    reviewer_client.patch_collection(data=data)

    # 3. upload more data
    print('Author creates 20 others records')
    upload_records(client, 20)

    print('Editor updates 5 random records')
    for toupdate in random.sample(records, 5):
        editor_client.patch_record(dict(newkey=_rand(10), **toupdate))

    print('Author deletes 5 random records')
    for todelete in random.sample(records, 5):
        client.delete_record(todelete['id'])

    expected = existing + 20 + 20 - 5

    # 4. ask again for a signature
    # 2.1 ask for review (noop on old versions)
    print('Editor asks for review')
    data = {"status": "to-review"}
    editor_client.patch_collection(data=data)
    # 2.2 check the preview collection (if enabled)
    if preview_client:
        print('Check preview collection')
        preview_records = preview_client.get_records()
        assert len(preview_records) == expected, '%s != %s records' % (
            len(preview_records), expected)
        # Diff size is 20 + 5 if updated records are also all deleted,
        # or 30 if deletions and updates apply to different records.
        diff_since_last = preview_client.get_records(_since=preview_timestamp)
        assert 25 <= len(
            diff_since_last
        ) <= 30, 'Changes since last signature are not consistent'

        metadata = preview_client.get_collection()['data']
        assert preview_signature != metadata[
            'signature'], 'Preview collection not updated'

    # 2.3 approve the review
    print('Reviewer approves and triggers signature')
    data = {"status": "to-sign"}
    reviewer_client.patch_collection(data=data)

    # 5. wait for the result

    # 6. obtain the destination records and serialize canonically.

    records = list(dest_client.get_records())
    assert len(records) == expected, '%s != %s records' % (len(records),
                                                           expected)
    timestamp = collection_timestamp(dest_client)
    serialized = canonical_json(records, timestamp)
    print('Hash is %r' % compute_hash(serialized))

    # 7. get back the signed hash

    dest_col = dest_client.get_collection()
    signature = dest_col['data']['signature']

    with open('pub', 'w') as f:
        f.write(signature['public_key'])

    # 8. verify the signature matches the hash
    signer = ECDSASigner(public_key='pub')
    try:
        signer.verify(serialized, signature)
        print('Signature OK')
    except Exception:
        print('Signature KO')
        raise
Exemple #40
0
def test_escapes_backslashes():
    records = [{'id': '4', 'a': "This\\ this"}]
    assert '[{"a":"This\\\\ this","id":"4"}]' in canonical_json(records, "42")
def validate_signature(event, context, **kwargs):
    """Validate the signature of each collection.
    """
    server_url = event["server"]
    bucket = event.get("bucket", "monitor")
    collection = event.get("collection", "changes")
    client = Client(server_url=server_url, bucket=bucket, collection=collection)
    print("Read collection list from {}".format(client.get_endpoint("collection")))

    error_messages = []

    checked_certificates = {}

    collections = client.get_records()

    # Grab server data in parallel.
    start_time = time.time()
    collections_data = []
    with concurrent.futures.ThreadPoolExecutor(
        max_workers=PARALLEL_REQUESTS
    ) as executor:
        futures = [
            executor.submit(download_collection_data, server_url, c)
            for c in collections
        ]
        for future in concurrent.futures.as_completed(futures):
            collections_data.append(future.result())
    elapsed_time = time.time() - start_time
    print(f"Downloaded all data in {elapsed_time:.2f}s")

    for i, (collection, endpoint, metadata, records, timestamp) in enumerate(
        collections_data
    ):
        start_time = time.time()

        message = "{:02d}/{:02d} {}:  ".format(i + 1, len(collections), endpoint)

        # 1. Serialize
        serialized = canonical_json(records, timestamp)
        data = b"Content-Signature:\x00" + serialized.encode("utf-8")

        # 2. Grab the signature
        try:
            signature = metadata["signature"]
        except KeyError:
            # Destination has no signature attribute.
            # Be smart and check if it was just configured.
            # See https://github.com/mozilla-services/remote-settings-lambdas/issues/31
            client = Client(
                server_url=server_url,
                bucket=collection["bucket"],
                collection=collection["collection"],
            )
            with_tombstones = client.get_records(_since=1)
            if len(with_tombstones) == 0:
                # It never contained records. Let's assume it is newly configured.
                message += "SKIP"
                print(message)
                continue
            # Some records and empty signature? It will fail below.
            signature = {}

        try:
            # 3. Verify the signature with the public key
            pubkey = signature["public_key"].encode("utf-8")
            verifier = ecdsa.VerifyingKey.from_pem(pubkey)
            signature_bytes = base64.urlsafe_b64decode(signature["signature"])
            verified = verifier.verify(signature_bytes, data, hashfunc=hashlib.sha384)
            assert verified, "Signature verification failed"

            # 4. Verify that the x5u certificate is valid (ie. that signature was well refreshed)
            x5u = signature["x5u"]
            if x5u not in checked_certificates:
                resp = requests.get(signature["x5u"])
                cert_pem = resp.text.encode("utf-8")
                cert = cryptography.x509.load_pem_x509_certificate(
                    cert_pem, crypto_default_backend()
                )
                assert (
                    cert.not_valid_before < datetime.now()
                ), "certificate not yet valid"
                assert cert.not_valid_after > datetime.now(), "certificate expired"
                subject = cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME)[
                    0
                ].value
                # eg. ``onecrl.content-signature.mozilla.org``, or
                # ``pinning-preload.content-signature.mozilla.org``
                assert subject.endswith(
                    ".content-signature.mozilla.org"
                ), "invalid subject name"
                checked_certificates[x5u] = cert

            # 5. Check that public key matches the certificate one.
            cert = checked_certificates[x5u]
            cert_pubkey_pem = cert.public_key().public_bytes(
                crypto_serialization.Encoding.PEM,
                crypto_serialization.PublicFormat.SubjectPublicKeyInfo,
            )
            assert (
                unpem(cert_pubkey_pem) == pubkey
            ), "signature public key does not match certificate"

            elapsed_time = time.time() - start_time
            message += f"OK ({elapsed_time:.2f}s)"
            print(message)
        except Exception:
            message += "⚠ BAD Signature ⚠"
            print(message)

            # Gather details for the global exception that will be raised.
            signed_on = metadata["last_modified"]
            signed_on_date = timestamp_to_date(signed_on)
            timestamp_date = timestamp_to_date(timestamp)
            error_message = (
                "Signature verification failed on {endpoint}\n"
                " - Signed on: {signed_on} ({signed_on_date})\n"
                " - Records timestamp: {timestamp} ({timestamp_date})"
            ).format(**locals())
            error_messages.append(error_message)

    # Make the lambda to fail in case an exception occured
    if len(error_messages) > 0:
        raise ValidationError("\n" + "\n\n".join(error_messages))
Exemple #42
0
def test_escapes_unicode_object_keys():
    records = [{'id': '4', 'é': 1}]
    assert '[{"id":"4","\\u00e9":1}]' in canonical_json(records, "42")
def test_serializes_none_to_null():
    records = [{'id': '4', 'a': None}]
    assert '[{"a":null,"id":"4"}]' in canonical_json(records, "42")
def test_serializes_empty_string():
    records = [{'id': '4', 'a': ''}]
    assert '[{"a":"","id":"4"}]' in canonical_json(records, "42")
def test_serializes_empty_array():
    records = [{'id': '4', 'a': []}]
    assert '[{"a":[],"id":"4"}]' in canonical_json(records, "42")
def test_serializes_empty_object():
    records = [{'id': '4', 'a': {}}]
    assert '[{"a":{},"id":"4"}]' in canonical_json(records, "42")
def test_escapes_unicode_object_keys():
    records = [{'id': '4', 'é': 1}]
    assert '[{"id":"4","\\u00e9":1}]' in canonical_json(records, "42")
Exemple #48
0
def test_escapes_quotes():
    records = [{'id': '4', 'a': "\""}]
    assert '[{"a":"\\"","id":"4"}]' in canonical_json(records, "42")
def test_orders_object_keys():
    records = [{'a': 'a', 'id': '1', 'b': 'b'}]
    assert '[{"a":"a","b":"b","id":"1"}]' in canonical_json(records, "42")
Exemple #50
0
def test_orders_nested_keys():
    records = [{'a': {'b': 'b', 'a': 'a'}, 'id': '1'}]
    assert '[{"a":{"a":"a","b":"b"},"id":"1"}]' in canonical_json(records, "1")
def test_orders_nested_keys():
    records = [{'a': {'b': 'b', 'a': 'a'}, 'id': '1'}]
    assert '[{"a":{"a":"a","b":"b"},"id":"1"}]' in canonical_json(records, "1")
Exemple #52
0
def test_orders_object_keys():
    records = [{'a': 'a', 'id': '1', 'b': 'b'}]
    assert '[{"a":"a","b":"b","id":"1"}]' in canonical_json(records, "42")
Exemple #53
0
def test_preserves_forwardslashes():
    records = [{'id': '4', 'a': "image/jpeg"}]
    assert '[{"a":"image/jpeg","id":"4"}]' in canonical_json(records, "42")
Exemple #54
0
def test_serializes_none_to_null():
    records = [{'id': '4', 'a': None}]
    assert '[{"a":null,"id":"4"}]' in canonical_json(records, "42")
Exemple #55
0
def test_preserves_predefined_json_escapes():
    records = [{'id': '4', 'a': "\n"}]
    assert '[{"a":"\\n","id":"4"}]' in canonical_json(records, "42")
def validate_signature(event, context, **kwargs):
    """Validate the signature of each collection.
    """
    server_url = event["server"]
    bucket = event.get("bucket", "monitor")
    collection = event.get("collection", "changes")
    client = Client(server_url=server_url,
                    bucket=bucket,
                    collection=collection)
    print("Read collection list from {}".format(
        client.get_endpoint("collection")))

    error_messages = []

    checked_certificates = {}

    collections = client.get_records()

    # Grab server data in parallel.
    start_time = time.time()
    collections_data = []
    with concurrent.futures.ThreadPoolExecutor(
            max_workers=PARALLEL_REQUESTS) as executor:
        futures = [
            executor.submit(download_collection_data, server_url, c)
            for c in collections
        ]
        for future in concurrent.futures.as_completed(futures):
            collections_data.append(future.result())
    elapsed_time = time.time() - start_time
    print(f"Downloaded all data in {elapsed_time:.2f}s")

    for i, (collection, endpoint, metadata, records,
            timestamp) in enumerate(collections_data):
        start_time = time.time()

        message = "{:02d}/{:02d} {}:  ".format(i + 1, len(collections),
                                               endpoint)

        # 1. Serialize
        serialized = canonical_json(records, timestamp)
        data = b"Content-Signature:\x00" + serialized.encode("utf-8")

        # 2. Grab the signature
        try:
            signature = metadata["signature"]
        except KeyError:
            # Destination has no signature attribute.
            # Be smart and check if it was just configured.
            # See https://github.com/mozilla-services/remote-settings-lambdas/issues/31
            client = Client(
                server_url=server_url,
                bucket=collection["bucket"],
                collection=collection["collection"],
            )
            with_tombstones = client.get_records(_since=1)
            if len(with_tombstones) == 0:
                # It never contained records. Let's assume it is newly configured.
                message += "SKIP"
                print(message)
                continue
            # Some records and empty signature? It will fail below.
            signature = {}

        try:
            # 3. Verify the signature with the public key
            pubkey = signature["public_key"].encode("utf-8")
            verifier = ecdsa.VerifyingKey.from_pem(pubkey)
            signature_bytes = base64.urlsafe_b64decode(signature["signature"])
            verified = verifier.verify(signature_bytes,
                                       data,
                                       hashfunc=hashlib.sha384)
            assert verified, "Signature verification failed"

            # 4. Verify that the x5u certificate is valid (ie. that signature was well refreshed)
            x5u = signature["x5u"]
            if x5u not in checked_certificates:
                resp = requests.get(signature["x5u"])
                cert_pem = resp.text.encode("utf-8")
                cert = cryptography.x509.load_pem_x509_certificate(
                    cert_pem, crypto_default_backend())
                assert (cert.not_valid_before <
                        datetime.now()), "certificate not yet valid"
                assert cert.not_valid_after > datetime.now(
                ), "certificate expired"
                subject = cert.subject.get_attributes_for_oid(
                    NameOID.COMMON_NAME)[0].value
                # eg. ``onecrl.content-signature.mozilla.org``, or
                # ``pinning-preload.content-signature.mozilla.org``
                assert subject.endswith(
                    ".content-signature.mozilla.org"), "invalid subject name"
                checked_certificates[x5u] = cert

            # 5. Check that public key matches the certificate one.
            cert = checked_certificates[x5u]
            cert_pubkey_pem = cert.public_key().public_bytes(
                crypto_serialization.Encoding.PEM,
                crypto_serialization.PublicFormat.SubjectPublicKeyInfo,
            )
            assert (unpem(cert_pubkey_pem) == pubkey
                    ), "signature public key does not match certificate"

            elapsed_time = time.time() - start_time
            message += f"OK ({elapsed_time:.2f}s)"
            print(message)
        except Exception:
            message += "⚠ BAD Signature ⚠"
            print(message)

            # Gather details for the global exception that will be raised.
            signed_on = metadata["last_modified"]
            signed_on_date = timestamp_to_date(signed_on)
            timestamp_date = timestamp_to_date(timestamp)
            error_message = (
                "Signature verification failed on {endpoint}\n"
                " - Signed on: {signed_on} ({signed_on_date})\n"
                " - Records timestamp: {timestamp} ({timestamp_date})").format(
                    **locals())
            error_messages.append(error_message)

    # Make the lambda to fail in case an exception occured
    if len(error_messages) > 0:
        raise ValidationError("\n" + "\n\n".join(error_messages))
Exemple #57
0
def main():
    args = _get_args()

    client = Client(server_url=args.server, auth=tuple(args.auth.split(':')),
                    bucket=args.source_bucket,
                    collection=args.source_col)

    if args.editor_auth is None:
        args.editor_auth = args.auth

    if args.reviewer_auth is None:
        args.reviewer_auth = args.auth

    editor_client = Client(server_url=args.server,
                           auth=tuple(args.editor_auth.split(':')),
                           bucket=args.source_bucket,
                           collection=args.source_col)
    reviewer_client = Client(server_url=args.server,
                             auth=tuple(args.reviewer_auth.split(':')),
                             bucket=args.source_bucket,
                             collection=args.source_col)

    # 0. initialize source bucket/collection (if necessary)
    server_info = client.server_info()
    editor_id = editor_client.server_info()['user']['id']
    reviewer_id = reviewer_client.server_info()['user']['id']
    print('Server: {0}'.format(args.server))
    print('Author: {user[id]}'.format(**server_info))
    print('Editor: {0}'.format(editor_id))
    print('Reviewer: {0}'.format(reviewer_id))

    # 0. check that this collection is well configured.
    signer_capabilities = server_info['capabilities']['signer']

    resources = [r for r in signer_capabilities['resources']
                 if (args.source_bucket, args.source_col) == (r['source']['bucket'], r['source']['collection']) or
                    (args.source_bucket, None) == (r['source']['bucket'], r['source']['collection'])]
    assert len(resources) > 0, 'Specified source not configured to be signed'
    resource = resources[0]
    if 'preview' in resource:
        print('Signoff: {source[bucket]}/{source[collection]} => {preview[bucket]}/{preview[collection]} => {destination[bucket]}/{destination[collection]}'.format(**resource))
    else:
        print('Signoff: {source[bucket]}/{source[collection]} => {destination[bucket]}/{destination[collection]}'.format(**resource))

    print('_' * 80)

    bucket = client.create_bucket(if_not_exists=True)
    client.create_collection(permissions={'write': [editor_id, reviewer_id] + bucket['permissions']['write']}, if_not_exists=True)

    editors_group = resource.get('editors_group') or signer_capabilities['editors_group']
    editors_group = editors_group.format(collection_id=args.source_col)
    client.patch_group(id=editors_group, data={'members': [editor_id]})

    reviewers_group = resource.get('reviewers_group') or signer_capabilities['reviewers_group']
    reviewers_group = reviewers_group.format(collection_id=args.source_col)
    client.patch_group(id=reviewers_group, data={'members': [reviewer_id]})

    if args.reset:
        client.delete_records()
        existing = 0
    else:
        existing_records = client.get_records()
        existing = len(existing_records)

    dest_col = resource['destination'].get('collection') or args.source_col
    dest_client = Client(server_url=args.server,
                         bucket=resource['destination']['bucket'],
                         collection=dest_col)

    preview_client = None
    if 'preview' in resource:
        preview_bucket = resource['preview']['bucket']
        preview_collection = resource['preview'].get('collection') or args.source_col
        preview_client = Client(server_url=args.server,
                                bucket=preview_bucket,
                                collection=preview_collection)

    # 1. upload data
    print('Author uploads 20 random records')
    records = upload_records(client, 20)

    # 2. ask for a signature
    # 2.1 ask for review (noop on old versions)
    print('Editor asks for review')
    data = {"status": "to-review"}
    editor_client.patch_collection(data=data)
    # 2.2 check the preview collection (if enabled)
    if preview_client:
        print('Check preview collection')
        preview_records = preview_client.get_records()
        expected = existing + 20
        assert len(preview_records) == expected, '%s != %s records' % (len(preview_records), expected)
        metadata = preview_client.get_collection()['data']
        preview_signature = metadata.get('signature')
        assert preview_signature, 'Preview collection not signed'
        preview_timestamp = preview_client.get_records_timestamp()
    # 2.3 approve the review
    print('Reviewer approves and triggers signature')
    data = {"status": "to-sign"}
    reviewer_client.patch_collection(data=data)

    # 3. upload more data
    print('Author creates 20 others records')
    upload_records(client, 20)

    print('Editor updates 5 random records')
    for toupdate in random.sample(records, 5):
        editor_client.patch_record(data=dict(newkey=_rand(10), **toupdate))

    print('Author deletes 5 random records')
    for todelete in random.sample(records, 5):
        client.delete_record(id=todelete['id'])

    expected = existing + 20 + 20 - 5

    # 4. ask again for a signature
    # 2.1 ask for review (noop on old versions)
    print('Editor asks for review')
    data = {"status": "to-review"}
    editor_client.patch_collection(data=data)
    # 2.2 check the preview collection (if enabled)
    if preview_client:
        print('Check preview collection')
        preview_records = preview_client.get_records()
        assert len(preview_records) == expected, '%s != %s records' % (len(preview_records), expected)
        # Diff size is 20 + 5 if updated records are also all deleted,
        # or 30 if deletions and updates apply to different records.
        diff_since_last = preview_client.get_records(_since=preview_timestamp)
        assert 25 <= len(diff_since_last) <= 30, 'Changes since last signature are not consistent'

        metadata = preview_client.get_collection()['data']
        assert preview_signature != metadata['signature'], 'Preview collection not updated'

    # 2.3 approve the review
    print('Reviewer approves and triggers signature')
    data = {"status": "to-sign"}
    reviewer_client.patch_collection(data=data)

    # 5. wait for the result

    # 6. obtain the destination records and serialize canonically.

    records = list(dest_client.get_records())
    assert len(records) == expected, '%s != %s records' % (len(records), expected)
    timestamp = dest_client.get_records_timestamp()
    serialized = canonical_json(records, timestamp)
    print('Hash is %r' % compute_hash(serialized))

    # 7. get back the signed hash

    signature = dest_client.get_collection()['data']['signature']

    with open('pub', 'w') as f:
        f.write(signature['public_key'])

    # 8. verify the signature matches the hash
    signer = ECDSASigner(public_key='pub')
    try:
        signer.verify(serialized, signature)
        print('Signature OK')
    except Exception:
        print('Signature KO')
        raise