Пример #1
0
    def test_generated_keypairs_can_be_loaded(self):
        private_key_location = tempfile.mktemp("private_key")
        public_key_location = tempfile.mktemp("public_key")

        generate_keypair(private_key_location, public_key_location)
        backend = ECDSASigner(private_key=private_key_location)
        backend.sign("test")
def verify_signatures(collection, records, timestamp):
    try:
        serialized = canonical_json(list(records), timestamp)
        signature = collection['data']['signature']
        with open('pub', 'w') as f:
            f.write(signature['public_key'])
        signer = ECDSASigner(public_key='pub')
        return signer.verify(serialized, signature) is None
    except KintoException as e:
        if e.response.status_code == 401:
            return -1
        return 0
Пример #3
0
    def __init__(self, *args, **kwargs):
        super(FunctionalTest, self).__init__(*args, **kwargs)
        self.auth = DEFAULT_AUTH
        self.private_key = os.path.join(__HERE__, 'config/ecdsa.private.pem')

        self.signer_config = configparser.RawConfigParser()
        self.signer_config.read(os.path.join(__HERE__, 'config/signer.ini'))
        priv_key = self.signer_config.get(
            'app:main', 'kinto.signer.ecdsa.private_key')
        self.signer = ECDSASigner(private_key=priv_key)

        # Setup the kinto clients for the source and destination.
        self._auth = DEFAULT_AUTH
        self._server_url = SERVER_URL
        self._source_bucket = "source"
        self._destination_bucket = "destination"
        self._collection_id = "collection1"

        self.source = Client(
            server_url=self._server_url,
            auth=self._auth,
            bucket=self._source_bucket,
            collection=self._collection_id)

        self.destination = Client(
            server_url=self._server_url,
            auth=self._auth,
            bucket=self._destination_bucket,
            collection=self._collection_id)
Пример #4
0
def generate_keypair(private_key_location, public_key_location):
    private_key, public_key = ECDSASigner.generate_keypair()

    with open(private_key_location, 'wb+') as tmp_file:
        tmp_file.write(private_key)

    with open(public_key_location, 'wb+') as tmp_file:
        tmp_file.write(public_key)
Пример #5
0
def main(args=None):
    parser = cli_utils.add_parser_options(
        description='Validate collection signature',
        default_server=DEFAULT_SERVER,
        default_bucket=DEST_BUCKET,
        default_collection=DEST_COLLECTION)

    args = parser.parse_args(args)

    client = cli_utils.create_client_from_args(args)

    # 1. Grab collection information
    dest_col = client.get_collection()

    # 2. Grab records
    records = list(client.get_records(_sort='-last_modified'))
    timestamp = client.get_records_timestamp()

    # 3. Serialize
    serialized = canonical_json(records, timestamp)

    # 4. Compute the hash
    computed_hash = compute_hash(serialized)

    # 5. Grab the signature
    signature = dest_col['data']['signature']

    # 6. Grab the public key
    with open('pub', 'w') as f:
        f.write(signature['public_key'])

    # 7. Verify the signature matches the hash
    signer = ECDSASigner(public_key='pub')
    try:
        signer.verify(serialized, signature)
        print('Signature OK')
    except Exception:
        print('Signature KO. Computed hash: %s' % computed_hash)
        raise
Пример #6
0
def main(args=None):
    parser = cli_utils.add_parser_options(
        description='Validate collection signature',
        default_server=DEFAULT_SERVER,
        default_bucket=DEST_BUCKET,
        default_collection=DEST_COLLECTION)

    args = parser.parse_args(args)

    client = cli_utils.create_client_from_args(args)

    # 1. Grab collection information
    dest_col = client.get_collection()

    # 2. Grab records
    records = list(client.get_records(_sort='-last_modified'))
    timestamp = client.get_records_timestamp()

    # 3. Serialize
    serialized = canonical_json(records, timestamp)

    # 4. Compute the hash
    computed_hash = compute_hash(serialized)

    # 5. Grab the signature
    signature = dest_col['data']['signature']

    # 6. Grab the public key
    with open('pub', 'w') as f:
        f.write(signature['public_key'])

    # 7. Verify the signature matches the hash
    signer = ECDSASigner(public_key='pub')
    try:
        signer.verify(serialized, signature)
        print('Signature OK')
    except Exception:
        print('Signature KO. Computed hash: %s' % computed_hash)
        raise
Пример #7
0
import os

from kinto_signer.signer.local_ecdsa import ECDSASigner
from kinto_signer.signer.autograph import AutographSigner


autograph_signer = AutographSigner(
    server_url='http://localhost:8000',
    hawk_id='alice',
    hawk_secret='fs5wgcer9qj819kfptdlp8gm227ewxnzvsuj9ztycsx08hfhzu')

here = os.path.abspath(os.path.dirname(__file__))
config_folder = os.path.join(here, '..', 'kinto_signer', 'tests', 'config')
python_signer = ECDSASigner(
    private_key=os.path.join(config_folder, 'ecdsa.private.pem'),
    public_key=os.path.join(config_folder, 'ecdsa.public.pem'))


def sign_data(signer, data):
    signature = signer.sign(data)
    print("signature", signature)


data = "TEST"
sign_data(autograph_signer, data)
sign_data(python_signer, data)
Пример #8
0
class FunctionalTest(unittest2.TestCase):

    def __init__(self, *args, **kwargs):
        super(FunctionalTest, self).__init__(*args, **kwargs)
        self.auth = DEFAULT_AUTH
        self.private_key = os.path.join(__HERE__, 'config/ecdsa.private.pem')

        self.signer_config = configparser.RawConfigParser()
        self.signer_config.read(os.path.join(__HERE__, 'config/signer.ini'))
        priv_key = self.signer_config.get(
            'app:main', 'kinto.signer.ecdsa.private_key')
        self.signer = ECDSASigner(private_key=priv_key)

        # Setup the kinto clients for the source and destination.
        self._auth = DEFAULT_AUTH
        self._server_url = SERVER_URL
        self._source_bucket = "source"
        self._destination_bucket = "destination"
        self._collection_id = "collection1"

        self.source = Client(
            server_url=self._server_url,
            auth=self._auth,
            bucket=self._source_bucket,
            collection=self._collection_id)

        self.destination = Client(
            server_url=self._server_url,
            auth=self._auth,
            bucket=self._destination_bucket,
            collection=self._collection_id)

    def tearDown(self):
        # Delete all the created objects.
        self._flush_server(self._server_url)

    def _flush_server(self, server_url):
        flush_url = urljoin(server_url, '/__flush__')
        resp = requests.post(flush_url)
        resp.raise_for_status()

    def test_destination_creation_and_new_records_signature(self):
        self.source.create_bucket()
        self.source.create_collection()

        # Send new data to the signer.
        with self.source.batch() as batch:
            for n in range(0, 10):
                batch.create_record(data={'newdata': n})

        source_records = self.source.get_records()
        assert len(source_records) == 10

        # Trigger a signature.
        self.source.update_collection(
            data={'status': 'to-sign'},
            method="put")

        # Ensure the remote data is signed properly.
        data = self.destination.get_collection()
        signature = data['data']['signature']
        assert signature is not None

        records = self.destination.get_records()
        assert len(records) == 10
        serialized_records = canonical_json(records)
        self.signer.verify(serialized_records, signature)

        # the status of the source collection should be "signed".
        source_collection = self.source.get_collection()['data']
        assert source_collection['status'] == 'signed'

    def test_records_deletion_and_signature(self):
        self.source.create_bucket()
        self.source.create_collection()

        # Create some data on the source collection and send it.
        with self.source.batch() as batch:
            for n in range(0, 10):
                batch.create_record(data={'newdata': n})

        source_records = self.source.get_records()
        assert len(source_records) == 10

        # Trigger a signature.
        self.source.update_collection(data={'status': 'to-sign'}, method="put")

        # Wait so the new last_modified timestamp will be greater than the
        # one from the previous records.
        time.sleep(0.01)
        # Now delete one record on the source and trigger another signature.
        self.source.delete_record(source_records[0]['id'])
        self.source.update_collection(data={'status': 'to-sign'}, method="put")

        records = self.destination.get_records()
        assert len(records) == 9

        data = self.destination.get_collection()
        signature = data['data']['signature']
        assert signature is not None

        serialized_records = canonical_json(records)
        # This raises when the signature is invalid.
        self.signer.verify(serialized_records, signature)
Пример #9
0
def main():
    args = _get_args()

    client = Client(server_url=args.server,
                    auth=tuple(args.auth.split(':')),
                    bucket=args.source_bucket,
                    collection=args.source_col)

    if args.editor_auth is None:
        args.editor_auth = args.auth

    if args.reviewer_auth is None:
        args.reviewer_auth = args.auth

    editor_client = Client(server_url=args.server,
                           auth=tuple(args.editor_auth.split(':')),
                           bucket=args.source_bucket,
                           collection=args.source_col)
    reviewer_client = Client(server_url=args.server,
                             auth=tuple(args.reviewer_auth.split(':')),
                             bucket=args.source_bucket,
                             collection=args.source_col)

    # 0. initialize source bucket/collection (if necessary)
    server_info = client.server_info()
    editor_id = editor_client.server_info()['user']['id']
    reviewer_id = reviewer_client.server_info()['user']['id']
    print('Server: {0}'.format(args.server))
    print('Author: {user[id]}'.format(**server_info))
    print('Editor: {0}'.format(editor_id))
    print('Reviewer: {0}'.format(reviewer_id))

    # 0. check that this collection is well configured.
    signer_capabilities = server_info['capabilities']['signer']
    to_review_enabled = signer_capabilities.get('to_review_enabled', False)
    group_check_enabled = signer_capabilities.get('group_check_enabled', False)

    resources = [
        r for r in signer_capabilities['resources']
        if (args.source_bucket, args.source_col) == (r['source']['bucket'],
                                                     r['source']['collection'])
    ]
    assert len(resources) > 0, 'Specified source not configured to be signed'
    resource = resources[0]
    if to_review_enabled and 'preview' in resource:
        print(
            'Signoff: {source[bucket]}/{source[collection]} => {preview[bucket]}/{preview[collection]} => {destination[bucket]}/{destination[collection]}'
            .format(**resource))
    else:
        print(
            'Signoff: {source[bucket]}/{source[collection]} => {destination[bucket]}/{destination[collection]}'
            .format(**resource))
    print('Group check: {0}'.format(group_check_enabled))
    print('Review workflow: {0}'.format(to_review_enabled))

    print('_' * 80)

    bucket = client.create_bucket(if_not_exists=True)
    client.patch_bucket(permissions={
        'write': [editor_id, reviewer_id] + bucket['permissions']['write']
    },
                        if_match=bucket['data']['last_modified'],
                        safe=True)

    client.create_collection(if_not_exists=True)

    if args.reset:
        client.delete_records()
        existing = 0
    else:
        existing_records = client.get_records()
        existing = len(existing_records)

    if group_check_enabled:
        editors_group = signer_capabilities['editors_group']
        client.create_group(editors_group,
                            data={'members': [editor_id]},
                            if_not_exists=True)
        reviewers_group = signer_capabilities['reviewers_group']
        client.create_group(reviewers_group,
                            data={'members': [reviewer_id]},
                            if_not_exists=True)

    dest_client = Client(server_url=args.server,
                         bucket=resource['destination']['bucket'],
                         collection=resource['destination']['collection'])

    preview_client = None
    if to_review_enabled and 'preview' in resource:
        preview_bucket = resource['preview']['bucket']
        preview_collection = resource['preview']['collection']
        preview_client = Client(server_url=args.server,
                                bucket=preview_bucket,
                                collection=preview_collection)

    # 1. upload data
    print('Author uploads 20 random records')
    records = upload_records(client, 20)

    # 2. ask for a signature
    # 2.1 ask for review (noop on old versions)
    print('Editor asks for review')
    data = {"status": "to-review"}
    editor_client.patch_collection(data=data)
    # 2.2 check the preview collection (if enabled)
    if preview_client:
        print('Check preview collection')
        preview_records = preview_client.get_records()
        expected = existing + 20
        assert len(preview_records) == expected, '%s != %s records' % (
            len(preview_records), expected)
        metadata = preview_client.get_collection()['data']
        preview_signature = metadata.get('signature')
        assert preview_signature, 'Preview collection not signed'
        preview_timestamp = collection_timestamp(preview_client)
    # 2.3 approve the review
    print('Reviewer approves and triggers signature')
    data = {"status": "to-sign"}
    reviewer_client.patch_collection(data=data)

    # 3. upload more data
    print('Author creates 20 others records')
    upload_records(client, 20)

    print('Editor updates 5 random records')
    for toupdate in random.sample(records, 5):
        editor_client.patch_record(dict(newkey=_rand(10), **toupdate))

    print('Author deletes 5 random records')
    for todelete in random.sample(records, 5):
        client.delete_record(todelete['id'])

    expected = existing + 20 + 20 - 5

    # 4. ask again for a signature
    # 2.1 ask for review (noop on old versions)
    print('Editor asks for review')
    data = {"status": "to-review"}
    editor_client.patch_collection(data=data)
    # 2.2 check the preview collection (if enabled)
    if preview_client:
        print('Check preview collection')
        preview_records = preview_client.get_records()
        assert len(preview_records) == expected, '%s != %s records' % (
            len(preview_records), expected)
        # Diff size is 20 + 5 if updated records are also all deleted,
        # or 30 if deletions and updates apply to different records.
        diff_since_last = preview_client.get_records(_since=preview_timestamp)
        assert 25 <= len(
            diff_since_last
        ) <= 30, 'Changes since last signature are not consistent'

        metadata = preview_client.get_collection()['data']
        assert preview_signature != metadata[
            'signature'], 'Preview collection not updated'

    # 2.3 approve the review
    print('Reviewer approves and triggers signature')
    data = {"status": "to-sign"}
    reviewer_client.patch_collection(data=data)

    # 5. wait for the result

    # 6. obtain the destination records and serialize canonically.

    records = list(dest_client.get_records())
    assert len(records) == expected, '%s != %s records' % (len(records),
                                                           expected)
    timestamp = collection_timestamp(dest_client)
    serialized = canonical_json(records, timestamp)
    print('Hash is %r' % compute_hash(serialized))

    # 7. get back the signed hash

    dest_col = dest_client.get_collection()
    signature = dest_col['data']['signature']

    with open('pub', 'w') as f:
        f.write(signature['public_key'])

    # 8. verify the signature matches the hash
    signer = ECDSASigner(public_key='pub')
    try:
        signer.verify(serialized, signature)
        print('Signature OK')
    except Exception:
        print('Signature KO')
        raise
Пример #10
0
def main():
    args = _get_args()

    client = Client(server_url=args.server, auth=tuple(args.auth.split(':')),
                    bucket=args.source_bucket,
                    collection=args.source_col)

    if args.editor_auth is None:
        args.editor_auth = args.auth

    if args.reviewer_auth is None:
        args.reviewer_auth = args.auth

    editor_client = Client(server_url=args.server,
                           auth=tuple(args.editor_auth.split(':')),
                           bucket=args.source_bucket,
                           collection=args.source_col)
    reviewer_client = Client(server_url=args.server,
                             auth=tuple(args.reviewer_auth.split(':')),
                             bucket=args.source_bucket,
                             collection=args.source_col)

    # 0. initialize source bucket/collection (if necessary)
    server_info = client.server_info()
    editor_id = editor_client.server_info()['user']['id']
    reviewer_id = reviewer_client.server_info()['user']['id']
    print('Server: {0}'.format(args.server))
    print('Author: {user[id]}'.format(**server_info))
    print('Editor: {0}'.format(editor_id))
    print('Reviewer: {0}'.format(reviewer_id))

    # 0. check that this collection is well configured.
    signer_capabilities = server_info['capabilities']['signer']

    resources = [r for r in signer_capabilities['resources']
                 if (args.source_bucket, args.source_col) == (r['source']['bucket'], r['source']['collection']) or
                    (args.source_bucket, None) == (r['source']['bucket'], r['source']['collection'])]
    assert len(resources) > 0, 'Specified source not configured to be signed'
    resource = resources[0]
    if 'preview' in resource:
        print('Signoff: {source[bucket]}/{source[collection]} => {preview[bucket]}/{preview[collection]} => {destination[bucket]}/{destination[collection]}'.format(**resource))
    else:
        print('Signoff: {source[bucket]}/{source[collection]} => {destination[bucket]}/{destination[collection]}'.format(**resource))

    print('_' * 80)

    bucket = client.create_bucket(if_not_exists=True)
    client.create_collection(permissions={'write': [editor_id, reviewer_id] + bucket['permissions']['write']}, if_not_exists=True)

    editors_group = resource.get('editors_group') or signer_capabilities['editors_group']
    editors_group = editors_group.format(collection_id=args.source_col)
    client.patch_group(id=editors_group, data={'members': [editor_id]})

    reviewers_group = resource.get('reviewers_group') or signer_capabilities['reviewers_group']
    reviewers_group = reviewers_group.format(collection_id=args.source_col)
    client.patch_group(id=reviewers_group, data={'members': [reviewer_id]})

    if args.reset:
        client.delete_records()
        existing = 0
    else:
        existing_records = client.get_records()
        existing = len(existing_records)

    dest_col = resource['destination'].get('collection') or args.source_col
    dest_client = Client(server_url=args.server,
                         bucket=resource['destination']['bucket'],
                         collection=dest_col)

    preview_client = None
    if 'preview' in resource:
        preview_bucket = resource['preview']['bucket']
        preview_collection = resource['preview'].get('collection') or args.source_col
        preview_client = Client(server_url=args.server,
                                bucket=preview_bucket,
                                collection=preview_collection)

    # 1. upload data
    print('Author uploads 20 random records')
    records = upload_records(client, 20)

    # 2. ask for a signature
    # 2.1 ask for review (noop on old versions)
    print('Editor asks for review')
    data = {"status": "to-review"}
    editor_client.patch_collection(data=data)
    # 2.2 check the preview collection (if enabled)
    if preview_client:
        print('Check preview collection')
        preview_records = preview_client.get_records()
        expected = existing + 20
        assert len(preview_records) == expected, '%s != %s records' % (len(preview_records), expected)
        metadata = preview_client.get_collection()['data']
        preview_signature = metadata.get('signature')
        assert preview_signature, 'Preview collection not signed'
        preview_timestamp = preview_client.get_records_timestamp()
    # 2.3 approve the review
    print('Reviewer approves and triggers signature')
    data = {"status": "to-sign"}
    reviewer_client.patch_collection(data=data)

    # 3. upload more data
    print('Author creates 20 others records')
    upload_records(client, 20)

    print('Editor updates 5 random records')
    for toupdate in random.sample(records, 5):
        editor_client.patch_record(data=dict(newkey=_rand(10), **toupdate))

    print('Author deletes 5 random records')
    for todelete in random.sample(records, 5):
        client.delete_record(id=todelete['id'])

    expected = existing + 20 + 20 - 5

    # 4. ask again for a signature
    # 2.1 ask for review (noop on old versions)
    print('Editor asks for review')
    data = {"status": "to-review"}
    editor_client.patch_collection(data=data)
    # 2.2 check the preview collection (if enabled)
    if preview_client:
        print('Check preview collection')
        preview_records = preview_client.get_records()
        assert len(preview_records) == expected, '%s != %s records' % (len(preview_records), expected)
        # Diff size is 20 + 5 if updated records are also all deleted,
        # or 30 if deletions and updates apply to different records.
        diff_since_last = preview_client.get_records(_since=preview_timestamp)
        assert 25 <= len(diff_since_last) <= 30, 'Changes since last signature are not consistent'

        metadata = preview_client.get_collection()['data']
        assert preview_signature != metadata['signature'], 'Preview collection not updated'

    # 2.3 approve the review
    print('Reviewer approves and triggers signature')
    data = {"status": "to-sign"}
    reviewer_client.patch_collection(data=data)

    # 5. wait for the result

    # 6. obtain the destination records and serialize canonically.

    records = list(dest_client.get_records())
    assert len(records) == expected, '%s != %s records' % (len(records), expected)
    timestamp = dest_client.get_records_timestamp()
    serialized = canonical_json(records, timestamp)
    print('Hash is %r' % compute_hash(serialized))

    # 7. get back the signed hash

    signature = dest_client.get_collection()['data']['signature']

    with open('pub', 'w') as f:
        f.write(signature['public_key'])

    # 8. verify the signature matches the hash
    signer = ECDSASigner(public_key='pub')
    try:
        signer.verify(serialized, signature)
        print('Signature OK')
    except Exception:
        print('Signature KO')
        raise