def test_schema_load_error_s3(self): schema_pk = PhysicalKey.from_url('s3://schema-bucket/schema-key') data = get_v1_conf_data(''' workflows: w1: name: Name metadata_schema: schema-id schemas: schema-id: url: %s ''' % schema_pk) registry = get_package_registry('s3://some-bucket') self.s3_mock_config(data, registry) self.s3_stubber.add_client_error( 'get_object', service_error_code='NoSuchKey', expected_params={ 'Bucket': 'schema-bucket', 'Key': 'schema-key', }, http_status_code=404, ) with pytest.raises(QuiltException, match=fr"Couldn't load schema at {schema_pk}"): self._validate(registry=registry, workflow='w1')
def _validate(self, registry=None, workflow=..., meta=None, message=None): registry = get_package_registry(registry) meta = meta or {} return workflows.validate(registry=registry, workflow=workflow, meta=meta, message=message)
def test_schema_validation_valid_meta(self): set_local_conf_data(get_v1_conf_data(''' workflows: w1: name: Name metadata_schema: schema-id schemas: schema-id: url: %s ''' % create_local_tmp_schema('{"type": "string"}'))) assert self._validate(workflow='w1', meta="some-data") == { 'id': 'w1', 'config': str(get_package_registry().workflow_conf_pk), 'schemas': { 'schema-id': str(get_package_registry().root.join('schemas/schema')), }, }
def test_workflow_not_required_default_set(self): set_local_conf_data(get_v1_conf_data(''' is_workflow_required: false default_workflow: w1 workflows: w1: name: Name ''')) assert self._validate() == { 'id': 'w1', 'config': str(get_package_registry().workflow_conf_pk), } assert self._validate(workflow=None) == { 'id': None, 'config': str(get_package_registry().workflow_conf_pk), }
def test_supported_meta_schema(self): for meta_schema in ( 'http://json-schema.org/draft-07/schema#', ): with self.subTest(meta_schema=meta_schema): set_local_conf_data(get_v1_conf_data(''' workflows: w1: name: Name metadata_schema: schema-id schemas: schema-id: url: %s ''' % create_local_tmp_schema(f'{{"$schema": "{meta_schema}"}}'))) assert self._validate(workflow='w1') == { 'id': 'w1', 'config': str(get_package_registry().workflow_conf_pk), 'schemas': { 'schema-id': str(get_package_registry().root.join('schemas/schema')), }, }
def test_multiple_schema_usages(self, load_schema_mock): load_schema_mock.return_value = b'true', get_package_registry().root.join('schemas/schema') set_local_conf_data(get_v1_conf_data(''' workflows: w1: name: Name metadata_schema: schema-id entries_schema: schema-id schemas: schema-id: url: %s ''' % create_local_tmp_schema('true'))) assert self._validate(workflow='w1', meta="some-data") == { 'id': 'w1', 'config': str(get_package_registry().workflow_conf_pk), 'schemas': { 'schema-id': str(get_package_registry().root.join('schemas/schema')), }, } load_schema_mock.assert_called_once_with(get_package_registry().root.join('schemas/schema'))
def test_successors(self): set_local_conf_data(get_v1_conf_data(''' workflows: w1: name: Name successors: s3://some-bucket: title: successor title ''')) assert self._validate(workflow='w1') == { 'id': 'w1', 'config': str(get_package_registry().workflow_conf_pk), }
def get_registry(registry_url): package_registry = None try: package_registry = get_package_registry(registry_url) except quilt3.util.URLParseError: pass else: if not isinstance(package_registry, S3PackageRegistryV1): package_registry = None if package_registry is None: raise PkgpushException("InvalidRegistry", {"registry_url": registry_url}) return package_registry
def test_schema_load_error(self): schema_pk = get_package_registry().root.join('nonexistent-schema') set_local_conf_data(get_v1_conf_data(''' workflows: w1: name: Name metadata_schema: schema-id schemas: schema-id: url: %s ''' % schema_pk)) with pytest.raises(QuiltException, match=fr"Couldn't load schema at {schema_pk}"): self._validate(workflow='w1')
def test_workflow_not_required_not_specified(self): set_local_conf_data(get_v1_conf_data(''' is_workflow_required: false workflows: w1: name: Name ''')) for workflow in (None, ...): with self.subTest(workflow=workflow): assert self._validate(workflow=workflow) == { 'id': None, 'config': str(get_package_registry().workflow_conf_pk), }
def get_registry(registry_url): package_registry = None try: package_registry = get_package_registry(registry_url) except quilt3.util.URLParseError: pass else: if not isinstance(package_registry, S3PackageRegistryV1): package_registry = None if package_registry is None: raise ApiException( HTTPStatus.BAD_REQUEST, f"{registry_url} is not a valid S3 package registry.") return package_registry
def mock_successors(self, successors): workflow_config_mock = mock.MagicMock() workflow_config_mock.config = { 'successors': successors, } src_registry = get_package_registry(self.src_registry) def side_effect(registry_url): if registry_url == self.src_registry: return src_registry return mock.DEFAULT with mock.patch.object(src_registry, 'get_workflow_config', return_value=workflow_config_mock), \ mock.patch('t4_lambda_pkgpush.get_package_registry', side_effect=side_effect, wraps=get_package_registry): yield
def test_workflow_is_required_default_set(self): set_local_conf_data(get_v1_conf_data(''' default_workflow: w1 workflows: w1: name: Name ''')) assert self._validate() == { 'id': 'w1', 'config': str(get_package_registry().workflow_conf_pk), } with pytest.raises(QuiltException, match=r'Workflow required, but none specified.'): self._validate(workflow=None)
def test_remote_registry_local_schema(self): data = get_v1_conf_data(''' workflows: w1: name: Name metadata_schema: schema-id schemas: schema-id: url: file:///local/path ''') registry = 's3://some-bucket' self.s3_mock_config(data, get_package_registry(registry)) schema_pk = PhysicalKey.from_path('/local/path') error_msg = rf"Local schema '{schema_pk}' can't be used on the remote registry." with pytest.raises(QuiltException, match=error_msg): self._validate(registry=registry, workflow='w1')
def test_schema_validation_valid_meta_s3(self): schema_urls = { 's3://schema-bucket/schema-key?versionId=schema-version': { 'Bucket': 'schema-bucket', 'Key': 'schema-key', 'VersionId': 'schema-version', }, 's3://schema-bucket/schema-key': { 'Bucket': 'schema-bucket', 'Key': 'schema-key', }, } for schema_url, expected_params in schema_urls.items(): data = get_v1_conf_data(''' workflows: w1: name: Name metadata_schema: schema-id schemas: schema-id: url: %s ''' % schema_url) with self.subTest(schema_url=schema_url): self.s3_mock_config(data, get_package_registry('s3://some-bucket')) self.s3_stubber.add_response( method='get_object', service_response={ 'VersionId': 'schema-version', 'Body': self.s3_streaming_body(b'{"type": "string"}'), }, expected_params=expected_params, ) assert self._validate( registry='s3://some-bucket', workflow='w1', meta="some-data" ) == { 'id': 'w1', 'config': 's3://some-bucket/.quilt/workflows/config.yml?versionId=some-version', 'schemas': { 'schema-id': 's3://schema-bucket/schema-key?versionId=schema-version', }, }
def test_is_message_required(self): set_local_conf_data(get_v1_conf_data(''' workflows: w1: name: Name is_message_required: true ''')) assert self._validate(workflow='w1', message='some message') == { 'id': 'w1', 'config': str(get_package_registry().workflow_conf_pk), } error_msg = r'Commit message is required by workflow, but none was provided.' for message in (None, ''): with self.subTest(message=message): with pytest.raises(QuiltException, match=error_msg): self._validate(workflow='w1')
def _validate(self, registry=None, workflow=..., name='test/name', meta=None, message=None): registry = get_package_registry(registry) meta = meta or {} pkg = Package() pkg.set_meta(meta) return workflows.validate(registry=registry, workflow=workflow, name=name, pkg=pkg, message=message)
def create_local_tmp_schema(data): pk = get_package_registry().root.join('schemas/schema') put_bytes(data.encode(), pk) return pk
def _mock_package_build(self, entries, *, message=..., expected_workflow=...): if message is ...: message = self.dst_commit_message # Use a test package to verify manifest entries test_pkg = Package() test_pkg.set_meta(self.meta) # Mock hashing package objects for entry in entries: pkey = PhysicalKey.from_url(entry['physical_key']) hash_obj = {'type': 'SHA256', 'value': entry['hash']} test_entry = PackageEntry(pkey, entry['size'], hash_obj, entry.get('meta')) test_pkg.set(entry['logical_key'], entry=test_entry) mocked_workflow_data = 'some-workflow-data' test_pkg._workflow = mocked_workflow_data # build the manifest from the test_package test_pkg._set_commit_message(message) manifest = io.BytesIO() test_pkg.dump(manifest) manifest.seek(0) self.s3_stubber.add_response( 'put_object', service_response={}, expected_params={ 'Body': manifest.read(), 'Bucket': self.dst_bucket, 'Key': f'.quilt/packages/{test_pkg.top_hash}', }, ) self.s3_stubber.add_response( 'put_object', service_response={}, expected_params={ 'Body': str.encode(test_pkg.top_hash), 'Bucket': self.dst_bucket, 'Key': f'.quilt/named_packages/{self.dst_pkg_name}/{str(int(self.mock_timestamp))}', }, ) self.s3_stubber.add_response( 'put_object', service_response={}, expected_params={ 'Body': str.encode(test_pkg.top_hash), 'Bucket': self.dst_bucket, 'Key': f'.quilt/named_packages/{self.dst_pkg_name}/latest', }, ) with mock.patch( 'quilt3.workflows.validate', return_value=mocked_workflow_data) as workflow_validate_mock: yield workflow_validate_mock.assert_called_once_with( registry=get_package_registry(self.dst_registry), workflow=expected_workflow, name=self.dst_pkg_name, pkg=mock.ANY, # TODO: probably this should be more specific. message=message, )
def set_local_conf_data(conf_data): put_bytes(conf_data.encode(), get_package_registry().workflow_conf_pk)