def test_pre_publish_blob_validation(self): class TestType(defs.ArtifactType): required_blob = defs.BinaryObject(required=True) optional_blob = defs.BinaryObject() tt = TestType(**get_artifact_fixture()) self.assertRaises(exc.InvalidArtifactPropertyValue, tt.__pre_publish__) tt.required_blob = defs.Blob(size=0) tt.__pre_publish__()
def insert(self, index, value): data = value[0] size = value[1] item_key = "%s.%s.%s" % (self.artifact_id, self.prop_name, uuid.uuid4()) blob = definitions.Blob(item_key=item_key) blob_proxy = self.helper.proxy(blob) blob_proxy.upload_to_store(data, size) super(ArtifactBlobProxyList, self).insert(index, blob_proxy)
def _deserialize_blobs(artifact_type, blobs_from_db, artifact_properties): """Retrieves blobs from database""" for blob_name, blob_value in six.iteritems(blobs_from_db): if not blob_value: continue if isinstance(artifact_type.metadata.attributes.blobs.get(blob_name), declarative.ListAttributeDefinition): val = [] for v in blob_value: b = definitions.Blob(size=v['size'], locations=v['locations'], checksum=v['checksum'], item_key=v['item_key']) val.append(b) elif len(blob_value) == 1: val = definitions.Blob(size=blob_value[0]['size'], locations=blob_value[0]['locations'], checksum=blob_value[0]['checksum'], item_key=blob_value[0]['item_key']) else: raise exception.InvalidArtifactPropertyValue( message=_('Blob %(name)s may not have multiple values'), name=blob_name) artifact_properties[blob_name] = val
def test_blob(self): class TestType(defs.ArtifactType): image_file = defs.BinaryObject(max_file_size=201054, min_locations=1, max_locations=5) screen_shots = defs.BinaryObjectList( objects=defs.BinaryObject(min_file_size=100), min_count=1) tt = TestType(**get_artifact_fixture()) blob = defs.Blob() blob.size = 1024 blob.locations.append("file://some.file.path") tt.image_file = blob self.assertEqual(1024, tt.image_file.size) self.assertEqual(["file://some.file.path"], tt.image_file.locations)
def set_type_specific_property(self, prop_name, value): if prop_name not in self.artifact.metadata.attributes.blobs: super(ArtifactProxy, self).set_type_specific_property(prop_name, value) return item_key = "%s.%s" % (self.artifact.id, prop_name) # XXX FIXME have to add support for BinaryObjectList properties blob = definitions.Blob(item_key=item_key) blob_proxy = self.helper.proxy(blob) if value is None: for location in blob_proxy.locations: blob_proxy.delete_from_store(location) else: data = value[0] size = value[1] blob_proxy.upload_to_store(data, size) setattr(self.artifact, prop_name, blob)
def __pre_publish__(self, context, *args, **kwargs): super(ImageAsAnArtifact, self).__pre_publish__(*args, **kwargs) if self.file is None and self.legacy_image_id is None: raise exception.InvalidArtifactPropertyValue( message=_("Either a file or a legacy_image_id has to be " "specified")) if self.file is not None and self.legacy_image_id is not None: raise exception.InvalidArtifactPropertyValue( message=_("Both file and legacy_image_id may not be " "specified at the same time")) if self.legacy_image_id: glance_endpoint = next(service['endpoints'][0]['publicURL'] for service in context.service_catalog if service['name'] == 'xmonitor') # Ensure glanceclient is imported correctly since we are catching # the ImportError on initialization if glanceclient == None: raise ImportError(_("Glance client not installed")) try: client = glanceclient.Client(version=2, endpoint=glance_endpoint, token=context.auth_token) legacy_image = client.images.get(self.legacy_image_id) except Exception: raise exception.InvalidArtifactPropertyValue( message=_('Unable to get legacy image')) if legacy_image is not None: self.file = definitions.Blob(size=legacy_image.size, locations=[{ "status": "active", "value": legacy_image.direct_url }], checksum=legacy_image.checksum, item_key=legacy_image.id) else: raise exception.InvalidArtifactPropertyValue( message=_("Legacy image was not found"))
def test_serialization_to_db(self): ref1 = defs.ArtifactType(**get_artifact_fixture(id="1")) ref2 = defs.ArtifactType(**get_artifact_fixture(id="2")) ref3 = defs.ArtifactType(**get_artifact_fixture(id="3")) blob1 = defs.Blob(size=100, locations=['http://example.com/blob1'], item_key='some_key', checksum='abc') blob2 = defs.Blob(size=200, locations=['http://example.com/blob2'], item_key='another_key', checksum='fff') blob3 = defs.Blob(size=300, locations=['http://example.com/blob3'], item_key='third_key', checksum='123') fixture = get_artifact_fixture() tt = SerTestType(**fixture) tt.some_string = 'bar' tt.some_text = 'bazz' tt.some_version = '11.22.33-beta' tt.some_int = 50 tt.some_numeric = 10.341 tt.some_bool = True tt.some_array = ['q', 'w', 'e', 'r', 't', 'y'] tt.another_array = [1, 1.2, False] tt.some_dict = {'foobar': "FOOBAR", 'baz': "QUX"} tt.another_dict = {'foo': 1, 'bar': True} tt.some_ref = ref1 tt.some_ref_list = [ref2, ref3] tt.some_blob = blob1 tt.some_blob_list = [blob2, blob3] results = serialization.serialize_for_db(tt) expected = fixture expected['type_name'] = 'SerTestType' expected['type_version'] = '1.0' expected['properties'] = { 'some_string': { 'type': 'string', 'value': 'bar' }, 'some_text': { 'type': 'text', 'value': 'bazz' }, 'some_version': { 'type': 'string', 'value': '11.22.33-beta' }, 'some_int': { 'type': 'int', 'value': 50 }, 'some_numeric': { 'type': 'numeric', 'value': 10.341 }, 'some_bool': { 'type': 'bool', 'value': True }, 'some_array': { 'type': 'array', 'value': [{ 'type': 'string', 'value': 'q' }, { 'type': 'string', 'value': 'w' }, { 'type': 'string', 'value': 'e' }, { 'type': 'string', 'value': 'r' }, { 'type': 'string', 'value': 't' }, { 'type': 'string', 'value': 'y' }] }, 'another_array': { 'type': 'array', 'value': [{ 'type': 'int', 'value': 1 }, { 'type': 'numeric', 'value': 1.2 }, { 'type': 'bool', 'value': False }] }, 'some_dict.foobar': { 'type': 'string', 'value': 'FOOBAR' }, 'some_dict.baz': { 'type': 'string', 'value': 'QUX' }, 'another_dict.foo': { 'type': 'int', 'value': 1 }, 'another_dict.bar': { 'type': 'bool', 'value': True } } expected['dependencies'] = { 'some_ref': ['1'], 'some_ref_list': ['2', '3'] } expected['blobs'] = { 'some_blob': [{ 'size': 100, 'checksum': 'abc', 'item_key': 'some_key', 'locations': ['http://example.com/blob1'] }], 'some_blob_list': [{ 'size': 200, 'checksum': 'fff', 'item_key': 'another_key', 'locations': ['http://example.com/blob2'] }, { 'size': 300, 'checksum': '123', 'item_key': 'third_key', 'locations': ['http://example.com/blob3'] }] } self.assertEqual(expected, results)