def test_update_new(self): """ Simulate the case where synchronization brings a new storage object to be instantiated. """ # Exercise add_or_update_resource(self.storage_json, self.metadata_before, self.storage_digest) # Verify self.assertEquals(1, StorageObject.objects.filter(identifier=self.storage_id).count()) storage_object = StorageObject.objects.get(identifier=self.storage_id) self.assertEquals(self.metadata_before, storage_object.metadata)
def test_update_new(self): """ Simulate the case where synchronization brings a new storage object to be instantiated. """ # Exercise add_or_update_resource(self.storage_json, self.metadata_before, self.storage_digest) # Verify self.assertEquals( 1, StorageObject.objects.filter(identifier=self.storage_id).count()) storage_object = StorageObject.objects.get(identifier=self.storage_id) self.assertEquals(self.metadata_before, storage_object.metadata)
def test_master_copy(self): """ Verify that reusable entities such as persons have copy status MASTER after synchronization. """ # exercise add_or_update_resource(self.storage_json, self.metadata_before, None, copy_status=MASTER) # verify resource = resourceInfoType_model.objects.get(storage_object__identifier=self.storage_id) persons = resource.contactPerson.all() self.assertEquals(1, len(persons)) contact_person = persons[0] self.assertEquals(MASTER, contact_person.copy_status)
def test_update_refuse_mastercopy(self): """ Refuse to replace a master copy with a non-master copy during update """ # setup add_or_update_resource(self.storage_json, self.metadata_before, self.storage_digest, MASTER) self.assertEquals(MASTER, StorageObject.objects.get(identifier=self.storage_id).copy_status) # exercise try: add_or_update_resource(self.storage_json, self.metadata_modified, self.storage_digest, REMOTE) self.fail("Should have raised an exception") except IllegalAccessException: pass # Expected exception
def test_update_existing(self): """ Simulate update for already existing storage object """ # helper def get_metadatacreationdate_for(storage_id): resource = resourceInfoType_model.objects.get(storage_object__identifier=self.storage_id) return resource.metadataInfo.metadataCreationDate # setup add_or_update_resource(self.storage_json, self.metadata_before, self.storage_digest) self.assertEquals(date(2005, 5, 12), get_metadatacreationdate_for(self.storage_id)) self.assertEquals(REMOTE, StorageObject.objects.get(identifier=self.storage_id).copy_status) # exercise add_or_update_resource(self.storage_json, self.metadata_modified, self.storage_digest) self.assertEquals(date(2006, 12, 31), get_metadatacreationdate_for(self.storage_id))
def test_master_copy(self): """ Verify that reusable entities such as persons have copy status MASTER after synchronization. """ # exercise add_or_update_resource(self.storage_json, self.metadata_before, None, copy_status=MASTER) # verify resource = resourceInfoType_model.objects.get( storage_object__identifier=self.storage_id) persons = resource.contactPerson.all() self.assertEquals(1, len(persons)) contact_person = persons[0] self.assertEquals(MASTER, contact_person.copy_status)
def test_update_refuse_mastercopy(self): """ Refuse to replace a master copy with a non-master copy during update """ # setup add_or_update_resource(self.storage_json, self.metadata_before, self.storage_digest, MASTER) self.assertEquals( MASTER, StorageObject.objects.get(identifier=self.storage_id).copy_status) # exercise try: add_or_update_resource(self.storage_json, self.metadata_modified, self.storage_digest, REMOTE) self.fail("Should have raised an exception") except IllegalAccessException: pass # Expected exception
def _get_remote_resource(resource_id, resource_digest, node_id, node, opener, copy_status): """ Retrieves from the given node the resource for the given id and adds/updates it at the current node with the given copy status using the given opener """ # Get the json storage object and the actual metadata xml storage_json, resource_xml_string = \ get_full_metadata(opener, "{0}/sync/{1}/metadata/" \ .format(node['URL'], resource_id), resource_digest) res_obj = add_or_update_resource(storage_json, resource_xml_string, resource_digest, copy_status, source_node=node_id) return res_obj
def test_update_existing(self): """ Simulate update for already existing storage object """ # helper def get_metadatacreationdate_for(storage_id): resource = resourceInfoType_model.objects.get( storage_object__identifier=self.storage_id) return resource.metadataInfo.metadataCreationDate # setup add_or_update_resource(self.storage_json, self.metadata_before, self.storage_digest) self.assertEquals(date(2005, 5, 12), get_metadatacreationdate_for(self.storage_id)) self.assertEquals( REMOTE, StorageObject.objects.get(identifier=self.storage_id).copy_status) # exercise add_or_update_resource(self.storage_json, self.metadata_modified, self.storage_digest) self.assertEquals(date(2006, 12, 31), get_metadatacreationdate_for(self.storage_id))
def _update_resource(repository, remote_id, metadataPrefix, raw_xml_record): storage_object, checksum = repository[remote_id] # Re-compute the checksum of the record and if it is different # update the resource new_checksum = _compute_checksum(raw_xml_record) if checksum == new_checksum: resource = storage_object.resourceinfotype_model_set.all()[0] return resource, False xml_record = _convert_to_MSschema(metadataPrefix, raw_xml_record) resource = add_or_update_resource(_get_storage_json(storage_object.identifier), \ xml_record, \ storage_object.digest_checksum, \ copy_status=storage_object.copy_status, \ source_node=storage_object.source_node) so = resource.storage_object repository[remote_id] = [so, new_checksum] return resource, True
def _add_resource(repository, remote_id, metadataPrefix, raw_xml_record, source_url): #TODO: copy_status = PROXY or MASTER. If PROXY then source_node=None #and source_url the harvested repo's url. If MASTER then source_node= None #and source_url= our repo's url xml_record = _convert_to_MSschema(metadataPrefix, raw_xml_record) # if resource comes from a META-SHARE node, then the imported resource belongs to that # repository and it has the same identifer as the remote identifier. # Eitherwise the imported resource will have as master META-SHARE node, this node resource = (add_or_update_resource(None, xml_record, None, \ source_node=source_url, \ identifier=remote_id, \ publication_status=INGESTED, \ source_url=source_url) \ if source_url else \ import_from_string(xml_record, \ INGESTED, \ MASTER, \ DJANGO_URL)) checksum = _compute_checksum(raw_xml_record) so = resource.storage_object repository[remote_id] = [so, checksum] return resource