Exemple #1
0
 def test_save_metadata(self):
     """
     Tests that the metadata XML is not written to the storage folder for internal
     resources but only when the resource is ingested
     """
     # load test fixture; its initial status is 'internal'
     _result = test_utils.import_xml(TESTFIXTURE_XML)
     resource = resourceInfoType_model.objects.get(pk=_result.id)
     _storage_object = resource.storage_object
     _storage_object.update_storage()
     # initial status is 'internal'
     self.assertTrue(_storage_object.publication_status == INTERNAL)
     # internal resource has no metadata XML stored in storage folder
     self.assertFalse(
       os.path.isfile('{0}/metadata-{1:04d}.xml'.format(
               _storage_object._storage_folder(), _storage_object.revision)))
     # set status to ingested
     _storage_object.publication_status = INGESTED
     _storage_object.update_storage()
     # ingested resource has metadata XML stored in storage folder
     self.assertTrue(
       os.path.isfile('{0}/metadata-{1:04d}.xml'.format(
         _storage_object._storage_folder(), _storage_object.revision)))
     # ingested resource has global part of storage object in storage folder
     self.assertTrue(
       os.path.isfile('{0}/storage-global.json'.format(
         _storage_object._storage_folder())))
     # ingested resource has local part of storage object in storage folder
     self.assertTrue(
       os.path.isfile('{0}/storage-local.json'.format(
         _storage_object._storage_folder())))
     # ingested resource has digest zip in storage folder
     self.assertTrue(
       os.path.isfile('{0}/resource.zip'.format(
         _storage_object._storage_folder())))
     # digest zip contains metadata.xml and storage-global.json
     _zf_name = '{0}/resource.zip'.format( _storage_object._storage_folder())
     _zf = zipfile.ZipFile(_zf_name, mode='r')
     self.assertTrue('metadata.xml' in _zf.namelist())
     self.assertTrue('storage-global.json' in _zf.namelist())
     # md5 of digest zip is stored in storage object
     with ZipFile(_zf_name, 'r') as inzip:
         with inzip.open('metadata.xml') as resource_xml:
             resource_xml_string = resource_xml.read()
         with inzip.open('storage-global.json') as storage_file:
             # read json string
             storage_json_string = storage_file.read() 
         _checksum = compute_digest_checksum(
           resource_xml_string, storage_json_string)
         self.assertEqual(_checksum, _storage_object.digest_checksum)
Exemple #2
0
 def test_metadata_digest(self):
     settings.SYNC_NEEDS_AUTHENTICATION = False
     client = Client()
     resource = resourceInfoType_model.objects.all()[0]
     resource_uuid = resource.storage_object.identifier
     expected_digest = resource.storage_object.digest_checksum
     response = client.get("{0}{1}/metadata/".format(self.SYNC_BASE, resource_uuid))
     # read zip file from response
     with ZipFile(StringIO(response.content), "r") as inzip:
         with inzip.open("metadata.xml") as resource_xml:
             resource_xml_string = resource_xml.read()
         with inzip.open("storage-global.json") as storage_file:
             # should be a json object, not string
             storage_json_string = storage_file.read()
     self.assertEquals(expected_digest, compute_digest_checksum(resource_xml_string, storage_json_string))
Exemple #3
0
 def test_metadata_digest(self):
     settings.SYNC_NEEDS_AUTHENTICATION = False
     client = Client()
     resource = resourceInfoType_model.objects.all()[0]
     resource_uuid = resource.storage_object.identifier
     expected_digest = resource.storage_object.digest_checksum
     response = client.get('{0}{1}/metadata/'.format(
         self.SYNC_BASE, resource_uuid))
     # read zip file from response
     with ZipFile(StringIO(response.content), 'r') as inzip:
         with inzip.open('metadata.xml') as resource_xml:
             resource_xml_string = resource_xml.read()
         with inzip.open('storage-global.json') as storage_file:
             # should be a json object, not string
             storage_json_string = storage_file.read()
     self.assertEquals(
         expected_digest,
         compute_digest_checksum(resource_xml_string, storage_json_string))
Exemple #4
0
def get_full_metadata(opener, full_metadata_url, expected_digest):
    """
    Obtain the full metadata record for one resource.
    
    Returns a pair of storage_json_string, resource_xml_string.
    
    Raises CorruptDataException if the zip data received from full_metadata_url
    does not have an md5 digest identical to expected_digest.
    """
    with contextlib.closing(opener.open(full_metadata_url)) as response:
        data = response.read()
        with ZipFile(StringIO(data), "r") as inzip:
            with inzip.open("metadata.xml") as resource_xml:
                resource_xml_string = resource_xml.read()
            with inzip.open("storage-global.json") as storage_file:
                # read json string
                storage_json_string = storage_file.read()
                # convert to json object
                storage_json = json.loads(storage_json_string)
            if not expected_digest == compute_digest_checksum(resource_xml_string, storage_json_string):
                raise CorruptDataException("Checksum error for resource '{0}'.".format(full_metadata_url))
            return storage_json, resource_xml_string
Exemple #5
0
def get_full_metadata(opener, full_metadata_url, expected_digest):
    """
    Obtain the full metadata record for one resource.
    
    Returns a pair of storage_json_string, resource_xml_string.
    
    Raises CorruptDataException if the zip data received from full_metadata_url
    does not have an md5 digest identical to expected_digest.
    """
    with contextlib.closing(opener.open(full_metadata_url)) as response:
        data = response.read()
        with ZipFile(StringIO(data), 'r') as inzip:
            with inzip.open('metadata.xml') as resource_xml:
                resource_xml_string = resource_xml.read()
            with inzip.open('storage-global.json') as storage_file:
                # read json string
                storage_json_string = storage_file.read()
                # convert to json object
                storage_json = json.loads(storage_json_string)
            if not expected_digest == \
              compute_digest_checksum(resource_xml_string, storage_json_string):
                raise CorruptDataException("Checksum error for resource '{0}'." \
                  .format(full_metadata_url))
            return storage_json, resource_xml_string