def test_get_object_if_different(self):
        """Test getting object content only when the local file is different

        http://en.wikipedia.org/wiki/HTTP_ETag
        Make a conditional request for an object using the If-None-Match
        header, it should get downloaded only if the local file is different,
        otherwise the response code should be 304 Not Modified
        """
        object_name, data = self.create_object(self.container_name)
        # local copy is identical, no download
        object_md5 = md5(data, usedforsecurity=False).hexdigest()
        headers = {'If-None-Match': object_md5}
        url = "%s/%s" % (self.container_name, object_name)
        resp, _ = self.object_client.get(url, headers=headers)
        self.assertEqual(resp['status'], '304')

        # When the file is not downloaded from Swift server, response does
        # not contain 'X-Timestamp' header. This is the special case, therefore
        # the existence of response headers is checked without custom matcher.
        self.assertIn('date', resp)
        # Check only the format of common headers with custom matcher
        self.assertThat(resp, custom_matchers.AreAllWellFormatted())

        # local copy is different, download
        local_data = "something different"
        other_md5 = md5(local_data.encode(), usedforsecurity=False).hexdigest()
        headers = {'If-None-Match': other_md5}
        resp, _ = self.object_client.get(url, headers=headers)
        self.assertHeaders(resp, 'Object', 'GET')
Esempio n. 2
0
    def _create_manifest(self):
        # Create a manifest file for SLO uploading
        object_name = data_utils.rand_name(name='TestObject')
        object_name_base_1 = object_name + '_01'
        object_name_base_2 = object_name + '_02'
        data_size = MIN_SEGMENT_SIZE
        self.content = data_utils.random_bytes(data_size)
        self._create_object(self.container_name, object_name_base_1,
                            self.content)
        self._create_object(self.container_name, object_name_base_2,
                            self.content)

        path_object_1 = '/%s/%s' % (self.container_name, object_name_base_1)
        path_object_2 = '/%s/%s' % (self.container_name, object_name_base_2)
        data_manifest = [{
            'path':
            path_object_1,
            'etag':
            md5(self.content, usedforsecurity=False).hexdigest(),
            'size_bytes':
            data_size
        }, {
            'path':
            path_object_2,
            'etag':
            md5(self.content, usedforsecurity=False).hexdigest(),
            'size_bytes':
            data_size
        }]

        return json.dumps(data_manifest)
Esempio n. 3
0
    def test_delete_volume(self):
        self.mock_object(self._driver, '_get_vrts_lun_list')
        self.mock_object(self._driver, '_access_api')

        va_lun_name = self._driver._get_va_lun_name(self.volume.id)

        length = len(self.volume.id)
        index = int(length / 2)
        name1 = self.volume.id[:index]
        name2 = self.volume.id[index:]
        crc1 = md5(name1.encode('utf-8'),
                   usedforsecurity=False).hexdigest()[:5]
        crc2 = md5(name2.encode('utf-8'),
                   usedforsecurity=False).hexdigest()[:5]

        volume_name_to_ret = 'cinder' + '-' + crc1 + '-' + crc2

        lun = {}
        lun['lun_name'] = va_lun_name
        lun['target_name'] = 'iqn.2017-02.com.veritas:faketarget'
        lun_list = {'output': {'output': {'luns': [lun]}}}
        self._driver._get_vrts_lun_list.return_value = lun_list

        self._driver._access_api.return_value = True

        self._driver.delete_volume(self.volume)
        self.assertEqual(volume_name_to_ret, va_lun_name)
        self.assertEqual(1, self._driver._access_api.call_count)
Esempio n. 4
0
 def _get_va_lun_name(self, name):
     length = len(name)
     index = int(length / 2)
     name1 = name[:index]
     name2 = name[index:]
     crc1 = md5(name1.encode('utf-8'),
                usedforsecurity=False).hexdigest()[:5]
     crc2 = md5(name2.encode('utf-8'),
                usedforsecurity=False).hexdigest()[:5]
     return 'cinder' + '-' + crc1 + '-' + crc2
Esempio n. 5
0
    def test_md5_with_data(self):
        digest = secretutils.md5(self._test_data).digest()
        self.assertEqual(digest, self._md5_digest)

        digest = secretutils.md5(self._test_data,
                                 usedforsecurity=True).digest()
        self.assertEqual(digest, self._md5_digest)

        digest = secretutils.md5(self._test_data,
                                 usedforsecurity=False).digest()
        self.assertEqual(digest, self._md5_digest)
Esempio n. 6
0
    def test_md5_without_data(self):
        md5 = secretutils.md5()
        md5.update(self._test_data)
        digest = md5.digest()
        self.assertEqual(digest, self._md5_digest)

        md5 = secretutils.md5(usedforsecurity=True)
        md5.update(self._test_data)
        digest = md5.digest()
        self.assertEqual(digest, self._md5_digest)

        md5 = secretutils.md5(usedforsecurity=False)
        md5.update(self._test_data)
        digest = md5.digest()
        self.assertEqual(digest, self._md5_digest)
Esempio n. 7
0
    def test_add(self):
        """Test that we can add an image via the filesystem backend."""
        filesystem.ChunkedFile.CHUNKSIZE = units.Ki
        expected_image_id = str(uuid.uuid4())
        expected_file_size = 5 * units.Ki  # 5K
        expected_file_contents = b"*" * expected_file_size
        expected_checksum = md5(expected_file_contents,
                                usedforsecurity=False).hexdigest()
        expected_location = "file://%s/%s" % (self.test_dir, expected_image_id)
        image_file = six.BytesIO(expected_file_contents)

        loc, size, checksum, metadata = self.store.add(expected_image_id,
                                                       image_file,
                                                       expected_file_size)

        self.assertEqual(expected_location, loc)
        self.assertEqual(expected_file_size, size)
        self.assertEqual(expected_checksum, checksum)
        self.assertEqual(u"file1", metadata['store'])

        uri = "file:///%s/%s" % (self.test_dir, expected_image_id)
        loc = location.get_location_from_uri_and_backend(uri,
                                                         'file1',
                                                         conf=self.conf)
        (new_image_file, new_image_size) = self.store.get(loc)
        new_image_contents = b""
        new_image_file_size = 0

        for chunk in new_image_file:
            new_image_file_size += len(chunk)
            new_image_contents += chunk

        self.assertEqual(expected_file_contents, new_image_contents)
        self.assertEqual(expected_file_size, new_image_file_size)
    def test_add_size_zero(self, mock_api_session, fake_size,
                           fake_select_datastore):
        """
        Test that when specifying size zero for the image to add,
        the actual size of the image is returned.
        """
        fake_select_datastore.return_value = self.store.datastores[0][0]
        expected_image_id = str(uuid.uuid4())
        expected_size = FIVE_KB
        expected_contents = b"*" * expected_size
        hash_code = secretutils.md5(expected_contents, usedforsecurity=False)
        expected_checksum = hash_code.hexdigest()
        fake_size.__get__ = mock.Mock(return_value=expected_size)
        with mock.patch('hashlib.md5') as md5:
            md5.return_value = hash_code
            expected_location = format_location(
                VMWARE_DS['vmware_server_host'],
                VMWARE_DS['vmware_store_image_dir'], expected_image_id,
                VMWARE_DS['vmware_datastores'])
            image = six.BytesIO(expected_contents)
            with mock.patch('requests.Session.request') as HttpConn:
                HttpConn.return_value = utils.fake_response()
                location, size, checksum, metadata = self.store.add(
                    expected_image_id, image, 0)
                self.assertEqual("vmware1", metadata["store"])

        self.assertEqual(utils.sort_url_by_qs_keys(expected_location),
                         utils.sort_url_by_qs_keys(location))
        self.assertEqual(expected_size, size)
        self.assertEqual(expected_checksum, checksum)
Esempio n. 9
0
 def update_lock(self):
     prop = self.settings.get('system.guid')
     guid = prop.get('value')
     path = '%s:%s' % (guid, self.path)
     if isinstance(path, six.text_type):
         path = path.encode('utf-8')
     self.lock = md5(path, usedforsecurity=False).hexdigest()
    def test_add(self, fake_api_session, fake_size, fake_select_datastore,
                 fake_cookie):
        """Test that we can add an image via the VMware backend."""
        fake_select_datastore.return_value = self.store.datastores[0][0]
        expected_image_id = str(uuid.uuid4())
        expected_size = FIVE_KB
        expected_contents = b"*" * expected_size
        hash_code = secretutils.md5(expected_contents, usedforsecurity=False)
        expected_checksum = hash_code.hexdigest()
        fake_size.__get__ = mock.Mock(return_value=expected_size)
        expected_cookie = 'vmware_soap_session=fake-uuid'
        fake_cookie.return_value = expected_cookie
        expected_headers = {
            'Content-Length': six.text_type(expected_size),
            'Cookie': expected_cookie
        }
        with mock.patch('hashlib.md5') as md5:
            md5.return_value = hash_code
            expected_location = format_location(
                VMWARE_DS['vmware_server_host'],
                VMWARE_DS['vmware_store_image_dir'], expected_image_id,
                VMWARE_DS['vmware_datastores'])
            image = six.BytesIO(expected_contents)
            with mock.patch('requests.Session.request') as HttpConn:
                HttpConn.return_value = utils.fake_response()
                location, size, checksum, metadata = self.store.add(
                    expected_image_id, image, expected_size)
                _, kwargs = HttpConn.call_args
                self.assertEqual(expected_headers, kwargs['headers'])
                self.assertEqual("vmware1", metadata["store"])

        self.assertEqual(utils.sort_url_by_qs_keys(expected_location),
                         utils.sort_url_by_qs_keys(location))
        self.assertEqual(expected_size, size)
        self.assertEqual(expected_checksum, checksum)
Esempio n. 11
0
 def _derive_key_and_iv(self, password, salt, key_length, iv_length):
     d = d_i = b''
     while len(d) < key_length + iv_length:
         md5_str = d_i + password + salt
         d_i = md5(md5_str, usedforsecurity=True).digest()
         d += d_i
     return d[:key_length], d[key_length:key_length + iv_length]
Esempio n. 12
0
 def close(self):
     media = http.MediaIoBaseUpload(io.BytesIO(self.data),
                                    'application/octet-stream',
                                    chunksize=self.chunk_size,
                                    resumable=self.resumable)
     resp = self.conn.objects().insert(
         bucket=self.bucket,
         name=self.object_name,
         body={},
         media_body=media).execute(num_retries=self.num_retries)
     etag = resp['md5Hash']
     md5 = secretutils.md5(self.data, usedforsecurity=False).digest()
     md5 = md5.encode('utf-8')
     etag = bytes(etag, 'utf-8')
     md5 = base64.b64encode(md5)
     if etag != md5:
         err = _('MD5 of object: %(object_name)s before: '
                 '%(md5)s and after: %(etag)s is not same.') % {
             'object_name': self.object_name,
             'md5': md5, 'etag': etag, }
         raise exception.InvalidBackup(reason=err)
     else:
         LOG.debug('MD5 before: %(md5)s and after: %(etag)s '
                   'writing object: %(object_name)s in GCS.',
                   {'etag': etag, 'md5': md5,
                    'object_name': self.object_name, })
         return md5
Esempio n. 13
0
    def _get_mount_point_for_share(self, nfs_share):
        """Returns path to mount point NFS share.

        :param nfs_share: example 172.18.194.100:/var/nfs
        """
        nfs_share = nfs_share.encode('utf-8')
        return os.path.join(self.configuration.nexenta_mount_point_base,
                            md5(nfs_share, usedforsecurity=False).hexdigest())
Esempio n. 14
0
 def _get_partition(self, data):
     if self._hash_function == 'md5':
         hashed_key = self._hash2int(md5(data, usedforsecurity=False))
     else:
         hashed_key = self._hash2int(
             hashlib.new(self._hash_function, data))
     position = bisect.bisect(self._partitions, hashed_key)
     return position if position < len(self._partitions) else 0
Esempio n. 15
0
 def _convert_host_name(name):
     if name is None:
         return ""
     if len(name) > 32:
         name = md5(name.encode('utf-8'), usedforsecurity=False).hexdigest()
     else:
         name = name.replace('.', '-').lower()
     return name
Esempio n. 16
0
def get_hash_str(base_str):
    """Returns string that represents MD5 hash of base_str (in hex format).

    If base_str is a Unicode string, encode it to UTF-8.
    """
    if isinstance(base_str, str):
        base_str = base_str.encode('utf-8')
    return md5(base_str, usedforsecurity=False).hexdigest()
Esempio n. 17
0
 def _validate_etag(self, req, volume_id):
     if not req.if_match:
         return True
     context = req.environ['cinder.context']
     metadata = self._get_metadata(context, volume_id)
     data = jsonutils.dumps({"metadata": metadata})
     data = data.encode('utf-8')
     checksum = md5(data, usedforsecurity=False).hexdigest()
     return checksum in req.if_match.etags
Esempio n. 18
0
 def __init__(self, name, parsed_url, options):
     super(PostgresLock, self).__init__(name)
     self.acquired = False
     self._conn = None
     self._parsed_url = parsed_url
     self._options = options
     h = md5(usedforsecurity=False)
     h.update(name)
     self.key = h.digest()[0:2]
Esempio n. 19
0
def get_fingerprint(self):
    """Patch paramiko

    This method needs to be patched to allow paramiko to work under FIPS.
    Until the patch to do this merges, patch paramiko here.

    TODO(alee) Remove this when paramiko is patched.
    See https://github.com/paramiko/paramiko/pull/1928
    """
    return md5(self.asbytes(), usedforsecurity=False).digest()
Esempio n. 20
0
    def add(self, image_id, image_file, image_size, hashing_algo,
            context=None, verifier=None):
        """This is a 0.26.0+ add, returns a 5-tuple"""
        if hashing_algo == 'md5':
            hasher = md5(usedforsecurity=False)
        else:
            hasher = hashlib.new(str(hashing_algo))

        # assume 'image_file' will be bytes for these tests
        hasher.update(image_file)
        backend_url = "backend://%s" % image_id
        bytes_written = len(image_file)
        checksum = md5(image_file, usedforsecurity=False).hexdigest()
        multihash = hasher.hexdigest()
        metadata_dict = {"verifier_obj":
                         verifier.name if verifier else None,
                         "context_obj":
                         context.name if context else None}
        return (backend_url, bytes_written, checksum, multihash, metadata_dict)
Esempio n. 21
0
 def test_update_lock(self, get_settings):
     guid = uuid.uuid4().hex
     settings = {'value': guid}
     get_settings.return_value = settings
     self.assertIsNone(self.proxy.update_lock())
     path = '%s:%s' % (guid, self.proxy.path)
     if isinstance(path, six.text_type):
         path = path.encode('utf-8')
     expected = md5(path, usedforsecurity=False).hexdigest()
     self.assertEqual(expected, self.proxy.lock)
Esempio n. 22
0
    def test_get_object_with_if_none_match(self):
        """Test getting object with if_none_match"""
        object_name = data_utils.rand_name(name='TestObject')
        data = data_utils.random_bytes()
        create_md5 = md5(data, usedforsecurity=False).hexdigest()
        create_metadata = {'Etag': create_md5}
        self.object_client.create_object(self.container_name,
                                         object_name,
                                         data,
                                         metadata=create_metadata)

        list_data = data_utils.random_bytes()
        list_md5 = md5(list_data, usedforsecurity=False).hexdigest()
        list_metadata = {'If-None-Match': list_md5}
        resp, body = self.object_client.get_object(self.container_name,
                                                   object_name,
                                                   metadata=list_metadata)
        self.assertHeaders(resp, 'Object', 'GET')
        self.assertEqual(body, data)
Esempio n. 23
0
    def test_configure_add_same_dir_multiple_times_same_priority(self):
        """Tests handling of same dir in config multiple times.

        Tests BadStoreConfiguration exception is raised if same directory
        is specified multiple times in filesystem_store_datadirs with the same
        priority.
        """
        store_map = [
            self.useFixture(fixtures.TempDir()).path,
            self.useFixture(fixtures.TempDir()).path
        ]
        self.conf.set_override('filesystem_store_datadir',
                               override=None,
                               group='file1')
        self.conf.set_override('filesystem_store_datadirs', [
            store_map[0] + ":100", store_map[1] + ":200", store_map[0] + ":100"
        ],
                               group='file1')
        try:
            self.store.configure()
        except exceptions.BadStoreConfiguration:
            self.fail("configure() raised BadStoreConfiguration unexpectedly!")

        # Test that we can add an image via the filesystem backend
        filesystem.ChunkedFile.CHUNKSIZE = 1024
        expected_image_id = str(uuid.uuid4())
        expected_file_size = 5 * units.Ki  # 5K
        expected_file_contents = b"*" * expected_file_size
        expected_checksum = md5(expected_file_contents,
                                usedforsecurity=False).hexdigest()
        expected_location = "file://%s/%s" % (store_map[1], expected_image_id)
        image_file = six.BytesIO(expected_file_contents)

        loc, size, checksum, metadata = self.store.add(expected_image_id,
                                                       image_file,
                                                       expected_file_size)
        self.assertEqual(u"file1", metadata["store"])

        self.assertEqual(expected_location, loc)
        self.assertEqual(expected_file_size, size)
        self.assertEqual(expected_checksum, checksum)

        loc = location.get_location_from_uri_and_backend(expected_location,
                                                         "file1",
                                                         conf=self.conf)
        (new_image_file, new_image_size) = self.store.get(loc)
        new_image_contents = b""
        new_image_file_size = 0

        for chunk in new_image_file:
            new_image_file_size += len(chunk)
            new_image_contents += chunk

        self.assertEqual(expected_file_contents, new_image_contents)
        self.assertEqual(expected_file_size, new_image_file_size)
Esempio n. 24
0
def ftok(name, project):
    # Similar to ftok & http://semanchuk.com/philip/sysv_ipc/#ftok_weakness
    # but hopefully without as many weaknesses...
    h = md5(usedforsecurity=False)
    if not isinstance(project, bytes):
        project = project.encode('ascii')
    h.update(project)
    if not isinstance(name, bytes):
        name = name.encode('ascii')
    h.update(name)
    return (int(h.hexdigest(), 16) % _KEY_RANGE) + sysv_ipc.KEY_MIN
Esempio n. 25
0
 def index(self, req, volume_id):
     req_version = req.api_version_request
     metadata = super(Controller, self).index(req, volume_id)
     if req_version.matches(mv.ETAGS):
         data = jsonutils.dumps(metadata)
         data = data.encode('utf-8')
         resp = webob.Response()
         resp.headers['Etag'] = md5(data, usedforsecurity=False).hexdigest()
         resp.body = data
         return resp
     return metadata
Esempio n. 26
0
 def _get_hash_list(self, accounts_list):
     hash_list = []
     hash_fields = (
         preprov_creds.PreProvisionedCredentialProvider.HASH_CRED_FIELDS)
     for account in accounts_list:
         hash = md5(usedforsecurity=False)
         account_for_hash = dict(
             (k, v) for (k, v) in account.items() if k in hash_fields)
         hash.update(str(account_for_hash).encode('utf-8'))
         temp_hash = hash.hexdigest()
         hash_list.append(temp_hash)
     return hash_list
Esempio n. 27
0
 def setUp(self):
     super(TestBackCompatWrapper, self).setUp()
     self.fake_store = _FakeStore()
     self.fake_context = _FakeContext()
     self.fake_verifier = _FakeVerifier()
     self.img_id = '1234'
     self.img_file = b'0123456789'
     self.img_size = 10
     self.img_checksum = md5(self.img_file,
                             usedforsecurity=False).hexdigest()
     self.hashing_algo = 'sha256'
     self.img_sha256 = hashlib.sha256(self.img_file).hexdigest()
 def test_reader_partial(self):
     content = b'XXX'
     image = six.BytesIO(content)
     expected_checksum = secretutils.md5(b'X',
                                         usedforsecurity=False).hexdigest()
     expected_multihash = hashlib.sha256(b'X').hexdigest()
     reader = vm_store._Reader(image, self.hash_algo)
     ret = reader.read(1)
     self.assertEqual(b'X', ret)
     self.assertEqual(expected_checksum, reader.checksum.hexdigest())
     self.assertEqual(expected_multihash, reader.os_hash_value.hexdigest())
     self.assertEqual(1, reader.size)
Esempio n. 29
0
def get_hasher(hash_algo, usedforsecurity=True):
    """
    Returns the required hasher, given the hashing algorithm.
    This is primarily to ensure that the hash algorithm is correctly
    chosen when executed on a FIPS enabled system

    :param hash_algo: hash algorithm requested
    :param usedforsecurity: whether the hashes are used in a security context
    """
    if str(hash_algo) == 'md5':
        return md5(usedforsecurity=usedforsecurity)
    else:
        return hashlib.new(str(hash_algo))
Esempio n. 30
0
    def test_add_different_backend(self, mock_client):
        self.store = s3.Store(self.conf, backend="s3_region2")
        self.store.configure()
        self.register_store_backend_schemes(self.store, 's3', 's3_region2')

        expected_image_id = str(uuid.uuid4())
        expected_s3_size = FIVE_KB
        expected_s3_contents = b"*" * expected_s3_size
        expected_checksum = md5(expected_s3_contents,
                                usedforsecurity=False).hexdigest()
        expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
        expected_location = format_s3_location(S3_CONF['s3_store_access_key'],
                                               S3_CONF['s3_store_secret_key'],
                                               'http://s3-region2.com',
                                               S3_CONF['s3_store_bucket'],
                                               expected_image_id)
        image_s3 = six.BytesIO(expected_s3_contents)

        fake_s3_client = botocore.session.get_session().create_client('s3')

        with stub.Stubber(fake_s3_client) as stubber:
            stubber.add_response(
                method='head_bucket',
                service_response={},
                expected_params={'Bucket': S3_CONF['s3_store_bucket']})
            stubber.add_client_error(method='head_object',
                                     service_error_code='404',
                                     service_message='',
                                     expected_params={
                                         'Bucket': S3_CONF['s3_store_bucket'],
                                         'Key': expected_image_id
                                     })
            stubber.add_response(method='put_object',
                                 service_response={},
                                 expected_params={
                                     'Bucket': S3_CONF['s3_store_bucket'],
                                     'Key': expected_image_id,
                                     'Body': botocore.stub.ANY
                                 })

            mock_client.return_value = fake_s3_client
            loc, size, checksum, multihash, metadata = \
                self.store.add(expected_image_id, image_s3, expected_s3_size,
                               self.hash_algo)
            self.assertEqual("s3_region2", metadata["store"])

            self.assertEqual(expected_location, loc)
            self.assertEqual(expected_s3_size, size)
            self.assertEqual(expected_checksum, checksum)
            self.assertEqual(expected_multihash, multihash)