# create another key to write over the first key write_key1 = Key(bucket, key_name) # upload some data write_key1.set_contents_from_string(test_string_1) self.assertTrue(write_key.exists()) # create another key with the same name read_key = Key(bucket, key_name) # read back the data returned_string = read_key.get_contents_as_string() self.assertEqual(returned_string, test_string_1) # get the metadata returned_meta_value = read_key.get_metadata(meta_key) self.assertEqual(returned_meta_value, None) # delete the key read_key.delete() self.assertFalse(write_key.exists()) # delete the bucket self._s3_connection.delete_bucket(bucket.name) if __name__ == "__main__": initialize_logging() unittest.main()
# archive a key keys_with_data = _create_some_keys_with_data(bucket, key_names) self.assertEqual(len(keys_with_data), 1) test_key, test_data = keys_with_data[0] for index in range(iteration_count): log.info("iteration {0}".format(index + 1)) existing_keys = bucket.get_all_versions(prefix=key_names[0]) self.assertEqual(len(existing_keys), 1) # try to retrieve the key read_data = test_key.get_contents_as_string() self.assertEqual(read_data, test_data, test_key.name) # delete the specific version we just archived test_key.delete(version_id=test_key.version_id) self.assertFalse(test_key.exists()) # archive another version of the key keys_with_data = _create_some_keys_with_data(bucket, key_names) self.assertEqual(len(keys_with_data), 1) test_key, test_data = keys_with_data[0] _clear_bucket(self._s3_connection, bucket) if __name__ == "__main__": initialize_logging() unittest.main()