def __init__(self, account, output): """Create a Kamaki instance""" self.account = account self.out = output self.pithos = PithosClient( self.account.get_service_endpoints('object-store')['publicURL'], self.account.token, self.account.user_info()['id'], CONTAINER) self.image = ImageClient( self.account.get_service_endpoints('image')['publicURL'], self.account.token)
def initialize_clients(self): """Initialize all the Kamaki Clients""" self.astakos = AstakosClient(self.auth_url, self.token) self.astakos.CONNECTION_RETRY_LIMIT = self.retry endpoints = self.astakos.authenticate() self.compute_url = _get_endpoint_url(endpoints, "compute") self.compute = ComputeClient(self.compute_url, self.token) self.compute.CONNECTION_RETRY_LIMIT = self.retry self.cyclades = CycladesClient(self.compute_url, self.token) self.cyclades.CONNECTION_RETRY_LIMIT = self.retry self.network_url = _get_endpoint_url(endpoints, "network") self.network = CycladesNetworkClient(self.network_url, self.token) self.network.CONNECTION_RETRY_LIMIT = self.retry self.pithos_url = _get_endpoint_url(endpoints, "object-store") self.pithos = PithosClient(self.pithos_url, self.token) self.pithos.CONNECTION_RETRY_LIMIT = self.retry self.image_url = _get_endpoint_url(endpoints, "image") self.image = ImageClient(self.image_url, self.token) self.image.CONNECTION_RETRY_LIMIT = self.retry
def setUp(self): # set up useful settings variables self.url = os.getenv('PITHOS_URL', 'https://pithos.okeanos.grnet.gr/v1') self.token = os.getenv('ASTAKOS_TOKEN', '') if self.token == '': print('\n\nUnable to run test suite.') print('Did you export the ASTAKOS_TOKEN environmental variable?') print('You can do this by running:') print('ASTAKOS_TOKEN="your_token_here" python runtests.py\n') sys.exit(0) self.account = 'd8e6f8bb-619b-4ce6-8903-89fabdca024d' self.container = 'pithos' self.syncer = pithossync.Syncer(self.url, self.token, self.account, self.container) self.client = PithosClient(self.url, self.token, self.account, self.container) # the local folder used by the tests, unavailable to the code being tested workspace_path = 'localworkspace' # the name of the remote folder within the pithos container remote_path = 'sync-test' self.workspace = self.Workspace(self, workspace_path) self.remote = self.Remote(self, remote_path) # clean up from previous test runs that may have crashed self.workspace.delete() self.remote.recursive_delete(self.remote.path)
def __init__(self, syncer, local, folder=None): self.syncer = syncer self.local = local self.meta_file = meta.LocalMetaFile(self.local) self.lock = lock.Lock(self) self.client = PithosClient(syncer.url, syncer.token, syncer.account, syncer.container) if folder is None: # working copy already init'ed self.meta_file.load() self.folder = self.meta_file.remote_dir else: # working copy not init'ed # the caller must call .init() or .clone() on it self.folder = folder
def authenticate(self): access_token = None #A KeyError will be raised if there is no token. access_token = LocalDataManager().get_token() try: s = AstakosClient(access_token, strings.Pithos_AUTHURL) auth_data = s.authenticate() username = auth_data['access']['user']['name'] pithos_url = self._get_pithos_public_url(auth_data) uuid = auth_data['access']['user']['id'] pithosClient = PithosClient(pithos_url, access_token, uuid) resp = pithosClient.list_containers() except (AstakosErrors.Unauthorized, faults.InvalidAuth) as e: raise faults.InvalidAuth('Pithos-Auth') except (AstakosErrors.AstakosClientException, faults.NetworkError) as e: raise faults.NetworkError('No internet-Auth') return (pithosClient, username)
def list_pithos_files(self): """ Method for listing pithos+ files available to the user """ auth_url = self.opts['auth_url'] token = self.opts['token'] try: auth = AstakosClient(auth_url, token) auth.authenticate() except ClientError: msg = ' Authentication error: Invalid Token' logging.error(msg) exit(error_fatal) pithos_endpoint = auth.get_endpoint_url('object-store') pithos_container = self.opts.get('pithos_container','pithos') user_id = auth.user_info['id'] pithos_client = PithosClient(pithos_endpoint,self.opts['token'], user_id, pithos_container) objects = pithos_client.list_objects() for object in objects: is_dir = 'application/directory' in object.get('content_type', object.get('content-type', '')) if not is_dir: print u"{:>12s} \"pithos:/{:s}/{:s}\"".format(bytes_to_shorthand(object['bytes']), pithos_container,object['name'])
def list_pithos_files(self): """ Method for listing pithos+ files available to the user """ auth_url = self.opts['auth_url'] token = self.opts['token'] try: auth = AstakosClient(auth_url, token) auth.authenticate() except ClientError: msg = 'Authentication error: Invalid Token' logging.error(msg) exit(error_fatal) pithos_endpoint = auth.get_endpoint_url('object-store') pithos_container = self.opts.get('pithos_container','pithos') user_id = auth.user_info['id'] pithos_client = PithosClient(pithos_endpoint,self.opts['token'], user_id, pithos_container) objects = pithos_client.list_objects() for object in objects: is_dir = 'application/directory' in object.get('content_type', object.get('content-type', '')) is_dir = 'application/folder' in object.get('content_type', object.get('content-type', '')) if not is_dir: print u"{:>12s} \"pithos:/{:s}/{:s}\"".format(bytes_to_shorthand(object['bytes']), pithos_container,object['name'])
def __init__(self, syncer, local, folder = None): self.syncer = syncer self.local = local self.meta_file = meta.LocalMetaFile(self.local) self.lock = lock.Lock(self) self.client = PithosClient(syncer.url, syncer.token, syncer.account, syncer.container) if folder is None: # working copy already init'ed self.meta_file.load() self.folder = self.meta_file.remote_dir else: # working copy not init'ed # the caller must call .init() or .clone() on it self.folder = folder
def setUp(self): self.cloud = 'cloud.%s' % self['testcloud'] aurl, self.token = self[self.cloud, 'url'], self[self.cloud, 'token'] self.auth_base = AstakosClient(aurl, self.token) purl = self.auth_base.get_service_endpoints( 'object-store')['publicURL'] self.uuid = self.auth_base.user_term('id') self.client = PithosClient(purl, self.token, self.uuid) self.now = time.mktime(time.gmtime()) self.now_unformated = datetime.datetime.utcnow() self._init_data() """Prepare an object to be shared - also its container""" self.client.container = self.c1 self.client.object_post( 'test', update=True, permissions={'read': [self.client.account]}) self.create_remote_object(self.c1, 'another.test')
def initialize_clients(self, ignore_ssl=False): """Initialize all the Kamaki Clients""" # Path kamaki for SSL verification self._kamaki_ssl(ignore_ssl=ignore_ssl) # Initialize kamaki Clients self.astakos = AstakosClient(self.auth_url, self.token) self.astakos.CONNECTION_RETRY_LIMIT = self.retry self.compute_url = self.astakos.get_endpoint_url( ComputeClient.service_type) self.compute = ComputeClient(self.compute_url, self.token) self.compute.CONNECTION_RETRY_LIMIT = self.retry self.cyclades_url = self.astakos.get_endpoint_url( CycladesClient.service_type) self.cyclades = CycladesClient(self.cyclades_url, self.token) self.cyclades.CONNECTION_RETRY_LIMIT = self.retry self.block_storage_url = self.astakos.get_endpoint_url( CycladesBlockStorageClient.service_type) self.block_storage = CycladesBlockStorageClient( self.block_storage_url, self.token) self.block_storage.CONNECTION_RETRY_LIMIT = self.retry self.network_url = self.astakos.get_endpoint_url( CycladesNetworkClient.service_type) self.network = CycladesNetworkClient(self.network_url, self.token) self.network.CONNECTION_RETRY_LIMIT = self.retry self.pithos_url = self.astakos.get_endpoint_url( PithosClient.service_type) self.pithos = PithosClient(self.pithos_url, self.token) self.pithos.CONNECTION_RETRY_LIMIT = self.retry self.image_url = self.astakos.get_endpoint_url( ImageClient.service_type) self.image = ImageClient(self.image_url, self.token) self.image.CONNECTION_RETRY_LIMIT = self.retry
def _prepare_img(self): f = open(self['image', 'local_path'], 'rb') (token, uuid) = (self.token, self.auth_base.user_term('id')) purl = self.auth_base.get_service_endpoints( 'object-store')['publicURL'] from kamaki.clients.pithos import PithosClient self.pithcli = PithosClient(purl, token, uuid) cont = 'cont_%s' % self.now self.pithcli.container = cont self.obj = 'obj_%s' % self.now print('\t- Create container %s on Pithos server' % cont) self.pithcli.container_put() self.location = 'pithos://%s/%s/%s' % (uuid, cont, self.obj) print('\t- Upload an image at %s...\n' % self.location) self.pithcli.upload_object(self.obj, f) print('\t- ok') f.close() r = self.client.register( self.imgname, self.location, params=dict(is_public=True)) self._imglist[self.imgname] = dict( name=r['name'], id=r['id']) self._imgdetails[self.imgname] = r
class Pithos(livetest.Generic): files = [] def setUp(self): self.cloud = 'cloud.%s' % self['testcloud'] aurl, self.token = self[self.cloud, 'url'], self[self.cloud, 'token'] self.auth_base = AstakosClient(aurl, self.token) purl = self.auth_base.get_service_endpoints( 'object-store')['publicURL'] self.uuid = self.auth_base.user_term('id') self.client = PithosClient(purl, self.token, self.uuid) self.now = time.mktime(time.gmtime()) self.now_unformated = datetime.datetime.utcnow() self._init_data() """Prepare an object to be shared - also its container""" self.client.container = self.c1 self.client.object_post( 'test', update=True, permissions={'read': [self.client.account]}) self.create_remote_object(self.c1, 'another.test') def _init_data(self): self.c1 = 'c1_' + unicode(self.now) self.c2 = 'c2_' + unicode(self.now) self.c3 = 'c3_' + unicode(self.now) try: self.client.create_container(self.c2) except ClientError: pass try: self.client.create_container(self.c1) except ClientError: pass try: self.client.create_container(self.c3) except ClientError: pass self.create_remote_object(self.c1, 'test') self.create_remote_object(self.c2, 'test') self.create_remote_object(self.c1, 'test1') self.create_remote_object(self.c2, 'test1') def create_remote_object(self, container, obj): self.client.container = container self.client.object_put( obj, content_type='application/octet-stream', data='file %s that lives in %s' % (obj, container), metadata={'incontainer': container}) def forceDeleteContainer(self, container): self.client.container = container try: r = self.client.list_objects() except ClientError: return for obj in r: name = obj['name'] self.client.del_object(name) r = self.client.container_delete() self.container = '' def tearDown(self): """Destroy test cases""" for f in self.files: f.close() self.forceDeleteContainer(self.c1) self.forceDeleteContainer(self.c2) try: self.forceDeleteContainer(self.c3) except ClientError: pass self.client.container = '' def test_000(self): """Prepare a full Pithos+ test""" print('') super(self.__class__, self).test_000() def test_account_head(self): """Test account_HEAD""" self._test_0010_account_head() def _test_0010_account_head(self): r = self.client.account_head() self.assertEqual(r.status_code, 204) r = self.client.account_head(until='1000000000') self.assertEqual(r.status_code, 204) r = self.client.get_account_info(until='1000000000') datestring = unicode(r['x-account-until-timestamp']) self.assertEqual(u'Sun, 09 Sep 2001 01:46:40 GMT', datestring) r = self.client.get_account_quota() self.assertTrue('x-account-policy-quota' in r) #r = self.client.get_account_versioning() #self.assertTrue('x-account-policy-versioning' in r) """Check if(un)modified_since""" for format in self.client.DATE_FORMATS: now_formated = self.now_unformated.strftime(format) r1 = self.client.account_head( if_modified_since=now_formated, success=(204, 304, 412)) sc1 = r1.status_code r2 = self.client.account_head( if_unmodified_since=now_formated, success=(204, 304, 412)) sc2 = r2.status_code self.assertNotEqual(sc1, sc2) def test_account_get(self): """Test account_GET""" self._test_0020_account_get() def _test_0020_account_get(self): #r = self.client.account_get() #self.assertEqual(r.status_code, 200) r = self.client.list_containers() fullLen = len(r) self.assertTrue(fullLen > 2) r = self.client.account_get(limit=1) self.assertEqual(len(r.json), 1) r = self.client.account_get(marker='c2_') temp_c0 = r.json[0]['name'] temp_c2 = r.json[2]['name'] r = self.client.account_get(limit=2, marker='c2_') conames = [container['name'] for container in r.json if ( container['name'].lower().startswith('c2_'))] self.assertTrue(temp_c0 in conames) self.assertFalse(temp_c2 in conames) r = self.client.account_get(show_only_shared=True) self.assertTrue(self.c1 in [c['name'] for c in r.json]) r = self.client.account_get(until=1342609206.0) self.assertTrue(len(r.json) <= fullLen) """Check if(un)modified_since""" for format in self.client.DATE_FORMATS: now_formated = self.now_unformated.strftime(format) r1 = self.client.account_get( if_modified_since=now_formated, success=(200, 304, 412)) sc1 = r1.status_code r2 = self.client.account_get( if_unmodified_since=now_formated, success=(200, 304, 412)) sc2 = r2.status_code self.assertNotEqual(sc1, sc2) """Check sharing_accounts""" r = self.client.get_sharing_accounts() try: self.assertTrue(len(r) > 0) except AssertionError as e: print '\n\tWARNING: Are there any sharers to your account?' self.assertEqual(len(r), 0) print '\tIf there are, this (%s) is an error, else it is OK' % e def test_account_post(self): """Test account_POST""" self._test_0030_account_post() def _test_0030_account_post(self): r = self.client.account_post() self.assertEqual(r.status_code, 202) grpName = 'grp' + unicode(self.now) """Method set/del_account_meta and set_account_groupcall use account_post internally """ u1 = self.client.account # Invalid display name u2 = '1nc0r3c7-d15p14y-n4m3' self.assertRaises( ClientError, self.client.set_account_group, grpName, [u1, u2]) self.client.set_account_group(grpName, [u1]) r = self.client.get_account_group() self.assertEqual(r['x-account-group-' + grpName], '%s' % u1) self.client.del_account_group(grpName) r = self.client.get_account_group() self.assertTrue('x-account-group-' + grpName not in r) mprefix = 'meta' + unicode(self.now) self.client.set_account_meta({ mprefix + '1': 'v1', mprefix + '2': 'v2'}) r = self.client.get_account_meta() self.assertEqual(r['x-account-meta-' + mprefix + '1'], 'v1') self.assertEqual(r['x-account-meta-' + mprefix + '2'], 'v2') self.client.del_account_meta(mprefix + '1') r = self.client.get_account_meta() self.assertTrue('x-account-meta-' + mprefix + '1' not in r) self.client.del_account_meta(mprefix + '2') r = self.client.get_account_meta() self.assertTrue('x-account-meta-' + mprefix + '2' not in r) """Missing testing for quota, versioning, because normally you don't have permissions to modify those at account level """ #newquota = 1000000 #self.client.set_account_quota(newquota) #r = self.client.get_account_info() #print(unicode(r)) #r = self.client.get_account_quota() #self.assertEqual(r['x-account-policy-quota'], newquota) #self.client.set_account_versioning('auto') def test_container_head(self): """Test container_HEAD""" self._test_0040_container_head() def _test_0040_container_head(self): self.client.container = self.c1 r = self.client.container_head() self.assertEqual(r.status_code, 204) """Check until""" r = self.client.container_head(until=1000000, success=(204, 404)) self.assertEqual(r.status_code, 404) """Check and if(un)modified_since""" for format in self.client.DATE_FORMATS: now_formated = self.now_unformated.strftime(format) r1 = self.client.container_head( if_modified_since=now_formated, success=(204, 304, 412)) sc1 = r1.status_code r2 = self.client.container_head( if_unmodified_since=now_formated, success=(204, 304, 412)) sc2 = r2.status_code self.assertNotEqual(sc1, sc2) """Check container object meta""" r = self.client.get_container_object_meta() self.assertEqual(r['x-container-object-meta'], 'Incontainer') def test_container_get(self): """Test container_GET""" self._test_0050_container_get() def _test_0050_container_get(self): self.client.container = self.c1 r = self.client.container_get() self.assertEqual(r.status_code, 200) fullLen = len(r.json) r = self.client.container_get(prefix='test') lalobjects = [obj for obj in r.json if obj['name'].startswith('test')] self.assertTrue(len(r.json) > 1) self.assertEqual(len(r.json), len(lalobjects)) r = self.client.container_get(limit=1) self.assertEqual(len(r.json), 1) r = self.client.container_get(marker='another') self.assertTrue(len(r.json) > 1) neobjects = [obj for obj in r.json if obj['name'] > 'another'] self.assertEqual(len(r.json), len(neobjects)) r = self.client.container_get(prefix='another.test', delimiter='.') self.assertTrue(fullLen > len(r.json)) r = self.client.container_get(path='/') self.assertEqual(fullLen, len(r.json)) r = self.client.container_get(format='xml') self.assertEqual(r.text.split()[4], 'name="' + self.c1 + '">') r = self.client.container_get(meta=['incontainer']) self.assertTrue(len(r.json) > 0) r = self.client.container_get(show_only_shared=True) self.assertTrue(len(r.json) < fullLen) try: r = self.client.container_get(until=1000000000) datestring = unicode(r.headers['x-account-until-timestamp']) self.assertEqual(u'Sun, 09 Sep 2001 01:46:40 GMT', datestring) except ClientError: pass """Check and if un/modified_since""" for format in self.client.DATE_FORMATS: now_formated = self.now_unformated.strftime(format) r1 = self.client.container_get( if_modified_since=now_formated, success=(200, 304, 412)) sc1 = r1.status_code r2 = self.client.container_get( if_unmodified_since=now_formated, success=(200, 304, 412)) sc2 = r2.status_code self.assertNotEqual(sc1, sc2) def test_container_put(self): """Test container_PUT""" self._test_0050_container_put() def _test_0050_container_put(self): self.client.container = self.c2 r = self.client.create_container() self.assertTrue(isinstance(r, dict)) r = self.client.get_container_limit(self.client.container) cquota = r.values()[0] newquota = 2 * int(cquota) r = self.client.create_container(sizelimit=newquota) self.assertTrue(isinstance(r, dict)) r = self.client.get_container_limit(self.client.container) xquota = int(r.values()[0]) self.assertEqual(newquota, xquota) r = self.client.create_container(versioning='auto') self.assertTrue(isinstance(r, dict)) r = self.client.get_container_versioning(self.client.container) nvers = r.values()[0] self.assertEqual('auto', nvers) r = self.client.container_put(versioning='none') self.assertEqual(r.status_code, 202) r = self.client.get_container_versioning(self.client.container) nvers = r.values()[0] self.assertEqual('none', nvers) r = self.client.create_container(metadata={'m1': 'v1', 'm2': 'v2'}) self.assertTrue(isinstance(r, dict)) r = self.client.get_container_meta(self.client.container) self.assertTrue('x-container-meta-m1' in r) self.assertEqual(r['x-container-meta-m1'], 'v1') self.assertTrue('x-container-meta-m2' in r) self.assertEqual(r['x-container-meta-m2'], 'v2') r = self.client.container_put(metadata={'m1': '', 'm2': 'v2a'}) self.assertEqual(r.status_code, 202) r = self.client.get_container_meta(self.client.container) self.assertTrue('x-container-meta-m1' not in r) self.assertTrue('x-container-meta-m2' in r) self.assertEqual(r['x-container-meta-m2'], 'v2a') self.client.del_container_meta(self.client.container) def test_container_post(self): """Test container_POST""" self._test_0060_container_post() def _test_0060_container_post(self): self.client.container = self.c2 """Simple post""" r = self.client.container_post() self.assertEqual(r.status_code, 202) """post meta""" self.client.set_container_meta({'m1': 'v1', 'm2': 'v2'}) r = self.client.get_container_meta(self.client.container) self.assertTrue('x-container-meta-m1' in r) self.assertEqual(r['x-container-meta-m1'], 'v1') self.assertTrue('x-container-meta-m2' in r) self.assertEqual(r['x-container-meta-m2'], 'v2') """post/2del meta""" r = self.client.del_container_meta('m1') r = self.client.set_container_meta({'m2': 'v2a'}) r = self.client.get_container_meta(self.client.container) self.assertTrue('x-container-meta-m1' not in r) self.assertTrue('x-container-meta-m2' in r) self.assertEqual(r['x-container-meta-m2'], 'v2a') """check quota""" r = self.client.get_container_limit(self.client.container) cquota = r.values()[0] newquota = 2 * int(cquota) r = self.client.set_container_limit(newquota) r = self.client.get_container_limit(self.client.container) xquota = int(r.values()[0]) self.assertEqual(newquota, xquota) r = self.client.set_container_limit(cquota) r = self.client.get_container_limit(self.client.container) xquota = r.values()[0] self.assertEqual(cquota, xquota) """Check versioning""" self.client.set_container_versioning('auto') r = self.client.get_container_versioning(self.client.container) nvers = r.values()[0] self.assertEqual('auto', nvers) self.client.set_container_versioning('none') r = self.client.get_container_versioning(self.client.container) nvers = r.values()[0] self.assertEqual('none', nvers) """put_block uses content_type and content_length to post blocks of data 2 container. All that in upload_object""" """Change a file at fs""" f = self.create_large_file(1024 * 1024 * 100) """Upload it at a directory in container""" self.client.create_directory('dir') r = self.client.upload_object('/dir/sample.file', f) for term in ('content-length', 'content-type', 'x-object-version'): self.assertTrue(term in r) """Check if file has been uploaded""" r = self.client.get_object_info('/dir/sample.file') self.assertTrue(int(r['content-length']) > 100000000) """What is tranfer_encoding? What should I check about it? """ #TODO """Check update=False""" r = self.client.object_post( 'test', update=False, metadata={'newmeta': 'newval'}) r = self.client.get_object_info('test') self.assertTrue('x-object-meta-newmeta' in r) self.assertFalse('x-object-meta-incontainer' in r) r = self.client.del_container_meta('m2') def test_container_delete(self): """Test container_DELETE""" self._test_0070_container_delete() def _test_0070_container_delete(self): """Fail to delete a non-empty container""" self.client.container = self.c2 r = self.client.container_delete(success=409) self.assertEqual(r.status_code, 409) """Fail to delete c3 (empty) container""" self.client.container = self.c3 r = self.client.container_delete(until='1000000000') self.assertEqual(r.status_code, 204) """Delete c3 (empty) container""" r = self.client.container_delete() self.assertEqual(r.status_code, 204) """Purge container(empty a container), check versionlist""" self.client.container = self.c1 r = self.client.object_head('test', success=(200, 404)) self.assertEqual(r.status_code, 200) self.client.del_container(delimiter='/') r = self.client.object_head('test', success=(200, 404)) self.assertEqual(r.status_code, 404) r = self.client.get_object_versionlist('test') self.assertTrue(len(r) > 0) self.assertTrue(len(r[0]) > 1) self.client.purge_container() self.assertRaises( ClientError, self.client.get_object_versionlist, 'test') def _test_0080_recreate_deleted_data(self): self._init_data() def test_object_head(self): """Test object_HEAD""" self._test_0090_object_head() def _test_0090_object_head(self): self.client.container = self.c2 obj = 'test' r = self.client.object_head(obj) self.assertEqual(r.status_code, 200) etag = r.headers['etag'] real_version = r.headers['x-object-version'] self.assertRaises( ClientError, self.client.object_head, obj, version=-10) r = self.client.object_head(obj, version=real_version) self.assertEqual(r.headers['x-object-version'], real_version) r = self.client.object_head(obj, if_etag_match=etag) self.assertEqual(r.status_code, 200) r = self.client.object_head( obj, if_etag_not_match=etag, success=(200, 412, 304)) self.assertNotEqual(r.status_code, 200) r = self.client.object_head( obj, version=real_version, if_etag_match=etag, success=200) self.assertEqual(r.status_code, 200) """Check and if(un)modified_since""" for format in self.client.DATE_FORMATS: now_formated = self.now_unformated.strftime(format) r1 = self.client.object_head( obj, if_modified_since=now_formated, success=(200, 304, 412)) sc1 = r1.status_code r2 = self.client.object_head( obj, if_unmodified_since=now_formated, success=(200, 304, 412)) sc2 = r2.status_code self.assertNotEqual(sc1, sc2) def test_object_get(self): """Test object_GET""" self._test_0100_object_get() def _test_0100_object_get(self): self.client.container = self.c1 obj = 'test' r = self.client.object_get(obj) self.assertEqual(r.status_code, 200) osize = int(r.headers['content-length']) etag = r.headers['etag'] r = self.client.object_get(obj, hashmap=True) for term in ('hashes', 'block_hash', 'block_hash', 'bytes'): self.assertTrue(term in r.json) r = self.client.object_get(obj, format='xml', hashmap=True) self.assertEqual(len(r.text.split('hash>')), 3) rangestr = 'bytes=%s-%s' % (osize / 3, osize / 2) r = self.client.object_get( obj, data_range=rangestr, success=(200, 206)) partsize = int(r.headers['content-length']) self.assertTrue(0 < partsize and partsize <= 1 + osize / 3) rangestr = 'bytes=%s-%s' % (osize / 3, osize / 2) r = self.client.object_get( obj, data_range=rangestr, if_range=True, success=(200, 206)) partsize = int(r.headers['content-length']) self.assertTrue(0 < partsize and partsize <= 1 + osize / 3) r = self.client.object_get(obj, if_etag_match=etag) self.assertEqual(r.status_code, 200) r = self.client.object_get(obj, if_etag_not_match=etag + 'LALALA') self.assertEqual(r.status_code, 200) """Check and if(un)modified_since""" for format in self.client.DATE_FORMATS: now_formated = self.now_unformated.strftime(format) r1 = self.client.object_get( obj, if_modified_since=now_formated, success=(200, 304, 412)) sc1 = r1.status_code r2 = self.client.object_get( obj, if_unmodified_since=now_formated, success=(200, 304, 412)) sc2 = r2.status_code self.assertNotEqual(sc1, sc2) """Upload an object to download""" container_info_cache = dict() trg_fname = 'remotefile_%s' % self.now f_size = 59247824 src_f = self.create_large_file(f_size) print('\tUploading...') r = self.client.upload_object( trg_fname, src_f, container_info_cache=container_info_cache) print('\tDownloading...') self.files.append(NamedTemporaryFile()) dnl_f = self.files[-1] self.client.download_object(trg_fname, dnl_f) print('\tCheck if files match...') for pos in (0, f_size / 2, f_size - 128): src_f.seek(pos) dnl_f.seek(pos) self.assertEqual(src_f.read(64), dnl_f.read(64)) print('\tDownload KiBs to string and check again...') for pos in (0, f_size / 2, f_size - 256): src_f.seek(pos) tmp_s = self.client.download_to_string( trg_fname, range_str='%s-%s' % (pos, (pos + 128))) self.assertEqual(tmp_s, src_f.read(len(tmp_s))) print('\tUploading KiBs as strings...') trg_fname = 'fromString_%s' % self.now src_size = 2 * 1024 src_f.seek(0) src_str = src_f.read(src_size) self.client.upload_from_string(trg_fname, src_str) print('\tDownload as string and check...') tmp_s = self.client.download_to_string(trg_fname) self.assertEqual(tmp_s, src_str) """Upload a boring file""" trg_fname = 'boringfile_%s' % self.now src_f = self.create_boring_file(42) print('\tUploading boring file...') self.client.upload_object( trg_fname, src_f, container_info_cache=container_info_cache) print('\tDownloading boring file...') self.files.append(NamedTemporaryFile()) dnl_f = self.files[-1] self.client.download_object(trg_fname, dnl_f) print('\tCheck if files match...') for i in range(42): self.assertEqual(sample_block(src_f, i), sample_block(dnl_f, i)) def test_object_put(self): """Test object_PUT""" self._test_0150_object_put() def _test_0150_object_put(self): self.client.container = self.c2 obj = 'another.test' self.client.create_object(obj + '.FAKE') r = self.client.get_object_info(obj + '.FAKE') self.assertEqual(r['content-type'], 'application/octet-stream') """create the object""" r = self.client.object_put( obj, data='a', content_type='application/octer-stream', permissions=dict( read=['accX:groupA', 'u1', 'u2'], write=['u2', 'u3']), metadata=dict(key1='val1', key2='val2'), content_encoding='UTF-8', content_disposition='attachment; filename="fname.ext"') self.assertEqual(r.status_code, 201) etag = r.headers['etag'] """Check content-disposition""" r = self.client.get_object_info(obj) self.assertTrue('content-disposition' in r) """Check permissions""" r = self.client.get_object_sharing(obj) self.assertTrue('accx:groupa' in r['read']) self.assertTrue('u1' in r['read']) self.assertTrue('u2' in r['write']) self.assertTrue('u3' in r['write']) """Check metadata""" r = self.client.get_object_meta(obj) self.assertEqual(r['x-object-meta-key1'], 'val1') self.assertEqual(r['x-object-meta-key2'], 'val2') """Check public and if_etag_match""" r = self.client.object_put( obj, if_etag_match=etag, data='b', content_type='application/octet-stream', public=True) r = self.client.object_get(obj) self.assertTrue('x-object-public' in r.headers) vers2 = int(r.headers['x-object-version']) etag = r.headers['etag'] self.assertEqual(r.text, 'b') """Check if_etag_not_match""" r = self.client.object_put( obj, if_etag_not_match=etag, data='c', content_type='application/octet-stream', success=(201, 412)) self.assertEqual(r.status_code, 412) """Check content_type and content_length""" tmpdir = 'dir' + unicode(self.now) r = self.client.object_put( tmpdir, content_type='application/directory', content_length=0) r = self.client.get_object_info(tmpdir) self.assertEqual(r['content-type'], 'application/directory') """Check copy_from, content_encoding""" r = self.client.object_put( '%s/%s' % (tmpdir, obj), format=None, copy_from='/%s/%s' % (self.client.container, obj), content_encoding='application/octet-stream', source_account=self.client.account, content_length=0, success=201) self.assertEqual(r.status_code, 201) """Test copy_object for cross-conctainer copy""" self.client.copy_object( src_container=self.c2, src_object='%s/%s' % (tmpdir, obj), dst_container=self.c1, dst_object=obj) self.client.container = self.c1 r1 = self.client.get_object_info(obj) self.client.container = self.c2 r2 = self.client.get_object_info('%s/%s' % (tmpdir, obj)) self.assertEqual(r1['x-object-hash'], r2['x-object-hash']) """Check cross-container copy_from, content_encoding""" self.client.container = self.c1 fromstr = '/%s/%s/%s' % (self.c2, tmpdir, obj) r = self.client.object_put( obj, format=None, copy_from=fromstr, content_encoding='application/octet-stream', source_account=self.client.account, content_length=0, success=201) self.assertEqual(r.status_code, 201) r = self.client.get_object_info(obj) self.assertEqual(r['etag'], etag) """Check source_account""" self.client.container = self.c2 fromstr = '/%s/%s' % (self.c1, obj) r = self.client.object_put( '%sv2' % obj, format=None, move_from=fromstr, content_encoding='application/octet-stream', source_account='*****@*****.**', content_length=0, success=(201, 403)) self.assertEqual(r.status_code, 403) """Check cross-container move_from""" self.client.container = self.c1 r1 = self.client.get_object_info(obj) self.client.container = self.c2 self.client.move_object( src_container=self.c1, src_object=obj, dst_container=self.c2, dst_object=obj + 'v0') r0 = self.client.get_object_info(obj + 'v0') self.assertEqual(r1['x-object-hash'], r0['x-object-hash']) """Check move_from""" r = self.client.object_put( '%sv1' % obj, format=None, move_from='/%s/%s' % (self.c2, obj), source_version=vers2, content_encoding='application/octet-stream', content_length=0, success=201) """Check manifest""" mobj = 'manifest.test' txt = '' for i in range(10): txt += '%s' % i r = self.client.object_put( '%s/%s' % (mobj, i), data='%s' % i, content_length=1, success=201, content_type='application/octet-stream', content_encoding='application/octet-stream') r = self.client.object_put( mobj, content_length=0, content_type='application/octet-stream', manifest='%s/%s' % (self.client.container, mobj)) r = self.client.object_get(mobj) self.assertEqual(r.text, txt) """Upload a local file with one request""" newf = self.create_large_file(1024 * 10) self.client.upload_object('sample.file', newf) """Check if file has been uploaded""" r = self.client.get_object_info('sample.file') self.assertEqual(int(r['content-length']), 10240) """Some problems with transfer-encoding?""" def test_object_copy(self): """Test object_COPY""" self._test_0110_object_copy() def _test_0110_object_copy(self): # TODO: check with source_account option self.client.container = self.c2 obj = 'test2' data = '{"key1":"val1", "key2":"val2"}' r = self.client.object_put( '%sorig' % obj, content_type='application/octet-stream', data=data, metadata=dict(mkey1='mval1', mkey2='mval2'), permissions=dict( read=['accX:groupA', 'u1', 'u2'], write=['u2', 'u3']), content_disposition='attachment; filename="fname.ext"') r = self.client.object_copy( '%sorig' % obj, destination='/%s/%s' % (self.client.container, obj), ignore_content_type=False, content_type='application/json', metadata={'mkey2': 'mval2a', 'mkey3': 'mval3'}, permissions={'write': ['u5', 'accX:groupB']}) self.assertEqual(r.status_code, 201) """Check content-disposition""" r = self.client.get_object_info(obj) self.assertTrue('content-disposition' in r) """Check Metadata""" r = self.client.get_object_meta(obj) self.assertEqual(r['x-object-meta-mkey1'], 'mval1') self.assertEqual(r['x-object-meta-mkey2'], 'mval2a') self.assertEqual(r['x-object-meta-mkey3'], 'mval3') """Check permissions""" r = self.client.get_object_sharing(obj) self.assertFalse('read' in r or 'u2' in r['write']) self.assertTrue('accx:groupb' in r['write']) """Check destination account""" r = self.client.object_copy( obj, destination='/%s/%s' % (self.c1, obj), content_encoding='utf8', content_type='application/json', destination_account='*****@*****.**', success=(201, 404)) self.assertEqual(r.status_code, 404) """Check destination being another container and also content_type and content encoding""" r = self.client.object_copy( obj, destination='/%s/%s' % (self.c1, obj), content_encoding='utf8', content_type='application/json') self.assertEqual(r.status_code, 201) self.assertEqual( r.headers['content-type'], 'application/json; charset=UTF-8') """Check ignore_content_type and content_type""" r = self.client.object_get(obj) etag = r.headers['etag'] ctype = r.headers['content-type'] self.assertEqual(ctype, 'application/json') r = self.client.object_copy( '%sorig' % obj, destination='/%s/%s0' % (self.client.container, obj), ignore_content_type=True, content_type='application/json') self.assertEqual(r.status_code, 201) self.assertNotEqual(r.headers['content-type'], 'application/json') """Check if_etag_(not_)match""" r = self.client.object_copy( obj, destination='/%s/%s1' % (self.client.container, obj), if_etag_match=etag) self.assertEqual(r.status_code, 201) r = self.client.object_copy( obj, destination='/%s/%s2' % (self.client.container, obj), if_etag_not_match='lalala') self.assertEqual(r.status_code, 201) vers2 = r.headers['x-object-version'] """Check source_version, public and format """ r = self.client.object_copy( '%s2' % obj, destination='/%s/%s3' % (self.client.container, obj), source_version=vers2, format='xml', public=True) self.assertEqual(r.status_code, 201) self.assertTrue(r.headers['content-type'].index('xml') > 0) r = self.client.get_object_info(obj + '3') self.assertTrue('x-object-public' in r) def test_object_move(self): """Test object_MOVE""" self._test_0120_object_move() def _test_0120_object_move(self): self.client.container = self.c2 obj = 'test2' data = '{"key1": "val1", "key2": "val2"}' r = self.client.object_put( '%sorig' % obj, content_type='application/octet-stream', data=data, metadata=dict(mkey1='mval1', mkey2='mval2'), permissions=dict( read=['accX:groupA', 'u1', 'u2'], write=['u2', 'u3'])) r = self.client.object_move( '%sorig' % obj, destination='/%s/%s' % (self.client.container, obj), ignore_content_type=False, content_type='application/json', metadata=dict(mkey2='mval2a', mkey3='mval3'), permissions=dict(write=['u5', 'accX:groupB'])) self.assertEqual(r.status_code, 201) """Check Metadata""" r = self.client.get_object_meta(obj) self.assertEqual(r['x-object-meta-mkey1'], 'mval1') self.assertEqual(r['x-object-meta-mkey2'], 'mval2a') self.assertEqual(r['x-object-meta-mkey3'], 'mval3') """Check permissions""" r = self.client.get_object_sharing(obj) self.assertFalse('read' in r) self.assertTrue('u5' in r['write']) self.assertTrue('accx:groupb' in r['write']) """Check destination account""" r = self.client.object_move( obj, destination='/%s/%s' % (self.c1, obj), content_encoding='utf8', content_type='application/json', destination_account='*****@*****.**', success=(201, 404)) self.assertEqual(r.status_code, 404) """Check destination being another container and also content_type, content_disposition and content encoding""" r = self.client.object_move( obj, destination='/%s/%s' % (self.c1, obj), content_encoding='utf8', content_type='application/json', content_disposition='attachment; filename="fname.ext"') self.assertEqual(r.status_code, 201) self.assertEqual( r.headers['content-type'], 'application/json; charset=UTF-8') self.client.container = self.c1 r = self.client.get_object_info(obj) self.assertTrue('content-disposition' in r) self.assertTrue('fname.ext' in r['content-disposition']) etag = r['etag'] ctype = r['content-type'] self.assertEqual(ctype, 'application/json') """Check ignore_content_type and content_type""" r = self.client.object_move( obj, destination='/%s/%s' % (self.c2, obj), ignore_content_type=True, content_type='application/json') self.assertEqual(r.status_code, 201) self.assertNotEqual(r.headers['content-type'], 'application/json') """Check if_etag_(not_)match""" self.client.container = self.c2 r = self.client.object_move( obj, destination='/%s/%s0' % (self.client.container, obj), if_etag_match=etag) self.assertEqual(r.status_code, 201) r = self.client.object_move( '%s0' % obj, destination='/%s/%s1' % (self.client.container, obj), if_etag_not_match='lalala') self.assertEqual(r.status_code, 201) """Check public and format """ r = self.client.object_move( '%s1' % obj, destination='/%s/%s2' % (self.client.container, obj), format='xml', public=True) self.assertEqual(r.status_code, 201) self.assertTrue(r.headers['content-type'].index('xml') > 0) r = self.client.get_object_info(obj + '2') self.assertTrue('x-object-public' in r) def test_object_post(self): """Test object_POST""" self._test_0130_object_post() def _test_0130_object_post(self): self.client.container = self.c2 obj = 'test2' """create a filesystem file""" self.files.append(NamedTemporaryFile()) newf = self.files[-1] newf.writelines([ 'ello!\n', 'This is a test line\n', 'inside a test file\n']) """create a file on container""" r = self.client.object_put( obj, content_type='application/octet-stream', data='H', metadata=dict(mkey1='mval1', mkey2='mval2'), permissions=dict( read=['accX:groupA', 'u1', 'u2'], write=['u2', 'u3'])) """Append livetest update, content_[range|type|length]""" newf.seek(0) self.client.append_object(obj, newf) r = self.client.object_get(obj) self.assertTrue(r.text.startswith('Hello!')) """Overwrite livetest update, content_type, content_length, content_range """ newf.seek(0) r = self.client.overwrite_object(obj, 0, 10, newf) r = self.client.object_get(obj) self.assertTrue(r.text.startswith('ello!')) """Truncate livetest update, content_range, content_type, object_bytes and source_object""" r = self.client.truncate_object(obj, 5) r = self.client.object_get(obj) self.assertEqual(r.text, 'ello!') """Check metadata""" self.client.set_object_meta(obj, {'mkey2': 'mval2a', 'mkey3': 'mval3'}) r = self.client.get_object_meta(obj) self.assertEqual(r['x-object-meta-mkey1'], 'mval1') self.assertEqual(r['x-object-meta-mkey2'], 'mval2a') self.assertEqual(r['x-object-meta-mkey3'], 'mval3') self.client.del_object_meta(obj, 'mkey1') r = self.client.get_object_meta(obj) self.assertFalse('x-object-meta-mkey1' in r) """Check permissions""" self.client.set_object_sharing( obj, read_permission=['u4', 'u5'], write_permission=['u4']) r = self.client.get_object_sharing(obj) self.assertTrue('read' in r) self.assertTrue('u5' in r['read']) self.assertTrue('write' in r) self.assertTrue('u4' in r['write']) self.client.del_object_sharing(obj) r = self.client.get_object_sharing(obj) self.assertTrue(len(r) == 0) """Check publish""" self.client.publish_object(obj) r = self.client.get_object_info(obj) self.assertTrue('x-object-public' in r) self.client.unpublish_object(obj) r = self.client.get_object_info(obj) self.assertFalse('x-object-public' in r) """Check if_etag_(not)match""" etag = r['etag'] r = self.client.object_post( obj, update=True, public=True, if_etag_not_match=etag, success=(412, 202, 204)) #self.assertEqual(r.status_code, 412) r = self.client.object_post( obj, update=True, public=True, if_etag_match=etag, content_encoding='application/json') r = self.client.get_object_info(obj) helloVersion = r['x-object-version'] self.assertTrue('x-object-public' in r) self.assertEqual(r['content-encoding'], 'application/json') """Check source_version and source_account and content_disposition""" r = self.client.object_post( obj, update=True, content_type='application/octet-srteam', content_length=5, content_range='bytes 1-5/*', source_object='/%s/%s' % (self.c2, obj), source_account='*****@*****.**', source_version=helloVersion, data='12345', success=(403, 202, 204)) self.assertEqual(r.status_code, 403) r = self.client.object_post( obj, update=True, content_type='application/octet-srteam', content_length=5, content_range='bytes 1-5/*', source_object='/%s/%s' % (self.c2, obj), source_account=self.client.account, source_version=helloVersion, data='12345', content_disposition='attachment; filename="fname.ext"') r = self.client.object_get(obj) self.assertEqual(r.text, 'eello!') self.assertTrue('content-disposition' in r.headers) self.assertTrue('fname.ext' in r.headers['content-disposition']) """Check manifest""" mobj = 'manifest.test' txt = '' for i in range(10): txt += '%s' % i r = self.client.object_put( '%s/%s' % (mobj, i), data='%s' % i, content_length=1, success=201, content_encoding='application/octet-stream', content_type='application/octet-stream') self.client.create_object_by_manifestation( mobj, content_type='application/octet-stream') r = self.client.object_post( mobj, manifest='%s/%s' % (self.client.container, mobj)) r = self.client.object_get(mobj) self.assertEqual(r.text, txt) """We need to check transfer_encoding """ def test_object_delete(self): """Test object_DELETE""" self._test_0140_object_delete() def _test_0140_object_delete(self): self.client.container = self.c2 obj = 'test2' """create a file on container""" r = self.client.object_put( obj, content_type='application/octet-stream', data='H', metadata=dict(mkey1='mval1', mkey2='mval2'), permissions=dict( read=['accX:groupA', 'u1', 'u2'], write=['u2', 'u3'])) """Check with false until""" r = self.client.object_delete(obj, until=1000000) r = self.client.object_get(obj, success=(200, 404)) self.assertEqual(r.status_code, 200) """Check normal case""" r = self.client.object_delete(obj) self.assertEqual(r.status_code, 204) r = self.client.object_get(obj, success=(200, 404)) self.assertEqual(r.status_code, 404) def create_large_file(self, size): """Create a large file at fs""" print self.files.append(NamedTemporaryFile()) f = self.files[-1] Ki = size / 8 bytelist = [b * Ki for b in range(size / Ki)] def append2file(step): f.seek(step) f.write(urandom(Ki)) f.flush() self.do_with_progress_bar( append2file, ' create rand file %s (%sB): ' % (f.name, size), bytelist) f.seek(0) return f def create_boring_file(self, num_of_blocks): """Create a file with some blocks being the same""" self.files.append(NamedTemporaryFile()) tmpFile = self.files[-1] block_size = 4 * 1024 * 1024 print('\n\tCreate boring file of %s blocks' % num_of_blocks) chars = chargen() while num_of_blocks: fslice = 3 if num_of_blocks > 3 else num_of_blocks tmpFile.write(fslice * block_size * chars.next()) num_of_blocks -= fslice print('\t\tDone') tmpFile.seek(0) return tmpFile
class WorkingCopy: """Represents a local copy of a directory cloned from the Pithos+ server.""" HTTP_NOT_MODIFIED = 304 @staticmethod def is_folder(type): return type in ['application/directory', 'application/folder'] def destroy(self): self.delete_meta_file() def download(self, name, version): remotepath = self.folder + '/' + name path = os.path.join(self.local, os.path.join(*name.split('/'))) try: f = open(path, 'wb+') except: # TODO: make this an exception? logger.error("Failed to write to local file '%s'." % path) return try: self.client.download_object(remotepath, f) except ClientError: # TODO: make this an exception? logger.error("Failed to download %s." % remotepath) return f.close() logger.info("Downloaded remote file '%s' into %s (%i bytes)" % (remotepath, path, os.stat(path).st_size)) def upload(self, destination, source): f = open(source, 'r') self.client.upload_object(self.folder + '/' + destination, f) f.close() logger.info("Uploaded local file '%s' (%i bytes)" % (source, os.stat(source).st_size)) def remote_recursive_mkdir(self, name): logger.info('Creating remote directory "%s" recursively.', name) parts = name.split('/') for i, _ in enumerate(parts): dir = '/'.join(parts[0:i + 1]) logger.debug('Creating remote directory "%s".' % dir) self.client.create_directory(dir) def remote_mkdir(self, name): dir = self.folder + '/' + name logger.info('Creating remote directory "%s".' % dir) self.client.create_directory(dir) logger.info('Created remote directory "%s".' % dir) def remote_recursive_delete_contents(self, name): obj_list = self.client.list_objects() # delete all the folders' contents for obj in obj_list: if obj['name'][0:len(name + '/')] == name + '/': self.client.object_delete(obj['name']) logger.info("Deleted remote object '%s'" % (obj['name'])) logger.info("Emptied remote directory '%s'" % name) def __init__(self, syncer, local, folder = None): self.syncer = syncer self.local = local self.meta_file = meta.LocalMetaFile(self.local) self.lock = lock.Lock(self) self.client = PithosClient(syncer.url, syncer.token, syncer.account, syncer.container) if folder is None: # working copy already init'ed self.meta_file.load() self.folder = self.meta_file.remote_dir else: # working copy not init'ed # the caller must call .init() or .clone() on it self.folder = folder def init(self): """Builds a new local working copy by initializing a new empty remote folder""" init(self) def clone(self): """Builds a new local working copy by cloning a folder from an already inited remote folder in a container.""" clone(self) def local_to_remote_path(self, root, name): path = os.path.join(root, name) native_path = path[len(self.local + os.sep):] remote_path = native_path.replace(os.sep, '/') return remote_path def list_objects_of_interest(self): logger.debug('Listing objects in remote folder "%s"', self.folder) ret = {} # TODO: Use meta-file last pull date for fast pull if_modified_since = 'Thu, 01 Jan 1970 00:00:00 GMT' response = self.client.container_get(prefix=self.folder, if_modified_since=if_modified_since) if response.status == self.HTTP_NOT_MODIFIED: logger.debug('Received HTTP_NOT_MODIFIED, listing suppressed.') return { 'modified': False } obj_list = response.json found = False for obj in obj_list: logger.debug('Found remote object "%s" with content type "%s" and version %i.', obj['name'], obj['content_type'], obj['x_object_version']) if obj['name'] == self.folder: found = True continue name = obj['name'][len(self.folder + '/'):] is_folder = self.is_folder(obj['content_type']) version = obj['x_object_version'] ret[name] = { 'name': name, 'is_folder': is_folder, 'version': version } if not found: logger.debug('Parent directory "%s" requested not found.', self.folder) raise FileNotFoundError return { 'modified': True, 'list': ret } def push(self): # self.remote_recursive_delete_contents(self.folder) obj_list = self.list_objects_of_interest() server_side_files = {} server_side_folders = {} constant_part = self.folder + '/' for obj in obj_list: file = obj['name'][len(constant_part):] type = obj['content_type'] if self.is_folder(type): server_side_folders[file] = True else: server_side_files[file] = True # TODO: somehow mark modified files as dirty and only check their hashes for root, dirs, files in os.walk(self.local, topdown=False): for name in files: if root == self.local and name == '.pithos': continue try: # file already exists del server_side_files[self.local_to_remote_path(root, name)] except: pass # kamaki library and Pithos will take care not to upload the same file twice self.upload(self.local_to_remote_path(root, name), os.path.join(root, name)) for name in dirs: try: # directory already exists del server_side_folders[self.local_to_remote_path(root, name)] except: self.remote_mkdir(self.local_to_remote_path(root, name)) for file in server_side_files.keys() + server_side_folders.keys(): self.client.object_delete(self.folder + '/' + file) logger.info("Push successful.") def list_local_objects(self): client_side_files = {} client_side_folders = {} # TODO: Push from the server to the client, or keep dirty state on the server for root, dirs, files in os.walk(self.local, topdown=False): for name in files: if root == self.local and meta.LocalMetaFile.is_meta_file(name): continue client_side_files[self.local_to_remote_path(root, name)] = True for name in dirs: client_side_folders[self.local_to_remote_path(root, name)] = True return { 'files': client_side_files, 'folders': client_side_folders } def pull(self): return pull(self)
# # The views and conclusions contained in the software and # documentation are those of the authors and should not be # interpreted as representing official policies, either expressed # or implied, of GRNET S.A. from kamaki.clients.astakos import AstakosClient from kamaki.clients.pithos import PithosClient AUTHENTICATION_URL = "https://astakos.example.com/identity/v2.0" TOKEN = "User-Token" astakos = AstakosClient(AUTHENTICATION_URL, TOKEN) service_type = PithosClient.service_type endpoint = astakos.get_endpoint_url(service_type) pithos = PithosClient(endpoint, TOKEN) user = astakos.authenticate() uuid = user["access"]["user"]["id"] pithos.account = uuid # Get the project containers we care for containers = filter(lambda c: c["name"] in ("pithos", "images"), pithos.list_containers()) # Construct dict of the form {CONTAINER_NAME: PROJECT_ID, ...} projects = dict([(c["name"], c["x_container_policy"]["project"]) for c in containers]) # Check projects and reassign if needed if projects["pithos"] != projects["images"]:
class Kamaki(object): """Wrapper class for the ./kamaki library""" @staticmethod def get_default_cloud_name(): """Returns the name of the default cloud""" clouds = config.keys('cloud') default = config.get('global', 'default_cloud') if not default: return clouds[0] if len(clouds) else "" return default if default in clouds else "" @staticmethod def set_default_cloud(name): """Sets a cloud account as default""" config.set('global', 'default_cloud', name) config.write() @staticmethod def get_clouds(): """Returns the list of available clouds""" names = config.keys('cloud') clouds = {} for name in names: clouds[name] = config.get('cloud', name) return clouds @staticmethod def get_cloud_by_name(name): """Returns a dictionary with cloud info""" return config.get('cloud', name) @staticmethod def save_cloud(name, url, token, description=""): """Save a new cloud account""" cloud = {'url': url, 'token': token} if len(description): cloud['description'] = description config.set('cloud', name, cloud) # Make the saved cloud the default one config.set('global', 'default_cloud', name) config.write() @staticmethod def remove_cloud(name): """Deletes an existing cloud from the ./Kamaki configuration file""" config.remove_option('cloud', name) config.write() @staticmethod def create_account(url, token): """Given a valid (URL, tokens) pair this method returns an Astakos client instance """ client = AstakosClient(url, token) try: client.authenticate() except ClientError: return None return client @staticmethod def get_account(cloud_name): """Given a saved cloud name this method returns an Astakos client instance """ cloud = config.get('cloud', cloud_name) assert cloud, "cloud: `%s' does not exist" % cloud_name assert 'url' in cloud, "url attr is missing in %s" % cloud_name assert 'token' in cloud, "token attr is missing in %s" % cloud_name return Kamaki.create_account(cloud['url'], cloud['token']) def __init__(self, account, output): """Create a Kamaki instance""" self.account = account self.out = output self.pithos = PithosClient( self.account.get_service_endpoints('object-store')['publicURL'], self.account.token, self.account.user_info()['id'], CONTAINER) self.image = ImageClient( self.account.get_service_endpoints('image')['publicURL'], self.account.token) def upload(self, file_obj, size=None, remote_path=None, container=None, content_type=None, hp=None, up=None): """Upload a file to Pithos+""" path = basename(file_obj.name) if remote_path is None else remote_path if container is None: container = CONTAINER try: self.pithos.create_container(container) except ClientError as e: if e.status != 202: # Ignore container already exists errors raise e hash_cb = self.out.progress_generator(hp) if hp is not None else None upload_cb = self.out.progress_generator(up) if up is not None else None try: self.pithos.container = container self.pithos.upload_object(path, file_obj, size=size, hash_cb=hash_cb, upload_cb=upload_cb, content_type=content_type) finally: self.pithos.container = CONTAINER return "pithos://%s/%s/%s" % (self.account.user_info()['id'], container, path) def register(self, name, location, metadata, public=False): """Register an image with Cyclades""" is_public = 'true' if public else 'false' params = {'is_public': is_public, 'disk_format': 'diskdump'} return self.image.register(name, location, params, metadata) def share(self, location): """Share this file with all the users""" self.pithos.set_object_sharing(location, "*") def object_exists(self, container, location): """Check if an object exists in Pithos+""" try: self.pithos.container = container self.pithos.get_object_info(location) except ClientError as e: self.pithos.container = CONTAINER if e.status == 404: # Object not found error return False else: raise self.pithos.container = CONTAINER return True
class WorkingCopy: """Represents a local copy of a directory cloned from the Pithos+ server.""" HTTP_NOT_MODIFIED = 304 @staticmethod def is_folder(type): return type in ['application/directory', 'application/folder'] def destroy(self): self.delete_meta_file() def download(self, name, version): remotepath = self.folder + '/' + name path = os.path.join(self.local, os.path.join(*name.split('/'))) try: f = open(path, 'wb+') except: # TODO: make this an exception? logger.error("Failed to write to local file '%s'." % path) return try: self.client.download_object(remotepath, f) except ClientError: # TODO: make this an exception? logger.error("Failed to download %s." % remotepath) return f.close() logger.info("Downloaded remote file '%s' into %s (%i bytes)" % (remotepath, path, os.stat(path).st_size)) def upload(self, destination, source): f = open(source, 'r') self.client.upload_object(self.folder + '/' + destination, f) f.close() logger.info("Uploaded local file '%s' (%i bytes)" % (source, os.stat(source).st_size)) def remote_recursive_mkdir(self, name): logger.info('Creating remote directory "%s" recursively.', name) parts = name.split('/') for i, _ in enumerate(parts): dir = '/'.join(parts[0:i + 1]) logger.debug('Creating remote directory "%s".' % dir) self.client.create_directory(dir) def remote_mkdir(self, name): dir = self.folder + '/' + name logger.info('Creating remote directory "%s".' % dir) self.client.create_directory(dir) logger.info('Created remote directory "%s".' % dir) def remote_recursive_delete_contents(self, name): obj_list = self.client.list_objects() # delete all the folders' contents for obj in obj_list: if obj['name'][0:len(name + '/')] == name + '/': self.client.object_delete(obj['name']) logger.info("Deleted remote object '%s'" % (obj['name'])) logger.info("Emptied remote directory '%s'" % name) def __init__(self, syncer, local, folder=None): self.syncer = syncer self.local = local self.meta_file = meta.LocalMetaFile(self.local) self.lock = lock.Lock(self) self.client = PithosClient(syncer.url, syncer.token, syncer.account, syncer.container) if folder is None: # working copy already init'ed self.meta_file.load() self.folder = self.meta_file.remote_dir else: # working copy not init'ed # the caller must call .init() or .clone() on it self.folder = folder def init(self): """Builds a new local working copy by initializing a new empty remote folder""" init(self) def clone(self): """Builds a new local working copy by cloning a folder from an already inited remote folder in a container.""" clone(self) def local_to_remote_path(self, root, name): path = os.path.join(root, name) native_path = path[len(self.local + os.sep):] remote_path = native_path.replace(os.sep, '/') return remote_path def list_objects_of_interest(self): logger.debug('Listing objects in remote folder "%s"', self.folder) ret = {} # TODO: Use meta-file last pull date for fast pull if_modified_since = 'Thu, 01 Jan 1970 00:00:00 GMT' response = self.client.container_get( prefix=self.folder, if_modified_since=if_modified_since) if response.status == self.HTTP_NOT_MODIFIED: logger.debug('Received HTTP_NOT_MODIFIED, listing suppressed.') return {'modified': False} obj_list = response.json found = False for obj in obj_list: logger.debug( 'Found remote object "%s" with content type "%s" and version %i.', obj['name'], obj['content_type'], obj['x_object_version']) if obj['name'] == self.folder: found = True continue name = obj['name'][len(self.folder + '/'):] is_folder = self.is_folder(obj['content_type']) version = obj['x_object_version'] ret[name] = { 'name': name, 'is_folder': is_folder, 'version': version } if not found: logger.debug('Parent directory "%s" requested not found.', self.folder) raise FileNotFoundError return {'modified': True, 'list': ret} def push(self): # self.remote_recursive_delete_contents(self.folder) obj_list = self.list_objects_of_interest() server_side_files = {} server_side_folders = {} constant_part = self.folder + '/' for obj in obj_list: file = obj['name'][len(constant_part):] type = obj['content_type'] if self.is_folder(type): server_side_folders[file] = True else: server_side_files[file] = True # TODO: somehow mark modified files as dirty and only check their hashes for root, dirs, files in os.walk(self.local, topdown=False): for name in files: if root == self.local and name == '.pithos': continue try: # file already exists del server_side_files[self.local_to_remote_path( root, name)] except: pass # kamaki library and Pithos will take care not to upload the same file twice self.upload(self.local_to_remote_path(root, name), os.path.join(root, name)) for name in dirs: try: # directory already exists del server_side_folders[self.local_to_remote_path( root, name)] except: self.remote_mkdir(self.local_to_remote_path(root, name)) for file in server_side_files.keys() + server_side_folders.keys(): self.client.object_delete(self.folder + '/' + file) logger.info("Push successful.") def list_local_objects(self): client_side_files = {} client_side_folders = {} # TODO: Push from the server to the client, or keep dirty state on the server for root, dirs, files in os.walk(self.local, topdown=False): for name in files: if root == self.local and meta.LocalMetaFile.is_meta_file( name): continue client_side_files[self.local_to_remote_path(root, name)] = True for name in dirs: client_side_folders[self.local_to_remote_path(root, name)] = True return {'files': client_side_files, 'folders': client_side_folders} def pull(self): return pull(self)
# # The views and conclusions contained in the software and # documentation are those of the authors and should not be # interpreted as representing official policies, either expressed # or implied, of GRNET S.A. from kamaki.clients.astakos import AstakosClient from kamaki.clients.pithos import PithosClient AUTHENTICATION_URL = "https://astakos.example.com/identity/v2.0" TOKEN = "User-Token" astakos = AstakosClient(AUTHENTICATION_URL, TOKEN) service_type = PithosClient.service_type endpoint = astakos.get_endpoint_url(service_type) pithos = PithosClient(endpoint, TOKEN) user = astakos.authenticate() uuid = user["access"]["user"]["id"] pithos.account = uuid # Get the project containers we care for containers = filter( lambda c: c["name"] in ("pithos", "images"), pithos.list_containers()) # Construct dict of the form {CONTAINER_NAME: PROJECT_ID, ...} projects = dict([( c["name"], c["x_container_policy"]["project"]) for c in containers])
from kamaki.clients.astakos import AstakosClient from kamaki.clients.pithos import PithosClient from kamaki.clients.image import ImageClient # Initliaze astakos client AUTHENTICATION_URL = "https://astakos.example.com/identity/v2.0" TOKEN = "User-Token" astakos = AstakosClient(AUTHENTICATION_URL, TOKEN) user = astakos.authenticate() uuid = user["access"]["user"]["id"] # Initliaze Pithos service_type = PithosClient.service_type endpoint = astakos.get_endpoint_url(service_type) pithos = PithosClient(endpoint, TOKEN) pithos.account = uuid # Initialize Image service_type = ImageClient.service_type endpoint = astakos.get_endpoint_url(service_type) image = ImageClient(endpoint, TOKEN) # Our data local = "my-image.diskdump" local_meta = "my-image.diskdump.meta" with open(local_meta) as f: meta = json.load(f) # Upload the image and meta files pithos.container = "images"
from kamaki.clients.astakos import AstakosClient from kamaki.clients.pithos import PithosClient AUTHENTICATION_URL = "https://astakos.example.com/identity/v2.0" TOKEN = "User-Token" astakos = AstakosClient(AUTHENTICATION_URL, TOKEN) # Our data container_name = "course_container" user = astakos.authenticate() uuid = user["access"]["user"]["id"] # Initialize a Pithos client service_type = PithosClient.service_type endpoint = astakos.get_endpoint_url(service_type) pithos = PithosClient(endpoint, TOKEN, uuid, container_name) # To what project is this container assigned to? container = pithos.get_container_info(container_name) container_project = container["x-container-policy-project"] # Get quota info quotas = astakos.get_quotas() container_quotas = quotas[container_project]["pithos.diskspace"] usage, limit = container_quotas["usage"], container_quotas["limit"] if usage < limit: print "Quotas for container {0} are OK".format(container_name) else: # We need to reassign to another project new_project = "a9f87654-3af2-1e09-8765-43a2df1098765"
from kamaki.clients.pithos import PithosClient, ClientError import logging import threading import os container = 'pithos' # url = 'https://pithos.okeanos.io/v1' # account = 'd8e6f8bb-619b-4ce6-8903-89fabdca024d' url = 'https://pithos.okeanos.grnet.gr/v1' account = '60dd7c4c-71ff-4156-b387-aaeea33763cb' token = os.getenv('ASTAKOS_TOKEN', '') client = PithosClient(url, token, account, container) client.create_directory('break-pithos') N = 10 K = 2 def parallel(j): for i in range(0, N): print '#%i DELETE' % j try: client.object_delete('break-pithos') except: pass print '#%i PUT' % j client.create_directory('break-pithos')
# The views and conclusions contained in the software and # documentation are those of the authors and should not be # interpreted as representing official policies, either expressed # or implied, of GRNET S.A. from kamaki.clients.astakos import AstakosClient from kamaki.clients.pithos import PithosClient # Initliaze astakos client AUTHENTICATION_URL = "https://astakos.example.com/identity/v2.0" TOKEN = "User-Token" astakos = AstakosClient(AUTHENTICATION_URL, TOKEN) service_type = PithosClient.service_type endpoint = astakos.get_endpoint_url(service_type) pithos = PithosClient(endpoint, TOKEN) user = astakos.authenticate() uuid = user["access"]["user"]["id"] pithos.account = uuid # Download from container "pithos" pithos.container = "pithos" source = "my-linux-distro.diskdump" target = "local.diskdump" with open(target, "rb+") as f: pithos.download_object(source, f) # Upload to container "images" pithos.container = "images" with open(target) as f:
# # The views and conclusions contained in the software and # documentation are those of the authors and should not be # interpreted as representing official policies, either expressed # or implied, of GRNET S.A. from kamaki.clients.astakos import AstakosClient from kamaki.clients.pithos import PithosClient AUTHENTICATION_URL = "https://astakos.example.com/identity/v2.0" TOKEN = "User-Token" astakos = AstakosClient(AUTHENTICATION_URL, TOKEN) service_type = PithosClient.service_type endpoint = astakos.get_endpoint_url(service_type) pithos = PithosClient(endpoint, TOKEN) user = astakos.authenticate() uuid = user["access"]["user"]["id"] pithos.account = uuid # Move to "trash" object_ = "my-linux-distro.diskdump" pithos.move_object("pithos", object_, "trash") # Delete from "images" pithos.container = "images" pithos.del_object(object_) # Copy from "trash" to "images" new_object = "recovered.diskdump"
# documentation are those of the authors and should not be # interpreted as representing official policies, either expressed # or implied, of GRNET S.A. from kamaki.clients.astakos import AstakosClient from kamaki.clients.pithos import PithosClient # initliaze astakos client AUTHENTICATION_URL = "https://astakos.example.com/identity/v2.0" TOKEN = "User-Token" astakos = AstakosClient(AUTHENTICATION_URL, TOKEN) # Initliaze pithos service_type = PithosClient.service_type endpoint = astakos.get_endpoint_url(service_type) pithos = PithosClient(endpoint, TOKEN) # Set user UUID and Container (optional) user = astakos.authenticate() uuid = user["access"]["user"]["id"] pithos.account = uuid containers = pithos.list_containers() for container in containers: pithos.container = container["name"] project = container["x_container_policy"]["project"] print "Listing contents of {container} (project: {project})".format( container=pithos.container, project=project) for object_ in pithos.list_objects():
# interpreted as representing official policies, either expressed # or implied, of GRNET S.A. from kamaki.clients.astakos import AstakosClient from kamaki.clients.pithos import PithosClient from kamaki.clients.image import ImageClient AUTHENTICATION_URL = "https://astakos.example.com/identity/v2.0" TOKEN = "User-Token" astakos = AstakosClient(AUTHENTICATION_URL, TOKEN) user = astakos.authenticate() uuid = user["access"]["user"]["id"] service_type = PithosClient.service_type endpoint = astakos.get_endpoint_url(service_type) pithos = PithosClient(endpoint, TOKEN) pithos.account = uuid service_type = ImageClient.service_type endpoint = astakos.get_endpoint_url(service_type) image = ImageClient(endpoint, TOKEN) # Find the image by id image_id = "my-image-id" my_image = image.get_meta(image_id) # Check if it is my image if my_image["owner"] == uuid: image.unregister(image_id) # Delete the image files
class Image(livetest.Generic): def setUp(self): self.now = time.mktime(time.gmtime()) self.cloud = 'cloud.%s' % self['testcloud'] aurl, self.token = self[self.cloud, 'url'], self[self.cloud, 'token'] self.auth_base = AstakosCachedClient(aurl, self.token) self.imgname = 'img_%s' % self.now url = self.auth_base.get_service_endpoints('image')['publicURL'] self.token = self.auth_base.token self.client = ImageClient(url, self.token) cyclades_url = self.auth_base.get_service_endpoints( 'compute')['publicURL'] self.cyclades = CycladesClient(cyclades_url, self.token) self._imglist = {} self._imgdetails = {} def test_000(self): self._prepare_img() super(self.__class__, self).test_000() def _prepare_img(self): f = open(self['image', 'local_path'], 'rb') (token, uuid) = (self.token, self.auth_base.user_term('id')) purl = self.auth_base.get_service_endpoints( 'object-store')['publicURL'] from kamaki.clients.pithos import PithosClient self.pithcli = PithosClient(purl, token, uuid) cont = 'cont_%s' % self.now self.pithcli.container = cont self.obj = 'obj_%s' % self.now print('\t- Create container %s on Pithos server' % cont) self.pithcli.container_put() self.location = 'pithos://%s/%s/%s' % (uuid, cont, self.obj) print('\t- Upload an image at %s...\n' % self.location) self.pithcli.upload_object(self.obj, f) print('\t- ok') f.close() r = self.client.register( self.imgname, self.location, params=dict(is_public=True)) self._imglist[self.imgname] = dict( name=r['name'], id=r['id']) self._imgdetails[self.imgname] = r def tearDown(self): for img in self._imglist.values(): print('\tDeleting image %s' % img['id']) self.cyclades.delete_image(img['id']) if hasattr(self, 'pithcli'): print('\tDeleting container %s' % self.pithcli.container) try: self.pithcli.del_container(delimiter='/') self.pithcli.purge_container() except ClientError: pass def _get_img_by_name(self, name): r = self.cyclades.list_images() for img in r: if img['name'] == name: return img return None def test_list_public(self): """Test list_public""" self._test_list_public() def _test_list_public(self): r = self.client.list_public() r0 = self.client.list_public(order='-') self.assertTrue(len(r) > 0) for img in r: for term in ( 'status', 'name', 'container_format', 'disk_format', 'id', 'size'): self.assertTrue(term in img) self.assertTrue(r, r0) r0.reverse() for i, img in enumerate(r): self.assert_dicts_are_equal(img, r0[i]) r1 = self.client.list_public(detail=True) for img in r1: for term in ( 'status', 'name', 'checksum', 'created_at', 'disk_format', 'updated_at', 'id', 'location', 'container_format', 'owner', 'is_public', 'deleted_at', 'properties', 'size'): self.assertTrue(term in img) if len(img['properties']): for interm in ('osfamily', 'root_partition'): self.assertTrue(interm in img['properties']) size_max = 1000000000000 r2 = self.client.list_public(filters=dict(size_max=size_max)) self.assertTrue(len(r2) <= len(r)) for img in r2: self.assertTrue(int(img['size']) <= size_max) def test_get_meta(self): """Test get_meta""" self._test_get_meta() def _test_get_meta(self): r = self.client.get_meta(self['image', 'id']) self.assertEqual(r['id'], self['image', 'id']) for term in ( 'status', 'name', 'checksum', 'updated-at', 'created-at', 'deleted-at', 'location', 'is-public', 'owner', 'disk-format', 'size', 'container-format'): self.assertTrue(term in r) for interm in ( 'OSFAMILY', 'USERS', 'ROOT_PARTITION', 'OS', 'DESCRIPTION'): self.assertTrue(interm in r['properties']) def test_register(self): """Test register""" self._prepare_img() self._test_register() def _test_register(self): self.assertTrue(self._imglist) for img in self._imglist.values(): self.assertTrue(img is not None) r = set(self._imgdetails[img['name']].keys()) self.assertTrue(r.issubset(IMGMETA.union(['properties']))) def test_unregister(self): """Test unregister""" self._prepare_img() self._test_unregister() def _test_unregister(self): try: for img in self._imglist.values(): self.client.unregister(img['id']) self._prepare_img() break except ClientError as ce: if ce.status in (405,): print 'IMAGE UNREGISTER is not supported by server: %s' % ce else: raise def test_set_members(self): """Test set_members""" self._prepare_img() self._test_set_members() def _test_set_members(self): members = ['*****@*****.**' % self.now] for img in self._imglist.values(): self.client.set_members(img['id'], members) r = self.client.list_members(img['id']) self.assertEqual(r[0]['member_id'], members[0]) def test_list_members(self): """Test list_members""" self._test_list_members() def _test_list_members(self): self._test_set_members() def test_remove_members(self): """Test remove_members - NO CHECK""" self._prepare_img() self._test_remove_members() def _test_remove_members(self): return members = ['*****@*****.**' % self.now, '*****@*****.**' % self.now] for img in self._imglist.values(): self.client.set_members(img['id'], members) r = self.client.list_members(img['id']) self.assertTrue(len(r) > 1) self.client.remove_member(img['id'], members[0]) r0 = self.client.list_members(img['id']) self.assertEqual(len(r), 1 + len(r0)) self.assertEqual(r0[0]['member_id'], members[1]) def test_list_shared(self): """Test list_shared - NOT CHECKED""" self._test_list_shared() def _test_list_shared(self): #No way to test this, if I dont have member images pass
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED # AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # The views and conclusions contained in the software and # documentation are those of the authors and should not be # interpreted as representing official policies, either expressed # or implied, of GRNET S.A. from kamaki.clients.astakos import AstakosClient from kamaki.clients.pithos import PithosClient # Initliaze astakos client AUTHENTICATION_URL = "https://astakos.example.com/identity/v2.0" TOKEN = "User-Token" astakos = AstakosClient(AUTHENTICATION_URL, TOKEN) # Initliaze pithos service_type = PithosClient.service_type endpoint = astakos.get_endpoint_url(service_type) pithos = PithosClient(endpoint, TOKEN) # Set user UUID and Container (optional) user = astakos.authenticate() uuid = user["access"]["user"]["id"] pithos.account = uuid pithos.container = "pithos"
from kamaki.clients.pithos import PithosClient, ClientError import logging import os logging.basicConfig(level=logging.DEBUG) url = 'https://pithos.okeanos.io/v1' account = 'd8e6f8bb-619b-4ce6-8903-89fabdca024d' container = 'pithos' token = os.getenv('ASTAKOS_TOKEN', '') client = PithosClient(url, token, account, container) client.create_directory('break-pithos') for i in range(0, 100): client.object_delete('break-pithos') client.create_directory('break-pithos')
def main(): #authenticate to Astakos print"---------------------------------------------------" print"******* Authenticating to Astakos******************" print"---------------------------------------------------" try: my_astakos_client = AstakosClient(AUTHENTICATION_URL, TOKEN) my_accountData = my_astakos_client.authenticate() ACCOUNT_UUID = my_accountData['access']['user']['id'] print "Status: Authenticated" except ClientError: print"Failed to authenticate user token" print"\n" print"---------------------------------------------------" print"**********Getting Endpoints for pithos*************" print"---------------------------------------------------" #get endpoint url try: endpoints = my_astakos_client.get_service_endpoints('object-store') PITHOS_URL = endpoints['publicURL'] print "The public URL:", PITHOS_URL except ClientError: print "Failed to get endpoints for pithos" print"\n" print"---------------------------------------------------" print"**********Authenticating to Pithos*****************" print"---------------------------------------------------" #Initialize pithos client try: pithos = PithosClient(PITHOS_URL, TOKEN) pithos.account = ACCOUNT_UUID pithos.container = '' except ClientError: print "Failed to initialize Pithos+ client" print"\n" print"---------------------------------------------------" print"**********LIST ALL CONTAINERS IN YOUR ACCOUNT******" print"---------------------------------------------------" #list all containers try: container_list = pithos.list_containers() containers = parse_containers(container_list) ContNums = len(containers) print "The number of Containers in your account:", ContNums print "The containers are" print ','.join(containers) except ClientError: print"Error in container list" print"\n" print"---------------------------------------------------" print"******LIST OBJECTS OF A FOLDER IN A CONTAINER******" print"---------------------------------------------------" #list all containers try: print_container_objects(pithos, YOUR_CONTAINER, prefixName=YOUR_FOLDER_PATH) except ClientError: print"Error in listing folder objects" print"\n" print"---------------------------------------------------" print"**********Print objects for all containers*********" print"---------------------------------------------------" try: for i in range(len(containers)): print_container_objects(pithos, containers[i]) except ClientError as e: print('Error: %s' % e) if e.status: print('- error code: %s' % e.status) if e.details: for detail in e.details: print('- %s' % detail) # Create and set a different container than pithos print "Create a new container - my container" CONTAINER = 'my container' pithos.create_container(CONTAINER) pithos.container = CONTAINER print"\n" print"---------------------------------------------------" print"**********UPLOAD AND DOWNLOAD**********************" print"---------------------------------------------------" """ B. UPLOAD AND DOWNLOAD """ print "Upload a small file to pithos" # Upload a small file print './test/'+SMALLFILE with open('./test/'+SMALLFILE) as f: pithos.upload_object(SMALLFILE, f) print "Download a small file from pithos and store to string" print SMALLFILE FILETOSTRING = pithos.download_to_string(SMALLFILE, download_cb= create_pb('Downloading...')) print "Small file string:", FILETOSTRING #To optimize for large files, allow pithos client # to use multiple threads! pithos client will # auto-adjust the number of threads, up to a limit pithos.MAX_THREADS = 5 print "Upload a large file to pithos" # Now, large file upload will be optimized: # dd if=/dev/zero of=test/large.txt count=8 bs=1073741824 with open('./test/'+BIGFILE) as f: pithos.upload_object(BIGFILE, f, hash_cb=create_pb('Calculating hashes...'), upload_cb=create_pb('Uploading...')) print "Create my own metadata for object" tags = {} tags['mytag'] = 12 pithos.set_object_meta(BIGFILE, tags) myOwnMetadataObject = pithos.get_object_meta(BIGFILE) print "Object Metatadata", myOwnMetadataObject print "Download a large file from pithos" # Download a file (btw, MAX_THREADS are still 5) with open('./test/'+TMPFILE, 'wb+') as f: pithos.download_object(BIGFILE, f, download_cb=create_pb('Downloading...')) # HIGHLIGHTS: If parts of the file are already uploaded or downloaded, # corresponding methods will transfer only the missing parts! print"\n" print"---------------------------------------------------" print"**********CREATE A NEW CONTAINER AND MOVE OBJECT***" print"---------------------------------------------------" """ Create a new container and move object""" print "Create a new container - containerToCopy" CONTAINERNEW = 'containertocopy' pithos.create_container(CONTAINERNEW) pithos.move_object(CONTAINER, SMALLFILE, CONTAINERNEW, SMALLFILE) print"\n" print"---------------------------------------------------" print"**********DELETE AND RECOVER***********************" print"---------------------------------------------------" """ C. DELETE AND RECOVER """ # Delete a file pithos.container = CONTAINER pithos.delete_object(BIGFILE) # Recover file file_versions = pithos.get_object_versionlist(BIGFILE) print "The file versions" for data in file_versions: print "The value of id=", data[0], "date=", data[1] first_version = file_versions[0] v_id, v_date = first_version pithos.copy_object(CONTAINER, BIGFILE, CONTAINER, BIGFILE, source_version=v_id) print"\n" print"---------------------------------------------------" print"**********GET FILE DETAILS************************" print"---------------------------------------------------" objectDetails = pithos.get_object_info(BIGFILE) for obj in objectDetails: print "The value of", obj, "=", objectDetails.get(obj) print"\n" print"---------------------------------------------------" print"**********SHARING AND PUBLISHING*******************" print"---------------------------------------------------" """ D. SHARING AND PUBLISHING """ # Read permission to all pithos users pithos.set_object_sharing(FILETOPUBLISH, read_permission='*') # Publish and get public URL pithos.publish_object(FILETOPUBLISH) print "Get sharing and public information" # Get sharing and public information info = pithos.get_object_info(FILETOPUBLISH) for data in info: print "The value of", data, "=", info.get(data) sharing = info.get('x-object-sharing', {}) print sharing public = info.get('x-object-public', None) print "The public URL=", public print "Remove sharing and publishing" # Remove sharing and publishing pithos.del_object_sharing(FILETOPUBLISH) pithos.unpublish_object(FILETOPUBLISH) print "Get sharing and public information" # Get sharing and public information info = pithos.get_object_info(FILETOPUBLISH) sharing = info.get('x-object-sharing', {}) public = info.get('x-object-public', None) print "The public URL=", public