def test_get_surl_cdn(self): cf_return = self.endpoints(name='cloudFilesCDN') parsed_url = auth_utils.get_surl(region='TEST-REGION', cf_list=cf_return, lookup='publicURL') self.assertEqual(parsed_url.scheme, 'https') self.assertEqual(parsed_url.netloc, 'TEST-CDN.url')
def test_get_surl_cdn(self): cf_return = self.endpoints(name='cloudFilesCDN') parsed_url = auth_utils.get_surl( region='TEST-REGION', cf_list=cf_return, lookup='publicURL' ) self.assertEqual(parsed_url.scheme, 'https') self.assertEqual(parsed_url.netloc, 'TEST-CDN.url')
def start(self): """Clone onjects from one container to another. NOTE: This method was intended for use with inter-datacenter cloning of objects. """ # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('source_container'), source=None, args=ARGS ) # Prep action class self.go = actions.CloudActions(payload=payload) # Ensure we have a target region. target_region = ARGS.get('target_region') if target_region is None: raise turbo.NoSource('No target Region was specified.') else: target_region = target_region.upper() # check for a target type URL if ARGS.get('target_snet') is True: target_type = 'internalURL' else: target_type = 'publicURL' # Format the target URL target_url = auth.get_surl( region=target_region, cf_list=payload['acfep'], lookup=target_type ) if target_url is None: raise turbo.NoSource('No url was found from the target region') else: payload['turl'] = target_url # Ensure we have a target Container. target_container = ARGS.get('target_container') if target_container is None: raise turbo.NoSource('No target Container was specified.') else: payload['tc_name'] = target_container # Check if the source and target containers exist. If not Create them. # Source Container. self.go.container_create(url=payload['url'], container=payload['c_name']) # Target Container. self.go.container_create(url=target_url, container=target_container) report.reporter(msg='Getting Object list from the Source.') with multi.spinner(): # Get a list of Objects from the Source/Target container. objects, list_count, last_obj = self.go.object_lister( url=payload['url'], container=payload['c_name'] ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) if objects is None: raise turbo.NoSource('The source container is empty.') # Get the number of objects and set Concurrency num_files = len(objects) concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) report.reporter(msg='Beginning Sync Operation.') kwargs = {'surl': payload['url'], 'turl': payload['turl'], 'scontainer': payload['c_name'], 'tcontainer': payload['tc_name'], 'cf_job': getattr(self.go, 'object_syncer')} multi.job_processer( num_jobs=num_files, objects=objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs )
def start(self): """Clone onjects from one container to another. NOTE: This method was intended for use with inter-datacenter cloning of objects. """ # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('source_container'), source=None, args=ARGS) # Prep action class self.go = actions.CloudActions(payload=payload) # Ensure we have a target region. target_region = ARGS.get('target_region') if target_region is None: raise turbo.NoSource('No target Region was specified.') else: target_region = target_region.upper() # check for a target type URL if ARGS.get('target_snet') is True: target_type = 'internalURL' else: target_type = 'publicURL' # Format the target URL target_url = auth.get_surl(region=target_region, cf_list=payload['acfep'], lookup=target_type) if target_url is None: raise turbo.NoSource('No url was found from the target region') else: payload['turl'] = target_url # Ensure we have a target Container. target_container = ARGS.get('target_container') if target_container is None: raise turbo.NoSource('No target Container was specified.') else: payload['tc_name'] = target_container # Check if the source and target containers exist. If not Create them. # Source Container. self.go.container_create(url=payload['url'], container=payload['c_name']) # Target Container. self.go.container_create(url=target_url, container=target_container) report.reporter(msg='Getting Object list from the Source.') with multi.spinner(): # Get a list of Objects from the Source/Target container. objects, list_count, last_obj = self.go.object_lister( url=payload['url'], container=payload['c_name']) if ARGS.get('pattern_match'): objects = basic.match_filter(idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True) if objects is None: raise turbo.NoSource('The source container is empty.') # Get the number of objects and set Concurrency num_files = len(objects) concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) report.reporter(msg='Beginning Sync Operation.') kwargs = { 'surl': payload['url'], 'turl': payload['turl'], 'scontainer': payload['c_name'], 'tcontainer': payload['tc_name'], 'cf_job': getattr(self.go, 'object_syncer') } multi.job_processer(num_jobs=num_files, objects=objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs)
def test_get_surl_bad_lookup(self): cf_return = self.endpoints(name='cloudFiles') parsed_url = auth_utils.get_surl(region='TEST-REGION', cf_list=cf_return, lookup='NotThisURL') self.assertEqual(parsed_url, None)
def test_get_surl_bad_lookup(self): cf_return = self.endpoints(name='cloudFiles') parsed_url = auth_utils.get_surl( region='TEST-REGION', cf_list=cf_return, lookup='NotThisURL' ) self.assertEqual(parsed_url, None)