def test_get_sfile_dot_source(self): def fake_cwd(): return '/some/dir' with mock.patch('turbolift.utils.basic_utils.os.getcwd', fake_cwd): obj = basic_utils.get_sfile(ufile='object1', source='.') self.assertEqual(obj, '/some/dir')
def remote_delete(self, payload): """If Remote Delete was True run. NOTE: Remote delete will delete ALL Objects in a remote container which differ from the objects in the SOURCED LOCAL FILESYSTEM. IE: If this option is used, on one directory and then another directory and the files were different any difference would be deleted and based on the index information found in LOCAL FILE SYSTEM on the LAST command run. :param payload: ``dict`` """ report.reporter(msg='Getting file list for REMOTE DELETE') # From the remote system see if we have differences in the local system f_indexed = self._index_local_files() objects = self.go.object_lister(url=payload['url'], container=payload['c_name']) source = payload['source'] obj_names = [ basic.jpath(root=source, inode=obj.get('name')) for obj in objects[0] ] obj_names = set(obj_names) # Sort the difference between remote files and local files. objects = [obj for obj in obj_names if obj not in f_indexed] if objects: # Set Basic Data for file delete. num_files = len(objects) report.reporter( msg=('MESSAGE: "%d" Files have been found to be removed' ' from the REMOTE CONTAINER.' % num_files)) concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Delete the difference in Files. report.reporter(msg='Performing REMOTE DELETE') del_objects = [ basic.get_sfile(ufile=obj, source=payload['source']) for obj in objects if obj is not None ] kwargs = { 'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter') } multi.job_processer(num_jobs=num_files, objects=del_objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs) else: report.reporter( msg='No Difference between REMOTE and LOCAL Directories.')
def remote_delete(self, payload, f_indexed): """If Remote Delete was True run. NOTE: Remote delete will delete ALL Objects in a remote container which differ from the objects in the SOURCED LOCAL FILESYSTEM. IE: If this option is used, on one directory and then another directory and the files were different any difference would be deleted and based on the index information found in LOCAL FILE SYSTEM on the LAST command run. :return: """ report.reporter(msg='Getting file list for REMOTE DELETE') objects = self.go.object_lister( url=payload['url'], container=payload['c_name'] ) source = payload['source'] obj_names = [basic.jpath(root=source, inode=obj.get('name')) for obj in objects[0]] # From the remote system see if we have differences in the local system objects = multi.return_diff().difference(target=f_indexed, source=obj_names) if objects: # Set Basic Data for file delete. num_files = len(objects) LOG.info('MESSAGE\t: "%s" Files have been found to be removed from' ' the REMOTE CONTAINER.', num_files) concurrency = multi.set_concurrency( args=ARGS, file_count=num_files ) # Delete the difference in Files. report.reporter(msg='Performing Remote Delete') objects = [basic.get_sfile( ufile=obj, source=payload['source'] ) for obj in objects] kwargs = {'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter')} multi.job_processer( num_jobs=num_files, objects=objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) else: report.reporter( msg='No Difference between REMOTE and LOCAL Directories.' )
def object_putter(self, url, container, source, u_file): """This is the Sync method which uploads files to the swift repository if they are not already found. If a file "name" is found locally and in the swift repository an MD5 comparison is done between the two files. If the MD5 is miss-matched the local file is uploaded to the repository. If custom meta data is specified, and the object exists the method will put the metadata onto the object. :param url: :param container: :param source: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=u_file): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn, obj=u_file): # Get the path ready for action sfile = basic.get_sfile(ufile=u_file, source=source) rpath = http.quoter(url=url.path, cont=container, ufile=sfile) fheaders = self.payload['headers'] # Perform Upload. self._putter(conn=conn, fpath=u_file, rpath=rpath, fheaders=fheaders, retry=retry) # Put headers on the object if custom headers, or save perms. if any([ ARGS.get('object_headers') is not None, ARGS.get('save_perms') is not None ]): if ARGS.get('object_headers') is not None: fheaders.update(ARGS.get('object_headers')) if ARGS.get('save_perms') is not None: fheaders.update(basic.stat_file(local_file=u_file)) self._header_poster(conn=conn, rpath=rpath, fheaders=fheaders, retry=retry)
def object_putter(self, url, container, source, u_file): """This is the Sync method which uploads files to the swift repository if they are not already found. If a file "name" is found locally and in the swift repository an MD5 comparison is done between the two files. If the MD5 is miss-matched the local file is uploaded to the repository. If custom meta data is specified, and the object exists the method will put the metadata onto the object. :param url: :param container: :param source: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=u_file): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn, obj=u_file): # Get the path ready for action sfile = basic.get_sfile(ufile=u_file, source=source) rpath = http.quoter(url=url.path, cont=container, ufile=sfile) fheaders = self.payload['headers'] # Perform Upload. self._putter(conn=conn, fpath=u_file, rpath=rpath, fheaders=fheaders, retry=retry) # Put headers on the object if custom headers, or save perms. if any([ARGS.get('object_headers') is not None, ARGS.get('save_perms') is not None]): if ARGS.get('object_headers') is not None: fheaders.update(ARGS.get('object_headers')) if ARGS.get('save_perms') is not None: fheaders.update(basic.stat_file(local_file=u_file)) self._header_poster(conn=conn, rpath=rpath, fheaders=fheaders, retry=retry)
def object_putter(self, url, container, source, u_file): """This is the Sync method which uploads files to the swift repository if they are not already found. If a file "name" is found locally and in the swift repository an MD5 comparison is done between the two files. If the MD5 is miss-matched the local file is uploaded to the repository. If custom meta data is specified, and the object exists the method will put the metadata onto the object. :param url: :param container: :param source: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=u_file): # Open connection and perform operation # Get the path ready for action sfile = basic.get_sfile(ufile=u_file, source=source) if ARGS.get('dir'): container = '%s/%s' % (container, ARGS['dir'].strip('/')) rpath = http.quoter(url=url.path, cont=container, ufile=sfile) fheaders = self.payload['headers'] if ARGS.get('object_headers') is not None: fheaders.update(ARGS.get('object_headers')) if ARGS.get('save_perms') is not None: fheaders.update(basic.stat_file(local_file=u_file)) with meth.operation(retry, obj='%s %s' % (fheaders, u_file)): self._putter(url=url, fpath=u_file, rpath=rpath, fheaders=fheaders)
def object_putter(self, url, container, source, u_file): """This is the Sync method which uploads files to the swift repository if they are not already found. If a file "name" is found locally and in the swift repository an MD5 comparison is done between the two files. If the MD5 is miss-matched the local file is uploaded to the repository. If custom meta data is specified, and the object exists the method will put the metadata onto the object. :param url: :param container: :param source: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get("error_retry"), delay=2, obj=u_file): # Open connection and perform operation # Get the path ready for action sfile = basic.get_sfile(ufile=u_file, source=source) if ARGS.get("dir"): container = "%s/%s" % (container, ARGS["dir"].strip("/")) rpath = http.quoter(url=url.path, cont=container, ufile=sfile) fheaders = self.payload["headers"] if ARGS.get("object_headers") is not None: fheaders.update(ARGS.get("object_headers")) if ARGS.get("save_perms") is not None: fheaders.update(basic.stat_file(local_file=u_file)) with meth.operation(retry, obj="%s %s" % (fheaders, u_file)): self._putter(url=url, fpath=u_file, rpath=rpath, fheaders=fheaders)
def test_get_sfile_dot_source(self): obj = basic_utils.get_sfile(ufile='/test/object1', source='/test') self.assertEqual(obj, 'object1')
def test_get_sfile_isfile(self): os = mock.Mock().return_value(True) with mock.patch('turbolift.utils.basic_utils.os.path.isfile', os): obj = basic_utils.get_sfile(ufile='object1', source='test/dir') self.assertEqual(obj, 'dir')
def test_get_sfile_with_preserver_path(self): args = {'preserve_path': True} with mock.patch('turbolift.utils.basic_utils.turbo.ARGS', args): obj = basic_utils.get_sfile(ufile='object1', source='test/dir') self.assertEqual(obj, 'test/dir/object1')