def _create_files_query(dir_path, namespace=None, recursive=False, depth=None, filters=None, order=None): """Creates a ndb.Query object for listing _TitanFile entities.""" if depth is not None and depth <= 0: raise ValueError('depth argument must be a positive integer.') if depth is not None and not recursive: raise ValueError('depth queries require recursive=True.') if filters is not None and not hasattr(filters, '__iter__'): raise ValueError('"filters" must be an iterable.') utils.validate_dir_path(dir_path) # Strip trailing slash. if dir_path != '/' and dir_path.endswith('/'): dir_path = dir_path[:-1] files_query = _TitanFile.query(namespace=namespace) if recursive: files_query = files_query.filter(_TitanFile.paths == dir_path) if depth is not None: dir_path_depth = 0 if dir_path == '/' else dir_path.count('/') depth_filter = _TitanFile.depth <= dir_path_depth + depth files_query = files_query.filter(depth_filter) files_query = files_query.filter(_TitanFile.paths == dir_path) else: files_query = files_query.filter(_TitanFile.dir_path == dir_path) if filters: files_query = files_query.filter(*filters) if order: files_query = files_query.order(*order) return files_query
def _move_or_copy_to(self, dir_path, namespace=None, is_move=False, strip_prefix=None, timeout=None, result_files=None, failed_files=None, max_workers=DEFAULT_MAX_WORKERS, **kwargs): """This encapsulate repeated logic for copy_to and move_to methods.""" utils.validate_dir_path(dir_path) destination_map = utils.make_destination_paths_map( self.keys(), destination_dir_path=dir_path, strip_prefix=strip_prefix) future_results = [] if not futures: raise ImportError( 'Moving or copying files requires the Python futures library: ' 'http://code.google.com/p/pythonfutures/') with futures.ThreadPoolExecutor(max_workers=max_workers) as executor: for source_path, destination_path in destination_map.iteritems(): source_file = self[source_path] destination_file = File(destination_path, namespace=namespace, **kwargs) if result_files is not None: result_files.update( Files(files=[destination_file], namespace=namespace)) file_method = source_file.move_to if is_move else source_file.copy_to future = executor.submit(file_method, destination_file) future_results.append(future) futures.wait(future_results, timeout=timeout) errors = [] for future in future_results: try: future.result() except (CopyFileError, MoveFileError) as e: if failed_files is not None: # No namespace here, since this is a source file. failed_files.update(Files(files=[e.titan_file])) # Remove the failed file from successfully copied files collection. if result_files is not None: del result_files[e.titan_file.path] logging.exception('Operation failed:') errors.append(e) # Important: clear the in-context cache since we changed state in threads. ndb.get_context().clear_cache() if errors: raise CopyFilesError( 'Failed to copy files: \n%s' % '\n'.join([str(e) for e in errors]))
def validate_path(path, namespace=None): utils.validate_dir_path(path) if namespace is not None: utils.validate_namespace(namespace)