def _delete_file(self, cur_time, path_name): num_file_removed, num_file_failed = 0, 0 if FileUtil.is_file(path_name=path_name): ttl = FileUtil.get_ttl_from_path(path_name=path_name) if ttl and cur_time - FileUtil.get_file_modified_time( file_name=path_name) > ttl: self._logger.info("Removing file " + path_name + '...') try: with FileLockTool(protected_file_path=path_name, read_mode=True, timeout=TimeSleepObj.ONE_TENTH_SECOND): FileUtil.remove_file(file_name=path_name) num_file_removed += 1 self.counter_increment("num_file_removed") except Exception as err: num_file_failed += 1 self.counter_increment("num_file_failed_to_be_removed") self._logger.error("Removing file " + path_name + ' failed with err ' + str(err) + '.') else: for file_name in FileUtil.list_files_in_dir(dir_name=path_name): stats = self._delete_file(cur_time=cur_time, path_name=file_name) num_file_removed += stats[0] num_file_failed += stats[1] for dir_name in FileUtil.list_dirs_in_dir(dir_name=path_name): stats = self._delete_file(cur_time=cur_time, path_name=dir_name) num_file_removed += stats[0] num_file_failed += stats[1] return num_file_removed, num_file_failed
def get_response_and_status_impl(self, request): file_path = request.file_path response = FileViewerRPCResponse() if FileUtil.is_file(path_name=file_path): file_name = FileUtil.die_if_file_not_exist(file_name=file_path) file_info = response.files_info.add() file_info.file_path = file_name file_info.file_size = FileUtil.get_file_size(file_name=file_name) file_info.modified_time = str( FileUtil.get_file_modified_time(file_name=file_name)) else: dir_name = FileUtil.die_if_dir_not_exist(dir_name=file_path) sub_files = FileUtil.list_files_in_dir(dir_name=dir_name) for sub_file in sub_files: file_info = response.files_info.add() file_info.file_path = sub_file file_info.file_size = FileUtil.get_file_size( file_name=sub_file) file_info.modified_time = str( FileUtil.get_file_modified_time(file_name=sub_file)) sub_dirs = FileUtil.list_dirs_in_dir(dir_name=dir_name) for sub_dir in sub_dirs: dirs_info = response.directories_info.add() dirs_info.file_path = sub_dir return response, Status.SUCCEEDED
def _recursive_initialize_from_dir(node, max_recursion): self._SYS_LOGGER.info("Starting recursion of " + str(max_recursion) + '.') if max_recursion == 0: self._SYS_LOGGER.info("Exhausted all recursions for dir [" + dir_name + '].') self._logger.info("Exhausted all recursions for dir [" + dir_name + '].') return node_name = node.get_node_name() self.increment_rpc_count_by(n=1) child_node_names = sorted( FileUtil.list_dirs_in_dir(dir_name=node_name), reverse=from_scratch) for child_node_name in child_node_names: if from_scratch and self._file_tree.get_num_nodes( ) >= self._max_capacity > 0: self._SYS_LOGGER.info("Reach the max number of node: " + str(self._max_capacity) + '.') return newly_added_string = child_node_name.replace(node_name, '').replace( '/', '') if not newly_added_string.isdigit(): continue if not from_scratch and self._cmp_dir_by_timestamp( dir_name_1=child_node_name, dir_name_2=self._get_latest_dir_internal()): continue child_node = self._file_tree.find_node( node_name=child_node_name) if not child_node: child_node = OrderedNodeBase(node_name=child_node_name) # The nodes are ordered from large to small. So if the tree is built scratch, since the directory # is listed from large to small, SortOrder.ORDER is used. If it is incremental build, since the # directory is listed from small to large, SortOrder.REVERSE is used. order = SortOrder.ORDER if from_scratch else SortOrder.REVERSE self._file_tree.add_node(parent_node=node, child_node=child_node, order=order) self._SYS_LOGGER.info("Adding new node [" + child_node_name + node.get_node_name() + '].') self._logger.info("Adding new node [" + child_node_name + "] to parent node [" + node.get_node_name() + '].') if not from_scratch: self._file_tree.trim_tree( max_capacity=self._max_capacity) _recursive_initialize_from_dir(node=child_node, max_recursion=max_recursion - 1)
def _recursively_check_dir_deletable(self, dir_name): if FileUtil.list_files_in_dir(dir_name=dir_name): return False sub_dirs = FileUtil.list_dirs_in_dir(dir_name=dir_name) if sub_dirs: for sub_dir in sub_dirs: if not self._recursively_check_dir_deletable(dir_name=sub_dir): return False return True
def get_oldest_dir_in_root_directory(self): if self.is_empty(): self.sys_log("Current partitioner is empty.") return '' else: oldest_directory = self._file_tree.get_root_name() while True: sub_dirs = FileUtil.list_dirs_in_dir(dir_name=oldest_directory) if sub_dirs: oldest_directory = sorted(sub_dirs)[0] else: return oldest_directory
def _get_oldest_dir_in_root_directory_interal(self): if not self._file_tree: self._SYS_LOGGER.info("Current partitioner is empty.") return '' else: oldest_directory = self._file_tree.get_root_name() while True: self.increment_rpc_count_by(n=1) sub_dirs = FileUtil.list_dirs_in_dir(dir_name=oldest_directory) if sub_dirs: oldest_directory = sorted(sub_dirs)[0] else: return oldest_directory
def _delete_dir(self, dir_name): num_dir_removed, num_dir_failed = 0, 0 for sub_dir_name in FileUtil.list_dirs_in_dir(dir_name=dir_name): if FileUtil.does_dir_exist( dir_name=sub_dir_name ) and self._recursively_check_dir_deletable(dir_name=sub_dir_name): self._logger.info("Removing directory " + sub_dir_name + '...') try: FileUtil.remove_dir_recursively(dir_name=sub_dir_name) self.counter_increment("num_directory_removed") num_dir_removed += 1 except Exception as err: num_dir_failed += 1 self.counter_increment( "num_directory_failed_to_be_removed") self._logger.error("Removing directory " + sub_dir_name + ' failed with err ' + str(err) + '.') else: stats = self._delete_dir(dir_name=sub_dir_name) num_dir_removed += stats[0] num_dir_failed += stats[1] return num_dir_removed, num_dir_failed