def run(self): if self.check_acrons(): for acron in self.acrons: logging.info("Process acronym: %s" % acron) walker = Walker(filter=["*.xml"], exclude=["*.*.xml"]) acron_folder = path.join(self.xml_fs.root_path, acron) for xml in walker.files(fs.open_fs(acron_folder)): if len(path.iteratepath(xml)) == 2: logging.info("Process XML: %s" % xml) issue_folder, pack_name = self.collect_xml(acron, xml) self.collect_pdf(acron, issue_folder, pack_name) self.collect_img(acron, issue_folder, pack_name) else: return False
def folder_walker(src_folder, dest_folder, target_height, target_width): src_fs = open_fs(src_folder) walker = Walker(filter=['*.jpg']) for path in walker.files(src_fs): image_path = os.path.normpath(src_folder + os.sep + path) process_image(image_path, dest_folder, target_height, target_width) return None
def find_newest_folder_fs(fs): folders = {} walker = Walker(max_depth=1) for path, info in walker.info(fs): if info.is_dir: info = (fs.getinfo(path, namespaces=['details'])) folders[path] = info.modified.strftime("%Y%m%d%H%M%S") newest_folder_fs = (sorted(folders.items(), key=lambda x: x[1], reverse=True)[0][0]) #print("newest folder: " + newest_folder_fs) return newest_folder_fs
def collect_pdf(self, acron, issue_folder, pack_name): walker = Walker(filter=["*" + pack_name + "*.pdf"], max_depth=2) pdf_path = path.join(self.pdf_fs.root_path, acron, issue_folder) for pdf in walker.files(fs.open_fs(pdf_path)): pdf_path = path.join(acron, issue_folder, path.basename(pdf)) target_pdf_path = path.join(acron, issue_folder, pack_name, self.rename_pdf_trans_filename(pdf)) self.copy(pdf_path, target_pdf_path, src_fs=self.pdf_fs)
def find_newest_file_fs(path="", file_type="*.jpg", fs=""): files = {} path_to_list = list(path) path_to_list[0] = '*' folder_to_check = "".join(path_to_list) walker = Walker(filter=[file_type], filter_dirs=[folder_to_check]) for path, info in walker.info(fs): if info.is_file: info = (fs.getinfo(path, namespaces=['details'])) files[path] = info.modified.strftime("%Y%m%d%H%M%S") newest_file_fs = (sorted(files.items(), key=lambda x: x[1], reverse=True)[0][0]) print("newest file: " + newest_file_fs) return newest_file_fs
def article_ALL_constructor(source_path: str, dest_path: str, in_place: bool = False) -> None: logger.info("Iniciando Construção dos XMLs") walker = Walker(filter=["*.xml"], exclude=["*.*.xml"]) list_files_xmls = walker.files(fs.open_fs(source_path)) for file_xml in tqdm(list_files_xmls): file_xml = source_path + file_xml try: article_xml_constructor(file_xml, dest_path, in_place) except Exception as ex: logger.info("não foi possível gerar o XML do Arquivo %s: %s", file_xml, ex)
def upload(self, local_path, remote_path, filter_regex=None, istree=False): try: # path prepare local_path = self._local_path_transfor(local_path) if os.path.isdir(local_path) and istree is False: self.logger.warning("warning : use upload to upload tree") istree = True # osfs prepare localfs, local_relative = self.build_osfs(local_path) walker = None # walk prepare if filter_regex is not None: if not isinstance(filter_regex, list): filter_regex = list(filter_regex) walker = Walker(filter=filter_regex) # ftp prepare ftp_args = self._ftp_path_transfor(remote_path) ftpfs = FTPFS(host=ftp_args['host'], port=ftp_args['port'], passwd=ftp_args['password'], user=ftp_args['user']) if not istree: ftp_local, ftp_file = self._parse_file_name(ftp_args['relative_path']) try: ftpfs.makedirs(ftp_local) except Exception, error_msg: self.logger.error(str(error_msg)) copy_file(localfs, local_relative, ftpfs, ftp_args['relative_path']) else:
def collect_img(self, acron, issue_folder, pack_name): walker = Walker(filter=["*" + pack_name + "*"], max_depth=2, exclude_dirs=["html"]) img_path = path.join(self.img_fs.root_path, acron, issue_folder) for img in walker.files(fs.open_fs(img_path)): img_path = path.join(acron, issue_folder, path.basename(img)) target_img_path = path.join(acron, issue_folder, pack_name, path.basename(img)) self.copy(img_path, target_img_path, src_fs=self.img_fs)
def read_data_from_files(file_start): from fs import open_fs from fs.walk import Walker home_fs = open_fs('./') walker = Walker(filter=[file_start+'*.txt']) data = [] for path in walker.files(home_fs): with open('.'+path) as f: A=np.loadtxt((x.replace('[',' ').replace(']',' ').replace(',',' ') for x in f)) # print(A) data.append(A) # else: data_list.append(np.loadtxt(dir_name+"/"+name, skiprows=9)) # pbar.update() # pbar.close() return np.array(data)
def build_static(self, *args, **options): """ Builds the static files directory as well as robots.txt and favicon.ico """ logger.debug("Building static directory") if self.verbosity > 1: self.stdout.write("Building static directory") management.call_command("collectstatic", interactive=False, verbosity=0) # Set the target directory inside the filesystem. target_dir = path.join(self.build_dir, settings.STATIC_URL.lstrip('/')) target_dir = smart_text(target_dir) exclude_dirs = getattr(settings, 'BAKERY_STATIC_EXCLUDE_DIRS', None) if not exclude_dirs: # explicitly set to None to make sure we don't get an empty list/tuple exclude_dirs = None if os.path.exists(self.static_root) and settings.STATIC_URL: if getattr(settings, 'BAKERY_GZIP', False): self.copytree_and_gzip(self.static_root, target_dir, exclude_dirs) # if gzip isn't enabled, just copy the tree straight over else: logger.debug("Copying {}{} to {}{}".format( "osfs://", self.static_root, self.fs_name, target_dir)) walker = Walker(exclude_dirs=exclude_dirs) copy.copy_dir("osfs:///", self.static_root, self.fs, target_dir, walker=walker) # If they exist in the static directory, copy the robots.txt # and favicon.ico files down to the root so they will work # on the live website. robots_src = path.join(target_dir, 'robots.txt') if self.fs.exists(robots_src): robots_target = path.join(self.build_dir, 'robots.txt') logger.debug("Copying {}{} to {}{}".format(self.fs_name, robots_src, self.fs_name, robots_target)) self.fs.copy(robots_src, robots_target) favicon_src = path.join(target_dir, 'favicon.ico') if self.fs.exists(favicon_src): favicon_target = path.join(self.build_dir, 'favicon.ico') logger.debug("Copying {}{} to {}{}".format(self.fs_name, favicon_src, self.fs_name, favicon_target)) self.fs.copy(favicon_src, favicon_target)
def _mirror(src_fs, dst_fs, walker=None, copy_if_newer=True, copy_file=copy_file_internal): # type: (FS, FS, Optional[Walker], bool, Callable[[FS, str, FS, str], None]) -> None walker = walker or Walker() walk = walker.walk(src_fs, namespaces=["details"]) for path, dirs, files in walk: try: dst = { info.name: info for info in dst_fs.scandir(path, namespaces=["details"]) } except ResourceNotFound: dst_fs.makedir(path) dst = {} # Copy files for _file in files: _path = _file.make_path(path) dst_file = dst.pop(_file.name, None) if dst_file is not None: if dst_file.is_dir: # Destination is a directory, remove it dst_fs.removetree(_path) else: # Compare file info if copy_if_newer and not _compare(_file, dst_file): continue copy_file(src_fs, _path, dst_fs, _path) # Make directories for _dir in dirs: _path = _dir.make_path(path) dst_dir = dst.pop(_dir.name, None) if dst_dir is not None: # Directory name exists on dst if not dst_dir.is_dir: # Not a directory, so remove it dst_fs.remove(_path) else: # Make the directory in dst dst_fs.makedir(_path, recreate=True) # Remove any remaining resources while dst: _, info = dst.popitem() _path = info.make_path(path) if info.is_dir: dst_fs.removetree(_path) else: dst_fs.remove(_path)
def file_chosen(selected, filters, popup, cb): result = [] State.imported_path.text = selected if os.path.isdir(selected): for file in Walker(filter=filters).files(open_fs(selected)): if 'sign' not in file: result.append(selected + file) print('file chosen', result) cb(result, selected) else: cb(selected, selected) popup.dismiss()
def backup_scenario(self, confirm=True): """Copy scenario data to backup disk and remove original :param bool confirm: prompt before deleting each batch of files """ src_fs = dst_fs = get_ssh_fs() items = [ (self._join(*server_setup.INPUT_DIR), f"{self.scenario_id}_*"), (self._join(*server_setup.OUTPUT_DIR), f"{self.scenario_id}_*"), (self._data_access.tmp_folder(self.scenario_id), "**"), ] for folder, pattern in items: print(f"--> Moving files matching {pattern} from {folder}") src_path = self._join(server_setup.DATA_ROOT_DIR, folder) dst_path = self._join(server_setup.BACKUP_DATA_ROOT_DIR, folder) walker = Walker(filter=[pattern]) try: copy_dir(src_fs, src_path, dst_fs, dst_path, walker=walker) except FSError as e: print(f"Operation failed: {e}") self._data_access.remove(self._join(folder, pattern), confirm=confirm)
def index(self, conf): fs = open_fs(conf['url']) walker = Walker() for path in walker.files(fs): yield Pyfsfile(fs, path)