def zip_last_n_files(directory: PosixPath = LOG_DIR, zip_file: str = ZIP_FILE, n: int = 3): # Get most recent n timestamps most_rec_updates = sorted([f.stat().st_mtime for f in directory.iterdir()], reverse=True)[:n] # Get files according to most recent timestamps files = [ file for file in directory.iterdir() if file.stat().st_mtime in most_rec_updates ] # Rename files and add to new list new_files = [] for file in files: dt_str = datetime.fromtimestamp( file.stat().st_mtime).strftime('%Y-%m-%d') pre, suf = file.name.split('.') new_name = f'{pre}_{dt_str}.{suf}' Path(file).replace(Path(new_name)) new_files.append(Path(new_name)) # Write new_files list to ZipFile with ZipFile(zip_file, 'w') as myzip: for file in new_files: myzip.write(file)
def get_matching_files(directory: PosixPath, filter_str: str) -> list: """Get all file names in "directory" and (case insensitive) match the ones that exactly match "filter_str" In case there is no exact match, return closely matching files. If there are no closely matching files either, return an empty list. (Return file names, not full paths). For example: d = Path('.') files in dir: bite1 test output get_matching_files(d, 'bite1') => ['bite1'] get_matching_files(d, 'Bite') => ['bite1'] get_matching_files(d, 'pybites') => ['bite1'] get_matching_files(d, 'test') => ['test'] get_matching_files(d, 'test2') => ['test'] get_matching_files(d, 'output') => ['output'] get_matching_files(d, 'o$tput') => ['output'] get_matching_files(d, 'nonsense') => [] """ matches = difflib.get_close_matches(filter_str, [f.name for f in directory.iterdir()]) return matches or []
def _check_folder(path: PosixPath) -> None: for _content in path.iterdir(): if _content.is_dir(): _check_folder(_content) if _content.suffix.lower() in suffixes: videos_raw.add(_content)
def _check_folder(path: PosixPath) -> None: for _content in path.iterdir(): if _content.is_dir() and _content != subtitles_folder: _check_folder(_content) if _content.is_file() and _content.suffix.lower() in suffixes: subtitles.add(_content)
def generate_examples(no_examples: int, idg: ImageDataGenerator, directory: pathlib.PosixPath, seed: Optional[int] = None, **kwargs: Any) -> Dict[str, np.ndarray]: """ Generate examples of pre-processed images. """ # Get list of classes. cls_names = [d.name for d in directory.iterdir()] # Dictionary of image generators for each class with a batch size # equal to no_examples. img_generators = dict() for cls in cls_names: flow_img = idg.flow_from_directory(directory=directory, classes=[cls], class_mode=None, batch_size=no_examples, seed=seed, **kwargs) img_generators.update({cls: next(flow_img)}) return img_generators
def process_list_files_request(self, request): """ Process LIST_FILES request. The request is expected to contain the directory for which the server must list files for. The possible responses are. * ACCEPTED with FILES_LISTED and the list of files, if the listing was successful * REFUSED with NOT_A_DIRECTORY if the list files directory doesn't exists Other responsed include ERROR with BAD_REQUEST if the request is imporperly formatted, or ERROR with UNKNOWN ERROR if any other error occured during the reading of the directory. """ # extract list directory from request, raise bad request error # if something goes wrong try: assert isinstance(request, tuple) assert len(request) == 2 directory = PosixPath(request[1]) except Exception: response = make_bad_request_error() self.socket.send_pyobj(response) return # normalize the directory (later it can be combined with the # root directory) directory = normalize_directory(directory) # combine the list directory with the root directory directory = self.root_directory / directory # if the directory doesn't refer to an actual directory, send # not a directory refused resonse if not directory.exists() or directory.is_file(): response = make_not_a_directory_response() self.socket.send_pyobj(response) return # build the list of files of the given directory, with files # properties files_list = {} for _file in directory.iterdir(): name = _file.name is_directory = _file.is_dir() size = _file.stat().st_size if not is_directory else 0 last_accessed = _file.stat().st_atime files_list[name] = (is_directory, size, last_accessed) # send list file accepted response with list of files response = make_files_listed_response(files_list) self.socket.send_pyobj(response)
def test_pt_run_dir(pt_run_dir: pathlib.PosixPath): files_present = {f.name for f in pt_run_dir.iterdir()} must_contain = { n_gro_o1, n_gro_o2, n_log_o1, n_log_o2, n_edr_o1, n_edr_o2, n_trr_o1, n_trr_o2 } assert must_contain - files_present == set()
def vcopy(source, destination): '''Copy files to the specified destination.''' path_source = PosixPath(source) path_destination = PosixPath(destination) # Creating the destination folder if it does not exist if not path_destination.exists() or not path_destination.is_dir(): path_destination.mkdir(mode=0o755) chown(path_destination, user=0, group=0) # Getting the name of the files to copy files = list() for object in path_source.iterdir(): if object.is_file(): filename = str(object.relative_to(path_source)) if filename[0] != '.': files.append(filename) # Copying files for file in files: path_file_source = path_source / file path_file_destination = path_destination / file if not path_file_destination.exists( ) or not path_file_destination.is_file(): copy(path_file_source, path_file_destination) path_file_destination.chmod(0o755) chown(path_file_destination, user=0, group=0)
def get_matching_files(directory: PosixPath, filter_str: str) -> list: """Get all file names in "directory" and (case insensitive) match the ones that exactly match "filter_str" In case there is no exact match, return closely matching files. If there are no closely matching files either, return an empty list. (Return file names, not full paths). For example: d = Path('.') files in dir: bite1 test output get_matching_files(d, 'bite1') => ['bite1'] get_matching_files(d, 'Bite') => ['bite1'] get_matching_files(d, 'pybites') => ['bite1'] get_matching_files(d, 'test') => ['test'] get_matching_files(d, 'test2') => ['test'] get_matching_files(d, 'output') => ['output'] get_matching_files(d, 'o$tput') => ['output'] get_matching_files(d, 'nonsense') => [] """ files = {file.name.lower(): file for file in directory.iterdir() if file.is_file()} if filter_str.lower() in files: return [files[filter_str.lower()].name] else: best_match = [files[f].name for f in difflib.get_close_matches( filter_str, files.keys())] return best_match
def _iterate_folder(folder: PosixPath) -> None: for _item in folder.iterdir(): if _item.is_dir(): os.chmod(str(_item), 0o745) _iterate_folder(_item) if _item.is_file(): os.chmod(str(_item), 0o644)
def _get_vtt_files_in_folder(folder: PosixPath) -> List[PosixPath]: vtt_files: List[PosixPath] = [] for _file in folder.iterdir(): if _file.is_file() and _file.suffix.lower() == ".vtt": vtt_files.append(_file) vtt_files.sort() return vtt_files
def get_result_files(path: PosixPath, name_pattern: str = 'log_res') -> List[PosixPath]: """Return list of files for givet name pattern. Args: path (PosixPath): Directory name name_pattern (str, optional): Pattern for name. Defaults to 'log_res'. Returns: List[PosixPath]: List of file names as PosixPath. """ return [fn for fn in path.iterdir() if fn.name.startswith(name_pattern)]
def search_dir(d: PosixPath, basepath, files, extension): """Search directories recursively for repo files.""" scripts = [] for path in d.iterdir(): string_path = str(path).replace(basepath, '')[1:] if path.is_dir(): recu_scripts = search_dir(path, basepath, files, extension) if recu_scripts: scripts.extend(recu_scripts) if string_path in files and path.suffix == extension: scripts.extend([string_path]) return scripts
def run(self): if not self.user_authorize('system', 'manage'): return while True: task = self.task_q.get() logging.info(task) if task[0] == 'UPLOAD': p = PosixPath(task[1]) if p.is_dir(): target_dir = os.path.join(task[2], p.name) if self.do_mkdir(p.name, task[2]): children = [d for d in p.iterdir()] children.sort() for f in children: self.task_q.put(['UPLOAD', str(f), target_dir]) else: self.do_upload(str(p), task[2]) elif task[0] == 'DOWNLOAD': R = self._stat(task[1]) if R["fileType"] == 1: #dir p = PosixPath(task[2]).joinpath(R["name"] == "" and "ROOT" or R["name"]) p.mkdir() target_dir = os.path.join(task[2], p.name) for r in self._stat2(task[1]): self.task_q.put(['DOWNLOAD', r["path"], target_dir]) else: self.do_download(task[1], task[2]) elif task[0] == 'MKDIR': self.do_mkdir(os.path.basename(task[1]), os.path.dirname(task[1])) elif task[0] == 'STAT': self.do_stat(task[1]) elif task[0] == 'LS': self.do_ls(task[1]) elif task[0] == 'LS-R': self.do_ls_r(task[1]) elif task[0] == 'RENAME': self.do_rename(task[1], task[2]) elif task[0] == 'REMOVE': self.do_delete(task[1]) elif task[0] == 'EXIT': self.close() logging.info("Exit.") self.task_q.task_done() return else: msg = "Invalid task: %s" % str(task) logging.error(msg) TestClient.ERRORS.append(msg) self.task_q.task_done()
def convertAllAudioMatToWav(path: pathlib.PosixPath): # Convert all MAT-files found in path to wavfiles # Mat-Files must containt "_A" and end with either "_OMNI.MAT" / "_BF.MAT" / "_BIN.MAT". # suffixe ".MAT" can also be written small ".mat" pattern = r"[\w\d\_\-]+_A[\w\d\_\-]+_(?:OMNI|BIN|BF).(?:MAT|mat)$" pattern = re.compile(pattern) generatorMatFile = (matFile for matFile in path.iterdir() if pattern.search(matFile.name)) for matFile in generatorMatFile: matToWav(matFile)
def zip_last_n_files(directory: PosixPath = LOG_DIR, zip_file: str = ZIP_FILE, n: int = 3): files = [file for file in directory.iterdir() if file.is_file()] files.sort(key=lambda file: file.lstat().st_mtime, reverse=True) zip = ZipFile(zip_file, mode='w') for file in files[:n]: modified_date = time.strftime('%Y-%m-%d', time.localtime(file.lstat().st_mtime)) arcname = file.name.replace('.', f'_{modified_date}.') zip.write(file.absolute(), arcname=arcname) zip.close()
def init_hci(iface: str = 'hci0'): # hciconfig <hci> up 的前提是 rfkill 先 unblock subprocess.check_output('rfkill unblock %d' % find_rfkill_devid(iface), stderr=STDOUT, timeout=5, shell=True) subprocess.check_output('hciconfig {} up'.format(iface), stderr=STDOUT, timeout=5, shell=True) subprocess.check_output('systemctl restart bluetooth.service', stderr=STDOUT, timeout=5, shell=True) hci = HCI(iface) # 下面在发送各种 HCI command 时,如果出现如下异常: # BlockingIOError: [Errno 11] Resource temporarily unavailable # 那么可能是 hci socket 被设为了 non-blocking mode。 hci.inquiry_cancel() hci.exit_periodic_inquiry_mode() hci.write_scan_enable() # No scan enabled event_params = hci.le_set_advertising_enable() # Advertising is disabled if event_params['Status'] != 0x00: #print(WARNING, 'Status of HCI_LE_Set_Advertising_Enable command: 0x%02x'%event_params['Status']) pass try: hci.le_set_scan_enable({ 'LE_Scan_Enable': 0x00, # Scanning disabled 'Filter_Duplicates': 0x01 # Ignored }) except RuntimeError as e: #print(WARNING, e) pass hci.set_event_filter({'Filter_Type': 0x00}) # Clear All Filters event_params = hci.read_bdaddr() if event_params['Status'] != 0: raise RuntimeError else: local_bd_addr = event_params['BD_ADDR'].upper() # Clear bluetoothd cache cache_path = PosixPath('/var/lib/bluetooth/') / local_bd_addr / 'cache' if cache_path.exists(): for file in cache_path.iterdir(): os.remove(file) hci.close()
def run(self): if not self.user_authorize('system', 'manage'): return while True: task = self.task_q.get() logging.info(task) if task[0] == 'UPLOAD': p = PosixPath(task[1]) if p.is_dir(): target_dir = os.path.join(task[2], p.name) if self.do_mkdir(p.name, task[2]): children = [d for d in p.iterdir()] children.sort() for f in children: self.task_q.put(['UPLOAD', str(f), target_dir]) else: self.do_upload(str(p), task[2]) elif task[0] == 'DOWNLOAD': R = self._stat(task[1]) if R["fileType"]==1: #dir p = PosixPath(task[2]).joinpath(R["name"]=="" and "ROOT" or R["name"]) p.mkdir() target_dir = os.path.join(task[2], p.name) for r in self._stat2(task[1]): self.task_q.put(['DOWNLOAD', r["path"], target_dir]) else: self.do_download(task[1], task[2]) elif task[0] == 'MKDIR': self.do_mkdir(os.path.basename(task[1]), os.path.dirname(task[1])) elif task[0] == 'STAT': self.do_stat(task[1]) elif task[0] == 'LS': self.do_ls(task[1]) elif task[0] == 'LS-R': self.do_ls_r(task[1]) elif task[0] == 'RENAME': self.do_rename(task[1], task[2]) elif task[0] == 'REMOVE': self.do_delete(task[1]) elif task[0] == 'EXIT': self.close() logging.info("Exit.") self.task_q.task_done() return else: msg = "Invalid task: %s" % str(task) logging.error(msg) TestClient.ERRORS.append(msg) self.task_q.task_done()
def init_hci(iface='hci0'): hci = HCI(iface) exitcode, output = subprocess.getstatusoutput('rfkill unblock %d' % find_rfkill_devid(iface)) if exitcode != 0: logger.error('rfkill: ' + output) sys.exit(exitcode) exitcode, output = subprocess.getstatusoutput("hciconfig up " + iface) if exitcode != 0: logger.error("Failed to up " + iface) sys.exit(exitcode) else: time.sleep(0.5) # hci.reset() hci.inquiry_cancel() hci.exit_periodic_inquiry_mode() hci.write_scan_enable() # No scan enabled event_params = hci.le_set_advertising_enable() # Advertising is disabled if event_params['Status'] != 0x00: #print(WARNING, 'Status of HCI_LE_Set_Advertising_Enable command: 0x%02x'%event_params['Status']) pass try: hci.le_set_scan_enable({ 'LE_Scan_Enable': 0x00, # Scanning disabled 'Filter_Duplicates': 0x01 # Ignored }) except RuntimeError as e: #print(WARNING, e) pass hci.set_event_filter({'Filter_Type': 0x00}) # Clear All Filters event_params = hci.read_bdaddr() if event_params['Status'] != 0: raise RuntimeError else: local_bd_addr = event_params['BD_ADDR'].upper() # Clear bluetoothd cache cache_path = PosixPath('/var/lib/bluetooth/') / local_bd_addr / 'cache' if cache_path.exists(): for file in cache_path.iterdir(): os.remove(file)
def iter_dir(self, folder: PosixPath, parent: Dict[str, Any], relative: PosixPath) -> None: _folders: Set = set() for _f in folder.iterdir(): if _f.is_dir(): _folders.add(_f) continue if _f.is_file(): parent["files"].append( self.get_item_dict(item=_f, relative=relative)) for _f in _folders: _folder = self.get_item_dict(item=_f, relative=relative) parent["files"].append(_folder) self.iter_dir(folder=_f, parent=_folder, relative=relative)
def get_matching_files1(directory: PosixPath, filter_str: str) -> list: files = [file_.name for file_ in directory.iterdir()] matches = [file_ for file_ in files if filter_str.lower() == file_.lower()] return matches if matches else get_close_matches(filter_str, files)
#!/usr/bin/env python3 from pathlib import PosixPath ignore = set(["unsupported", "disabled", "unstable", "hidden"]) targets_to_test = [] targets_dir = PosixPath("tests/integration/targets") for t in targets_dir.iterdir(): aliases = t / "aliases" if not aliases.is_file(): continue alias_content = aliases.read_text().split("\n") if not ignore.isdisjoint(set(alias_content)): continue targets_to_test.append(t.stem) print(" ".join(targets_to_test))
def get_migration_files_filtered(directory: PosixPath) -> List[str]: return [file.name for file in directory.iterdir() if file.is_file() and file.name.lower().endswith('.sql')]
def delete_garbage_files(self): protected_files = list(self.vid_parts.values()) p = PosixPath("vid_sources") for file in p.iterdir(): if file.as_posix() not in protected_files: file.unlink()
def run_stacks(args): """Given command line arguments generate stack plots Parameters ---------- args : argparse.ArgumentParser """ samples = rexart.constants.samples if args.out_dir is None: outd = f"{args.workspace}/MPL" else: outd = args.out_dir if outd != ".": PosixPath(outd).mkdir(parents=True, exist_ok=True) fit_name = PosixPath(args.workspace).stem hfiledir = PosixPath(f"{fit_name}/Histograms") regions = [] if args.skip_regions is not None: skipregex = re.compile(args.skip_regions) else: skipregex = None for hfile in hfiledir.iterdir(): if "_histos.root" in hfile.name: region = hfile.name.split("_histos.root")[0].split( f"{fit_name}_")[-1] if skipregex: if re.search(skipregex, region): continue regions.append(region) for region in regions: raw_region, template_variable = split_region_str(region) data, histograms, band = prefit_histograms(args, fit_name, region, samples) data.unit = rexart.constants.region_meta[template_variable].unit data.mpl_title = rexart.constants.region_meta[template_variable].title fig, (ax, axr) = stackem(args, region, data, histograms, template_variable, band=band) out_name = f"{outd}/preFit_{region}.pdf" fig.savefig(out_name) plt.close(fig) if args.shrink: shrink_pdf(out_name) log.info(f"Done with {region} prefit") if args.do_postfit: histograms, band = postfit_histograms(args, fit_name, region, samples) fig, (ax, axr) = stackem(args, region, data, histograms, template_variable, band=band) axr.set_ylim([0.925, 1.075]) axr.set_yticks([0.95, 1.0, 1.05]) out_name = f"{outd}/postFit_{region}.pdf" fig.savefig(out_name) plt.close(fig) if args.shrink: shrink_pdf(out_name) log.info(f"Done with {region} postfit")
def _delete_original_subtitles(subtitles_folder: PosixPath) -> None: for item in subtitles_folder.iterdir(): if item.is_file() and item.suffix.lower() == ".srt": _create_and_write_to_meta_file(file_path=item) os.remove(str(item))
def _convert_srts_to_vtts_in_folder(subtitles_folder: PosixPath) -> None: for item in subtitles_folder.iterdir(): if item.is_file() and item.suffix.lower() == ".srt": _convert_srt_to_vtt(str(item))