def list(self, path, keys=()): is_allowed = False if len(path) == 0: is_allowed = True else: for seg in path: if seg in self.allowed_dirs: is_allowed = True break results = [] if is_allowed: path = self._path(path) if path.isdir(): entries = path.listdir() else: entries = [None] for fName in entries: if not fName or re.search('^\.', str(fName)): continue ent = [] full_path = os.path.join(path.path, fName) if not os.path.isdir(full_path) or fName in self.allowed_dirs: results.append((fName, ent)) if keys: if fName is not None: p = os.path.join(path.path, fName) else: p = path.path try: statResult = os.stat(p) except (IOError, OSError), e: return ftp.errnoToFailure(e.errno, path) except: return defer.fail()
def get_descriptors(self): retVal = {} filenames = listdir(self.__project_name) for filename in filenames: retVal[filename] = load(open(join(dir_name, filename), 'r', encoding='utf-8')) return retVal
def diskUsage(path): size = p.getsize(path) if p.isdir(path): listDir = p.listdir(path) for item in listDir: childPath = p.join(path,item) print childPath size += diskUsage(childPath) return size
def __init__(self, foldername, column_names_tuple=None): self.foldername = os.path.abspath(foldername) files = [ f for f in listdir(self.foldername) if isfile(join(self.foldername, f)) ] files.sort() filename = files[-1] if not len(files) == 0: self._recover(filename) else: self._first_init(foldername, column_names_tuple)
def LoadGrads(filename): datafiles = [file for file in listdir('plots/grad_files/%s' % filename) if file.split('.')[-1] == 'grd'] df = pd.DataFrame(columns = ['Home', 'Away']) homedata = [] awaydata = [] for file in datafiles: if file.split(' ')[-2] == 'Home': homedata.extend(file) if file.split(' ')[-2] == 'Away': awaydata.extend(file) # add data to dataframe df['Home'] = pd.Series(homedata).values df['Away'] = pd.Series(awaydata).values return(df)
def doCopyDir(self, result): if result is not None: if result[1]: self.session.openWithCallback(self.callback, MessageBox, _('Copying ...'), type = 1, timeout = 1) symlinks = False aaa = self.SOURCELIST.getCurrentDirectory() src = self.SOURCELIST.getFilename() bbb = src[len(aaa):] dst = self.TARGETLIST.getCurrentDirectory() + bbb names = os.listdir(src) try: os.makedirs(dst) except: pass errors = [] for name in names: srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if symlinks and os.path.islink(srcname): linkto = os.readlink(srcname) os.symlink(linkto, dstname) elif os.path.isdir(srcname): shutil.copytree(srcname, dstname, symlinks) else: shutil.copy2(srcname, dstname) continue except (IOError, os.error): why = None errors.append((srcname, dstname, str(why))) continue try: copystat(src, dst) except: pass self.doRefresh()
def gen(self, path): try: entries = path.listdir() except self.ignore: return rec = self.rec dirs = self.optsort( [p for p in entries if p.check(dir=1) and (rec is None or rec(p))]) if not self.breadthfirst: for subdir in dirs: for p in self.gen(subdir): yield p for p in self.optsort(entries): if self.fil is None or self.fil(p): yield p if self.breadthfirst: for subdir in dirs: for p in self.gen(subdir): yield p
def gen(self, path): try: entries = path.listdir() except self.ignore: return rec = self.rec dirs = self.optsort( [p for p in entries if p.check(dir=1) and (rec is None or rec(p))]) if not self.breadthfirst: for subdir in dirs: #logger.info('recursing into directory {!r}'.format(subdir)) for p in self.gen(subdir): yield p for p in self.optsort(entries): if self.fil is None or self.fil(p): yield p if self.breadthfirst: for subdir in dirs: #logger.info(subdir) for p in self.gen(subdir): yield p
assert inp_dim % 32 == 0 assert inp_dim > 32 if CUDA: model.cuda() # Set the model in evalution mode # Auto dropout, batch normalization to validate. This make accuracy is higher model.eval() # read_dir is a checkpoint to mesure time load image read_dir = time.time() # Detection phase try: imlist = [osp.join(osp.realpath('.'), images, img) for img in osp.listdir(images)] except NotADirectoryError: imlist = [] imlist.append(ops.append(osp.realpath('.'), images)) except FileNotFoundError: print("No file or directory with the name {}".format(images)) exit() # Create det folder if not exists if not os.path.exists(args.det): os.makedirs(args.det) # Using OpenCV to load the images # load_batch is a checkpoint to measure the time read image load_batch = time.time() loaded_ims = [cv2.imread(x) for img in imlist]
def rmtree(path, ignore_errors=False, rm_all=False, black_list=()): """Partial removal of a filesystem tree from a black list entry point and PMF-trees specific directory entry rules. For example, to remove a PMF model located at ``path`` with a configuration file name ``configuration``, one would use: .. code:: python rmtree(path, black_list=('metadata', configuration)) Args: path (PathLike): A |Path| to a PMF |Model| to erase. ignore_errors (bool): Optional. Default to ``False``. If ``True``, any error arising when trying to delete or list elements will be silently swallowed. rm_all (bool): Optional. Default to ``False``. If ``True``, the entire filesystem tree descending from *path* will be deleted, independent on whether it looks PMF related. black_list (tuple): A tuple of filename (with or without extensions) to be deleted from *path*. Warnings: ``rm_all`` is to be used with caution, especially coupled with ``ignore_errors`` as it may lead to irreversible and indifferenciated data loss. """ path = Path(path) black_list_extension = tuple( str(element) for element in black_list if Path(element).ext) black_list_no_extension = tuple( str(element) for element in black_list if not Path(element).ext) try: for element in path.listdir(): # Delete files from rm rule or optional black lists if (path / element).is_file(): if rm_all: os.remove(str(path / element)) continue if element.filename in black_list_no_extension: os.remove(str(path / element)) continue if str(element[-1]) in black_list_extension: os.remove(str(path / element)) continue # Enter directories with tree dependent black lists or rm rules elif (path / element).is_dir(): # If it is data, foreign elements might be there so we specifically look for build_parameters if element[-1] == 'data': rmtree(path / element, ignore_errors=ignore_errors, rm_all=rm_all, black_list=('build_parameters', )) # If it is checkpoints, only checkpoints should be there, we delete everything. elif element[-1] == 'checkpoints': rmtree(path / element, ignore_errors=ignore_errors, rm_all=True) # If it is initialisation, we need to check the initialisation type. elif element[-1] == 'initialisation': # If it is PMF initialisation, we use the typical PMF entry point with metadata and configuration if (path / element / 'metadata.yaml').is_file() or \ (path / element / 'metadata.yml').is_file(): # Catch obvious naming mistake metadata = None if (path / element / 'metadata.yaml').is_file(): with open(str(path / element / 'metadata.yaml'), 'r') as f: metadata = yaml.safe_load(f) elif (path / element / 'metadata.yml').is_file(): with open(str(path / element / 'metadata.yml'), 'r') as f: metadata = yaml.safe_load(f) configuration_path = metadata['model'][ 'configuration']['path'] rmtree(path / element, ignore_errors=ignore_errors, rm_all=rm_all, black_list=('metadata', configuration_path)) # If it is not PMF initialisation, it is either: # * A file initialisation # * No initialisation # * A PMF with no metadata to be found, which is as worrying as it is impossible to work with # In all case above, the remedy is simple: we burn it all ! else: rmtree(path / element, ignore_errors=ignore_errors, rm_all=True) # Delete path if it is empty if not list(path.listdir()): os.rmdir(str(path)) except OSError as e: if ignore_errors: pass else: raise e