def addpackage(sitedir, pthfile, known_dirs=None): """ Wrapper for site.addpackage Try and work out which directories are added by the .pth and add them to the known_dirs set """ known_dirs = set(known_dirs or []) with open(join(sitedir, pthfile)) as f: for n, line in enumerate(f): if line.startswith("#"): continue line = line.rstrip() if line: if line.startswith(("import ", "import\t")): exec (line, globals(), locals()) continue else: p_rel = join(sitedir, line) p_abs = abspath(line) if isdir(p_rel): os.environ['PATH'] += env_t(os.pathsep + p_rel) sys.path.append(p_rel) added_dirs.add(p_rel) elif isdir(p_abs): os.environ['PATH'] += env_t(os.pathsep + p_abs) sys.path.append(p_abs) added_dirs.add(p_abs) if isfile(pthfile): site.addpackage(sitedir, pthfile, known_dirs) else: logging.debug("pth file '%s' not found")
def del_prefix_2_subfolders(self, dir_path, prefix='tmp_'): '''This function rename all the folders inside a path. Here we remove the prefix to the subfolders.For example: current folder my_folder: tmp_10 tmp_20 tmp_30 The result will be my_folder: 1 2 3 ''' print('\n Rename all subfolders inside: {}'.format(dir_path)) prefix_len = len(prefix) for f in listdir(dir_path): src = join(dir_path, f) if isdir(src): try: print(src) if f.startswith(prefix): new_file_name = f[prefix_len:] tar = join(dir_path, new_file_name) os.rename(src, tar) except Exception as e: print('error: {}'.format(e))
def rename_subfolders(self, dir_path, delta): '''This function rename all the folders inside a path. The folders names are integers. for example: my_folder: 1 2 3 The new names will have a prefix "tmp_" and the integer number will increase summing delta. If delta = 10. So the result will be my_folder: tmp_10 tmp_20 tmp_30 ''' print('\n Rename all subfolders inside: {}'.format(dir_path)) for f in listdir(dir_path): src = join(dir_path, f) if isdir(src): try: print(src) n = str(int(f) + delta) tar = join(dir_path, 'tmp_' + n) os.rename(src, tar) except Exception as e: print('error: {}'.format(e))
def get_subfolders(self, dir_path): print('\n List folder: {}'.format(dir_path)) for file_name in listdir(dir_path): file_path = join(dir_path, file_name) if isdir(file_path): print(file_path)
def get_package_dir(): """ Get the directory in which packages are installed if not overridden on project level. """ envpath = getenv('NOTEX_PACKAGE_DIR', '') if isdir(envpath): return envpath defpath = user_data_dir('ntp') if not isdir(defpath): stderr.write(( 'package path not set in NOTEX_PACKAGE_DIR and default location does not exist at "{0:s}"; ' + 'it will be created (this is normal if you\'re running for the first time)' ).format(defpath)) makedirs(defpath) return defpath
def clean_log_history(dir): for f1 in os.listdir(dir): path = os.path.join(dir, f1) if isdir(path): clean_log_history(path) else: os.remove(path)
def load_music_lists(music_path): music_list_dir = os.listdir(music_path) return [ MusicLoader.list_all_files(join(music_path, music_list)) for music_list in music_list_dir if isdir(join(music_path, music_list)) ]
def add_resource(mt_file, ref, cache): """Add a resources entry, downloading the intuiting the file, replacing entries with the same reference""" from metatab.util import enumerate_contents if isinstance(mt_file, MetatabDoc): doc = mt_file else: doc = MetatabDoc(mt_file) if not 'Resources' in doc: doc.new_section('Resources') doc['Resources'].args = [e for e in set(doc['Resources'].args + ['Name', 'StartLine', 'HeaderLines', 'Encoding']) if e] seen_names = set() if isdir(ref): for f in find_files(ref, DATA_FORMATS): if f.endswith(DEFAULT_METATAB_FILE): continue if doc.find_first('Root.Datafile', value=f): prt("Datafile exists for '{}', ignoring".format(f)) else: add_single_resource(doc, f, cache=cache, seen_names=seen_names) else: for c in enumerate_contents(ref, cache=cache, callback=prt): add_single_resource(doc, c.rebuild_url(), cache=cache, seen_names=seen_names) write_doc(doc, mt_file)
def setUp(self): self.testConfPath = os.path.join(os.environ["HOME"], ".cpc/test") if isdir(self.testConfPath): shutil.rmtree(self.testConfPath) self.serverConfs = dict() os.makedirs(self.testConfPath)
def getPaths(basePath, pathsVector): thisDirPaths = [basePath + "/" + x for x in listdir(basePath) if isdir(basePath + "/" + x)] for path in thisDirPaths: pathsVector.append(path) getPaths(path, pathsVector) return pathsVector
def do_process(self, request, response, abspath): if isdir(abspath): if(not request.path.endswith('/')): response.redirect(request.path + '/') else: self.process_dir(request, response, abspath) else: self.process_file(request, response, abspath)
def ensure_directory(directory): """ Create the directory if it does not exist :param str directory: Directory to create :return None: """ if not isdir(directory): makedirs(directory)
def listdir(path): listfile=[] if isdir(path): lf =os.listdir(path) for i in lf: listfile.append(listdir(os.path.join(path,i))) listfile.append(path) return listfile
def delete_cache(): for f in os.listdir(SERIAL_DIR): path = os.path.join(SERIAL_DIR, f) if isdir(path): shutil.rmtree(path) else: os.remove(path) print("Serialzed data folder purged!")
def index_image_folder(folder_path): for f in listdir(folder_path): path = join(folder_path, f) if isfile(path): is_image = any(path.endswith("." + ext) for ext in config.IMAGE_EXTENSIONS) if is_image: # check extension, only index images index_image(folder_path=folder_path, file_name=f) elif isdir(path): index_image_folder(path)
def __init__(self): assert isinstance(self.dumpRequestsSize, int), 'Invalid dump size %s' % self.dumpRequestsSize assert isinstance(self.dumpRequestsPath, str), 'Invalid dump path %s' % self.dumpRequestsPath self.dumpRequestsPath = normOSPath(self.dumpRequestsPath) if not os.path.exists(self.dumpRequestsPath): os.makedirs(self.dumpRequestsPath) assert isdir(self.dumpRequestsPath) and os.access(self.dumpRequestsPath, os.W_OK), \ 'Unable to access the dump directory %s' % self.dumpRequestsPath super().__init__() self._count = 0
def conan_builds(self, conan_reference): """ Returns a list of package ids from a local cache build folder """ assert isinstance(conan_reference, ConanFileReference) builds_dir = self.builds(conan_reference) try: builds = [dirname for dirname in os.listdir(builds_dir) if isdir(os.path.join(builds_dir, dirname))] except: # if there isn't any package folder builds = [] return builds
def conan_packages(self, conan_reference): """ Returns a list of package_id from a local cache package folder """ assert isinstance(conan_reference, ConanFileReference) packages_dir = self.packages(conan_reference) try: packages = [dirname for dirname in os.listdir(packages_dir) if isdir(os.path.join(packages_dir, dirname))] except: # if there isn't any package folder packages = [] return packages
def raise_error_if_no_such_directory(directory): """ Raise a RuntimeError if the directory does not exist :param str directory: Directory to check :return None: """ if not isdir(directory): raise RuntimeError( "Directory {} does not exists! Perhaps you meant to specify a " "different directory. Can't start the Vagrant.".format(directory))
def index_image_folder(folder_path): for f in listdir(folder_path): path = join(folder_path, f) if isfile(path): is_image = any( path.endswith("." + ext) for ext in config.IMAGE_EXTENSIONS) if is_image: # check extension, only index images index_image(folder_path=folder_path, file_name=f) elif isdir(path): index_image_folder(path)
def ignored(path, only_hidden=False): """if path is in the ignorelist return True ignore list includes hidden files and git or annex maintained folders when only_hidden set, only ignores hidden files and folders not git or annex maintained folders """ if isdir(opj(path, ".git")) and not only_hidden: return True return '.' == leaf_name(path)[0] or leaf_name(path) == 'index.html'
def __init__(self): assert isinstance(self.locale_dir_path, str), 'Invalid locale directory %s' % self.locale_dir_path assert isinstance(self.catalog_config, dict), 'Invalid catalog configurations %s' % self.catalog_config assert isinstance(self.write_po_config, dict), 'Invalid write PO configurations %s' % self.write_po_config assert isinstance(self.messageService, IMessageService), 'Invalid message service %s' % self.messageService assert isinstance(self.sourceService, ISourceService), 'Invalid source file service %s' % self.sourceService if not os.path.exists(self.locale_dir_path): os.makedirs(self.locale_dir_path) if not isdir(self.locale_dir_path) or not os.access(self.locale_dir_path, os.W_OK): raise IOError('Unable to access the locale directory %s' % self.locale_dir_path)
def raise_error_if_no_such_directory(directory): """ Raise a RuntimeError if the directory does not exist :param str directory: Directory to check :return None: """ if not isdir(directory): raise RuntimeError( "Directory {} does not exists! Perhaps you meant to specify a " "different directory. Can't start the Vagrant.".format(directory) )
def init_path(): """ Add any new modules that are directories to the PATH """ sitedirs = getsyssitepackages() for sitedir in sitedirs: env_path = os.environ['PATH'].split(os.pathsep) for module in allowed_modules: p = join(sitedir, module) if isdir(p) and not p in env_path: os.environ['PATH'] += env_t(os.pathsep + p)
def process_transcoding((flac_file, flac_root_path, mp3_target_path)): try: target_mp3_file = get_mp3_filename(mp3_target_path, flac_root_path, flac_file) if not isdir(dirname(target_mp3_file)): try: os.makedirs(dirname(target_mp3_file)) except OSError: pass # other thread might have been faster transcode(flac_file, target_mp3_file) except Exception as e: LOGGER.error('error during the transcoding of %r : %s' % (flac_file, e))
def scanDir(l, root, folder=None): if not folder is None: root = os.path.join(root, folder) for f in os.listdir(root): path = os.path.join(root, f) if isdir(path): scanDir(l, root, f) else: l.append({"filename": f, "folder": folder, "from_root": None if root == VIEW.showLocation else os.path.relpath(root, VIEW.showLocation)})
def __scan_dlls(self): # Scan DLLs separately since we need to build them as their own thing dll_dirs = [dir for dir in glob.glob("src/dlls/*") if isdir(dir)] to_compile: "set[str]" = set() for dir in dll_dirs: dir_parts = dir.split("/") number = dir_parts[-1] # Skip if this DLL is configured to use the original DLL instead of recompiling if not self.__should_compile_dll(Path(dir), number): continue c_paths = [ Path(path) for path in glob.glob(f"{dir}/**/*.c", recursive=True) ] asm_paths = [ Path(path) for path in glob.glob(f"{dir}/**/*.s", recursive=True) ] asm_paths.extend([ Path(path) for path in glob.glob( f"asm/nonmatchings/dlls/{number}/data/*.s") ]) files: "list[BuildFile]" = [] for src_path in c_paths: obj_path = self.__make_obj_path(src_path) opt = self.__get_optimization_level(src_path) files.append( BuildFile(str(src_path), obj_path, BuildFileType.C, opt)) for src_path in asm_paths: obj_path = self.__make_obj_path(src_path) files.append( BuildFile(str(src_path), obj_path, BuildFileType.ASM)) self.dlls.append(DLL(number, dir, files)) to_compile.add(number) # Scan for leftover DLLs that haven't been decompiled yet paths = [ Path(path) for path in glob.glob("bin/assets/dlls/*.dll", recursive=True) ] for src_path in paths: number = src_path.name.split(".")[0] if number in to_compile: continue obj_path = f"$BUILD_DIR/{src_path.with_suffix('.dll')}" self.leftover_dlls.append( BuildFile(str(src_path), obj_path, BuildFileType.BIN))
def synchronizeURIToDir(path, dirPath): ''' Publishes the entire contents from the URI path to the provided directory path. @param path: string The path to a resource: a file system path, a ZIP path @param dirPath: string The directory path to synchronize with. ''' assert isinstance(path, str) and path, 'Invalid content path %s' % path assert isinstance(dirPath, str), 'Invalid directory path value %s' % dirPath if not isdir(path): # not a directory, see if it's a entry in a zip file zipFilePath, inDirPath = getZipFilePath(path) zipFile = ZipFile(zipFilePath) if not inDirPath.endswith(ZIPSEP): inDirPath = inDirPath + ZIPSEP tmpDir = TemporaryDirectory() lenPath, zipTime = len(inDirPath), datetime.fromtimestamp(stat(zipFilePath).st_mtime) for zipInfo in zipFile.filelist: assert isinstance(zipInfo, ZipInfo), 'Invalid zip info %s' % zipInfo if zipInfo.filename.startswith(inDirPath): if zipInfo.filename[0] == '/': dest = zipInfo.filename[1:] else: dest = zipInfo.filename dest = normpath(join(dirPath, dest[lenPath:])) if exists(dest) and zipTime <= datetime.fromtimestamp(stat(dest).st_mtime): continue destDir = dirname(dest) if not exists(destDir): makedirs(destDir) zipFile.extract(zipInfo.filename, tmpDir.name) move(join(tmpDir.name, normOSPath(zipInfo.filename)), dest) if zipInfo.filename.endswith('.exe'): os.chmod(dest, stat(dest).st_mode | S_IEXEC) return path = normpath(path) assert os.access(path, os.R_OK), 'Unable to read the directory path %s' % path lenPath = len(path) + 1 for root, _dirs, files in os.walk(path): for file in files: src, dest = join(root, file), join(dirPath, root[lenPath:], file) if exists(dest) and \ datetime.fromtimestamp(stat(src).st_mtime) <= datetime.fromtimestamp(stat(dest).st_mtime): continue destDir = dirname(dest) if not exists(destDir): makedirs(destDir) copy(src, dest) if file.endswith('.exe'): os.chmod(dest, stat(dest).st_mode | S_IEXEC)
def initList(self): if not self.location: return self.table.clearContents() self.list.clear() for f in os.listdir(self.location): dir = os.path.join(str(self.location), f) if isdir(dir): item = QtGui.QListWidgetItem(self.list) item.setText(f) self.list.sortItems()
def getType(self, path): """ Metoda vraca tip fajla koji se nalazi na zadatoj putanji Args: path putanja do fajla """ if isdir(path): return "FOLDER " if isfile(path): return "FILE " else: return "INVALID"
def listSubfolders(folder, sort=True, shuffle=False, seed=None): ''' :param sort: if true, sort subfolders by name :param shuffle: if true, shuffle subfolders in random order :return: a list of full paths to subfolders of given folder. ''' subfolders = [ join(folder, f) for f in listdir(folder) if isdir(join(folder, f)) ] if sort or shuffle: subfolders.sort() if shuffle: if seed: random.seed(seed) random.shuffle(subfolders) return subfolders
def load_specs(): bad_specs = set() last_error = None for fn in spec_files(): logger.debug("load spec: %s", fn) if fn in bad_specs: # Don't try and load the same bad spec twice continue try: spec = open_spec(open(fn)) for module in spec['modules']: logger.debug("allow module: %s", module) allowed_modules.add(module) for path_name in spec.get('extra_paths', []): extra_path = get_extra_path(path_name) if isdir(extra_path): os.environ['PATH'] += env_t(os.pathsep + extra_path) sys.path.append(extra_path) added_dirs.add(extra_path) else: logger.warn( "Could not add extra path: {0}".format(extra_path)) sys_sitedirs = getsyssitepackages() for sys_sitedir in sys_sitedirs: with fixup_paths(): for pth in [pth for pth in spec['pths'] or [] if pth]: try: logger.debug("open pth: %s", pth) pth_file = join(sys_sitedir, pth) addpackage(sys_sitedir, pth_file, added_dirs) init_path() # TODO except IOError as e: # Path files are optional.. logging.debug('No pth found at %s', pth_file) pass except Exception as e: bad_specs.add(fn) err_msg = 'error loading spec %s: %s' % (fn, e) if last_error != err_msg: logging.error(err_msg) last_error = err_msg if bad_specs: raise VextError('Error loading spec files: %s' % ', '.join(bad_specs))
def process_transcoding((flac_file, flac_root_path, mp3_target_path)): try: target_mp3_file = get_mp3_filename(mp3_target_path, flac_root_path, flac_file) if not isdir(dirname(target_mp3_file)): try: os.makedirs(dirname(target_mp3_file)) except OSError: pass # other thread might have been faster if isfile(target_mp3_file) and tags_are_equals(flac_file, target_mp3_file): LOGGER.info('skipping %r as target mp3 file exists and seems to have the same tags', flac_file) else: transcode(flac_file, target_mp3_file) except Exception as e: LOGGER.exception('error during the transcoding of %r : %s' % (flac_file, e))
def add_directory_csv_files(dir_path, paths=None): if not paths: paths = [] for p in listdir(dir_path): path = join(dir_path, p) if isdir(path): # call recursively for each dir paths = add_directory_csv_files(path, paths) elif isfile(path) and path.endswith('.csv'): # add every file to the list paths.append(path) return paths
def subfolders(self, sort=True, shuffle=False, seed=0, abspath=True): ''' Return the list of locations's subfolders :param sort: if true, sort subfolders by name :param shuffle: if true, shuffle subfolders in random order :return: a list of full paths to subfolders of given folder. ''' folder = lambda f: join(self._root, f) if abspath else f subfolders = [ folder(f) for f in listdir(self._root) if isdir(join(self._root, f)) ] if sort or shuffle: self.__sortShuffle(subfolders, shuffle, seed) return subfolders
def _foreach_cluster_root_dir(self, cluster_home_dir, action): results = [] cluster_home_dir = path_join(self.path, cluster_home_dir) if isdir(cluster_home_dir): cluster_member_dirs = listdir(cluster_home_dir) for cluster_member_dir in cluster_member_dirs: neo4j_dirs = listdir(path_join(cluster_home_dir, cluster_member_dir)) for neo4j_dir in neo4j_dirs: if neo4j_dir.startswith("neo4j"): neo4j_path = path_join(cluster_home_dir, cluster_member_dir, neo4j_dir) result = action(neo4j_path) results.append(result) break return results
def test_isdir(self): self.assertIs(genericpath.isdir(support.TESTFN), False) f = open(support.TESTFN, "wb") try: f.write(b"foo") f.close() self.assertIs(genericpath.isdir(support.TESTFN), False) os.remove(support.TESTFN) os.mkdir(support.TESTFN) self.assertIs(genericpath.isdir(support.TESTFN), True) os.rmdir(support.TESTFN) finally: if not f.close(): f.close() try: os.remove(support.TESTFN) except os.error: pass try: os.rmdir(support.TESTFN) except os.error: pass self.assertRaises(TypeError, genericpath.isdir)
def addpackage(sys_sitedir, pthfile, known_dirs): """ Wrapper for site.addpackage Try and work out which directories are added by the .pth and add them to the known_dirs set :param sys_sitedir: system site-packages directory :param pthfile: path file to add :param known_dirs: set of known directories """ with open(join(sys_sitedir, pthfile)) as f: for n, line in enumerate(f): if line.startswith("#"): continue line = line.rstrip() if line: if line.startswith(("import ", "import\t")): exec(line, globals(), locals()) continue else: p_rel = join(sys_sitedir, line) p_abs = abspath(line) if isdir(p_rel): os.environ['PATH'] += env_t(os.pathsep + p_rel) sys.path.append(p_rel) added_dirs.add(p_rel) elif isdir(p_abs): os.environ['PATH'] += env_t(os.pathsep + p_abs) sys.path.append(p_abs) added_dirs.add(p_abs) if isfile(pthfile): site.addpackage(sys_sitedir, pthfile, known_dirs) else: logging.debug("pth file '%s' not found")
def load_specs(): bad_specs = set() last_error = None for fn in spec_files(): logger.debug("load spec: %s", fn) if fn in bad_specs: # Don't try and load the same bad spec twice continue try: spec = open_spec(open(fn)) for module in spec['modules']: logger.debug("allow module: %s", module) allowed_modules.add(module) for path_name in spec.get('extra_paths', []): extra_path = get_extra_path(path_name) if isdir(extra_path): os.environ['PATH'] += env_t(os.pathsep + extra_path) sys.path.append(extra_path) added_dirs.add(extra_path) else: logger.warn("Could not add extra path: {0}".format(extra_path)) sys_sitedirs = getsyssitepackages() for sys_sitedir in sys_sitedirs: for pth in [pth for pth in spec['pths'] or [] if pth]: try: logger.debug("open pth: %s", pth) pth_file = join(sys_sitedir, pth) addpackage(sys_sitedir, pth_file, added_dirs) init_path() # TODO except IOError as e: # Path files are optional.. logging.debug('No pth found at %s', pth_file) pass except Exception as e: bad_specs.add(fn) err_msg = 'error loading spec %s: %s' % (fn, e) if last_error != err_msg: logging.error(err_msg) last_error = err_msg if bad_specs: raise VextError('Error loading spec files: %s' % ', '.join(bad_specs))
def RepositoryIterator(root, depth=1): """ Ordered iterator over the filesystem, returning only files from the fourth directory level, corresponding to the data files in the repository. """ root = canonify(root) files = sorted(listdir(root)) for file in files: path = join(root, file) if isdir(path) and depth < 4: # cannot use 'yield from' as 3to2 doesn't translate it for path in RepositoryIterator(path, depth=depth + 1): yield path elif depth == 4: yield path
def private(): '''Private dotfiles''' name = 'dotfiles_secrets' print_section('Clone repo') repo = f'https://github.com/davidkristoffersen/{name}.git' if not isdir(f'{DOTFILES_PRIVATE}/{name}'): bash_cmd(f'git clone {repo} "{DOTFILES_PRIVATE}/{name}"') print_section('Link repo') link_dir(f'{DOTFILES_PRIVATE}/{name}', name) print_section('Shell source') secret_body = '#!/usr/bin/env bash\n\n' secret_body += f'. \\"\\$DOTFILES_PRIVATE/{name}/.bashrc\\"' write(f'{DOTFILES_SHELL}/.bash.d/.90-secrets.bash', secret_body)
def generate(self): """ Procedure - generation folder structure """ for item in os.listdir(self.fullpath()): # if it's a folder than calculate and generate subfolder structure if isdir(self.fullpath() + item): self._folders_count += 1 # for each folder create FolderTree object f = FolderTree(item, self) # save child object self._items.append(f) # generate folder structure for child f.generate() # if it's a file than calculate elif isfile(self.fullpath() + item): self._files_count += 1 return self
def copy2remote(): webhdfs = WebHdfs() base_path = "/home/work/data/collie/local_projects/" dirs = [f for f in listdir(base_path) if isdir(join(base_path, f))] for dr in dirs: try: project_version = int(dr) print "processing version %d" % project_version project_file_path = join(base_path, dr) files = [ f for f in listdir(project_file_path) if isfile(join(project_file_path, f)) ] if len(files) <= 0: continue if len(files) > 1: print "ERROR!!!" file_name = files[0] local_path = join(project_file_path, file_name) # local_path_new = join(project_file_path, str(project_version) + ".zip") # os.rename(local_path, local_path_new) remote_path = "/user/h_sns/collie/projects/" webhdfs.copyFromLocal(local_path, remote_path) print "local copied from %s to %s" % (local_path, remote_path) version = ProjectVersions.objects.filter( project_version=project_version)[:1].get() version.hdfs_path = join(remote_path, file_name) version.save() print "saved version %d, path %s" % (project_version, version.hdfs_path) except IOError: print "Error!!" continue except OSError: print "Error!!" continue
def getPaths(basePath, pathsVector): ''' Insert a complete list of sub-dirs for a path in a vector of paths @param basePath: the base path to search into @type basePath: string @param pathsVector: the sub-path vector @type pathsVector: list @rtype: list ''' thisDirPaths = [basePath + "/" + x for x in listdir(basePath) if isdir(basePath + "/" + x)] for path in thisDirPaths: if exists(path + "/functions.json"): pathsVector.append(path) else: # search inside getPaths(path, pathsVector) return pathsVector
def __init__(self): assert isinstance(self.image_dir_path, str), 'Invalid image directory %s' % self.image_dir_path assert isinstance(self.format_file_name, str), 'Invalid format file name %s' % self.format_file_name assert isinstance(self.default_file_name, str), 'Invalid default file name %s' % self.default_file_name assert isinstance(self.imageTypeKey, str), 'Invalid meta type image key %s' % self.imageTypeKey assert isinstance(self.thumbnailSizes, dict), 'Invalid thumbnail sizes %s' % self.thumbnailSizes assert isinstance(self.cdmImages, ICDM), 'Invalid image CDM %s' % self.cdmImages assert isinstance(self.cdmThumbnails, ICDM), 'Invalid image thumbnail CDM %s' % self.cdmThumbnails SessionSupport.__init__(self) if not os.path.exists(self.image_dir_path): os.makedirs(self.image_dir_path) if not isdir(self.image_dir_path) or not os.access(self.image_dir_path, os.W_OK): raise IOError('Unable to access the repository directory %s' % self.image_dir_path) # We order the thumbnail sizes in descending order thumbnailSizes = [(key, sizes) for key, sizes in self.thumbnailSizes.items()] thumbnailSizes.sort(key=lambda pack: pack[1][0] * pack[1][1]) self.thumbnailSizes = OrderedDict(thumbnailSizes) self._metaTypeId = None
def main_loop(): GAMES_DIR = os.getcwd()+'/../games/' print "GAMES_DIR: ", GAMES_DIR print games = [x for x in os.listdir(GAMES_DIR) if isdir(GAMES_DIR + "/" + x) and isfile(GAMES_DIR+"/"+x+"/"+"package.json")] if len(games) < 1: print "Non ho trovato nessun gioco. Ciao ciao...." return False while True: print "Questi sono i giochi che ho trovato:" for i, game in enumerate(games): print "\t"+str(i)+") "+str(game) print "\t.) Qualsiasi altra selezione per uscire" try: choosed = int(raw_input('Cosa vuoi che risolva? ')) print "Hai scelto: "+str(games[choosed]) print print "--------------------------------" try: solve_game(GAMES_DIR+"/"+games[choosed]+"/") except Exception: traceback.print_exc(file=sys.stdout) print "--------------------------------" print except ValueError: print "...Ciao Ciao..." return True
def __buildFileListToSend(self, path): fileListToSend = [] fullPath = os.path.join(self.__repositoryPath, path) if isfile(fullPath): file = open(fullPath, mode='rb') fileInfo = {'file' : file ,'fileName' : path} fileListToSend.append(fileInfo) elif isdir(fullPath): filesUnderPath = os.path.join(fullPath, '*' ) it = iter(glob.iglob(filesUnderPath)) for filePath in it: if isfile(filePath) : file = open(filePath, mode='rb') fileName = filePath.replace(self.__repositoryPath, '') fileInfo = {'file' : file ,'fileName' : fileName} fileListToSend.append(fileInfo) return fileListToSend
def check_dir(dirpath): if dirpath: if not exists(dirpath): arg_parse_error('Directory ' + dirpath + ' does not exist.') if not isdir(dirpath): arg_parse_error(dirpath + ' is a file.')
def _update(self, locale, messages, poFile, path, pathMO, isGlobal=True): assert isinstance(locale, Locale), 'Invalid locale %s' % locale assert isinstance(messages, Iterable), 'Invalid messages %s' % messages assert isinstance(poFile, IInputStream), 'Invalid file object %s' % poFile assert isinstance(path, str), 'Invalid path %s' % path assert isinstance(pathMO, str), 'Invalid path MO %s' % pathMO assert isinstance(isGlobal, bool), 'Invalid is global flag %s' % isGlobal catalog = read_po(poFile, locale=locale) assert isinstance(catalog, Catalog), 'Invalid catalog %s' % catalog if not catalog: # The catalog has no messages, no need for updating. return if not isGlobal: pathGlobal = self._filePath(locale) pathGlobalMO = self._filePath(locale, format=FORMAT_MO) if isfile(pathGlobal): with open(pathGlobal) as fObj: catalogGlobal = read_po(fObj, locale) self._processCatalog(catalogGlobal, self.messageService.getMessages()) else: isGlobal, path, pathMO = True, pathGlobal, pathGlobalMO messages = self.messageService.getMessages() self._processCatalog(catalog, messages) if isfile(path): with open(path) as fObj: catalogOld = read_po(fObj, locale) for msg in catalog: msgO = catalogOld.get(msgId(msg), msg.context) if not isMsgTranslated(msg) and msgO and isMsgTranslated(msgO): msg.string = msgO.string catalog.creation_date = catalogOld.creation_date else: pathDir = dirname(path) if not isdir(pathDir): os.makedirs(pathDir) catalog.creation_date = datetime.now() if not isGlobal: # We remove all the messages that are not translated or have the same translation as in the global locale # or are the only plugin that makes use of the message in the global. updatedGlobal = False for msg in list(catalog): id = msgId(msg) if not id: continue if not isMsgTranslated(msg): catalog.delete(id, msg.context) else: msgG = catalogGlobal.get(id, msg.context) if not msgG or msgG.string == msg.string: catalog.delete(id, msg.context) elif not isMsgTranslated(msgG) or msgG.locations == msg.locations: copyTranslation(msg, msgG) catalog.delete(id, msg.context) updatedGlobal = True if updatedGlobal: # We remove all the messages that are not translated. for msg in list(catalogGlobal): if not isMsgTranslated(msg): catalogGlobal.delete(msgId(msg), msg.context) catalogGlobal.revision_date = datetime.now() os.makedirs(dirname(pathGlobal), exist_ok=True) with open(pathGlobal, 'wb') as fObj: write_po(fObj, catalogGlobal, **self.write_po_config) os.makedirs(dirname(pathGlobalMO), exist_ok=True) with open(pathGlobalMO, 'wb') as fObj: write_mo(fObj, catalogGlobal) else: # We remove all the messages that are not translated. for msg in list(catalog): if not isMsgTranslated(msg): catalog.delete(msgId(msg), msg.context) catalog.revision_date = datetime.now() os.makedirs(dirname(path), exist_ok=True) with open(path, 'wb') as fObj: write_po(fObj, catalog, **self.write_po_config) os.makedirs(dirname(pathMO), exist_ok=True) with open(pathMO, 'wb') as fObj: write_mo(fObj, catalog)
def main(args): cnf, options = common_main( 'targetcov', opts=[ (['--bam'], 'align.bam', { 'dest': 'bam', 'help': 'used to generate some annotations by GATK'}), (['--capture', '--bed'], 'capture.bed', { 'dest': 'capture', 'help': ''}), (['--genes', '--genes'], 'genes.bed', { 'dest': 'genes', 'help': ''}), (['--exons', '--exons'], 'exons.bed', { 'dest': 'exons', 'help': ''}), (['--padding'], '250', { 'dest': 'padding', 'help': '', 'default': 250}), ]) genes_bed = options.get('genes') or cnf.get('genes') or cnf['genome'].get('genes') exons_bed = options.get('exons') or cnf.get('exons') or expanduser(cnf['genome'].get('exons')) chr_len_fpath = cnf.get('chr_lengths') or cnf['genome'].get('chr_lengths') capture_bed = options.get('capture') or cnf.get('capture') bam = options.get('bam') or cnf.get('bam') genes_bed = expanduser(genes_bed) exons_bed = expanduser(exons_bed) chr_len_fpath = expanduser(chr_len_fpath) bam = expanduser(bam) capture_bed = expanduser(capture_bed) if not genes_bed: critical('Specify sorted genes bed file in system info or in run info.') if not exons_bed: critical('Specify sorted exons bed file in system info or in run info.') if not chr_len_fpath: critical('Specify chromosome lengths for the genome' ' in system info or in run info.') if not bam: critical('Specify bam file by --bam option or in run_config.') if not capture_bed: critical('Specify capture file by --capture option or in run_config.') print('using genes ' + genes_bed) print('using exons ' + exons_bed) print('using chr lengths ' + chr_len_fpath) print('using bam ' + bam) print('using capture panel ' + capture_bed) if not verify_file(genes_bed): exit() if not verify_file(exons_bed): exit() if not verify_file(chr_len_fpath): exit() if not verify_file(bam): exit() if not verify_file(capture_bed): exit() depth_thresholds = cnf['depth_thresholds'] padding = options.get('padding', cnf.get('padding', 250)) output_dir = expanduser(options.get('output_dir', cnf.get('output_dir', os.getcwd()))) work_dir = join(output_dir, 'work') if isdir(work_dir): rmtree(work_dir) os.makedirs(work_dir) run_header_report(output_dir, work_dir, capture_bed, bam, chr_len_fpath, depth_thresholds, padding) run_cov_report(output_dir, work_dir, capture_bed, bam, depth_thresholds) run_cov_report(output_dir, work_dir, capture_bed, bam, depth_thresholds, genes_bed, exons_bed)
def fs_traverse(path, repo, parent=None, subdatasets=None, render=True, recurse_datasets=False, recurse_directories=False, json=None, basepath=None): """Traverse path through its nodes and returns a dictionary of relevant attributes attached to each node Parameters ---------- path: str Path to the directory to be traversed repo: AnnexRepo or GitRepo Repo object the directory belongs too parent: dict Extracted info about parent directory recurse_directories: bool Recurse into subdirectories (note that subdatasets are not traversed) render: bool To render from within function or not. Set to false if results to be manipulated before final render Returns ------- list of dict extracts and returns a (recursive) list of directory info at path does not traverse into annex, git or hidden directories """ subdatasets = subdatasets or [] fs = fs_extract(path, repo, basepath=basepath or path) dataset = Dataset(repo.path) submodules = {sm.path: sm for sm in repo.get_submodules()} # TODO: some submodules might not even have a local empty directory # (git doesn't care about those), so us relying on listdir here and # for _traverse_handle_subds might not work out. # E.g. create-sibling --ui true ... --existing=reconfigure # causes removal of those empty ones on the remote end if isdir(path): # if node is a directory children = [fs.copy()] # store its info in its children dict too (Yarik is not sure why, but I guess for .?) # ATM seems some pieces still rely on having this duplication, so left as is # TODO: strip away for node in listdir(path): nodepath = opj(path, node) # Might contain subdatasets, so we should analyze and prepare entries # to pass down... in theory we could just pass full paths may be? strip node_subdatasets = [] is_subdataset = False if isdir(nodepath): node_sep = with_pathsep(node) for subds in subdatasets: if subds == node: # it is the subdataset is_subdataset = True else: # use path_is_subdir if subds.startswith(node_sep): node_subdatasets += [subds[len(node_sep):]] # TODO: it might be a subdir which is non-initialized submodule! # if not ignored, append child node info to current nodes dictionary if is_subdataset: # repo.path is real, so we are doomed (for now at least) # to resolve nodepath as well to get relpath for it node_relpath = relpath(realpath(nodepath), repo.path) subds = _traverse_handle_subds( node_relpath, dataset, recurse_datasets=recurse_datasets, recurse_directories=recurse_directories, json=json ) # Enhance it with external url if available submod_url = submodules[node_relpath].url if submod_url and is_datalad_compat_ri(submod_url): subds['url'] = submod_url children.append(subds) elif not ignored(nodepath): # if recursive, create info dictionary (within) each child node too if recurse_directories: subdir = fs_traverse(nodepath, repo, subdatasets=node_subdatasets, parent=None, # children[0], recurse_datasets=recurse_datasets, recurse_directories=recurse_directories, json=json, basepath=basepath or path) subdir.pop('nodes', None) else: # read child metadata from its metadata file if it exists subdir_json = metadata_locator(path=node, ds_path=basepath or path) if exists(subdir_json): with open(subdir_json) as data_file: subdir = js.load(data_file) subdir.pop('nodes', None) # else extract whatever information you can about the child else: # Yarik: this one is way too lean... subdir = fs_extract(nodepath, repo, basepath=basepath or path) # append child metadata to list children.extend([subdir]) # sum sizes of all 1st level children children_size = {} for node in children[1:]: for size_type, child_size in node['size'].items(): children_size[size_type] = children_size.get(size_type, 0) + machinesize(child_size) # update current node sizes to the humanized aggregate children size fs['size'] = children[0]['size'] = \ {size_type: humanize.naturalsize(child_size) for size_type, child_size in children_size.items()} children[0]['name'] = '.' # replace current node name with '.' to emulate unix syntax if parent: parent['name'] = '..' # replace parent node name with '..' to emulate unix syntax children.insert(1, parent) # insert parent info after current node info in children dict fs['nodes'] = children # add children info to main fs dictionary if render: # render directory node at location(path) fs_render(fs, json=json, ds_path=basepath or path) lgr.info('Directory: %s' % path) return fs