def __init__(self, appcfg_file): self.__paths = Path self.__pluginlist = PluginList #REVIEW: Is this the best way to tackle this?: # http://sqlobject.org/SQLObject.html#declaring-a-connection if platform == "win32": appcfg_file = appcfg_file.replace("\\", "/") appcfg_file = appcfg_file.replace(r"\\", r"/") (drive, filepath) = path.splitdrive(appcfg_file) uri = "sqlite:/" if drive == '': appcfg_file = path.abspath(filepath) if path.isfile(appcfg_file): (drive, filepath) = path.splitdrive(appcfg_file) if ":" in drive: uri = uri + "".join([drive.replace(":", "|"), filepath]) else: uri = uri + appcfg_file else: uri = uri + appcfg_file print uri self.__app_cfg_db = connectionForURI(uri) self.__paths.setConnection(self.__app_cfg_db) self.__paths.createTable(ifNotExists=True) self.__pluginlist.setConnection(self.__app_cfg_db) self.__pluginlist.createTable(ifNotExists=True)
def _make_tarball(self, target_name): tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') os.mkdir(os.path.join(tmpdir, 'sub')) self.write_file([tmpdir, 'sub', 'file3'], 'xxx') tmpdir2 = self.mkdtemp() unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], 'source and target should be on same drive') base_name = os.path.join(tmpdir2, target_name) old_dir = os.getcwd() os.chdir(tmpdir) try: make_tarball(splitdrive(base_name)[1], '.') finally: os.chdir(old_dir) tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) base_name = os.path.join(tmpdir2, target_name) old_dir = os.getcwd() os.chdir(tmpdir) try: make_tarball(splitdrive(base_name)[1], '.', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) return
def _make_tarball(self, target_name): tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') os.mkdir(os.path.join(tmpdir, 'sub')) self.write_file([tmpdir, 'sub', 'file3'], 'xxx') tmpdir2 = self.mkdtemp() unittest.skipUnless( splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], 'source and target should be on same drive') base_name = os.path.join(tmpdir2, target_name) old_dir = os.getcwd() os.chdir(tmpdir) try: make_tarball(splitdrive(base_name)[1], '.') finally: os.chdir(old_dir) tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) base_name = os.path.join(tmpdir2, target_name) old_dir = os.getcwd() os.chdir(tmpdir) try: make_tarball(splitdrive(base_name)[1], '.', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) return
def cmd_parser(): parser = ArgumentParser(description="a doujin voice tagger") parser.add_argument("--orig", "-o", type=str, dest="orig", action="store", help="directory to process") parser.add_argument("--dest", "-d", type=str, dest="dest", action="store", help="destination") parser.add_argument("--nocov", "-q", action="store_false", dest="cover", default=True, help="do not save cover") parser.add_argument("--debug", action="store_true", dest="debug", default=False, help="run in single thread for debug") parser.add_argument("--lang", "-l", type=int, dest="lang", action="store", default=0, help="0 for Japanese(default), 1 for Chinese") parser.add_argument("--proxy", type=str, dest="proxy", action="store", help="proxy, the same as 'requests' module") parser.add_argument("--method", "-m", type=str, dest="method", default="save2ape", action="store", help="how to save tags") options = parser.parse_args(sys.argv[1:]) if not (options.orig and options.dest): logger.error("必须提供orig和dest参数") exit(1) if not path.exists(options.orig) or not path.exists(options.dest): logger.error("orig或者dest文件夹不存在") exit(1) # for file rename, we must have both on the same mount point. # XXX not tested on *nix if two on different mount point if path.splitdrive(options.orig)[0] != path.splitdrive(options.dest)[0]: logger.error("orig和dest文件夹不在一个分区") exit(1) logger.debug(f"options is {options}") return options
def main(): banner() options = cmd_parser() config = read_config() if options.show: show_config(config) return orig, dest, reflush = merge_config(options, config) if reflush == -1: return if not path.exists(orig) or not path.exists(dest): logger.error("orig or dest does not exist") return # for file rename, we must have both on the same mount point. # XXX not tested on *nix if two on different mount point if path.splitdrive(orig)[0] != path.splitdrive(dest)[0]: logger.error("orig and dest not in the same drive") return logger.info("starting") cover, lang = options.cover, options.lang work_list = [(rjcode, root, dest, cover, lang) for rjcode, root in match_path(orig, RJPAT)] if not work_list: logger.info("no match found") return if options.debug: for args in work_list: worker(args) else: with Pool() as pool: pool.map(worker, work_list) if reflush == 1: config = {"dest": dest, "orig": orig} logger.info("saving config to file") save_config(config)
class TestLongPath(unittest.TestCase): def setUp(self): filename = 'A' * 100 self.dirname = '\\\\?\\' + op.join(gettempdir(), filename) self.file = op.join( self.dirname, filename, filename, # From there, the path is not trashable from Explorer filename, filename + '.txt') self._create_tree(self.file) def tearDown(self): shutil.rmtree(self.dirname, ignore_errors=True) def _create_tree(self, path): dirname = op.dirname(path) if not op.isdir(dirname): os.makedirs(dirname) with open(path, 'w') as writer: writer.write('Looong filename!') def test_trash_file(self): s2t(self.file) self.assertFalse(op.exists(self.file)) @unittest.skipIf( op.splitdrive(os.getcwd())[0] != op.splitdrive(gettempdir())[0], 'Cannot trash long path from other drive') def test_trash_folder(self): s2t(self.dirname) self.assertFalse(op.exists(self.dirname))
def is_zip_path(path): """ Returns True if the path refers to a zip file. """ filepath = path while not is_zipfile(filepath) and \ splitdrive(filepath)[1] != '\\' \ and splitdrive(filepath)[1] != '/': filepath = dirname(filepath) return is_zipfile(filepath)
def _make_tarball(self, tmpdir, target_name, suffix, **kwargs): tmpdir2 = self.mkdtemp() unittest.skipUnless( splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], 'source and target should be on same drive') base_name = os.path.join(tmpdir2, target_name) with change_cwd(tmpdir): make_tarball(splitdrive(base_name)[1], 'dist', **kwargs) tarball = base_name + suffix self.assertTrue(os.path.exists(tarball)) self.assertEqual(self._tarinfo(tarball), self._created_files)
def simplify_path(path): """ Return relative path towards current working directory unless it is a separate Windows drive """ cwd = os.getcwd() drive_cwd = splitdrive(cwd)[0] drive_path = splitdrive(path)[0] if drive_path == drive_cwd: return relpath(path, cwd) return path
def _make_tarball(self, tmpdir, target_name, suffix, **kwargs): tmpdir2 = self.mkdtemp() unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], "source and target should be on same drive") base_name = os.path.join(tmpdir2, target_name) # working with relative paths to avoid tar warnings with change_cwd(tmpdir): make_tarball(splitdrive(base_name)[1], 'dist', **kwargs) # check if the compressed tarball was created tarball = base_name + suffix self.assertTrue(os.path.exists(tarball)) self.assertEqual(self._tarinfo(tarball), self._created_files)
def rt_MoveRVMAT(rvmatName, prefixPath="P:\\"): # Construct the output name on the drive. This assumes the rvMat is on the P drive outputFolder = path.split(rvmatName)[0] # Construct the output name in Game terms. This assumes outputFolder is on the P drive gameOutputFolder = path.splitdrive(outputFolder)[1] gameOutputFolder = gameOutputFolder.strip("\\") # Get the textures for this textures = rt_readTextures(rvmatName) outputList = [] for tex in textures: texture, replaced = rt_findTextureMatch(tex) if replaced is True: outputList.append([tex, texture]) else: # Copy the texture srcFile = path.join(prefixPath, path.splitext(tex)[0] + ".paa") dstFile = path.join(outputFolder, path.basename(texture)) print("Need to copy " + srcFile + " to " + dstFile) try: rt_smartCopy(srcFile, dstFile) except: pass # Add the replacement to the list texBase = path.split(texture)[1] outputList.append([tex, path.join(gameOutputFolder, texBase)]) ft_replaceNames(rvmatName, outputList)
def is_nt_ssd(path): import win32file flag = False path = _fullpath(path) drive = splitdrive(path)[0].upper() drivetype = win32file.GetDriveType(drive) if drivetype == win32file.DRIVE_RAMDISK: flag = True elif drivetype in (win32file.DRIVE_FIXED, win32file.DRIVE_REMOVABLE): import wmi c = wmi.WMI() phy_to_part = 'Win32_DiskDriveToDiskPartition' log_to_part = 'Win32_LogicalDiskToPartition' index = dict((log_disk.Caption, phy_disk.Index) for phy_disk in c.Win32_DiskDrive() for partition in phy_disk.associators(phy_to_part) for log_disk in partition.associators(log_to_part)) c = wmi.WMI(moniker='//./ROOT/Microsoft/Windows/Storage') flag = bool( c.MSFT_PhysicalDisk(DeviceId=str(index[drive]), MediaType=4)) return flag
def _resolve_filepath(self, filepath, force=False): ret = None if not force: try: resolved = self._resolved_filepaths[filepath] except KeyError: force = True else: ret = resolved if force: orig_filepath = filepath filepath = path.normpath(filepath) if path.isabs(filepath): drive, tail = path.splitdrive(filepath) if drive: raise AbsPathError("absolute path with drive not allowed: %s" % filepath) filepath = filepath[1:] # resolve extension filepath = self._resolve_filepath_ext(filepath) # add to cache self._resolved_filepaths[orig_filepath] = filepath ret = filepath return ret
def database_factory(database_location): ''' Create a DatabaseEngine from its location string. This location can be either a sqlite file path (ending with '.sqlite' or ':memory:' for an in memory database for testing) or a populse_db URL, or None. ''' global _populsedb_url_re engine_directory = None if database_location is None: database_location = ':memory:' match = _populsedb_url_re.match(database_location) if match: path = match.groups(2) _, path = osp.splitdrive(path) if path.startswith(os.apth.sep): engine_directory = osp.abspath(osp.dirname(path)) populse_db = database_location elif database_location.endswith('.sqlite'): populse_db = 'sqlite:///%s' % database_location engine_directory = osp.abspath(osp.dirname(database_location)) elif database_location == ':memory:': populse_db = 'sqlite:///:memory:' else: raise ValueError('Invalid database location: %s' % database_location) engine = PopulseDBEngine(populse_db) if engine_directory: engine.set_named_directory('capsul_engine', engine_directory) return engine
def play_music(self, name): a = path.split(name) b = path.splitdrive(name) if pygame.mixer.music.get_busy(): pygame.mixer.music.stop() elif PLATFORM == 'win32' and self.a_drive_is_busy(): self.stop_drives() if name[-4:].upper() == '.CDA': drive = b[0] + SLASH for x in self.drive_list: if drive == x[1]: drive_id = x[0] track_list = listdir(u'.') track_number = track_list.index(a[1]) self.pcc = pygame.cdrom.CD(drive_id) self.pcc.init() try: self.pcc.play(track_number) self.V.set('loaded ... ' + a[1]) except RuntimeError: self.V.set('could not load ...') else: try: pygame.mixer.music.load(name.encode(DEFAULTLOCALE[1])) pygame.mixer.music.play() self.V.set('loaded ... ' + a[1]) except RuntimeError: self.V.set('could not load ...')
def str_sanitize(filename: str) -> str: """Removes illegal filename characters and condenses whitespace.""" base, ext = splitext(filename) base = re.sub(r"\s+", " ", base) drive, tail = splitdrive(base) tail = re.sub(r'[<>:"|?*&%=+@#`^]', "", tail) return drive + tail.strip("-., ") + ext
def get_root_dir(cwd=None): if cwd is None: cwd = getcwd() while (normpath(splitdrive(cwd)[1]) not in ('/', '\\', '')): if is_root_dir(cwd): return cwd cwd = join(cwd, '..') return None
def validate_dirname(dirname: Union[Path, str]) -> bool: """ Check whether the provided directory name is valid or not. This is done by considering each part of dirname and testing if it is valid or not, ignoring non-existent and non-readable path components. Returns True if the dirname is valid for current OS else False. """ if not isinstance(dirname, (Path, str)) or not dirname: return False if _os.isfile(dirname): raise FileExistsError(f'File: {dirname!r} already exists') try: _, dirname = _os.splitdrive(dirname) home = os.environ.get('HOMEDRIVE', 'C:') if os.name == 'nt' else _SEP assert _os.isdir(home) home = home.rstrip(_SEP) + _SEP for part in dirname.split(_SEP): try: os.lstat(home + part) except OSError as err: if hasattr(err, 'winerror'): # pyright: reportGeneralTypeIssues=false if err.winerror == _ERROR_INVALID_NAME: raise InvalidDirectoryName( valid=True, msg=f'Path: {dirname!r} syntax is incorrect') elif err.errno in (errno.ENAMETOOLONG, errno.ERANGE): raise InvalidDirectoryName( valid=True, msg=f'Path: {dirname!r} is too long for a directory') except TypeError: return False else: return True
def temp_home(clazz, enter_functions=None, exit_functions=None, use_temp_home=None, extra_env=None): 'Return an env_override object with a temporary HOME' check.check_callable_seq(enter_functions, allow_none=True) check.check_callable_seq(exit_functions, allow_none=True) if use_temp_home: tmp_home = use_temp_home else: tmp_home = tempfile.mkdtemp(suffix='-tmp-home.dir') filesystem.atexit_remove(tmp_home) if host.is_unix(): env = {'HOME': tmp_home} elif host.is_windows(): homedrive, homepath = path.splitdrive(tmp_home) env = { 'HOME': tmp_home, 'HOMEDRIVE': homedrive, 'HOMEPATH': homepath, 'APPDATA': path.join(tmp_home, 'AppData\\Roaming') } if extra_env: for key, value in env.items(): if key in extra_env: raise RuntimeError(f'Invalid key: "{key}"') env.update(extra_env) return env_override(env=env, enter_functions=enter_functions, exit_functions=exit_functions)
def get_root_path(): """Helper function for getting the root path.""" drive = splitdrive(sys.executable)[0] if drive: return '{}\\'.format(drive) return '/'
def start(path, drive, progress=None): st = stat(path) #preprocess to see if it's a drive or path we are dealing with if splitdrive(path)[1].lstrip("/\\"): #path not root if islink(path): log.warn("Link ignored: %s" % path) elif isdir(path): newdir = Dir(name=basename(path), path=path, size=0, modtime=st.st_mtime, createtime=st.st_ctime) drive.dirs.append(newdir) processdir(path, newdir, progress) drive.totaldirs += newdir.totaldirs drive.totalfiles += newdir.totalfiles drive.size += newdir.size elif isfile(path): size = st.st_size if progress: progress.update(size) drive.totalfiles += 1 drive.files.append(File(name=basename(path), parent=drive, size=size, modtime=st.st_mtime, createtime=st.st_ctime)) drive.size += size else: log.warn("UNKNOWN OBJECT? %s" % path) else: #root processdir(path, drive, progress)
def URLFromVirtualFilePath(self,path): """Converts a virtual file path into a :class:`URI` instance.""" host=path.fsName segments=[] if not path.isabs(): path=path.abspath() drive,head=path.splitdrive() uncFlag=path.IsUNC() while head: newHead,tail=head.split() if newHead==head: # We are unable to split any more from head if uncFlag and segments: # This is the unusual case of the UNC path, first segment is machine if host: raise ValueError("UNC hosts cannot be specified in named file systems.") host=str(segments[0]) del segments[0] break else: segments[0:0]=[tail] head=newHead if drive: segments[0:0]=[drive] # At this point we need to convert to octets c=sys.getfilesystemencoding() if host: host=EscapeData(host,IsAuthorityReserved) for i in xrange(len(segments)): # we always use utf-8 in URL path segments to make URLs portable segments[i]=EscapeData(unicode(segments[i]).encode('utf-8'),IsPathSegmentReserved) return FileURL('file://%s/%s'%(host,string.join(segments,'/')))
def __init__(self, directory, logger): self.directory = directory self.logger = logger self.unmountRequired = False self.invalid = False self.diskUsageDB = "MirrorMaker.state" if platform() != "Windows": self.logger.message("MMDiskMonitor only supports Windows platforms at this time", 3) self.invalid = True else: if splitunc(directory)[0] == "": # Local drive self.caption = splitdrive(directory)[0] self.name = self.caption else: # Network share self.uncRoot = splitunc(directory)[0] for i in range(26): if system("net use " + ascii_uppercase[i] + ": " + self.uncRoot + " > NUL 2>&1") == 0: self.caption = ascii_uppercase[i] + ":" self.name = self.uncRoot self.directory = join(self.caption, self.uncRoot) self.unmountRequired = True break if self.unmountRequired == False: self.logger.message("Error mounting network share " + self.uncRoot + " for monitoring its free space", 3) self.invalid = True self.logger.message("Disk monitor initialized for " + self.name, 0)
def isNetworkFile(self): """Return true if this file resides on a network share. Note: For networked file, isLocal is *always* true.""" if self._networkFile is None: # Check if the user allows networked files to be checked - bug 88521. network_check_enabled = FileHandler._network_file_check_enabled if network_check_enabled is None: globalPrefs = components.classes["@activestate.com/koPrefService;1"].\ getService(components.interfaces.koIPrefService).prefs network_check_enabled = globalPrefs.getBooleanPref( "checkNetworkDiskFile") FileHandler._network_file_check_enabled = network_check_enabled if not network_check_enabled and win32: # Determine if file is networked using the Win32 API. The string # must be a unicode object - otherwise the call will fail. # TODO: Does my path ever change? If so there needs to be an # invalidate method so this call fires again. drive = unicode(splitdrive(self._path)[0]) self._networkFile = Win32_GetDriveTypeW( drive) == WIN32_DRIVE_REMOTE else: self._networkFile = False # TODO: Check if the user has marked this location to be treated as # a network file type (via user preferences). #if not self._networkFile: # if self._path.startswith("/home/toddw/tmp"): # self._networkFile = True return self._networkFile
def checkdup(filename, thisdir, texture, texlist, multierr, object): # Canonicalise pathnames to avoid false dupes if filename[0:2] in ['//', '\\\\']: # Path is relative to .blend file fixedfile=join(thisdir,filename[2:]) else: fixedfile=abspath(filename) if sep=='\\': if fixedfile[0] in ['/', '\\']: # Add Windows drive letter (drive,foo)=splitdrive(Blender.sys.progname) fixedfile=drive.lower()+fixedfile else: # Lowercase Windows drive lettter fixedfile=fixedfile[0].lower()+fixedfile[1:] # Check for multiple textures if ((not texture) or (str.lower(fixedfile)==str.lower(texture))): texture = fixedfile texlist.append(str.lower(fixedfile)) else: if not multierr: print "Warn:\tMultiple texture files found:" print texture if not object in multierr: multierr.append(object) if not str.lower(fixedfile) in texlist: texlist.append(str.lower(fixedfile)) print '%s in object %s, ...' % (fixedfile, object.name) return texture
def isNetworkFile(self): """Return true if this file resides on a network share. Note: For networked file, isLocal is *always* true.""" if self._networkFile is None: # Check if the user allows networked files to be checked - bug 88521. network_check_enabled = FileHandler._network_file_check_enabled if network_check_enabled is None: globalPrefs = components.classes["@activestate.com/koPrefService;1"].\ getService(components.interfaces.koIPrefService).prefs network_check_enabled = globalPrefs.getBooleanPref("checkNetworkDiskFile") FileHandler._network_file_check_enabled = network_check_enabled if not network_check_enabled and win32: # Determine if file is networked using the Win32 API. The string # must be a unicode object - otherwise the call will fail. # TODO: Does my path ever change? If so there needs to be an # invalidate method so this call fires again. drive = unicode(splitdrive(self._path)[0]) self._networkFile = Win32_GetDriveTypeW(drive) == WIN32_DRIVE_REMOTE else: self._networkFile = False # TODO: Check if the user has marked this location to be treated as # a network file type (via user preferences). #if not self._networkFile: # if self._path.startswith("/home/toddw/tmp"): # self._networkFile = True return self._networkFile
def play_music(self, name): a = path.split(name) b = path.splitdrive(name) if pygame.mixer.music.get_busy(): pygame.mixer.music.stop() elif PLATFORM == "win32" and self.a_drive_is_busy(): self.stop_drives() if name[-4:].upper() == ".CDA": drive = b[0] + SLASH for x in self.drive_list: if drive == x[1]: drive_id = x[0] track_list = listdir(u".") track_number = track_list.index(a[1]) self.pcc = pygame.cdrom.CD(drive_id) self.pcc.init() try: self.pcc.play(track_number) self.V.set("loaded ... " + a[1]) except RuntimeError: self.V.set("could not load ...") else: try: pygame.mixer.music.load(name.encode(DEFAULTLOCALE[1])) pygame.mixer.music.play() self.V.set("loaded ... " + a[1]) except RuntimeError: self.V.set("could not load ...")
def _update_files(self, *args): # Clear current files self.dispatch("on_entries_cleared") # Add the components that are always needed if platform == "win32": is_root = splitdrive(self.path)[1] in (sep, altsep) elif platform in ("darwin", "linux2"): is_root = normpath(expanduser(self.path)) == sep else: # Unknown file system; Just always add the .. entry but also log Logger.warning("Filechooser: Unsupported OS: %r" % platform) is_root = False if not is_root: back = ".." + sep pardir = Builder.template( self._ENTRY_TEMPLATE, **dict( name=back, size="", path=back, controller=self, isdir=True, parent=None, sep=sep, get_nice_size=lambda: "", ) ) self.dispatch("on_entry_added", pardir) try: self._add_files(self.path) except OSError, e: Logger.exception("Unable to open directory <%s>" % self.path)
def explode_path(path): """Split a path into its components. If the path is absolute, the first value of the returned list will be '/', or the drive letter for platforms where it is applicable. Example ------- >>> explode_path("/Users/joe") ["/", "Users", "joe"] """ ret = [] d, p = op.splitdrive(path) while p: head, tail = op.split(p) if head == p: ret.append(head) break if tail: ret.append(tail) p = head if d: ret.append(d) return ret[::-1]
def to_components(self, path): drive, path = splitdrive(path) r = [] while path != sep: path, tail = split(path) r.insert(0, tail) return drive, r
def _get_abspath(local_path): """Returns the absolute path to the required file. """ mname = _getframe(1).f_globals.get('__name__') if mname == '__main__' or mname == '__init__': mpath = getcwd() else: module = modules[mname] if hasattr(module, '__path__'): mpath = module.__path__[0] elif '.' in mname: mpath = modules[mname[:mname.rfind('.')]].__path__[0] else: mpath = mname drive, mpath = splitdrive(mpath) mpath = drive + join(mpath, local_path) # Make it working with Windows. Internally we use always the "/". if sep == '\\': mpath = mpath.replace(sep, '/') return mpath
def _update_files(self, *args): # Clear current files self.dispatch('on_entries_cleared') self._items = [] # Add the components that are always needed if platform == 'win': is_root = splitdrive(self.path)[1] in (sep, altsep) elif platform in ('macosx', 'linux', 'android', 'ios'): is_root = normpath(expanduser(self.path)) == sep else: # Unknown file system; Just always add the .. entry but also log Logger.warning('Filechooser: Unsupported OS: %r' % platform) is_root = False if not is_root: back = '..' + sep pardir = Builder.template( self._ENTRY_TEMPLATE, **dict(name=back, size='', path=back, controller=self, isdir=True, parent=None, sep=sep, get_nice_size=lambda: '')) self._items.append(pardir) self.dispatch('on_entry_added', pardir) try: self._add_files(self.path) except OSError: Logger.exception('Unable to open directory <%s>' % self.path)
def _xp_filename_windows(clazz, p, sep=None): sep = sep or clazz._XP_SEP _, split_path = path.splitdrive(p) xp_split_path = split_path.replace('\\', sep) xp_split_path = xp_split_path.replace('/', sep) result = p.replace(split_path, xp_split_path) return result
def _abspath_split(path): abs = abspath(op.normpath(path)) prefix, rest = splitunc(abs) is_unc = bool(prefix) if not is_unc: prefix, rest = splitdrive(abs) return is_unc, prefix, [x for x in rest.split(sep) if x]
def _save(self, f, trail): """Create a virtual path of groups in the Zip file and save data. Args: f(DataFile): A DataFile instance to save trail (list): The trail of groups Returns: The new filename of the saved DataFile. ZipFiles are really a flat heirarchy, so concatentate the trail and save the data using :py:meth:`Stoner.Zip.ZipFile.save` This routine is used by a walk_groups call - hence the prototype matches that required for :py:meth:`Stoner.Folders.DataFolder.walk_groups`. """ if not isinstance(f, DataFile): f = DataFile(f) filename = path.splitdrive(f.filename)[1] bits = [self.File.filename] + trail + [filename] pathsep = path.join("a", "b")[1] for ix, b in enumerate(bits): if ix == 0 or not b.startswith(pathsep): continue bits[ix] = b[1:] member = path.join(*bits) f = ZippedFile(f) f.save(member) return f.filename
def relpath(stage='', prefix=''): # combine stage with prefix to form the actual path if stage: if stage[-1] != '/': stage += '/' prefix = splitdrive(prefix)[1] return join(stage + prefix)
def _convert_video_to_audio(storage_location, video_path, lang_code, mkv_streams): drive, tail = splitdrive(video_path) dir_name = join(storage_location, dirname(tail[1:])) file_name, file_ext = splitext(basename(video_path)) audio_path = join(dir_name, file_name + '.mp3') Path(dir_name).mkdir(parents=True, exist_ok=True) stream_cmd = f'{_get_ffmpeg()} -i "{video_path}" -y -ab 160k -ac 2 -ar 44100 -vn "{audio_path}"' if lang_code is not None and mkv_streams is not None and len( mkv_streams[0]) > 1: track_id = mkv_streams[0].index( lang_code) if lang_code in mkv_streams[0] else None if track_id is not None: stream_cmd = f'{_get_ffmpeg()} -i "{video_path}" -map 0:a:{track_id} -y -ab 160k -ac 2 -ar 44100 -vn "{audio_path}"' print( f'[video-to-podcast]: selected "{lang_code}" audio track for {file_name}' ) print(f'[video-to-podcast]: started converting {file_name} to audio.') run(stream_cmd, stdout=DEVNULL, stderr=DEVNULL) print(f'[video-to-podcast]: finished converting {file_name} to audio.') return audio_path
def _update_files(self, *args): # Clear current files self.dispatch('on_entries_cleared') self._items = [] # Add the components that are always needed if platform == 'win': is_root = splitdrive(self.path)[1] in (sep, altsep) elif platform in ('macosx', 'linux', 'android', 'ios'): is_root = normpath(expanduser(self.path)) == sep else: # Unknown file system; Just always add the .. entry but also log Logger.warning('Filechooser: Unsupported OS: %r' % platform) is_root = False if not is_root: back = '..' + sep pardir = Builder.template(self._ENTRY_TEMPLATE, **dict(name=back, size='', path=back, controller=self, isdir=True, parent=None, sep=sep, get_nice_size=lambda: '')) self._items.append(pardir) self.dispatch('on_entry_added', pardir) try: self._add_files(self.path) except OSError: Logger.exception('Unable to open directory <%s>' % self.path)
def _generate_file_entries(self, *args, **kwargs): # Generator that will create all the files entries. # the generator is used via _update_files() and _create_files_entries() # don't use it directly. is_root = False path = kwargs.get('path', self.path) have_parent = kwargs.get('parent', None) is not None # Add the components that are always needed if self.rootpath: rootpath = realpath(self.rootpath) path = realpath(path) if not path.startswith(rootpath): self.path = rootpath return elif path == rootpath: is_root = True else: if platform == 'win': is_root = splitdrive(path)[1] in (sep, altsep) elif platform in ('macosx', 'linux', 'android', 'ios'): is_root = normpath(expanduser(path)) == sep else: # Unknown fs, just always add the .. entry but also log Logger.warning('Filechooser: Unsupported OS: %r' % platform) # generate an entries to go back to previous if not is_root and not have_parent: back = '..' + sep if platform == 'win': new_path = path[:path.rfind(sep)] if sep not in new_path: new_path += sep pardir = self._create_entry_widget( dict(name=back, size='', path=new_path, controller=ref(self), isdir=True, parent=None, sep=sep, get_nice_size=lambda: '')) else: pardir = self._create_entry_widget( dict(name=back, size='', path=back, controller=ref(self), isdir=True, parent=None, sep=sep, get_nice_size=lambda: '')) yield 0, 1, pardir # generate all the entries for files try: for index, total, item in self._add_files(path): yield index, total, item except OSError: Logger.exception('Unable to open directory <%s>' % self.path) self.files[:] = []
def get_root_dir(cwd=None): if cwd is None: cwd = getcwd() while normpath(splitdrive(cwd)[1]) not in ("/", "\\", ""): if is_root_dir(cwd): return cwd cwd = join(cwd, "..") return None
def getCommonPath(dirname, filename): # if the 2 components are on different drives (windows) if splitdrive(dirname)[0] != splitdrive(filename)[0]: return None dirl = dirname.split(sep) filel = filename.split(sep) commpth = [] for d, f in itertools.izip(dirl, filel): if d == f : commpth.append(d) else : break commpth = sep.join(commpth) if not commpth: commpth = sep elif commpth[-1] != sep: commpth += sep return commpth
def get_pdf_content(pages, toc): """ :type pages: flask.ext.flatpages.flatpages.FlatPages :param pages: """ content = [] for toc_section in toc: section = {"id": toc_section["title"].replace(" ", "_"), "title": toc_section["title"], "content": []} for reference in toc_section["items"]: url = reference["url"] if url.startswith("/"): url = url[1:] if url.endswith(".html"): url = url[:-5] if url == "docs/reference/grammar": page_html = render_template("pages/grammar.html", kotlinGrammar=get_grammar()).replace("<br>", "<br/>") document = BeautifulSoup(page_html, "html.parser") document = document.find("div", {"class": "grammar"}) page_id = "grammar" title = "Grammar" else: page = pages.get(url) if page is None: continue title = page.meta["title"] document = BeautifulSoup(page.html, "html.parser") page_id = page.path.split("/")[-1] for element in document.find_all(): if "id" in element.attrs: element.attrs["id"] = page_id + "_" + element.attrs["id"] if element.name == "a": if "href" not in element.attrs: continue href = element.attrs["href"] url = urlparse(href) if url.scheme == "": if href.startswith("#"): new_href = page_id + "_" + href[1:] else: url_path = url.path[:-5] if url.path.endswith(".html") else url.path new_href = url_path + ("_" + url.fragment if url.fragment != "" else "") element.attrs["href"] = "#" + new_href header_regex = re.compile("^h(\d)$") if header_regex.match(element.name): level = int(header_regex.match(element.name).group(1)) + 1 element.name = "h" + str(level) section["content"].append({"id": page_id, "title": title, "content": document.decode()}) content.append(section) drive, root_folder_path_rest = path.splitdrive(root_folder_path) page_html = render_template( "pdf.html", content=content, root_folder=(drive + root_folder_path_rest).replace("\\", "/") ) return page_html
def get_module_name_from_zip(filename): # first, find the zip file in the path filepath = filename zippath = None while not is_zipfile(filepath) and \ splitdrive(filepath)[1] != '\\' \ and splitdrive(filepath)[1] != '/': filepath, tail = os.path.split(filepath) if zippath is not None: zippath = tail + '/' + zippath else: zippath = tail if not is_zipfile(filepath): return None # if the split left a preceding slash on the zippath then remove # it if zippath.startswith('\\') or zippath.startswith('/'): zippath = zippath[1:] # replace any backwards slashes with forward slashes zippath = zippath.replace('\\', '/') # Get the name of the module minus the '.py' module, ext = splitext(basename(zippath)) # Start with the actual module name. module_path = [module] # to get the module name, we walk through the zippath until we # find a parent directory that does NOT have a __init__.py file z = ZipFile(filepath) parentpath = dirname(zippath) while path_exists_in_zip(z, parentpath + '/__init__.py'): module_path.insert(0, basename(parentpath)) parentpath = dirname(parentpath) z.close() return '.'.join(module_path)
def get_zip_path(filename): """ Returns the path to the zip file contained in the filename. fixme: An example here would help. """ filepath = filename zippath = None while not is_zipfile(filepath) and \ splitdrive(filepath)[1] != '\\' \ and splitdrive(filepath)[1] != '/': filepath, tail = os.path.split(filepath) if zippath is not None: zippath = tail + '/' + zippath else: zippath = tail return zippath
def get_file_details(file_path, file_name, dirname): THUMB_SIZE = 100, 100 #Image.DEBUG = True fobj = Image.open(file_path) xdpi, ydpi = fobj.info['dpi'] x_px, y_px = fobj.size xdpi +=1; ydpi +=1; if x_px % xdpi != 0: x_inch = round(x_px / (xdpi * 1.0), 2) else: x_inch = x_px / xdpi if y_px % ydpi != 0: y_inch = round(y_px / (ydpi * 1.0), 2) else: y_inch = y_px / ydpi if 20 < xdpi < 400: div_factor = 12 else: div_factor = 1 # generate the thumbnail file_name, file_extension = os.path.splitext(file_name) wlk_drive, wlk_dir = splitdrive(dirname) thumbnail_path = join(THUMBNAIL_PATH, 'thumbnail') thumbnail_path = join(thumbnail_path, wlk_dir[1:]) if not os.path.exists(thumbnail_path): os.makedirs(thumbnail_path) rlog.info('making dirs %s'%thumbnail_path) thumbnail_path = join(thumbnail_path, file_name) thumbnail_path = thumbnail_path + '.jpg' if not os.path.exists(thumbnail_path) and use_thumbnail: rlog.info('generating thumbnail for %s'%file_name) fobj.thumbnail(THUMB_SIZE) th_x, th_y = fobj.size thumbnail_height = int(th_y * 0.75) fobj.save(thumbnail_path, 'JPEG') else: try: thm_obj = Image.open(thumbnail_path) th_x, th_y = thm_obj.size thumbnail_height = int(th_y * 0.75) except: thumbnail_height = 10 rlog.info('skipping generating thumbnail for %s'%file_name) return xdpi, x_inch, y_inch, div_factor, thumbnail_path, thumbnail_height
def hwaf_get_install_path(self, k, destdir=True): """ Installation path obtained from ``self.dest`` and prefixed by the destdir. The variables such as '${PREFIX}/bin' are substituted. """ dest = waflib.Utils.subst_vars(k, self.env) dest = dest.replace('/', os.sep) if destdir and self.env.DESTDIR: destdir = self.env.DESTDIR dest = os.path.join(destdir, osp.splitdrive(dest)[1].lstrip(os.sep)) pass return dest
def to_posix(path_name): if os.name == "posix": return path_name else: drive, path = systempath.splitdrive(path_name) valid_posix_path = path.replace(os.sep,posixpath.sep) if drive: #add_drive_by_hash(drive,valid_posix_path) add_drive(drive,valid_posix_path) return valid_posix_path
def splitpath(path): ''' Split string path into a list of folders (+ file if available). ''' if path[0] == sep and path[1] != sep: path = path[1:] path = normpath(path).split(sep) else: drive, path = splitdrive(path) if path[0] == sep and path[1] != sep: path = path[1:] path = [drive, ] + normpath(path).split(sep) return path
def test_make_tarball(self): # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, "file1"], "xxx") self.write_file([tmpdir, "file2"], "xxx") os.mkdir(os.path.join(tmpdir, "sub")) self.write_file([tmpdir, "sub", "file3"], "xxx") tmpdir2 = self.mkdtemp() # force shutil to create the directory os.rmdir(tmpdir2) unittest.skipUnless( splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], "source and target should be on same drive" ) base_name = os.path.join(tmpdir2, "archive") # working with relative paths to avoid tar warnings old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], ".") finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + ".tar.gz" self.assertTrue(os.path.exists(tarball)) # trying an uncompressed one base_name = os.path.join(tmpdir2, "archive") old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], ".", compress=None) finally: os.chdir(old_dir) tarball = base_name + ".tar" self.assertTrue(os.path.exists(tarball))
def _generate_file_entries(self, *args, **kwargs): # Generator that will create all the files entries. # the generator is used via _update_files() and _create_files_entries() # don't use it directly. is_root = True path = kwargs.get("path", self.path) have_parent = kwargs.get("parent", None) is not None # Add the components that are always needed if self.rootpath: rootpath = realpath(self.rootpath) path = realpath(path) if not path.startswith(rootpath): self.path = rootpath return elif path == rootpath: is_root = True else: if platform == "win": is_root = splitdrive(path)[1] in (sep, altsep) elif platform in ("macosx", "linux", "android", "ios"): is_root = normpath(expanduser(path)) == sep else: # Unknown fs, just always add the .. entry but also log Logger.warning("Filechooser: Unsupported OS: %r" % platform) is_root = False # generate an entries to go back to previous if not is_root and not have_parent: back = ".." + sep pardir = Builder.template( self._ENTRY_TEMPLATE, **dict( name=back, size="", path=back, controller=ref(self), isdir=True, parent=None, sep=sep, get_nice_size=lambda: "", ) ) yield 0, 1, pardir # generate all the entries for files try: for index, total, item in self._add_files(path): yield index, total, item except OSError: Logger.exception("Unable to open directory <%s>" % self.path) self.files[:] = []
def test_make_tarball(self): # creating something to tar tmpdir = self.mkdtemp() write_file((tmpdir, 'file1'), 'xxx') write_file((tmpdir, 'file2'), 'xxx') os.mkdir(os.path.join(tmpdir, 'sub')) write_file((tmpdir, 'sub', 'file3'), 'xxx') tmpdir2 = self.mkdtemp() # force shutil to create the directory os.rmdir(tmpdir2) unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], "source and target should be on same drive") base_name = os.path.join(tmpdir2, 'archive') # working with relative paths to avoid tar warnings old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], '.') finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: _make_tarball(splitdrive(base_name)[1], '.', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball))
def get_home_dir(): home_dir = get_project_optional_dir("home_dir", join(expanduser("~"), ".platformio")) if "windows" in get_systype(): try: home_dir.encode("utf8") except UnicodeDecodeError: home_dir = splitdrive(home_dir)[0] + "\\.platformio" if not isdir(home_dir): os.makedirs(home_dir) assert isdir(home_dir) return home_dir