def makedir(self, path, recursive=False, allow_recreate=False): """Creates a file with mimeType _folder_mimetype which acts as a folder in GoogleDrive.""" if self.isdir(path): if allow_recreate: return else: raise DestinationExistsError(path) if self.isfile(path): raise ResourceInvalidError(path) if not recursive and not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) if recursive: self.makedir(dirname(path), recursive=recursive, allow_recreate=True) parent_id = self._ids[dirname(path)] fh = self.client.CreateFile({ 'title': basename(path), 'mimeType': self._folder_mimetype, 'parents': [{ 'id': parent_id }] }) fh.Upload() self._ids[path] = fh['id']
def _rename_file(self, src, dst): ''' rename source file 'src' to destination file 'dst' ''' srcdir = dirname(src) srcfname = basename(src) dstdir = dirname(dst) dstfname = basename(dst) #Make sure that the destination directory exists and destination file #doesnot exist. dstdirid = self._get_dir_id(dstdir) if( dstdirid == None): raise ParentDirectoryMissingError(dst) dstfile_id = self._get_file_id(dstdirid, dstfname) if( dstfile_id != None): raise DestinationExistsError(dst) #All checks are done. Delete the entry for the source file. #Create an entry for the destination file. srcdir_id = self._get_dir_id(srcdir) assert(srcdir_id != None) srcfile_id = self._get_file_id(srcdir_id, srcfname) assert(srcfile_id != None) srccontent_id = self._get_file_contentid(srcfile_id) self._updatecur.execute('DELETE FROM FsFileMetaData where ROWID=?',(srcfile_id,)) self._updatecur.execute("INSERT INTO FsFileMetaData(name, parent, fileid) \ VALUES(?,?,?)",(dstfname, dstdirid, srccontent_id))
def removedir(self, path, recursive=False, force=False): path = abspath(normpath(path)) if not self.exists(path): raise ResourceNotFoundError(path) if self.isfile(path): raise ResourceInvalidError(path) if not force: for _checkpath in self.listdir(path): raise DirectoryNotEmptyError(path) try: if force: for rpath in self.listdir(path, full=True): try: if self.isfile(rpath): self.remove(rpath) elif self.isdir(rpath): self.removedir(rpath, force=force) except FSError: pass self.clear_dircache(dirname(path)) self.ftp.rmd(_encode(path)) except error_reply: pass if recursive: try: self.removedir(dirname(path), recursive=True) except DirectoryNotEmptyError: pass self.clear_dircache(dirname(path), path)
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs): if self.isdir(path): raise ResourceInvalidError(path) if 'w' in mode and not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) if 'r' in mode and not self.isfile(path): raise ResourceNotFoundError(path) if not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) if 'w' in mode and '+' not in mode and self.isfile(path): self.remove(path) data = '' if 'r' in mode: data = self.getcontents(path, mode=mode, encoding=encoding, errors=errors, newline=newline) rfile = StringIO(data=data, mode=mode) return RemoteFileBuffer(self, path, mode=mode, rfile=rfile)
def listdir(self, path="/", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False): if self.isfile(path): raise ResourceInvalidError(path) if not self.isdir(path): if not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) raise ResourceNotFoundError(path) query = "'{0}' in parents and trashed=false"\ .format(self._ids[dirname(path)]) if dirs_only: query += " and mimeType = '{0}'".format(self._folder_mimetype) if files_only: query += " and mimeType != '{0}'".format(self._folder_mimetype) self._ids = self._map_ids_to_paths() entries = self._ids.names(path) # entries = self.client.ListFile({"q": query, # "fields": "items(title,id," # "parents(id,isRoot))"}).GetList() # We don't want the _listdir_helper to perform dirs_only # and files_only filtering again return self._listdir_helper(path, entries, wildcard=wildcard, full=full, absolute=absolute, dirs_only=dirs_only, files_only=files_only)
def copy(self, src, dst, overwrite=False, chunk_size=65536): if self.isdir(src): raise ResourceInvalidError(src) if not self.isfile(src): if not self.isdir(dirname(src)): raise ParentDirectoryMissingError(src) raise ResourceNotFoundError(src) if self.isdir(dst): raise ResourceInvalidError(dst) if self.isfile(dst): if overwrite: self.remove(dst) else: raise DestinationExistsError(dst) else: if not self.isdir(dirname(dst)): raise ParentDirectoryMissingError(dst) parent_path = self._ids[dirname(dst)] copy_fh = {'title': basename(dst), 'parents': [{'id': parent_path}]} copy_fh = self.client.auth.service.files() \ .copy(fileId=self._ids[src], body=copy_fh) \ .execute() self._ids[dst] = copy_fh['id']
def gen_pcb_dxf(projfolder, force=False): """ Generates a DXF file of the PCB provided by the gEDA project. The pcb file is the one listed in the gEDA project file, and the pcbname is the one specified in the :mod:`tendril.gedaif.conffile.ConfigsFile`. This function does not use jinja2 and latex. It relies on :func:`tendril.gedaif.pcb.conv_pcb2dxf` instead. :param projfolder: The gEDA project folder. :type projfolder: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output File : ``<projectfolder>/pcb/<pcbfile>.dxf`` * Source Files : The project's `.pcb` file. """ configfile = conffile.ConfigsFile(projfolder) gpf = projfile.GedaProjectFile(configfile.projectfolder) pcb_mtime = fsutils.get_file_mtime( os.path.join(configfile.projectfolder, 'pcb', gpf.pcbfile + '.pcb'), ) if pcb_mtime is None: logger.warning("PCB does not seem to exist for : " + projfolder) return docfolder = get_project_doc_folder(projfolder) dxffile = path.normpath(os.path.join(docfolder, os.pardir, configfile.pcbname + '.dxf')) bottom_dxffile = path.normpath(os.path.join(docfolder, os.pardir, configfile.pcbname + 'bottom.dxf')) outf_mtime = fsutils.get_file_mtime(dxffile, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > pcb_mtime: logger.debug('Skipping up-to-date ' + dxffile) return dxffile logger.info('Regenerating ' + dxffile + os.linesep + 'Last modified : ' + str(pcb_mtime) + '; Last Created : ' + str(outf_mtime)) workspace_folder = workspace_fs.getsyspath(path.dirname(dxffile)) workspace_fs.makedir(path.dirname(dxffile), recursive=True, allow_recreate=True) pcb.conv_pcb2dxf( os.path.join(configfile.projectfolder, 'pcb', gpf.pcbfile + '.pcb'), workspace_folder, configfile.pcbname ) copyfile(workspace_fs, dxffile, refdoc_fs, dxffile, overwrite=True) copyfile(workspace_fs, bottom_dxffile, refdoc_fs, bottom_dxffile, overwrite=True) return dxffile
def gen_pcb_dxf(projfolder, force=False): """ Generates a DXF file of the PCB provided by the gEDA project. The pcb file is the one listed in the gEDA project file, and the pcbname is the one specified in the :mod:`tendril.gedaif.conffile.ConfigsFile`. This function does not use jinja2 and latex. It relies on :func:`tendril.connectors.geda.pcb.conv_pcb2dxf` instead. :param projfolder: The gEDA project folder. :type projfolder: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output File : ``<projectfolder>/pcb/<pcbfile>.dxf`` * Source Files : The project's `.pcb` file. """ configfile = conffile.ConfigsFile(projfolder) gpf = projfile.GedaProjectFile(configfile.projectfolder) pcb_mtime = fsutils.get_file_mtime( os.path.join(configfile.projectfolder, 'pcb', gpf.pcbfile + '.pcb'), ) if pcb_mtime is None: logger.warning("PCB does not seem to exist for : " + projfolder) return docfolder = get_project_doc_folder(projfolder) dxffile = path.normpath(os.path.join(docfolder, os.pardir, configfile.pcbname + '.dxf')) bottom_dxffile = path.normpath(os.path.join(docfolder, os.pardir, configfile.pcbname + 'bottom.dxf')) outf_mtime = fsutils.get_file_mtime(dxffile, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > pcb_mtime: logger.debug('Skipping up-to-date ' + dxffile) return dxffile logger.info('Regenerating ' + dxffile + os.linesep + 'Last modified : ' + str(pcb_mtime) + '; Last Created : ' + str(outf_mtime)) workspace_folder = workspace_fs.getsyspath(path.dirname(dxffile)) workspace_fs.makedir(path.dirname(dxffile), recursive=True, allow_recreate=True) pcb.conv_pcb2dxf( os.path.join(configfile.projectfolder, 'pcb', gpf.pcbfile + '.pcb'), workspace_folder, configfile.pcbname ) copyfile(workspace_fs, dxffile, refdoc_fs, dxffile, overwrite=True) copyfile(workspace_fs, bottom_dxffile, refdoc_fs, bottom_dxffile, overwrite=True) return dxffile
def movedir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384): self.clear_dircache(dirname(src), dirname(dst)) super(FTPFS, self).movedir(src, dst, overwrite, ignore_errors, chunk_size)
def rename(self, src, dst): try: self.refresh_dircache(dirname(src), dirname(dst)) self.ftp.rename(_encode(src), _encode(dst)) except error_perm, exception: code, message = str(exception).split(" ", 1) if code == "550": if not self.exists(dirname(dst)): raise ParentDirectoryMissingError(dst) raise
def rename(self, src, dst): try: self.refresh_dircache(dirname(src), dirname(dst)) self.ftp.rename(_encode(src), _encode(dst)) except error_perm, exception: code, message = str(exception).split(' ', 1) if code == "550": if not self.exists(dirname(dst)): raise ParentDirectoryMissingError(dst) raise
def insert_document(sno, docpath, series): fname = os.path.split(docpath)[1] if not fname.startswith(sno) and not os.path.splitext(fname)[0].endswith(sno): fname = sno + "-" + fname if series is None: series = serialnos.get_series(sno) storepath = path.join(series, fname) if not docstore_fs.exists(path.dirname(storepath)): docstore_fs.makedir(path.dirname(storepath), recursive=True) copyfile(local_fs, docpath, docstore_fs, storepath) return storepath
def move(self, src, dst, overwrite=False, chunk_size=16384): if not overwrite and self.exists(dst): raise DestinationExistsError(dst) # self.refresh_dircache(dirname(src), dirname(dst)) try: self.rename(src, dst) except: self.copy(src, dst, overwrite=overwrite) self.remove(src) finally: self.refresh_dircache(src, dirname(src), dst, dirname(dst))
def move(self, src, dst, overwrite=False, chunk_size=16384): if not overwrite and self.exists(dst): raise DestinationExistsError(dst) #self.refresh_dircache(dirname(src), dirname(dst)) try: self.rename(src, dst) except: self.copy(src, dst, overwrite=overwrite) self.remove(src) finally: self.refresh_dircache(src, dirname(src), dst, dirname(dst))
def gen_schpdf(projfolder, namebase, force=False): """ Generates a PDF file of all the project schematics listed in the gEDA project file. This function does not ise jinja2 and latex. It relies on :func:`tendril.gedaif.gschem.conv_gsch2pdf` instead. :param projfolder: The gEDA project folder. :type projfolder: str :param namebase: The project name. :type namebase: str :param force: Regenerate even if up-to-date. :type force: bool :return: The output file path. .. rubric:: Paths * Output File : ``<project_doc_folder>/<namebase>-schematic.pdf`` * Source Files : The project's schematic folder. """ gpf = projfile.GedaProjectFile(projfolder) sch_mtime = fsutils.get_folder_mtime(gpf.schfolder) configfile = conffile.ConfigsFile(projfolder) docfolder = get_project_doc_folder(projfolder) schpdfpath = path.join(docfolder, namebase + '-schematic.pdf') outf_mtime = fsutils.get_file_mtime(schpdfpath, fs=refdoc_fs) if not force and outf_mtime is not None and outf_mtime > sch_mtime: logger.debug('Skipping up-to-date ' + schpdfpath) return schpdfpath logger.info('Regenerating ' + schpdfpath + os.linesep + 'Last modified : ' + str(sch_mtime) + '; Last Created : ' + str(outf_mtime)) if configfile.rawconfig is not None: workspace_outpath = workspace_fs.getsyspath(schpdfpath) workspace_folder = workspace_fs.getsyspath(path.dirname(schpdfpath)) workspace_fs.makedir(path.dirname(schpdfpath), recursive=True, allow_recreate=True) pdffiles = [] for schematic in gpf.schfiles: schfile = os.path.normpath(projfolder + '/schematic/' + schematic) pdffile = gschem.conv_gsch2pdf(schfile, workspace_folder) pdffiles.append(pdffile) pdf.merge_pdf(pdffiles, workspace_outpath) for pdffile in pdffiles: os.remove(pdffile) copyfile(workspace_fs, schpdfpath, refdoc_fs, schpdfpath, overwrite=True) return schpdfpath
def insert_document(sno, docpath, series): fname = os.path.split(docpath)[1] if not fname.startswith(sno) and \ not os.path.splitext(fname)[0].endswith(sno): fname = sno + '-' + fname if series is None: series = serialnos.get_series(sno) storepath = path.join(series, fname) if not docstore_fs.exists(path.dirname(storepath)): docstore_fs.makedir(path.dirname(storepath), recursive=True) copyfile(local_fs, docpath, docstore_fs, storepath) return storepath
def move(self, src_path, dst_path, overwrite=False): _CheckPath(src_path) _CheckPath(dst_path) with self._lock: if not overwrite and self.exists(dst_path): raise DestinationExists(dst_path) driveItemResponse = self.session.get(_PathUrl(src_path, "")) if driveItemResponse.status_code == 404: raise ResourceNotFound(src_path) driveItemResponse.raise_for_status() driveItem = driveItemResponse.json() if "folder" in driveItem: raise FileExpected(src_path) itemUpdate = {} newFilename = basename(dst_path) if not self.isdir(dst_path) and newFilename != basename(src_path): itemUpdate["name"] = newFilename parentDir = dirname(dst_path) if parentDir != dirname(src_path): parentDirItem = self.session.get(_PathUrl(parentDir, "")) if parentDirItem.status_code == 404: raise ResourceNotFound(parentDir) parentDirItem.raise_for_status() itemUpdate["parentReference"] = { "id": parentDirItem.json()["id"] } itemId = driveItem["id"] response = self.session.patch(_ItemUrl(itemId, ""), json=itemUpdate) if response.status_code == 409 and overwrite is True: # delete the existing version and then try again response = self.session.delete(_PathUrl(dst_path, "")) response.raise_for_status() # try again response = self.session.patch(_ItemUrl(itemId, ""), json=itemUpdate) response.raise_for_status() return if response.status_code == 409 and overwrite is False: debug( "Retrying move in case it's an erroneous error (see issue #7)" ) response = self.session.patch(_ItemUrl(itemId, ""), json=itemUpdate) response.raise_for_status() return response.raise_for_status()
def move(self, src_path, dst_path, overwrite=False, preserve_time=False): _log.info( f'move({src_path}, {dst_path}, {overwrite}, {preserve_time})') src_path = self.validatepath(src_path) dst_path = self.validatepath(dst_path) with self._lock: dstItem = self._itemFromPath(dst_path) if overwrite is False and dstItem is not None: raise DestinationExists(dst_path) srcParentItem = self._itemFromPath(dirname(src_path)) if srcParentItem is None: raise ResourceNotFound(src_path) # TODO - it would be more efficient to go directly from srcParentItem to it's child here srcItem = self._itemFromPath(src_path) if srcItem is None: raise ResourceNotFound(src_path) if srcItem['mimeType'] == _folderMimeType: raise FileExpected(src_path) dstParentDir = dirname(dst_path) dstParentDirItem = self._itemFromPath(dstParentDir) if dstParentDirItem is None: raise ResourceNotFound(dstParentDir) if dstItem is not None: assert overwrite is True self._drive.files().delete( fileId=dstItem['id'], **self._file_kwargs, ).execute(num_retries=self.retryCount) metadata = { 'name': basename(dst_path), 'enforceSingleParent': True } if preserve_time is True: metadata['modifiedTime'] = srcItem['modifiedTime'] self._drive.files().update( fileId=srcItem['id'], addParents=dstParentDirItem['id'], removeParents=srcParentItem['id'], body=metadata, **self._file_kwargs, ).execute(num_retries=self.retryCount)
def fetch(source, target, filenames, depth, verbose): from ...api import files_in_directories from fs.osfs import OSFS from fs.copy import copy_file from fs.path import dirname with OSFS(source) as sor: with OSFS(target) as tar: files = files_in_directories(sor, ['*'], filenames, depth) for f in files: if not tar.exists(dirname(f)): tar.makedirs(dirname(f)) copy_file(sor, f, tar, f) if verbose: click.echo('[COPY] {} => {}.'.format( sor.getsyspath(f), tar.getsyspath(f)))
def makedir(self, path, permissions=None, recreate=False): """ Create a directory under `path`. :param str path: Path pointing to a file. :param Permissions permissions: PyFilesystem permission instance :param bool recreate: Not supported """ # type: (Text, Permissions, bool) -> SubFS path = ensureUnicode(path) self.check() _path = toAscii(self.validatepath(path)) if not self.isdir(dirname(_path)): raise errors.ResourceNotFound(path) if permissions is None: permissions = Permissions(user='******', group='r-x', other='r-x') try: self.getinfo(path) except errors.ResourceNotFound: self._odfs.mkdir(_path, permissions.mode) return SubFS(self, path)
def _listPath(self, path, list_contents=False): """ Path listing with SMB errors converted. """ # Explicitly convert the SMB errors to be able to catch the # PyFilesystem error while listing the path. if list_contents: try: # List all contents of a directory. return _conv_smb_errors(self.conn.listPath)(self.share, normpath(path)) except ResourceNotFoundError: if self.isfile(path): raise ResourceInvalidError(path) raise else: # List a specific path (file or directory) by listing the contents # of the containing directory and comparing the filename. pathdir = dirname(path) searchpath = basename(path) for i in _conv_smb_errors(self.conn.listPath)(self.share, pathdir): if i.filename == '..': continue elif ((i.filename == '.' and searchpath == '') or i.filename == searchpath): return i raise ResourceNotFoundError(path)
def remove(self, path): self._initdb() path = normpath(path) if (self.isdir(path) == True): #path is actually a directory raise ResourceInvalidError(path) filedir = dirname(path) filename = basename(path) dirid = self._get_dir_id(filedir) fileid = self._get_file_id(dirid, filename) if (fileid == None): raise ResourceNotFoundError(path) content_id = self._get_file_contentid(fileid) self._updatecur.execute("DELETE FROM FsFileMetaData where ROWID=?", (fileid, )) #check there is any other file pointing to same location. If not #delete the content as well. self._querycur.execute( 'SELECT count(*) FROM FsFileMetaData where fileid=?', (content_id, )) row = fetchone(self._querycur) if (row == None or row[0] == 0): self._updatecur.execute("DELETE FROM FsFileTable where ROWID=?", (content_id, ))
def render_close(self, data, text=None): if 'context' not in data: return "<code>{}</code>".format(text) context = data['context'] path = dirname(context.get('.request.path', '/')) urls = context['.urls'] tag_name = text.strip() xmlns = None if '{' in tag_name: xmlns, tag_name = self._re_namespace.match(tag_name).groups() if '://' not in xmlns: xmlns = self._join(namespaces.default, xmlns) text = tag_name tag_name = "{{{}}}{}".format(xmlns, tag_name) if xmlns is None: xmlns = self.attribs.strip() if '://' not in xmlns: xmlns = self._join(namespaces.default, xmlns) tag_name = "{{{}}}{}".format(xmlns, tag_name) try: tag_path = urls['tag'][tag_name] relative_tag_path = relativefrom(path, tag_path) except KeyError as e: return "<code>{}</code>".format(text) else: return '''<a class="tag" href="{tag_path}"><{text}></a>'''.format( tag_path=relative_tag_path, text=text)
def makedir(self, path, recursive=False, allow_recreate=False): path = normpath(path) if path in ('', '/'): return if recursive: created = False for path_part in recursepath(path): if not self.isdir(path_part): self.conn.mkdir(self.smb_path(path_part)) created = True else: if self.isfile(path_part): raise ResourceInvalidError(path_part) if not created and not allow_recreate: raise DestinationExistsError(path) else: base = dirname(path) if not self.exists(base): raise ParentDirectoryMissingError(path) if not allow_recreate: if self.exists(path): if self.isfile(path): raise ResourceInvalidError(path) raise DestinationExistsError(path) self.conn.mkdir(self.smb_path(path)) else: if not self.isdir(path): self.conn.mkdir(self.smb_path(path))
def _get_fs(self, create_dir=True): """Return tuple with filesystem and filename.""" filedir = dirname(self.fileurl) filename = basename(self.fileurl) return (opener.opendir(filedir, writeable=True, create_dir=create_dir), filename)
def removedir(self, path, recursive=False, force=False): path = self._prepare_abspath(path) if not self.exists(path): raise ResourceNotFoundError(path) if self.isfile(path): raise ResourceInvalidError(path) lst = self.listdir(path, full=True) if len(lst) > 0: if not force: raise DirectoryNotEmptyError(path) else: for rpath in lst: try: if self.isfile(rpath): self.remove(rpath) elif self.isdir(rpath): self.removedir(rpath, force=force) except FSError: pass self.conn.rmdir(self.smb_path(path)) if recursive: try: self.removedir(dirname(path), recursive=True) except DirectoryNotEmptyError: pass
def openbin(self, path, mode="r", buffering=-1, **options): _mode = Mode(mode) _mode.validate_bin() self.check() _path = self.validatepath(path) _key = self._path_to_key(_path) info = None try: info = self.getinfo(path) except errors.ResourceNotFound: pass else: if info.is_dir: raise errors.FileExpected(path) if _mode.create: try: dir_path = dirname(_path) if dir_path != "/": self.getinfo(dir_path) except errors.ResourceNotFound: raise errors.ResourceNotFound(path) if info and _mode.exclusive: raise errors.FileExists(path) # AzureDLFile does not support exclusive mode, but we mimic it dlkfile = self.dlk.open(_key, str(_mode).replace("x", "")) return dlkfile
def __init__(self, fs, path, parsedMode): self.fs = fs self.path = path self.parentMetadata = self.fs._itemFromPath(dirname(self.path)) # pylint: disable=protected-access # None here means we'll have to create a new file later self.thisMetadata = self.fs._itemFromPath(self.path) # pylint: disable=protected-access # keeping a parsed mode separate from the base class's mode member self.parsedMode = parsedMode fileHandle, self.localPath = mkstemp( prefix="pyfilesystem-googledrive-", suffix=splitext(self.path)[1], text=False) close(fileHandle) debug(f"self.localPath: {self.localPath}") if (self.parsedMode.reading or self.parsedMode.appending) and not self.parsedMode.truncate: if self.thisMetadata is not None: initialData = self.fs.drive.files().get_media( fileId=self.thisMetadata["id"]).execute() debug(f"Read initial data: {initialData}") with open(self.localPath, "wb") as f: f.write(initialData) platformMode = self.parsedMode.to_platform() platformMode += ("b" if "b" not in platformMode else "") platformMode = platformMode.replace("x", "a") super().__init__(f=open(self.localPath, mode=platformMode)) if self.parsedMode.appending: # seek to the end self.seek(0, SEEK_END)
def copy(self, src_path, dst_path, overwrite=False): info(f"copy: {src_path} -> {dst_path}, {overwrite}") _CheckPath(src_path) _CheckPath(dst_path) with self._lock: parentDir = dirname(dst_path) parentDirItem = self._itemFromPath(parentDir) if parentDirItem is None: raise ResourceNotFound(parentDir) dstItem = self._itemFromPath(dst_path) if overwrite is False and dstItem is not None: raise DestinationExists(dst_path) srcItem = self._itemFromPath(src_path) if srcItem is None: raise ResourceNotFound(src_path) if srcItem["mimeType"] == _folderMimeType: raise FileExpected(src_path) # TODO - we should really replace the contents of the existing file with the new contents, so that the history is correct if dstItem is not None: self.drive.files().delete(fileId=dstItem["id"]).execute(num_retries=self.retryCount) newMetadata = {"parents": [parentDirItem["id"]], "name": basename(dst_path)} self.drive.files().copy(fileId=srcItem["id"], body=newMetadata).execute(num_retries=self.retryCount)
def render_close(self, data, text=None): if 'context' not in data: return "<code>{}</code>".format(text) context = data['context'] path = dirname(context.get('.request.path', '/')) urls = context['.urls'] tag_name = text.strip() xmlns = None if '{' in tag_name: xmlns, tag_name = self._re_namespace.match(tag_name).groups() if '://' not in xmlns: xmlns = self._join(namespaces.default, xmlns) text = tag_name tag_name = "{{{}}}{}".format(xmlns, tag_name) if xmlns is None: xmlns = self.attribs.strip() if '://' not in xmlns: xmlns = self._join(namespaces.default, xmlns) tag_name = "{{{}}}{}".format(xmlns, tag_name) try: tag_path = urls['tag'][tag_name] relative_tag_path = relativefrom(path, tag_path) except KeyError as e: return "<code>{}</code>".format(text) else: return '''<a class="tag" href="{tag_path}"><{text}></a>'''.format(tag_path=relative_tag_path, text=text)
def command_cd(self, params): if '.' in params: return newdir = self.working_dir[:] if len(params) > 1: self.writeline('cd: string not in pwd: {0}'.format(' '.join(params))) return arg = params[0] while arg.startswith('../') or arg == '..': if newdir.endswith('/'): newdir = newdir[:-1] newdir = dirname(newdir) arg = arg[3:] newdir = os.path.join(newdir, arg) try: if self.vfs.isdir(newdir): self.working_dir = newdir[:] self.PROMPT = '[{0}@{1} {2}]$ '.format(self.username, self.HOSTNAME, self.working_dir) else: self.writeline('cd: no such file or directory: {0}'.format(params[0])) except ValueError: # Attacker tried to leave the Virtual File system. We wont let him. self.working_dir = '/' self.PROMPT = '[{0}@{1} {2}]$ '.format(self.username, self.HOSTNAME, self.working_dir) self.update_total_file_size(self.working_dir)
def openbin(self, path, mode="r", buffering=-1, **options): """ Open file under `path` in binary mode. :param str path: Path pointing to a file. :param str mode: Text representation of open mode e.g. "rw+" :param int buffering: Whether the BaseIO instance should be buffered or not :param map options: Additional PyFilesystem options """ # type: (Text, Text, int, **Any) -> BinaryIO path = ensureUnicode(path) _mode = Mode(mode) _mode.validate_bin() _path = toAscii(self.validatepath(path)) with self._lock: try: info = self.getinfo(path) except errors.ResourceNotFound: if _mode.reading: raise errors.ResourceNotFound(path) if _mode.writing and not self.isdir(dirname(_path)): raise errors.ResourceNotFound(path) else: if info.is_dir: raise errors.FileExpected(path) if _mode.exclusive: raise errors.FileExists(path) # TODO support mode handle = self._odfs.open(_path) onedata_file = OnedataFile(self._odfs, handle, path, mode) return onedata_file # type: ignore
def _isfile(self, path): path = normpath(path) filedir = dirname(path) filename = basename(path) dirid = self._get_dir_id(filedir) return (dirid is not None and self._get_file_id(dirid, filename) is not None)
def makedir(self, path: str, permissions: Optional[Permissions] = None, recreate: bool = False) -> SubFS[FS]: """Make a directory. Note: As GCS is not a real filesystem but a key-value store that does not have any concept of directories, we write empty blobs as a work around. See: https://fs-s3fs.readthedocs.io/en/latest/#limitations This implementation currently ignores the `permissions` argument, the empty blobs are written with default permissions. """ self.check() _path = self.validatepath(path) _key = self._path_to_dir_key(_path) if not self.isdir(dirname(_path)): raise errors.ResourceNotFound(path) try: self.getinfo(path) except errors.ResourceNotFound: pass else: if recreate: return self.opendir(_path) else: raise errors.DirectoryExists(path) blob = self.bucket.blob(_key) blob.upload_from_string(b"") return SubFS(self, path)
def remove(self, path): if not self.exists(path): raise ResourceNotFoundError(path) if not self.isfile(path): raise ResourceInvalidError(path) self.refresh_dircache(dirname(path)) self.ftp.delete(_encode(path))
def command_cd(self, params): if "." in params: return newdir = self.working_dir[:] if len(params) > 1: self.writeline("cd: string not in pwd: {0}".format(" ".join(params))) return if len(params) > 0: arg = params[0] # TODO: Not too sure about what's going on here, need to investigate. while arg.startswith("../") or arg == "..": if newdir.endswith("/"): newdir = newdir[:-1] newdir = dirname(newdir) arg = arg[3:] newdir = os.path.join(newdir, arg) else: newdir = os.path.join(newdir, "/") try: if self.vfs.isdir(newdir): self.working_dir = newdir[:] self.PROMPT = "[{0}@{1} {2}]$ ".format(self.username, self.HOSTNAME, self.working_dir) else: self.writeline("cd: no such file or directory: {0}".format(params[0])) except ValueError: # Attacker tried to leave the Virtual File system. We wont let him. self.working_dir = "/" self.PROMPT = "[{0}@{1} {2}]$ ".format(self.username, self.HOSTNAME, self.working_dir) self.update_total_file_size(self.working_dir)
def openbin(self, path: str, mode: str = "r", buffering: int = -1, **options) -> "GCSFile": _mode = Mode(mode) _mode.validate_bin() self.check() _path = self.validatepath(path) _key = self._path_to_key(_path) def on_close(gcs_file): if _mode.create or _mode.writing: gcs_file.raw.seek(0) blob = self._get_blob(_key) if not blob: blob = self.bucket.blob(_key) blob.upload_from_file(gcs_file.raw) gcs_file.raw.close() if _mode.create: dir_path = dirname(_path) if dir_path != "/": _dir_key = self._path_to_dir_key(dir_path) if not self.bucket.get_blob(_dir_key): raise errors.ResourceNotFound(path) try: info = self.getinfo(path) except errors.ResourceNotFound: pass else: if _mode.exclusive: raise errors.FileExists(path) if info.is_dir: raise errors.FileExpected(path) gcs_file = GCSFile.factory(path, _mode, on_close=on_close) if _mode.appending: blob = self._get_blob(_key) if blob: # in case there is an existing blob in GCS, we download it and seek until the end of the stream gcs_file.seek(0, os.SEEK_END) blob.download_to_file(gcs_file.raw) return gcs_file if self.strict: info = self.getinfo(path) if info.is_dir: raise errors.FileExpected(path) gcs_file = GCSFile.factory(path, _mode, on_close=on_close) blob = self._get_blob(_key) if not blob: raise errors.ResourceNotFound blob.download_to_file(gcs_file.raw) gcs_file.seek(0) return gcs_file
def getinfo(self, path: str, namespaces: Optional[List[str]] = None, check_parent_dir: bool = True) -> Info: if check_parent_dir: self.check() namespaces = namespaces or () _path = self.validatepath(path) if check_parent_dir: parent_dir = dirname(_path) parent_dir_key = self._path_to_dir_key(parent_dir) if parent_dir != "/" and not self._get_blob(parent_dir_key): raise errors.ResourceNotFound(path) if _path == "/": return self._dir_info("") key = self._path_to_key(_path) dir_key = self._path_to_dir_key(_path) blob = self._get_blob(key) if blob: # Check if there exists a blob at the provided path, return the corresponding object Info return self._info_from_blob(blob, namespaces) elif self._get_blob(dir_key): # Check if there exists a blob with a slash at the end, return the corresponding directory Info return self._dir_info(path) else: raise errors.ResourceNotFound(path)
def setcontents(self, path, data='', encoding=None, errors=None, chunk_size=65536): if self.isdir(path): raise ResourceInvalidError(path) if hasattr(data, 'read'): data = data.read() if self.isfile(path): fh = self.client.CreateFile({'id': self._ids[path]}) fh.SetContentString(data) fh.Upload() else: parent_path = self._ids[dirname(path)] fh = self.client.CreateFile({ 'title': basename(path), 'parents': [{ 'id': parent_path }] }) fh.SetContentString(data) fh.Upload() self._ids[path] = fh['id']
def _listPath(self, path, list_contents=False): """ Path listing with SMB errors converted. """ # Explicitly convert the SMB errors to be able to catch the # PyFilesystem error while listing the path. if list_contents: try: # List all contents of a directory. return _conv_smb_errors(self.conn.listPath)( self.share, normpath(path)) except ResourceNotFoundError: if self.isfile(path): raise ResourceInvalidError(path) raise else: # List a specific path (file or directory) by listing the contents # of the containing directory and comparing the filename. pathdir = dirname(path) searchpath = basename(path) for i in _conv_smb_errors(self.conn.listPath)(self.share, pathdir): if i.filename == '..': continue elif ((i.filename == '.' and searchpath == '') or i.filename == searchpath): return i raise ResourceNotFoundError(path)
def getinfo(self, path, namespaces=None): self.check() namespaces = namespaces or () _path = self.validatepath(path) _key = self._path_to_key(_path) try: dir_path = dirname(_path) if dir_path != '/': _dir_key = self._path_to_dir_key(dir_path) self._get_object(dir_path, _dir_key) except errors.ResourceNotFound: raise errors.ResourceNotFound(path) if _path == '/': return Info({ "basic": { "name": "", "is_dir": True }, "details": { "type": int(ResourceType.directory) } }) obj = self._get_object(path, _key) info = self._info_from_object(obj, namespaces) return Info(info)
def _get_fs(self, create_dir=True): """Return tuple with filesystem and filename.""" filedir = dirname(self.fileurl) filename = basename(self.fileurl) return ( opener.opendir(filedir, writeable=True, create_dir=create_dir), filename )
def run(self): args = self.args #client = self.app.make_client(None) conf_path = args.conf or tools.find_conf() dataplicity_path = dirname(conf_path) do_build(dataplicity_path)
def _get_fs(self, create_dir=True): """.""" filedir = dirname(self.fileurl) filename = basename(self.fileurl) return ( opener.opendir(filedir, writeable=True, create_dir=create_dir), filename )
def listparts(self, path, full=True, absolute=False): """ Return all parts for a given path. By default it will always return the full paths. :param path: Path to check for parts :returns list of paths of parts """ return self.wrapped_fs.listdir(path=dirname(path), wildcard="{0}.part*".format(basename(path)), full=full, absolute=absolute, files_only=True)
def prep_directory(self, target_dir): """ Prepares a new directory to store the file at the provided path, if needed. """ dirname = path.dirname(target_dir) if dirname: dirname = path.join(settings.BUILD_DIR, dirname) if not self.fs.exists(dirname): logger.debug("Creating directory at {}{}".format(self.fs_name, dirname)) self.fs.makedirs(dirname)
def move(self, src, dst, overwrite=False): self._log(INFO, "Moving file from %s to %s" % (src, dst)) if self.getmeta("read_only"): raise errors.UnsupportedError('read only filesystem') src = _fixpath(src) dst = _fixpath(dst) if not self.exists(dirname(dst)): raise errors.ParentDirectoryMissingError(dst) if not overwrite and self.exists(dst): raise errors.DestinationExistsError(dst) self.tahoeutil.move(self.dircap, src, dst)
def getcontents(self, path, mode='rb', encoding=None, errors=None, newline=None): if self.isdir(path): raise ResourceInvalidError(path) if not self.isfile(path): if not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) raise ResourceNotFoundError(path) fh = self.client.CreateFile({'id': self._ids[path]}) return fh.GetContentString()
def makedir(self, path, recursive=False, allow_recreate=False): self._log(INFO, "Creating directory %s" % path) if self.getmeta("read_only"): raise errors.UnsupportedError('read only filesystem') if self.exists(path): if not self.isdir(path): raise errors.ResourceInvalidError(path) if not allow_recreate: raise errors.DestinationExistsError(path) if not recursive and not self.exists(dirname(path)): raise errors.ParentDirectoryMissingError(path) self.tahoeutil.mkdir(self.dircap, path)
def logic(self, context): params = self.get_parameters(context) params.contents if self.has_parameter('fsobj'): dst_fs = params.fsobj else: dst_fs = self.archive.get_filesystem(params.fs) try: dst_fs.makedir(dirname(params.path), recursive=True, allow_recreate=True) dst_fs.setcontents(params.path, params.contents) except Exception as e: self.throw("set-contents.fail", "unable to set file contents ({})".format(e))
def remove(self, path): if self.isdir(path): raise ResourceInvalidError(path) if not self.isfile(path): if not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) raise ResourceNotFoundError(path) self.client.auth.service.files() \ .delete(fileId=self._ids[path]) \ .execute() self._ids.pop(path)
def compile_fs_template(fs, template_text, data=None, path=None): """Compile a fs template structure in to a filesystem object""" if data is None: data = {} template = Template(template_text) template.re_special = re.compile(r'\{\{\%((?:\".*?\"|\'.*?\'|.|\s)*?)\%\}\}|(\{\{\#)|(\#\}\})') context = Context({"data": data}, re_sub=r'\$\{\{(.*?)\}\}') with context.frame("data"): fs_template = template.render(context) out_type = None out_filename = None file_lines = [] def write_file(filename, file_type): if filename: if file_type.lower() == "text": with fs.open(filename, 'wt') as f: f.write('\n'.join(file_lines) + '\n') elif file_type.lower() == "wraptext": import textwrap with fs.open(filename, 'wt') as f: for line in file_lines: f.write('\n'.join(textwrap.wrap(line, 79)) + '\n') elif file_type.lower() == "bin": with fs.open(filename, 'wb') as f: for line in file_lines: chunk = b''.join(chr(int(a + b, 16)) for a, b in zip(line[::2], line[1::2])) f.write(chunk) del file_lines[:] for line in fs_template.splitlines(): line = line.rstrip() if line.startswith('@'): #out_path = out_filename write_file(out_filename, out_type) out_filename = None out_type, path_spec = line[1:].split(' ', 1) if path: path_spec = join(path, relpath(path_spec)) if path_spec.endswith('/'): fs.makedir(path_spec, allow_recreate=True, recursive=True) out_filename = None else: fs.makedir(dirname(path_spec), allow_recreate=True, recursive=True) out_filename = path_spec continue if out_filename: file_lines.append(line) if out_filename: write_file(out_filename, out_type)
def open(self, path, mode="r"): path = normpath(path) mode = mode.lower() if self.isdir(path): raise ResourceInvalidError(path) if "r" in mode or "a" in mode: if not self.isfile(path): raise ResourceNotFoundError(path) if "w" in mode or "a" in mode or "+" in mode: self.refresh_dircache(dirname(path)) ftp = self._open_ftp() f = _FTPFile(self, ftp, normpath(path), mode) return f
def put_file(self, path, f, overwrite=False): try: super(DropboxClient, self).put_file(path, f, overwrite=overwrite) except rest.ErrorResponse as e: raise OperationFailedError(opname='file_copy', msg=str(e) ) except TypeError as e: raise ResourceInvalidError("put_file", path) except: raise RemoteConnectionError("Most probable reasons: " + \ "access token has expired " + \ "or user credentials are invalid.") self.cache.pop(dirname(path), None) return path