コード例 #1
0
    def _rename_file(self, src, dst):
        '''
        rename source file 'src' to destination file 'dst'
        '''
        srcdir = dirname(src)
        srcfname = basename(src)
        dstdir = dirname(dst)
        dstfname = basename(dst)
        #Make sure that the destination directory exists and destination file
        #doesnot exist.
        dstdirid = self._get_dir_id(dstdir)
        if( dstdirid == None):
            raise ParentDirectoryMissingError(dst)
        dstfile_id = self._get_file_id(dstdirid, dstfname)
        if( dstfile_id != None):
            raise DestinationExistsError(dst)
        #All checks are done. Delete the entry for the source file.
        #Create an entry for the destination file.

        srcdir_id = self._get_dir_id(srcdir)
        assert(srcdir_id != None)
        srcfile_id = self._get_file_id(srcdir_id, srcfname)
        assert(srcfile_id != None)
        srccontent_id = self._get_file_contentid(srcfile_id)
        self._updatecur.execute('DELETE FROM FsFileMetaData where ROWID=?',(srcfile_id,))
        self._updatecur.execute("INSERT INTO FsFileMetaData(name, parent, fileid) \
                            VALUES(?,?,?)",(dstfname, dstdirid, srccontent_id))
コード例 #2
0
ファイル: sqlitefs.py プロジェクト: Liryna/pyfilesystem
    def _rename_file(self, src, dst):
        '''
        rename source file 'src' to destination file 'dst'
        '''
        srcdir = dirname(src)
        srcfname = basename(src)
        dstdir = dirname(dst)
        dstfname = basename(dst)
        #Make sure that the destination directory exists and destination file
        #doesnot exist.
        dstdirid = self._get_dir_id(dstdir)
        if( dstdirid == None):
            raise ParentDirectoryMissingError(dst)
        dstfile_id = self._get_file_id(dstdirid, dstfname)
        if( dstfile_id != None):
            raise DestinationExistsError(dst)
        #All checks are done. Delete the entry for the source file.
        #Create an entry for the destination file.

        srcdir_id = self._get_dir_id(srcdir)
        assert(srcdir_id != None)
        srcfile_id = self._get_file_id(srcdir_id, srcfname)
        assert(srcfile_id != None)
        srccontent_id = self._get_file_contentid(srcfile_id)
        self._updatecur.execute('DELETE FROM FsFileMetaData where ROWID=?',(srcfile_id,))
        self._updatecur.execute("INSERT INTO FsFileMetaData(name, parent, fileid) \
                            VALUES(?,?,?)",(dstfname, dstdirid, srccontent_id))
コード例 #3
0
    def validate(self):
        validation_errors = []
        expected_files = set()

        expected_files.add('/' + path.basename(self.db_file))

        for item in self.all():
            # validate item
            item_errors, item_expected_files = self.validate_item(item)
            validation_errors.extend(item_errors)
            expected_files |= item_expected_files

        stored_files = set({
            f
            for f in self.remote_fs.walk.files()
            if not f.endswith('/' + path.basename(self.db_file) + "-journal")
        })

        if expected_files - stored_files:
            validation_errors.append(
                "missing files: ('%s')" %
                "', '".join(expected_files - stored_files))
        if stored_files - expected_files:
            validation_errors.append(
                "additional files: ('%s')" %
                "', '".join(stored_files - expected_files))

        return validation_errors
コード例 #4
0
    def rename_pdf_trans_filename(self, filename):

        if filename.find("_") == 3:
            name, ext = path.splitext(path.basename(filename))

            return "%s%s%s%s" % (name[3:], "-", name[0:2], ext)
        else:
            return path.basename(filename)
コード例 #5
0
ファイル: hst_filename.py プロジェクト: SETI/pdart
 def __init__(self, filename: str) -> None:
     self.filename = filename
     if len(basename(filename)) <= 6:
         raise ValueError("Filename must be at least six characters long.")
     basename2 = basename(filename)
     if basename2[0].lower() not in ACCEPTED_LETTER_CODES:
         raise ValueError(f"First char of filename {basename2!r} must be " +
                          f"in {ACCEPTED_LETTER_CODES!r}.")
コード例 #6
0
    def move(self, src_path, dst_path, overwrite=False):
        _CheckPath(src_path)
        _CheckPath(dst_path)
        with self._lock:
            if not overwrite and self.exists(dst_path):
                raise DestinationExists(dst_path)
            driveItemResponse = self.session.get(_PathUrl(src_path, ""))
            if driveItemResponse.status_code == 404:
                raise ResourceNotFound(src_path)
            driveItemResponse.raise_for_status()
            driveItem = driveItemResponse.json()

            if "folder" in driveItem:
                raise FileExpected(src_path)

            itemUpdate = {}

            newFilename = basename(dst_path)
            if not self.isdir(dst_path) and newFilename != basename(src_path):
                itemUpdate["name"] = newFilename

            parentDir = dirname(dst_path)
            if parentDir != dirname(src_path):
                parentDirItem = self.session.get(_PathUrl(parentDir, ""))
                if parentDirItem.status_code == 404:
                    raise ResourceNotFound(parentDir)
                parentDirItem.raise_for_status()
                itemUpdate["parentReference"] = {
                    "id": parentDirItem.json()["id"]
                }

            itemId = driveItem["id"]
            response = self.session.patch(_ItemUrl(itemId, ""),
                                          json=itemUpdate)
            if response.status_code == 409 and overwrite is True:
                # delete the existing version and then try again
                response = self.session.delete(_PathUrl(dst_path, ""))
                response.raise_for_status()

                # try again
                response = self.session.patch(_ItemUrl(itemId, ""),
                                              json=itemUpdate)
                response.raise_for_status()
                return
            if response.status_code == 409 and overwrite is False:
                debug(
                    "Retrying move in case it's an erroneous error (see issue #7)"
                )
                response = self.session.patch(_ItemUrl(itemId, ""),
                                              json=itemUpdate)
                response.raise_for_status()
                return
            response.raise_for_status()
コード例 #7
0
    def test_populate_from_bad_fits_file(self) -> None:
        fits_product_lidvid = "urn:nasa:pds:hst_09059:data_acs_raw:j6gp02lzq_raw::2.0"
        os_filepath = path_to_testfile("j6gp02lzq_raw.fits")

        populate_database_from_fits_file(self.db, os_filepath,
                                         fits_product_lidvid)

        self.assertFalse(
            self.db.fits_file_exists(basename(os_filepath),
                                     fits_product_lidvid))

        self.assertTrue(
            self.db.bad_fits_file_exists(basename(os_filepath),
                                         fits_product_lidvid))
コード例 #8
0
    def copy(self, src, dst, overwrite=False, chunk_size=65536):
        if self.isdir(src):
            raise ResourceInvalidError(src)
        if not self.isfile(src):
            if not self.isdir(dirname(src)):
                raise ParentDirectoryMissingError(src)
            raise ResourceNotFoundError(src)

        if self.isdir(dst):
            raise ResourceInvalidError(dst)
        if self.isfile(dst):
            if overwrite:
                self.remove(dst)
            else:
                raise DestinationExistsError(dst)
        else:
            if not self.isdir(dirname(dst)):
                raise ParentDirectoryMissingError(dst)

        parent_path = self._ids[dirname(dst)]
        copy_fh = {'title': basename(dst), 'parents': [{'id': parent_path}]}
        copy_fh = self.client.auth.service.files() \
                                  .copy(fileId=self._ids[src], body=copy_fh) \
                                  .execute()
        self._ids[dst] = copy_fh['id']
コード例 #9
0
    def serve_file(self, request):
        """Serve a file, guessing a mime-type"""
        path = request.path
        serving_file = None
        try:
            serving_file = self.serve_fs.open(path, 'rb')
        except Exception as e:
            if serving_file is not None:
                serving_file.close()
            return self.serve_500(request, str(e))

        mime_type = mimetypes.guess_type(basename(path))[0] or b'text/plain'
        file_size = self.serve_fs.getsize(path)
        headers = [(b'Content-Type', bytes(mime_type)),
                   (b'Content-Length', bytes(file_size))]

        def gen_file():
            chunk_size = self.chunk_size
            read = serving_file.read
            try:
                while 1:
                    data = read(chunk_size)
                    if not data:
                        break
                    yield data
            finally:
                serving_file.close()

        request.start_response(b'200 OK', headers)
        return gen_file()
コード例 #10
0
	def makedir(self, path, permissions=None, recreate=False):
		_CheckPath(path)
		with self._lock:
			_log.info(f"makedir: {path}, {permissions}, {recreate}")
			parentMetadata = self._itemFromPath(dirname(path))

			if parentMetadata is None:
				raise ResourceNotFound(path=path)

			childMetadata = self._childByName(parentMetadata["id"], basename(path))
			if childMetadata is not None:
				if recreate is False:
					raise DirectoryExists(path=path)
				return SubFS(self, path)

			return self._createSubdirectory(basename(path), path, [parentMetadata["id"]])
コード例 #11
0
ファイル: googledrivefs.py プロジェクト: msb/fs.googledrivefs
    def add_parent(self, path, parent_dir):
        _log.info(f"add_parent: {path} -> {parent_dir}")
        _CheckPath(path)
        _CheckPath(parent_dir)
        with self._lock:
            targetPath = join(parent_dir, basename(path))
            idsFromPath = self._itemsFromPath(targetPath)

            # don't allow violation of our requirement to keep filename unique inside new directory
            if targetPath in idsFromPath:
                raise FileExists(targetPath)

            parentDirItem = idsFromPath.get(parent_dir)
            if parentDirItem is None:
                raise ResourceNotFound(parent_dir)

            if parentDirItem["mimeType"] != _folderMimeType:
                raise DirectoryExpected(parent_dir)

            sourceItem = self._itemFromPath(path)
            if sourceItem is None:
                raise ResourceNotFound(path)

            self.drive.files().update(
                fileId=sourceItem["id"],
                addParents=parentDirItem["id"],
                body={}).execute(num_retries=self.retryCount)
コード例 #12
0
ファイル: hidefs.py プロジェクト: pombreda/file-versioning
 def walkfiles(self, path="/", wildcard=None, dir_wildcard=None,
               search="breadth", ignore_errors=False):
     if dir_wildcard is not None:
         #  If there is a dir_wildcard, fall back to the default impl
         #  that uses listdir().  Otherwise we run the risk of enumerating
         #  lots of directories that will just be thrown away.
         for item in super(HideFS, self).walkfiles(path, wildcard,
                                                   dir_wildcard, search,
                                                   ignore_errors):
             yield item
     #  Otherwise, the wrapped FS may provide a more efficient impl
     #  which we can use directly.
     else:
         if wildcard is not None and not callable(wildcard):
             wildcard_re = re.compile(fnmatch.translate(wildcard))
             wildcard = lambda fn: bool(wildcard_re.match(fn))
         walk = self.wrapped_fs.walkfiles(self._encode(path),
                                          search=search,
                                          ignore_errors=ignore_errors)
         for filepath in walk:
             filepath = abspath(self._decode(filepath))
             if wildcard is not None:
                 if not wildcard(basename(filepath)):
                     continue
             if self.is_hidden(filepath):
                 continue
             yield filepath
コード例 #13
0
    def setcontents(self,
                    path,
                    data='',
                    encoding=None,
                    errors=None,
                    chunk_size=65536):
        if self.isdir(path):
            raise ResourceInvalidError(path)

        if hasattr(data, 'read'):
            data = data.read()

        if self.isfile(path):
            fh = self.client.CreateFile({'id': self._ids[path]})
            fh.SetContentString(data)
            fh.Upload()
        else:
            parent_path = self._ids[dirname(path)]
            fh = self.client.CreateFile({
                'title': basename(path),
                'parents': [{
                    'id': parent_path
                }]
            })
            fh.SetContentString(data)
            fh.Upload()
            self._ids[path] = fh['id']
コード例 #14
0
    def getinfo(self, path, namespaces=None):
        self.check()
        namespaces = namespaces or ()
        _path = self.validatepath(path)
        _stat = self._fs.getinfo(_path)

        info = {
            "basic": {"name": basename(_path), "is_dir": stat.S_ISDIR(_stat["st_mode"])}
        }

        if "details" in namespaces:
            info["details"] = {
                "_write": ["accessed", "modified"],
                "accessed": _stat["st_atime"],
                "modified": _stat["st_mtime"],
                "size": _stat["st_size"],
                "type": int(
                    self.STAT_TO_RESOURCE_TYPE.get(
                        stat.S_IFMT(_stat["st_mode"]), ResourceType.unknown
                    )
                ),
            }
        if "stat" in namespaces:
            info["stat"] = _stat

        if "access" in namespaces:
            info["access"] = {
                "permissions": Permissions(mode=_stat["st_mode"]).dump(),
                "uid": 1000,  # TODO: fix
                "gid": 100,  # TODO: fix
            }

        return Info(info)
コード例 #15
0
	def copy(self, src_path, dst_path, overwrite=False):
		info(f"copy: {src_path} -> {dst_path}, {overwrite}")
		_CheckPath(src_path)
		_CheckPath(dst_path)
		with self._lock:
			parentDir = dirname(dst_path)
			parentDirItem = self._itemFromPath(parentDir)

			if parentDirItem is None:
				raise ResourceNotFound(parentDir)

			dstItem = self._itemFromPath(dst_path)
			if overwrite is False and dstItem is not None:
				raise DestinationExists(dst_path)

			srcItem = self._itemFromPath(src_path)
			if srcItem is None:
				raise ResourceNotFound(src_path)

			if srcItem["mimeType"] == _folderMimeType:
				raise FileExpected(src_path)

			# TODO - we should really replace the contents of the existing file with the new contents, so that the history is correct
			if dstItem is not None:
				self.drive.files().delete(fileId=dstItem["id"]).execute(num_retries=self.retryCount)

			newMetadata = {"parents": [parentDirItem["id"]], "name": basename(dst_path)}
			self.drive.files().copy(fileId=srcItem["id"], body=newMetadata).execute(num_retries=self.retryCount)
コード例 #16
0
    def gzip_file(self, target_path, html):
        """
        Zips up the provided HTML as a companion for the provided path.

        Intended to take advantage of the peculiarities of
        Amazon S3's GZIP service.

        mtime, an option that writes a timestamp to the output file
        is set to 0, to avoid having s3cmd do unnecessary uploads because
        of differences in the timestamp
        """
        logger.debug("Gzipping to {}{}".format(self.fs_name, target_path))

        # Write GZIP data to an in-memory buffer
        data_buffer = six.BytesIO()
        kwargs = dict(filename=path.basename(target_path),
                      mode='wb',
                      fileobj=data_buffer)
        if float(sys.version[:3]) >= 2.7:
            kwargs['mtime'] = 0
        with gzip.GzipFile(**kwargs) as f:
            f.write(six.binary_type(html))

        # Write that buffer out to the filesystem
        with self.fs.open(smart_text(target_path), 'wb') as outfile:
            outfile.write(data_buffer.getvalue())
            outfile.close()
コード例 #17
0
    def _get_fs(self, create_dir=True):
        """."""
        filedir = dirname(self.fileurl)
        filename = basename(self.fileurl)

        return (opener.opendir(filedir, writeable=True,
                               create_dir=create_dir), filename)
コード例 #18
0
ファイル: hidefs.py プロジェクト: smartfile/file-versioning
 def walkfiles(self, path="/", wildcard=None, dir_wildcard=None,
               search="breadth", ignore_errors=False):
     if dir_wildcard is not None:
         #  If there is a dir_wildcard, fall back to the default impl
         #  that uses listdir().  Otherwise we run the risk of enumerating
         #  lots of directories that will just be thrown away.
         for item in super(HideFS, self).walkfiles(path, wildcard,
                                                   dir_wildcard, search,
                                                   ignore_errors):
             yield item
     #  Otherwise, the wrapped FS may provide a more efficient impl
     #  which we can use directly.
     else:
         if wildcard is not None and not callable(wildcard):
             wildcard_re = re.compile(fnmatch.translate(wildcard))
             wildcard = lambda fn: bool(wildcard_re.match(fn))
         walk = self.wrapped_fs.walkfiles(self._encode(path),
                                          search=search,
                                          ignore_errors=ignore_errors)
         for filepath in walk:
             filepath = abspath(self._decode(filepath))
             if wildcard is not None:
                 if not wildcard(basename(filepath)):
                     continue
             if self.is_hidden(filepath):
                 continue
             yield filepath
コード例 #19
0
    def __init__(self, fs, path, parsedMode):
        self.fs = fs
        self.path = path
        self.parentMetadata = self.fs._itemFromPath(dirname(self.path))
        self.thisMetadata = self.fs._itemFromPath(basename(
            self.path))  # may be None
        # keeping a parsed mode separate from the base class's mode member
        self.parsedMode = parsedMode
        fileHandle, self.localPath = mkstemp(
            prefix="pyfilesystem-googledrive-", text=False)
        close(fileHandle)

        if (self.parsedMode.reading
                or self.parsedMode.appending) and not self.parsedMode.truncate:
            if self.thisMetadata is not None:
                initialData = self.fs.drive.files().get_media(
                    fileId=self.thisMetadata["id"]).execute()
                with open(self.localPath, "wb") as f:
                    f.write(initialData)
        platformMode = self.parsedMode.to_platform()
        platformMode += ("b" if "b" not in platformMode else "")
        super().__init__(f=open(self.localPath, mode=platformMode))
        if self.parsedMode.appending:
            # seek to the end
            self.seek(0, SEEK_END)
コード例 #20
0
 def _isfile(self, path):
     path = normpath(path)
     filedir = dirname(path)
     filename = basename(path)
     dirid = self._get_dir_id(filedir)
     return (dirid is not None
             and self._get_file_id(dirid, filename) is not None)
コード例 #21
0
    def copy(self, src, dst, overwrite=False, chunk_size=65536):
        if self.isdir(src):
            raise ResourceInvalidError(src)
        if not self.isfile(src):
            if not self.isdir(dirname(src)):
                raise ParentDirectoryMissingError(src)
            raise ResourceNotFoundError(src)

        if self.isdir(dst):
            raise ResourceInvalidError(dst)
        if self.isfile(dst):
            if overwrite:
                self.remove(dst)
            else:
                raise DestinationExistsError(dst)
        else:
            if not self.isdir(dirname(dst)):
                raise ParentDirectoryMissingError(dst)

        parent_path = self._ids[dirname(dst)]
        copy_fh = {'title': basename(dst), 'parents': [{'id': parent_path}]}
        copy_fh = self.client.auth.service.files() \
                                  .copy(fileId=self._ids[src], body=copy_fh) \
                                  .execute()
        self._ids[dst] = copy_fh['id']
コード例 #22
0
ファイル: base.py プロジェクト: datadesk/django-bakery
    def gzip_file(self, target_path, html):
        """
        Zips up the provided HTML as a companion for the provided path.

        Intended to take advantage of the peculiarities of
        Amazon S3's GZIP service.

        mtime, an option that writes a timestamp to the output file
        is set to 0, to avoid having s3cmd do unnecessary uploads because
        of differences in the timestamp
        """
        logger.debug("Gzipping to {}{}".format(self.fs_name, target_path))

        # Write GZIP data to an in-memory buffer
        data_buffer = six.BytesIO()
        kwargs = dict(
            filename=path.basename(target_path),
            mode='wb',
            fileobj=data_buffer
        )
        if float(sys.version[:3]) >= 2.7:
            kwargs['mtime'] = 0
        with gzip.GzipFile(**kwargs) as f:
            f.write(six.binary_type(html))

        # Write that buffer out to the filesystem
        with self.fs.open(smart_text(target_path), 'wb') as outfile:
            outfile.write(data_buffer.getvalue())
            outfile.close()
コード例 #23
0
    def remove(self, path):
        self._initdb()
        path = normpath(path)
        if (self.isdir(path) == True):
            #path is actually a directory
            raise ResourceInvalidError(path)

        filedir = dirname(path)
        filename = basename(path)
        dirid = self._get_dir_id(filedir)
        fileid = self._get_file_id(dirid, filename)
        if (fileid == None):
            raise ResourceNotFoundError(path)

        content_id = self._get_file_contentid(fileid)

        self._updatecur.execute("DELETE FROM FsFileMetaData where ROWID=?",
                                (fileid, ))
        #check there is any other file pointing to same location. If not
        #delete the content as well.
        self._querycur.execute(
            'SELECT count(*) FROM FsFileMetaData where fileid=?',
            (content_id, ))
        row = fetchone(self._querycur)
        if (row == None or row[0] == 0):
            self._updatecur.execute("DELETE FROM FsFileTable where ROWID=?",
                                    (content_id, ))
コード例 #24
0
ファイル: reader.py プロジェクト: esaye/moya
    def read(self, path, app=None, mime_type=None):
        """Read a file"""
        if not path.startswith('/'):
            if app is None:
                raise RelativePathError("Can't use relative data paths with an application")
            path = pathjoin(app.data_directory, path)

        filename = basename(path)
        if mime_type is None:
            mime_type, encoding = mimetypes.guess_type(filename)

        _type, sub_type = mime_type.split('/', 1)

        if mime_type == "text/plain":
            data = self.fs.getcontents(path, mode="rt", encoding="utf-8")
        elif mime_type == "application/json":
            with self.fs.open(path, 'rt', encoding="utf-8") as f:
                data = json.load(f)
        elif mime_type == "application/octet-stream":
            data = self.fs.getcontents(path, mode="rb")

        elif _type == "text":
            data = self.fs.getcontents(path, mode="rt", encoding="utf-8")

        else:
            raise UnknownFormat("Moya doesn't know how to read file '{}' (in {!r})".format(path, self.fs))

        return data
コード例 #25
0
    def close(self):
        super().close()  # close the file so that it's readable for upload
        if self.parsedMode.writing:
            # google doesn't accept the fractional second part
            now = datetime.utcnow().replace(microsecond=0).isoformat() + "Z"
            onlineMetadata = {"modifiedTime": now}

            upload = MediaFileUpload(self.localPath, resumable=True)
            if self.thisMetadata is None:
                onlineMetadata.update({
                    "name": basename(self.path),
                    "parents": [self.parentMetadata["id"]],
                    "createdTime": now
                })
                request = self.fs.drive.files().create(body=onlineMetadata,
                                                       media_body=upload)
            else:
                request = self.fs.drive.files().update(
                    fileId=self.thisMetadata["id"], body={}, media_body=upload)

            response = None
            while response is None:
                status, response = request.next_chunk()
            # MediaFileUpload doesn't close it's file handle, so we have to workaround it
            upload._fd.close()
        remove(self.localPath)
コード例 #26
0
    def test_get_time_coordinates(self) -> None:
        db = create_bundle_db_in_memory()
        db.create_tables()
        fits_product_lidvid = "urn:nasa:pds:hst_13012:data_acs_raw:jbz504eoq_raw::2.3"
        os_filepath = path_to_testfile("jbz504eoq_raw.fits")

        populate_database_from_fits_file(db, os_filepath, fits_product_lidvid)

        file_basename = basename(os_filepath)

        card_dicts = db.get_card_dictionaries(fits_product_lidvid,
                                              file_basename)

        nb = get_time_coordinates(
            get_start_stop_times(
                DictLookup("test_get_time_coordinates", card_dicts)))
        doc = xml.dom.getDOMImplementation().createDocument(None, None, None)
        str: bytes = nb(doc).toxml().encode()
        str = pretty_print(str)

        expected = b"""<?xml version="1.0"?>
<Time_Coordinates>
  <start_date_time>2012-09-27T20:23:28Z</start_date_time>
  <stop_date_time>2012-09-27T20:27:58Z</stop_date_time>
</Time_Coordinates>
"""
        self.assertEqual(expected, str)
コード例 #27
0
    def _get_fs(self, create_dir=True):
        """Return tuple with filesystem and filename."""
        filedir = dirname(self.fileurl)
        filename = basename(self.fileurl)

        return (opener.opendir(filedir, writeable=True,
                               create_dir=create_dir), filename)
コード例 #28
0
    def makedir(self, path, recursive=False, allow_recreate=False):
        """Creates a file with mimeType _folder_mimetype
        which acts as a folder in GoogleDrive."""
        if self.isdir(path):
            if allow_recreate:
                return
            else:
                raise DestinationExistsError(path)
        if self.isfile(path):
            raise ResourceInvalidError(path)
        if not recursive and not self.isdir(dirname(path)):
            raise ParentDirectoryMissingError(path)

        if recursive:
            self.makedir(dirname(path),
                         recursive=recursive,
                         allow_recreate=True)

        parent_id = self._ids[dirname(path)]
        fh = self.client.CreateFile({
            'title': basename(path),
            'mimeType': self._folder_mimetype,
            'parents': [{
                'id': parent_id
            }]
        })
        fh.Upload()
        self._ids[path] = fh['id']
コード例 #29
0
ファイル: __init__.py プロジェクト: Gianfranco753/fs-smb
 def _listPath(self, path, list_contents=False):
     """ Path listing with SMB errors converted. """
     # Explicitly convert the SMB errors to be able to catch the
     # PyFilesystem error while listing the path.
     if list_contents:
         try:
             # List all contents of a directory.
             return _conv_smb_errors(self.conn.listPath)(
                 self.share, normpath(path))
         except ResourceNotFoundError:
             if self.isfile(path):
                 raise ResourceInvalidError(path)
             raise
     else:
         # List a specific path (file or directory) by listing the contents
         # of the containing directory and comparing the filename.
         pathdir = dirname(path)
         searchpath = basename(path)
         for i in _conv_smb_errors(self.conn.listPath)(self.share, pathdir):
             if i.filename == '..':
                 continue
             elif ((i.filename == '.' and searchpath == '') or
                   i.filename == searchpath):
                 return i
         raise ResourceNotFoundError(path)
コード例 #30
0
 def _info_from_object(self, obj, namespaces):
     """Make an info dict from an s3 Object."""
     key = obj.key
     path = self._key_to_path(key)
     name = basename(path.rstrip('/'))
     is_dir = key.endswith(self.delimiter)
     info = {"basic": {"name": name, "is_dir": is_dir}}
     if 'details' in namespaces:
         _type = int(
             ResourceType.directory if is_dir else ResourceType.file)
         info['details'] = {
             'accessed': None,
             'modified': datetime_to_epoch(obj.last_modified),
             'size': obj.content_length,
             'type': _type
         }
     if 's3' in namespaces:
         s3info = info['s3'] = {}
         for name in self._object_attributes:
             value = getattr(obj, name, None)
             if isinstance(value, datetime):
                 value = datetime_to_epoch(value)
             s3info[name] = value
     if 'urls' in namespaces:
         url = self.client.generate_presigned_url(ClientMethod='get_object',
                                                  Params={
                                                      'Bucket':
                                                      self._bucket_name,
                                                      'Key': key
                                                  })
         info['urls'] = {'download': url}
     return info
コード例 #31
0
ファイル: _s3fs.py プロジェクト: ptzagk/s3fs
    def _info_from_object(self, obj, namespaces):
        """Make an info dict from an s3 Object."""
        key = obj.key
        path = self._key_to_path(key)
        name = basename(path.rstrip('/'))
        is_dir = key.endswith(self.delimiter)
        info = {"basic": {"name": name, "is_dir": is_dir}}
        if 'details' in namespaces:
            _type = int(
                ResourceType.directory if is_dir else ResourceType.file)
            info['details'] = {
                'accessed': None,
                'modified': datetime_to_epoch(obj.last_modified),
                'size': obj.content_length,
                'type': _type
            }
        if 's3' in namespaces:
            s3info = info['s3'] = {}
            for name in self._object_attributes:
                value = getattr(obj, name, None)
                if isinstance(value, datetime):
                    value = datetime_to_epoch(value)
                s3info[name] = value

        return info
コード例 #32
0
    def read(self, path, app=None, mime_type=None):
        """Read a file"""
        if not path.startswith('/'):
            if app is None:
                raise RelativePathError(
                    "Can't use relative data paths with an application")
            path = pathjoin(app.data_directory, path)

        filename = basename(path)
        if mime_type is None:
            mime_type, encoding = mimetypes.guess_type(filename)

        _type, sub_type = mime_type.split('/', 1)

        if mime_type == "text/plain":
            data = self.fs.getcontents(path, mode="rt", encoding="utf-8")
        elif mime_type == "application/json":
            with self.fs.open(path, 'rb') as f:
                data = json.load(f)
        elif mime_type == "application/octet-stream":
            data = self.fs.getcontents(path, mode="rb")

        elif _type == "text":
            data = self.fs.getcontents(path, mode="rt", encoding="utf-8")

        else:
            raise UnknownFormat(
                "Moya doesn't know how to read file '{}' (in {!r})".format(
                    path, self.fs))

        return data
コード例 #33
0
ファイル: __init__.py プロジェクト: pombreda/fs-smb
 def _listPath(self, path, list_contents=False):
     """ Path listing with SMB errors converted. """
     # Explicitly convert the SMB errors to be able to catch the
     # PyFilesystem error while listing the path.
     if list_contents:
         try:
             # List all contents of a directory.
             return _conv_smb_errors(self.conn.listPath)(self.share,
                                                         normpath(path))
         except ResourceNotFoundError:
             if self.isfile(path):
                 raise ResourceInvalidError(path)
             raise
     else:
         # List a specific path (file or directory) by listing the contents
         # of the containing directory and comparing the filename.
         pathdir = dirname(path)
         searchpath = basename(path)
         for i in _conv_smb_errors(self.conn.listPath)(self.share, pathdir):
             if i.filename == '..':
                 continue
             elif ((i.filename == '.' and searchpath == '')
                   or i.filename == searchpath):
                 return i
         raise ResourceNotFoundError(path)
コード例 #34
0
    def listdir(self, path: Text) -> List[Text]:
        """ Get a list of resources in a directory. """
        npath = self.normalize_path(path)
        if not self.exists(npath):
            raise errors.ResourceNotFound(path)
        if not self.isdir(npath):
            raise errors.DirectoryExpected(path)

        qpath = npath + "/%"
        if npath == "/":
            qpath = "/%"

        cursor = self.connection.cursor()
        cursor.execute(
            "SELECT name FROM sqlar WHERE name LIKE ?",
            (qpath,)
        )
        rows = list(cursor.fetchall())
        cursor.close()

        children = []
        for row in rows:
            if row['name'] == npath or "/" in row['name'][len(npath):].strip("/"):
                continue
            children.append(basename(row['name']))

        return children
コード例 #35
0
    def collect_img(self, acron, issue_folder, pack_name):

        walker = Walker(filter=["*" + pack_name + "*"],
                        max_depth=2,
                        exclude_dirs=["html"])

        img_path = path.join(self.img_fs.root_path, acron, issue_folder)

        for img in walker.files(fs.open_fs(img_path)):

            img_path = path.join(acron, issue_folder, path.basename(img))

            target_img_path = path.join(acron, issue_folder, pack_name,
                                        path.basename(img))

            self.copy(img_path, target_img_path, src_fs=self.img_fs)
コード例 #36
0
 def _info_from_object(self, obj, namespaces):
     """Make an info dict from an s3 Object."""
     key = obj.key
     path = self._key_to_path(key)
     name = basename(path.rstrip("/"))
     is_dir = key.endswith(self.delimiter)
     info = {"basic": {"name": name, "is_dir": is_dir}}
     if "details" in namespaces:
         _type = int(ResourceType.directory if is_dir else ResourceType.file)
         info["details"] = {
             "accessed": None,
             "modified": datetime_to_epoch(obj.last_modified),
             "size": obj.content_length,
             "type": _type,
         }
     if "s3" in namespaces:
         s3info = info["s3"] = {}
         for name in self._object_attributes:
             value = getattr(obj, name, None)
             if isinstance(value, datetime):
                 value = datetime_to_epoch(value)
             s3info[name] = value
     if "urls" in namespaces:
         url = self.client.generate_presigned_url(
             ClientMethod="get_object",
             Params={"Bucket": self._bucket_name, "Key": key},
         )
         info["urls"] = {"download": url}
     return info
コード例 #37
0
ファイル: pyfs.py プロジェクト: nharraud/invenio-files-rest
    def _get_fs(self, create_dir=True):
        """Return tuple with filesystem and filename."""
        filedir = dirname(self.fileurl)
        filename = basename(self.fileurl)

        return (
            opener.opendir(filedir, writeable=True, create_dir=create_dir),
            filename
        )
コード例 #38
0
ファイル: pyfs.py プロジェクト: drjova/invenio-files-rest
    def _get_fs(self, create_dir=True):
        """."""
        filedir = dirname(self.fileurl)
        filename = basename(self.fileurl)

        return (
            opener.opendir(filedir, writeable=True, create_dir=create_dir),
            filename
        )
コード例 #39
0
ファイル: partedfs.py プロジェクト: TrienDo/cuckoodrive
 def listparts(self, path, full=True, absolute=False):
     """
     Return all parts for a given path.
     By default it will always return the full paths.
     :param path: Path to check for parts
     :returns list of paths of parts
     """
     return self.wrapped_fs.listdir(path=dirname(path),
                                    wildcard="{0}.part*".format(basename(path)),
                                    full=full, absolute=absolute, files_only=True)
コード例 #40
0
ファイル: image.py プロジェクト: chrmorais/moya
    def logic(self, context):
        params = self.get_parameters(context)
        img = self.get_image(context, params)
        try:
            img.load()
        except Exception as e:
            self.throw("image.read-fail", "Failed to read image ({})".format(e))

        moya_image = MoyaImage(img, filename=basename(params.path or ''))
        self.set_context(context, params.dst, moya_image)
コード例 #41
0
    def rename(self, src, dst):
        if not self.exists(src):
            raise ResourceNotFoundError(src)
        if self.exists(dst):
            raise DestinationExistsError(dst)
        if isprefix(src, dst):
            raise ResourceInvalidError(dst)

        fh = self.client.CreateFile({'id': self._ids[src],
                                     'title': basename(dst)})
        fh.Upload()
        self._ids[dst] = self._ids.pop(src)
コード例 #42
0
ファイル: __init__.py プロジェクト: dmitry-viskov/smbfs
    def move(self, src, dst, overwrite=False, chunk_size=16384):
        if self.isfile(src):
            src = self._prepare_normpath(src)
            dst = self._prepare_normpath(dst)

            if self.isdir(dst):
                dst = '/'.join([dst, basename(src)])
            if not overwrite and self.exists(dst):
                raise DestinationExistsError(dst)

            self.rename(src, dst)
        else:
            raise ResourceInvalidError(src, msg="Source is not a file: %(path)s")
コード例 #43
0
ファイル: fstags.py プロジェクト: chrmorais/moya
    def logic(self, context):
        params = self.get_parameters(context)
        if self.has_parameter('fsobj'):
            walk_fs = params.fsobj
        else:
            walk_fs = self.archive.get_filesystem(params.fs)

        wildcard = lambda name: params.files(context, name=basename(name)) if self.has_parameter('files') else lambda name: True
        dir_wildcard = lambda name: params.dirs(context, name=basename(name)) if self.has_parameter('dirs') else lambda name: True

        paths = []
        add_path = paths.append

        for dirname, dir_paths in walk_fs.walk(path=params.path,
                                               search=params.search,
                                               wildcard=wildcard,
                                               dir_wildcard=dir_wildcard,
                                               ignore_errors=True):

            for path in dir_paths:
                add_path(pathjoin(dirname, path))
        self.set_context(context, params.dst, paths)
コード例 #44
0
ファイル: sqlitefs.py プロジェクト: Liryna/pyfilesystem
 def _rename_dir(self, src, dst):
     src = remove_end_slash(src)
     dst = remove_end_slash(dst)
     dstdirid = self._get_dir_id(dst)
     if( dstdirid != None):
         raise DestinationExistsError(dst)
     dstparent = dirname(dst)
     dstparentid = self._get_dir_id(dstparent)
     if(dstparentid == None):
         raise ParentDirectoryMissingError(dst)
     srcdirid = self._get_dir_id(src)
     assert(srcdirid != None)
     dstdname = basename(dst)
     self._updatecur.execute('UPDATE FsDirMetaData SET name=?, fullpath=?, \
                 parentid=? where ROWID=?',(dstdname, dst, dstparentid, srcdirid,))
コード例 #45
0
    def getinfo(self, path):
        if self.isdir(path):
            raise ResourceInvalidError(path)
        if not self.isfile(path):
            if not self.isdir(dirname(path)):
                raise ParentDirectoryMissingError(path)
            raise ResourceNotFoundError(path)

        fh = self.client.CreateFile({'id': self._ids[path],
                                     'title': basename(path)})
        return {
            'size': int(fh['fileSize']),
            'created_time': fh['createdDate'],
            'acessed_time': fh['lastViewedByMeDate'],
            'modified_time': fh['modifiedDate']
        }
コード例 #46
0
ファイル: wsgi.py プロジェクト: anthonybishopric/pyboxfs
 def serve_dir(self, request):
     """Serve an index page"""
     fs = self.serve_fs
     isdir = fs.isdir        
     path = request.path                     
     dirinfo = fs.listdirinfo(path, full=True, absolute=True)        
     entries = []
     
     for p, info in dirinfo:
         entry = {}
         entry['path'] = p
         entry['name'] = basename(p)
         entry['size'] = info.get('size', 'unknown')
         entry['created_time'] = info.get('created_time')                                                        
         if isdir(p):
             entry['type'] = 'dir'
         else:
             entry['type'] = 'file'                
             
         entries.append(entry)
         
     # Put dirs first, and sort by reverse created time order
     no_time = datetime(1970, 1, 1, 1, 0)
     entries.sort(key=lambda k:(k['type'] == 'dir', k.get('created_time') or no_time), reverse=True)
     
     # Turn datetime to text and tweak names
     for entry in entries:
         t = entry.get('created_time')
         if t and hasattr(t, 'ctime'):
             entry['created_time'] = t.ctime()
         if entry['type'] == 'dir':
             entry['name'] += '/'
 
     # Add an up dir link for non-root
     if path not in ('', '/'):
         entries.insert(0, dict(name='../', path='../', type="dir", size='', created_time='..'))
         
     # Render the mako template
     html = self.dir_template.render(**dict(fs=self.serve_fs,
                                            path=path,
                                            dirlist=entries))
     
     request.start_response('200 OK', [('Content-Type', 'text/html'),
                                       ('Content-Length', '%i' % len(html))])
     
     return [html]
コード例 #47
0
ファイル: partedfs.py プロジェクト: TrienDo/cuckoodrive
 def walk(self, path="/", wildcard=None, dir_wildcard=None, search="breadth",
          ignore_errors=False):
     if dir_wildcard is not None:
         for item in super(WrapFS, self).walk(path, wildcard, dir_wildcard, search,
                                              ignore_errors):
             yield item
     else:
         if wildcard is not None and not callable(wildcard):
             wildcard_re = re.compile(fnmatch.translate(wildcard))
             wildcard = lambda fn: bool(wildcard_re.match(fn))
         for (dirpath, filepaths) in self.wrapped_fs.walk(path, search=search,
                                                          ignore_errors=ignore_errors):
             filepaths = [basename(self._decode(pathcombine(dirpath, p)))
                          for p in filepaths]
             if wildcard is not None:
                 filepaths = [p for p in filepaths if wildcard(p)]
             yield (dirpath, filepaths)
コード例 #48
0
ファイル: serve.py プロジェクト: chrmorais/moya
def serve_file(req, fs, path, name=None):
    """Serve a file"""
    res = MoyaResponse()
    mime_type, encoding = mimetypes.guess_type(basename(path))
    if mime_type is None:
        mime_type = b"application/octet-stream" if PY2 else "application/octet-stream"

    if not path or not fs.isfile(path):
        raise logic.EndLogic(http.RespondNotFound())

    serve_file = None
    try:
        file_size = fs.getsize(path)
        info = fs.getinfokeys(path, "modified_time")
        serve_file = fs.open(path, "rb")
    except FSError:
        if serve_file is not None:
            serve_file.close()
        raise logic.EndLogic(http.RespondNotFound())
    else:
        mtime = info.get("modified_time", None)
        if mtime is None:
            mtime = time.time()
        else:
            mtime = datetime_to_epoch(mtime)
        res.date = datetime.utcnow()
        res.content_type = py2bytes(mime_type)
        res.last_modified = mtime
        res.etag = "%i-%i-%s" % (mtime, file_size, md5_hexdigest(path))
        res.server = "Moya/1.0"
        if name is not None:
            res.content_disposition = 'attachment; filename="{}"'.format(name)

        status304 = False
        if req.if_none_match and res.etag:
            status304 = res.etag in req.if_none_match
        elif req.if_modified_since and res.last_modified:
            status304 = res.last_modified <= req.if_modified_since
        if status304:
            res.status = 304
            serve_file.close()
        else:
            res.body_file = serve_file
        res.content_length = file_size
    raise logic.EndLogic(res)
コード例 #49
0
ファイル: sqlitefs.py プロジェクト: Liryna/pyfilesystem
    def makedir(self, path, recursive=False, allow_recreate=False):
        self._initdb()
        path = remove_end_slash(normpath(path))

        if(self._isexist(path)==False):
            parentdir = dirname(path)
            dname = basename(path)

            parent_id = self._get_dir_id(parentdir)
            if( parent_id ==None):
                if( recursive == False):
                    raise ParentDirectoryMissingError(path)
                else:
                    self.makedir(parentdir, recursive,allow_recreate)
                    parent_id = self._get_dir_id(parentdir)
            self._makedir(parent_id,dname)
        else:
            raise DestinationExistsError(path)
コード例 #50
0
ファイル: dropboxfs.py プロジェクト: SCOAP3/invenio
 def children(self, path):
     """Get children of a given path."""
     update = False
     hash_ = None
     item = self.cache.get(path)
     if item:
         if item.expired:
             update = True
         if item.metadata and item.children:
             hash_ = item.metadata['hash']
         else:
             if not item.metadata.get('is_dir'):
                 raise ResourceInvalidError(path)
         if not item.children:
             update = True
     else:
         update = True
     if update:
         try:
             metadata = super(
                 DropboxClient, self).metadata(
                 path, hash=hash_, include_deleted=False, list=True)
             children = []
             contents = metadata.pop('contents')
             for child in contents:
                 if child.get('is_deleted', False):
                     continue
                 children.append(basename(child['path']))
                 self.cache[child['path']] = CacheItem(child)
             item = self.cache[path] = CacheItem(metadata, children)
         except rest.ErrorResponse as e:
             if not item or e.status != 304:
                 raise OperationFailedError(opname='metadata', path=path,
                                            msg=str(e))
             # We have an item from cache (perhaps expired), but it's
             # hash is still valid (as far as Dropbox is concerned),
             # so just renew it and keep using it.
             item.renew()
         except:
             raise RemoteConnectionError(
                 "Most probable reasons: access token has expired or user"
                 " credentials are invalid.")
     return item.children
コード例 #51
0
    def setcontents(self, path, data='', encoding=None,
                    errors=None, chunk_size=65536):
        if self.isdir(path):
            raise ResourceInvalidError(path)

        if hasattr(data, 'read'):
            data = data.read()

        if self.isfile(path):
            fh = self.client.CreateFile({'id': self._ids[path]})
            fh.SetContentString(data)
            fh.Upload()
        else:
            parent_path = self._ids[dirname(path)]
            fh = self.client.CreateFile({'title': basename(path),
                                         'parents': [{'id': parent_path}]})
            fh.SetContentString(data)
            fh.Upload()
            self._ids[path] = fh['id']
コード例 #52
0
ファイル: sqlitefs.py プロジェクト: Liryna/pyfilesystem
 def _get_file_info(self, path):
     filedir = dirname(path)
     filename = basename(path)
     dirid = self._get_dir_id(filedir)
     assert(dirid is not None)
     fileid = self._get_file_id(dirid, filename)
     assert(fileid is not None)
     contentid = self._get_file_contentid(fileid)
     assert(contentid is not None)
     self._querycur.execute('SELECT author, size, created, last_modified, last_accessed \
                     FROM FsFileTable where rowid=?',(contentid,))
     row = fetchone(self._querycur)
     assert(row != None)
     info = dict()
     info['author'] = row[0]
     info['size'] = row[1]
     info['created'] = row[2]
     info['last_modified'] = row[3]
     info['last_accessed'] = row[4]
     info['st_mode'] = 0666
     return(info)
コード例 #53
0
ファイル: __init__.py プロジェクト: anthonybishopric/pyboxfs
    def rename(self, src, dst):
        if not src:
            raise PathError(src)

        src = normpath(src)
        item = self._get_item_by_path(src)
        if not item:
            raise ResourceNotFoundError(src)

        dst = normpath(dst)
        new_name = basename(dst)

        if item['type'] == _ITEM_TYPE_FILE:
            resource_name = 'files'
        else:
            resource_name = 'folders'
        self._api_request(
            'PUT',
            '{}/{}'.format(resource_name, item['id']),
            data={'name': new_name},
        )
コード例 #54
0
ファイル: sqlitefs.py プロジェクト: Liryna/pyfilesystem
    def open(self, path, mode='r', **kwargs):
        self._initdb()
        path = normpath(path)
        filedir = dirname(path)
        filename = basename(path)

        dir_id = self._get_dir_id(filedir)
        if( dir_id == None):
            raise ResourceNotFoundError(filedir)

        file_id = self._get_file_id(dir_id, filename)
        if( self._islocked(file_id)):
                raise ResourceLockedError(path)

        sqfsfile=None
        if 'r' in mode:
            if file_id is None:
                raise ResourceNotFoundError(path)
            content_id = self._get_file_contentid(file_id)
            #make sure lock status is updated before the blob is opened
            self._lockfileentry(content_id, lock=True)
            blob_stream=self.dbcon.blobopen("main", "FsFileTable", "contents", file_id, False) # 1 is for read/write
            sqfsfile = SqliteReadableFile(self, path, content_id, blob_stream)

        elif 'w' in mode or 'a' in mode:
            if( file_id is None):
                file_id= self._create_file_entry(dir_id, filename)
                assert(file_id != None)

            content_id = self._get_file_contentid(file_id)
            #file_dir_entry.accessed_time = datetime.datetime.now()
            self._lockfileentry(content_id, lock=True)
            sqfsfile = SqliteWritableFile(self, path, content_id)

        if( sqfsfile):
            self.open_files.append(sqfsfile)
            return sqfsfile

        raise ResourceNotFoundError(path)
コード例 #55
0
ファイル: reader.py プロジェクト: chrmorais/moya
    def read(self, path, app=None, mime_type=None):
        """Read a file"""
        if not path.startswith('/'):
            if app is None:
                raise RelativePathError("Can't use relative data paths with an application")
            path = pathjoin(app.data_directory, path)

        filename = basename(path)
        if mime_type is None:
            mime_type, encoding = mimetypes.guess_type(filename)

        _type, sub_type = mime_type.split('/', 1)
        try:
            if mime_type == "text/plain":
                data = self.fs.getcontents(path, mode="rt", encoding="utf-8")
            elif mime_type == "application/json":
                with self.fs.open(path, 'rt', encoding="utf-8") as f:
                    data = json.load(f)
            elif mime_type == "application/octet-stream":
                data = self.fs.getcontents(path, mode="rb")

            elif _type == "text":
                data = self.fs.getcontents(path, mode="rt", encoding="utf-8")

            else:
                raise UnknownFormat("Moya doesn't know how to read file '{}' (in {!r})".format(path, self.fs))
        except FSError as e:
            from .logic import MoyaException
            info = {
                "path": path,
                "mime_type": mime_type
            }
            raise MoyaException("data.read-fail",
                                "unable to read data from {path} ({e})".format(path=path, e=e),
                                diagnosis="check the data exists with **moya fs data --tree**",
                                info=info)

        return data
コード例 #56
0
    def makedir(self, path, recursive=False, allow_recreate=False):
        """Creates a file with mimeType _folder_mimetype
        which acts as a folder in GoogleDrive."""
        if self.isdir(path):
            if allow_recreate:
                return
            else:
                raise DestinationExistsError(path)
        if self.isfile(path):
            raise ResourceInvalidError(path)
        if not recursive and not self.isdir(dirname(path)):
            raise ParentDirectoryMissingError(path)

        if recursive:
            self.makedir(dirname(path), recursive=recursive,
                         allow_recreate=True)

        parent_id = self._ids[dirname(path)]
        fh = self.client.CreateFile({'title': basename(path),
                                     'mimeType': self._folder_mimetype,
                                     'parents': [{'id': parent_id}]})
        fh.Upload()
        self._ids[path] = fh['id']
コード例 #57
0
ファイル: sqlitefs.py プロジェクト: Liryna/pyfilesystem
    def remove(self, path):
        self._initdb()
        path = normpath(path)
        if( self.isdir(path)==True):
            #path is actually a directory
            raise ResourceInvalidError(path)

        filedir = dirname(path)
        filename = basename(path)
        dirid = self._get_dir_id(filedir)
        fileid = self._get_file_id(dirid, filename)
        if( fileid == None):
            raise ResourceNotFoundError(path)

        content_id = self._get_file_contentid(fileid)

        self._updatecur.execute("DELETE FROM FsFileMetaData where ROWID=?",(fileid,))
        #check there is any other file pointing to same location. If not
        #delete the content as well.
        self._querycur.execute('SELECT count(*) FROM FsFileMetaData where fileid=?',
                    (content_id,))
        row = fetchone(self._querycur)
        if( row == None or row[0] == 0):
            self._updatecur.execute("DELETE FROM FsFileTable where ROWID=?",(content_id,))
コード例 #58
0
ファイル: xrdfile.py プロジェクト: jirikuncar/xrootdpyfs
 def name(self):
     """Get filename."""
     return basename(self.path)
コード例 #59
0
ファイル: modifiers.py プロジェクト: pombreda/moya
 def basename(self, context, v):
     return basename(v)
コード例 #60
0
ファイル: build.py プロジェクト: datadesk/django-bakery
    def copyfile_and_gzip(self, source_path, target_path):
        """
        Copies the provided file to the provided target directory.

        Gzips JavaScript, CSS and HTML and other files along the way.
        """
        # And then where we want to copy it to.
        target_dir = path.dirname(target_path)
        if not self.fs.exists(target_dir):
            try:
                self.fs.makedirs(target_dir)
            except OSError:
                pass

        # determine the mimetype of the file
        guess = mimetypes.guess_type(source_path)
        content_type = guess[0]
        encoding = guess[1]

        # If it isn't a file want to gzip...
        if content_type not in self.gzip_file_match:
            # just copy it to the target.
            logger.debug("Copying {}{} to {}{} because its filetype isn't on the whitelist".format(
                "osfs://",
                source_path,
                self.fs_name,
                target_path
            ))
            copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))

        # # if the file is already gzipped
        elif encoding == 'gzip':
            logger.debug("Copying {}{} to {}{} because it's already gzipped".format(
                "osfs://",
                source_path,
                self.fs_name,
                target_path
            ))
            copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))

        # If it is one we want to gzip...
        else:
            # ... let the world know ...
            logger.debug("Gzipping {}{} to {}{}".format(
                "osfs://",
                source_path,
                self.fs_name,
                target_path
            ))
            # Open up the source file from the OS
            with open(source_path, 'rb') as source_file:
                # Write GZIP data to an in-memory buffer
                data_buffer = six.BytesIO()
                kwargs = dict(
                    filename=path.basename(target_path),
                    mode='wb',
                    fileobj=data_buffer
                )
                if float(sys.version[:3]) >= 2.7:
                    kwargs['mtime'] = 0
                with gzip.GzipFile(**kwargs) as f:
                    f.write(six.binary_type(source_file.read()))

                # Write that buffer out to the filesystem
                with self.fs.open(smart_text(target_path), 'wb') as outfile:
                    outfile.write(data_buffer.getvalue())
                    outfile.close()