def truncate(self, path, length, fh=None): ''' Truncate or extend the given file so that it is precisely size bytes long. See truncate(2) for details. This call is required for read/write filesystems, because recreating a file will first truncate it. ''' # length must be positive if length < 0: raise FuseOSError(errno.EINVAL) try: orig_path = path path = path.lstrip('/') directory, filename = self._get_separated_path(path) with self.file_cache[orig_path].write_lock: self._files_service.resize_file(self._azure_file_share_name, directory, filename, length) self.file_cache[orig_path].max_size = length cached = self._get_cached_dir(directory, False) if cached is not None: file = cached.get(filename) if cached is not None: file.properties.content_length = length else: props = models.FileProperties() props.content_length = length cached[filename] = models.File(filename, None, props) except Exception as e: logger.exception( "truncate operation exception: path:%r length:%d fh:%d e:%s", path, length, fh, e) raise e
def create(self, path, mode): ''' create a file at the specified path with specific access mode (chmod) TODO: Mode is not respected at this time. Support could be added ''' path = path.lstrip('/') try: if not path: raise FuseOSError(errno.EINVAL) directory, filename = self._get_separated_path(path) self._files_service.create_file(self._azure_file_share_name, directory, filename, 0) cached = self._get_cached_dir(directory, False) if cached is not None: props = models.FileProperties() props.content_length = 0 cached[filename] = models.File(filename, None, props) return 0 except Exception as e: logger.exception( "create operation exception: path:%r mode:%s exception:%s", path, mode, e) raise FuseOSError(ENOENT)
def rename(self, old, new): """ Rename a file or directory. TODO: Currently this implementation does not support renaming directories. Support needed. """ try: old_orig_path = old old_path = old.strip('/') new_path = new.strip('/') if new_path == old_path: # file exists at path. Would cause name collision raise FuseOSError(errno.EALREADY) if new_path.lower() == old_path.lower(): # Azure Files is case insensitive, but case preserving # Do the rename by moving to an intermediate file # So we can create a file with different casing. temporary_path = "{}-rename-{}".format(old, uuid.uuid4()) self.rename(old, temporary_path) self.rename(temporary_path, new) return with self.file_cache[old_orig_path].write_lock: new_length = self._rename(old_path, new_path, self._discover_item_type(old_path)) self.file_cache[old_orig_path].max_size = 0 if new_length is None: self._clear_dir_cache( self._get_separated_path(old_path)[0], 'rename old') self._clear_dir_cache( self._get_separated_path(new_path)[0], 'rename new') else: directory, filename = self._get_separated_path(old_path) cached = self._get_cached_dir(directory, False) if cached is not None: with contextlib.suppress(KeyError): del cached[filename] directory, filename = self._get_separated_path(new_path) cached = self._get_cached_dir(directory, False) if cached is not None: with contextlib.suppress(KeyError): if new_length is None: cached[filename] = models.Directory(filename) else: props = models.FileProperties() props.content_length = new_length cached[filename] = models.File( filename, None, props) return 0 except Exception as e: logger.exception( "rename operation exception: old:%r new:%r exception:%s", old, new, e) raise e
def write(self): try: with self.files.file_cache[self.orig_path].append_write_lock: self.processing = True with self.files.file_cache[self.orig_path].write_lock: max_size = self.files.file_cache[self.orig_path].max_size data_length = len(self.data) computed_content_length = self.offset + data_length if max_size < computed_content_length: f = self.files._files_service.get_file_properties( self.files._azure_file_share_name, self.directory, self.filename) file_length = f.properties.content_length if file_length < computed_content_length: self.files._files_service.resize_file( self.files._azure_file_share_name, self.directory, self.filename, computed_content_length) self.files.file_cache[ self.orig_path].max_size = computed_content_length cached = self.files._get_cached_dir( self.directory, False) if cached is not None: file = cached.get(self.filename) if cached is not None: logger.debug( "Updating content length to computed length:%s", computed_content_length) file.properties.content_length = computed_content_length else: props = models.FileProperties() props.content_length = computed_content_length logger.debug( "Updating cached content length:%s", props.content_length) cached[self.filename] = models.File( self.filename, None, props) # update the range specified by this write. #logger.debug('updating %s range %d to %d', path, self.offset, self.offset+data_length-1) self.files._files_service.update_range( self.files._azure_file_share_name, self.directory, self.filename, self.data, start_range=self.offset, end_range=self.offset + data_length - 1) except AzureHttpError as ahe: self.files._prior_write_failure = True raise except Exception as e: logger.warning('error writing %s', str(e))
def test_model_to_dict(): """Test airfs.storage.azure._AzureBaseSystem._model_to_dict""" from airfs.storage.azure import _AzureBaseSystem from azure.storage.file import models # type: ignore last_modified = datetime.now() props = models.FileProperties() props.etag = "etag" props.last_modified = last_modified file = models.File(props=props, metadata=dict(metadata1=0)) assert _AzureBaseSystem._model_to_dict(file) == dict( etag="etag", last_modified=last_modified, metadata=dict(metadata1=0))
def test_model_to_dict(): """Test pycosio.storage.azure._AzureBaseSystem._model_to_dict""" from pycosio.storage.azure import _AzureBaseSystem from azure.storage.file import models last_modified = datetime.now() props = models.FileProperties() props.etag = 'etag' props.last_modified = last_modified file = models.File(props=props, metadata=dict(metadata1=0)) print(_AzureBaseSystem._model_to_dict(file)) assert _AzureBaseSystem._model_to_dict(file) == dict( etag='etag', last_modified=last_modified, metadata=dict(metadata1=0))