def test_setcontents_async(self): # setcontents() should accept both a string... self.fs.setcontents_async("hello", "world").wait() self.assertEquals(self.fs.getcontents("hello"), "world") # ...and a file-like object self.fs.setcontents_async("hello",StringIO("to you, good sir!")).wait() self.assertEquals(self.fs.getcontents("hello"), "to you, good sir!") self.fs.setcontents_async("hello", "world", chunk_size=2).wait() self.assertEquals(self.fs.getcontents("hello"), "world") # ...and a file-like object self.fs.setcontents_async("hello", StringIO("to you, good sir!"), chunk_size=2).wait() self.assertEquals(self.fs.getcontents("hello"), "to you, good sir!")
def xml_to_tree(path, data): # This function will arrange our attributes how we like them. def build_info(path, xml): info = dict(xml.attrib.items()) if 'file_name' in info: info['name'] = info.pop('file_name') else: info['is_dir'] = True info['path'] = abspath(pathjoin(path, info['name'])) info['inode'] = info.pop('id') return info tree = {} # This function will build the tree recursively. def build_tree(path, xml): if hasattr(xml, 'folders'): for f in xml.folders[0].folder: info = build_info(path, f) tree[info['path']] = info build_tree(info['path'], f) if hasattr(xml, 'files'): for f in xml.files[0].file: info = build_info(path, f) tree[info['path']] = info # Base64 decode, then unzip and parse the XML into our tree. s = StringIO(base64.b64decode(data['tree'])) with zipfile.ZipFile(s) as z: d = z.read(z.infolist()[0]) xml = boxdotnet.XMLNode.parseXML(d) build_tree(path, xml) return tree
def open(self, path, mode='r', **kwargs): if 'w' in mode or '+' in mode or 'a' in mode: logging.error('cannot use httpfs.open() in write mode: %s' % path) raise UnsupportedError('open', path=path) url = self._build_url(path) for attempt in (1, 2, 3): try: response = requests.get(url) except requests.RequestException as error: logging.warning('open attempt %d: %s %s' % (attempt, url, error)) else: break else: raise RemoteConnectionError('getinfo', path) if response.status_code == 200: return StringIO(response.content) elif response.status_code == 404: raise ResourceNotFoundError(path) else: logging.warning( 'open status %d for %s assumed as connection error.' % (response.status_code, url)) raise RemoteConnectionError('open', path)
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs): if self.isdir(path): raise ResourceInvalidError(path) if 'w' in mode and not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) if 'r' in mode and not self.isfile(path): raise ResourceNotFoundError(path) if not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) if 'w' in mode and '+' not in mode and self.isfile(path): self.remove(path) data = '' if 'r' in mode: data = self.getcontents(path, mode=mode, encoding=encoding, errors=errors, newline=newline) rfile = StringIO(data=data, mode=mode) return RemoteFileBuffer(self, path, mode=mode, rfile=rfile)
def test_setcontents(self): # setcontents() should accept both a string... self.fs.setcontents("hello", b("world")) self.assertEquals(self.fs.getcontents("hello", "rb"), b("world")) # ...and a file-like object self.fs.setcontents("hello", StringIO(b("to you, good sir!"))) self.assertEquals(self.fs.getcontents("hello", "rb"), b("to you, good sir!")) # setcontents() should accept both a string... self.fs.setcontents("hello", b("world"), chunk_size=2) self.assertEquals(self.fs.getcontents("hello", "rb"), b("world")) # ...and a file-like object self.fs.setcontents("hello", StringIO(b("to you, good sir!")), chunk_size=2) self.assertEquals(self.fs.getcontents("hello", "rb"), b("to you, good sir!"))
def upload(self, path, f): import pdb pdb.set_trace() if isinstance(f, basestring): # upload given string as file's contents. f = StringIO(f) l = None try: l = len(f) except: try: l = os.fstat(f.fileno()).st_size except: try: f.seek(0, 2) l = f.tell() f.seek(0) except: raise Exception('Could not determine length of file!') dirname, basename = pathsplit(path) try: info = self.info(path) except: try: info = self.info(dirname) except: raise Exception('Cannot upload to non-existent directory!') url = '%s/%s/%s' % (ULURL, self.auth_token, info['inode']) host = urlparse.urlparse(url).hostname conn = httplib.HTTPConnection(host, 443) boundary = mimetools.choose_boundary() fields = { 'boundary': boundary, 'mime': mimetypes.guess_type(basename)[0] or 'application/octet-stream', 'name': basename, } head = MULTIPART_HEAD % fields tail = MULTIPART_TAIL % fields l += len(head) + len(tail) headers = { 'Content-Length': l, 'Content-Type': 'multipart/form-data; boundary=%s' % boundary, } conn.request('POST', url, '', headers) # now stream the file to box.net. conn.send(head) while True: data = f.read(4096) if not data: break conn.send(data) conn.send(tail) r = conn.getresponse() if r.status != 200: raise Exception('Error uploading data!')
def __setstate__(self, state): self.__dict__.update(state) if self.type == 'file': self.lock = threading.RLock() else: self.lock = None if self.mem_file is not None: data = self.mem_file self.mem_file = StringIO() self.mem_file.write(data)
def open(self, path, mode="r"): # TODO: chunked transport of large files path = self.encode_path(path) if "w" in mode: self.proxy.set_contents(path, xmlrpclib.Binary(b(""))) if "r" in mode or "a" in mode or "+" in mode: try: data = self.proxy.get_contents(path, "rb").data except IOError: if "w" not in mode and "a" not in mode: raise ResourceNotFoundError(path) if not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) self.proxy.set_contents(path, xmlrpclib.Binary(b(""))) else: data = b("") f = StringIO(data) if "a" not in mode: f.seek(0, 0) else: f.seek(0, 2) oldflush = f.flush oldclose = f.close oldtruncate = f.truncate def newflush(): self._lock.acquire() try: oldflush() self.proxy.set_contents(path, xmlrpclib.Binary(f.getvalue())) finally: self._lock.release() def newclose(): self._lock.acquire() try: f.flush() oldclose() finally: self._lock.release() def newtruncate(size=None): self._lock.acquire() try: oldtruncate(size) f.flush() finally: self._lock.release() f.flush = newflush f.close = newclose f.truncate = newtruncate return f
def setcontents(self, path, data, chunk_size=1024 * 64): if not isinstance(data, six.binary_type): return super(MemoryFS, self).setcontents(path, data, chunk_size) if not self.exists(path): self.open(path, 'wb').close() dir_entry = self._get_dir_entry(path) if not dir_entry.isfile(): raise ResourceInvalidError('Not a directory %(path)s', path) new_mem_file = StringIO() new_mem_file.write(data) dir_entry.mem_file = new_mem_file
def open(self, path, mode='r', **kwargs): if self.isdir(path): raise ResourceInvalidError(path) # Erase the contents of a file upon write. if 'w' in mode: file_obj = None self.setcontents(path, StringIO()) else: file_obj = SpooledTemporaryFile() self._retrieveFile(path, file_obj) return RemoteFileBuffer(self, path, mode, file_obj)
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs): path = normpath(relpath(path)) if 'r' in mode: if self.zip_mode not in 'ra': raise OperationFailedError( "open file", path=path, msg= "1 Zip file must be opened for reading ('r') or appending ('a')" ) try: if hasattr(self.zf, 'open') and self._zip_file_string: #return self.zf.open(self._encode_path(path), "r") return self.zf.open(self._encode_path(path), 'rU' if 'U' in mode else 'r') else: contents = self.zf.read(self._encode_path(path)) except KeyError: raise ResourceNotFoundError(path) return StringIO(contents) if 'w' in mode: if self.zip_mode not in 'wa': raise OperationFailedError( "open file", path=path, msg= "2 Zip file must be opened for writing ('w') or appending ('a')" ) dirname, _filename = pathsplit(path) if dirname: self.temp_fs.makedir(dirname, recursive=True, allow_recreate=True) self._add_resource(path) f = _TempWriteFile(self.temp_fs, path, self._on_write_close) return f raise ValueError("Mode must contain be 'r' or 'w'")
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=1024 * 64): # Remove then write contents. There is no method to erase the contents # of a file when writing to it using pysmb. try: self.remove(path) except ResourceNotFoundError: pass if not hasattr(data, 'read'): data = StringIO(data) self.conn.storeFile(self.share, path, data)
def __init__(self, fs, path, mode, rfile=None, write_on_flush=True): """RemoteFileBuffer constructor. The owning filesystem, path and mode must be provided. If the optional argument 'rfile' is provided, it must be a read()-able object or a string containing the initial file contents. """ wrapped_file = SpooledTemporaryFile(max_size=self.max_size_in_memory) self.fs = fs self.path = path self.write_on_flush = write_on_flush self._changed = False self._readlen = 0 # How many bytes already loaded from rfile self._rfile = None # Reference to remote file object self._eof = False # Reached end of rfile? if getattr(fs, "_lock", None) is not None: self._lock = fs._lock.__class__() else: self._lock = threading.RLock() if "r" in mode or "+" in mode or "a" in mode: if rfile is None: # File was just created, force to write anything self._changed = True self._eof = True if not hasattr(rfile, "read"): #rfile = StringIO(unicode(rfile)) rfile = StringIO(rfile) self._rfile = rfile else: # Do not use remote file object self._eof = True self._rfile = None self._changed = True if rfile is not None and hasattr(rfile, "close"): rfile.close() super(RemoteFileBuffer, self).__init__(wrapped_file, mode) # FIXME: What if mode with position on eof? if "a" in mode: # Not good enough... self.seek(0, SEEK_END)
def __init__(self, type, name, contents=None): assert type in ("dir", "file"), "Type must be dir or file!" self.type = type self.name = name if contents is None and type == "dir": contents = {} self.open_files = [] self.contents = contents self.mem_file = None self.created_time = datetime.datetime.now() self.modified_time = self.created_time self.accessed_time = self.created_time self.xattrs = {} self.lock = None if self.type == 'file': self.mem_file = StringIO() self.lock = threading.RLock()
def setcontents(self, path, data=b'', encoding=None, errors=None, chunk_size=1024 * 64, **kwargs): if isinstance(data, six.binary_type): if not self.exists(path): self.open(path, 'wb').close() dir_entry = self._get_dir_entry(path) if not dir_entry.isfile(): raise ResourceInvalidError('Not a directory %(path)s', path) new_mem_file = StringIO() new_mem_file.write(data) dir_entry.mem_file = new_mem_file return len(data) return super(MemoryFS, self).setcontents(path, data=data, encoding=encoding, errors=errors, chunk_size=chunk_size, **kwargs)
def decompress(self, g, wrapAsFile=True): buf = g.read(2) magic = unpack(">H", buf)[0] if (magic & 0x3EFF) == 0x10FB: # it is compressed if magic & 0x8000: outputSize = unpack(">I", g.read(4))[0] if magic & 0x100: unknown1 = unpack(">I", g.read(4))[0] else: outputSize = unpack(">I", "\0" + g.read(3))[0] if magic & 0x100: unknown1 = unpack(">I", "\0" + g.read(3))[0] output = [] while True: opcode = unpack("B", g.read(1))[0] if not (opcode & 0x80): # opcode: bit7==0 to get here # read second opcode opcode2 = unpack("B", g.read(1))[0] #print "0x80", toBits(opcode), toBits(opcode2), opcode & 0x03, (((opcode & 0x60) << 3) | opcode2) + Q, ((opcode & 0x1C) >> 2) + 2 + R # copy at most 3 bytes to output stream (lowest 2 bits of opcode) count = opcode & 0x03 for i in range(count): output.append(g.read(1)) # you always have to look at least one byte, hence the +1 # use bit6 and bit5 (bit7=0 to trigger the if-statement) of opcode, and 8 bits of opcode2 (10-bits) lookback = (((opcode & 0x60) << 3) | opcode2) + 1 # use bit4..2 of opcode count = ((opcode & 0x1C) >> 2) + 3 for i in range(count): output.append(output[-lookback]) elif not (opcode & 0x40): # opcode: bit7..6==10 to get here opcode2 = unpack("B", g.read(1))[0] opcode3 = unpack("B", g.read(1))[0] #print "0x40", toBits(opcode), toBits(opcode2), toBits(opcode3) # copy count bytes (upper 2 bits of opcode2) count = opcode2 >> 6 for i in range(count): output.append(g.read(1)) # look back again (lower 6 bits of opcode2, all 8 bits of opcode3, total 14-bits) lookback = (((opcode2 & 0x3F) << 8) | opcode3) + 1 # lower 6 bits of opcode are the count to copy count = (opcode & 0x3F) + 4 for i in range(count): output.append(output[-lookback]) elif not (opcode & 0x20): # opcode: bit7..5=110 to get here opcode2 = unpack("B", g.read(1))[0] opcode3 = unpack("B", g.read(1))[0] opcode4 = unpack("B", g.read(1))[0] # copy at most 3 bytes to output stream (lowest 2 bits of opcode) count = opcode & 0x03 for i in range(count): output.append(g.read(1)) # look back: bit4 of opcode, all bits of opcode2 and opcode3, total 17-bits lookback = (((opcode & 0x10) >> 4) << 16) | ( opcode2 << 8) | (opcode3) + 1 # bit3..2 of opcode and the whole of opcode4 count = (((((opcode & 0x0C) >> 2) << 8)) | opcode4) + 5 #print "0x20", toBits(opcode), toBits(opcode2), toBits(opcode3), toBits(opcode4), lookback, count for i in range(count): output.append(output[-lookback]) else: # opcode: bit7..5==1 to get here # use lowest 5 bits for count count = ((opcode & 0x1F) << 2) + 4 if count > 0x70: # this is end of input # turn into a small-copy count = opcode & 0x03 #print "0xEXITCOPY", count for i in range(count): output.append(g.read(1)) break # "big copy" operation: up to 112 bytes (minumum of 4, multiple of 4) for i in range(count): output.append(g.read(1)) #print "0xLO", toBits(opcode), count if wrapAsFile: return StringIO("".join(output)) else: return "".join(output)
import threading import paramiko from fs.base import flags_to_mode from fs.path import * from fs.errors import * from fs.local_functools import wraps from fs.filelike import StringIO from fs.utils import isdir # Default host key used by BaseSFTPServer # DEFAULT_HOST_KEY = paramiko.RSAKey.from_private_key( StringIO( "-----BEGIN RSA PRIVATE KEY-----\nMIICXgIBAAKCAIEAl7sAF0x2O/HwLhG68b1uG8KHSOTqe3Cdlj5i/1RhO7E2BJ4B\n3jhKYDYtupRnMFbpu7fb21A24w3Y3W5gXzywBxR6dP2HgiSDVecoDg2uSYPjnlDk\nHrRuviSBG3XpJ/awn1DObxRIvJP4/sCqcMY8Ro/3qfmid5WmMpdCZ3EBeC0CAwEA\nAQKCAIBSGefUs5UOnr190C49/GiGMN6PPP78SFWdJKjgzEHI0P0PxofwPLlSEj7w\nRLkJWR4kazpWE7N/bNC6EK2pGueMN9Ag2GxdIRC5r1y8pdYbAkuFFwq9Tqa6j5B0\nGkkwEhrcFNBGx8UfzHESXe/uE16F+e8l6xBMcXLMJVo9Xjui6QJBAL9MsJEx93iO\nzwjoRpSNzWyZFhiHbcGJ0NahWzc3wASRU6L9M3JZ1VkabRuWwKNuEzEHNK8cLbRl\nTyH0mceWXcsCQQDLDEuWcOeoDteEpNhVJFkXJJfwZ4Rlxu42MDsQQ/paJCjt2ONU\nWBn/P6iYDTvxrt/8+CtLfYc+QQkrTnKn3cLnAkEAk3ixXR0h46Rj4j/9uSOfyyow\nqHQunlZ50hvNz8GAm4TU7v82m96449nFZtFObC69SLx/VsboTPsUh96idgRrBQJA\nQBfGeFt1VGAy+YTLYLzTfnGnoFQcv7+2i9ZXnn/Gs9N8M+/lekdBFYgzoKN0y4pG\n2+Q+Tlr2aNlAmrHtkT13+wJAJVgZATPI5X3UO0Wdf24f/w9+OY+QxKGl86tTQXzE\n4bwvYtUGufMIHiNeWP66i6fYCucXCMYtx6Xgu2hpdZZpFw==\n-----END RSA PRIVATE KEY-----\n" )) def report_sftp_errors(func): """Decorator to catch and report FS errors as SFTP error codes. Any FSError exceptions are caught and translated into an appropriate return code, while other exceptions are passed through untouched. """ @wraps(func) def wrapper(*args, **kwds): try: return func(*args, **kwds) except ResourceNotFoundError, e: return paramiko.SFTP_NO_SUCH_FILE