def __init__(self, filename, capture_diff=False, mode='rU'): BytesIO.__init__(self) self.name = filename self._capture_diff = capture_diff self.diff = None self.mode = mode self.force_update = False
def __init__(self, buf): self._progress = 0 self._len = len(buf) self._bar = None if self._len > 4096: self._bar = progress.Bar(filled_char='=', every=4096) BytesIO.__init__(self, buf)
def __init__(self, buf=b'', callback=None, cb_args=(), cb_kwargs={}): self._callback = callback self._cb_args = cb_args self._cb_kwargs = cb_kwargs self._progress = 0 self._len = len(buf) BytesIO.__init__(self, buf)
def __init__(self, name, parent=None, mode=0, storage=None, **kwarg): BytesIO.__init__(self) self.parent = parent or self self.storage = storage or parent.storage self.children = {} # if there is no transaction yet, create a blank one if not isinstance(self.cleanup, dict): self.cleanup = {} self.name = name self.type = 0 self.dev = 0 self.ctime = self.atime = self.mtime = int(time.time()) self.uidnum = self.muidnum = os.getuid() self.gidnum = os.getgid() self.uid = self.muid = pwd.getpwuid(self.uidnum).pw_name self.gid = grp.getgrgid(self.gidnum).gr_name self.writelock = False # callbacks self.on_open = kwarg.get('on_open', None) self.on_sync = kwarg.get('on_sync', None) self.on_commit = kwarg.get('on_commit', None) self.on_destroy = kwarg.get('on_destroy', None) if not self.mode: if mode & stat.S_IFDIR: self.mode = stat.S_IFDIR | DEFAULT_DIR_MODE self.children["."] = self self.children[".."] = self.parent elif mode == stat.S_IFLNK: self.mode = mode else: self.mode = stat.S_IFREG | DEFAULT_FILE_MODE
def __init__(self, filename, capture_diff=False, dry_run=False, mode='rU'): BytesIO.__init__(self) self.name = filename self._capture_diff = capture_diff self._dry_run = dry_run self.diff = None self.mode = mode
def __init__(self, data=None, filename=None, big_endian=False): if filename is not None: with open(filename, 'rb') as file_handle: data = file_handle.read() BytesIO.__init__(self, data) self._types = {k: None for k in _TYPE_FMT} self.set_endian(big_endian)
def __init__(self, name): """ @param name: Name of this file. @type name: L{str} """ self.name = name BytesIO.__init__(self)
def __init__(self, stream, status=200, headers=None): self.status = status self.headers = headers or {} self.reason = requests.status_codes._codes.get( status, [''] )[0].upper().replace('_', ' ') BytesIO.__init__(self, stream)
def __init__(self, *args): args_new = [] for arg in args: if not isinstance(arg, (bytes, memoryview)): args_new.append(arg.encode()) else: args_new.append(arg) BytesIO.__init__(self, *args_new)
def __init__(self, *args): args_new = [] for arg in args: if not isinstance(arg, bytes): args_new.append(arg.encode()) else: args_new.append(arg) BytesIO.__init__(self, *args_new)
def __init__(self, filename, fs, mode): BytesIO.__init__(self) self._id = None self.filename = filename self.fs = fs self.mode = mode self.file_type = "file" # Set "file" as default file_type self.__initialize__()
def __init__(self, max_size=None, buffer=None): """ max_size is the max size of the buffer in bytes """ args = [] if buffer is not None: args.append(buffer) BytesIO.__init__(self, *args) self.__max_size = max_size
def __init__(self, romFilePath): """Loads the ROM file's data.""" BytesIO.__init__(self) v = self.readFromFile(romFilePath) if not v: return None if md5(self.getbuffer()).hexdigest() != MM_MD5: return None
def __init__(self, data): BytesIO.__init__(self, data) self.status = 200 self.version = 'HTTP/1.1' self.reason = 'OK' if PY2: self.msg = HTTPMessage(BytesIO('Content-Type: application/x-compressed\r\n')) else: self.msg = HTTPMessage() self.msg.add_header('Content-Type', 'application/x-compressed')
def __init__(self, initial_buffer='', byte_size=1024): initial_buffer = initial_buffer[-byte_size:] bytes_len = len(initial_buffer) BytesIO.__init__(self, initial_buffer) self._available = bytes_len self._limit = byte_size self._head = 0 self._tail = self._available % self._limit self._set_head(bytes_len) self.__write_locked = False
def __init__(self): BytesIO.__init__(self) self.vars = self._init_vars() self.funcs = self._init_funcs() self.state = self.STATE_IDLE self.stack = [] self.current_block = None self.state_blocks = {} self.path_stack = [] self.blocks = []
def __init__(self, data): BytesIO.__init__(self, data) self.status = 200 self.version = 'HTTP/1.1' self.reason = 'OK' if PY2: self.msg = HTTPMessage(BytesIO(b'Content-Type: application/x-compressed\r\n')) else: self.msg = HTTPMessage() self.msg.add_header('Content-Type', 'application/x-compressed')
def __init__(self, data, fname=None, mode="r"): _BytesIO.__init__(self, data) if "closed" not in dir(self): self.closed = False if fname is None: self.name = "fabioStream" else: self.name = fname self.mode = mode self.lock = threading.Semaphore() self.__size = None
def __init__(self, fp): """ Adds buffering to a stream file object, in order to provide **seek** and **tell** methods required by the :func:`PIL.Image.Image.open` method. The stream object must implement **read** and **close** methods. :param fp: Stream file handle. .. seealso:: modules :func:`PIL.Image.open` """ data = fp.read() BytesIO.__init__(self, data)
def __init__(self, filename, capture_diff=False, dry_run=False, readmode='rU'): BytesIO.__init__(self) self.name = filename assert type(capture_diff) == bool assert type(dry_run) == bool assert 'r' in readmode self._capture_diff = capture_diff self._write_to_file = not dry_run self.diff = None self.mode = readmode
def __init__(self, data_buffer, closing_boundary, filename, callback=None, chunk_size=0, file_part=0, signal_sender=None): """ Class extending the standard BytesIO to read data directly from file instead of loading all file content in memory. It's initially started with all the necessary data to build the full body of the POST request, in an multipart-form-data encoded way. It can also feed progress data and transfer rates. When uploading file chunks through various queries, the progress takes also into account the fact that this may be the part XX of a larger file. :param data_buffer: All the beginning of the multipart data, until the opening of the file content field :param closing_boundary: Last data to add after the file content has been sent. :param filename: Path of the file on the filesystem :param callback: dict() that can be updated with progress data :param chunk_size: maximum size that can be posted at once :param file_part: if file is bigger that chunk_size, can be 1, 2, 3, etc... :return: """ self.callback = callback self.cursor = 0 self.start = time.time() self.closing_boundary = closing_boundary self.data_buffer_length = len(data_buffer) self.file_length = os.stat(filename).st_size self.full_length = self.length = self.data_buffer_length + self.file_length + len( closing_boundary) self.chunk_size = chunk_size self.file_part = file_part self._seek = 0 self._signal_sender = signal_sender self.fd = open(filename, 'rb') if chunk_size and self.file_length > chunk_size: seek = file_part * chunk_size self._seek = seek self.fd.seek(seek) # recompute chunk_size if self.file_length - seek < chunk_size: self.chunk_size = self.file_length - seek self.length = self.chunk_size + self.data_buffer_length + len( closing_boundary) BytesIO.__init__(self, data_buffer)
def __init__(self, **kwargs): buffer = kwargs.get("buffer", None) if buffer: BytesIO.__init__(self, buffer) self.pageId = kwargs.get("pageId", None) header = kwargs.get("header", None) schema = kwargs.get("schema", None) if self.pageId and header: self.header = header elif self.pageId: self.header = self.initializeHeader(**kwargs) else: raise ValueError("No page identifier provided to page constructor.") else: raise ValueError("No backing buffer provided to page constructor.")
def __init__(self, **kwargs): buffer = kwargs.get("buffer", None) if buffer: BytesIO.__init__(self, buffer) self.pageId = kwargs.get("pageId", None) header = kwargs.get("header", None) schema = kwargs.get("schema", None) if self.pageId and header: self.header = header elif self.pageId: self.header = self.initializeHeader(**kwargs) else: raise ValueError( "No page identifier provided to page constructor.") else: raise ValueError("No backing buffer provided to page constructor.")
def __init__(self, data_buffer, closing_boundary, filename, callback=None, chunk_size=0, file_part=0, signal_sender=None): """ Class extending the standard BytesIO to read data directly from file instead of loading all file content in memory. It's initially started with all the necessary data to build the full body of the POST request, in an multipart-form-data encoded way. It can also feed progress data and transfer rates. When uploading file chunks through various queries, the progress takes also into account the fact that this may be the part XX of a larger file. :param data_buffer: All the beginning of the multipart data, until the opening of the file content field :param closing_boundary: Last data to add after the file content has been sent. :param filename: Path of the file on the filesystem :param callback: dict() that can be updated with progress data :param chunk_size: maximum size that can be posted at once :param file_part: if file is bigger that chunk_size, can be 1, 2, 3, etc... :return: """ self.callback = callback self.cursor = 0 self.start = time.time() self.closing_boundary = closing_boundary self.data_buffer_length = len(data_buffer) self.file_length = os.stat(filename).st_size self.full_length = self.length = self.data_buffer_length + self.file_length + len(closing_boundary) self.chunk_size=chunk_size self.file_part=file_part self._seek = 0 self._signal_sender=signal_sender self.fd = open(filename, 'rb') if chunk_size and self.file_length > chunk_size: seek = file_part * chunk_size self._seek = seek self.fd.seek(seek) # recompute chunk_size if self.file_length - seek < chunk_size: self.chunk_size = self.file_length - seek self.length = self.chunk_size + self.data_buffer_length + len(closing_boundary) BytesIO.__init__(self, data_buffer)
def __init__(self, hosted_file, mode, content_type=None, filename=None): self.url = hosted_file._wadl_resource.url if mode == 'r': if content_type is not None: raise ValueError("Files opened for read access can't " "specify content_type.") if filename is not None: raise ValueError("Files opened for read access can't " "specify filename.") response, value = hosted_file._root._browser.get( self.url, return_response=True) content_type = response['content-type'] last_modified = response.get('last-modified') # The Content-Location header contains the URL of the file # hosted by the web service. We happen to know that the # final component of the URL is the name of the uploaded # file. content_location = response['content-location'] path = urlparse(content_location)[2] filename = unquote(path.split("/")[-1]) elif mode == 'w': value = '' if content_type is None: raise ValueError("Files opened for write access must " "specify content_type.") if filename is None: raise ValueError("Files opened for write access must " "specify filename.") last_modified = None else: raise ValueError("Invalid mode. Supported modes are: r, w") self.hosted_file = hosted_file self.mode = mode self.content_type = content_type self.filename = filename self.last_modified = last_modified BytesIO.__init__(self, value)
def __init__(self, content, password): self.size = content.size BytesIO.__init__(self, Cryptographer.encrypted(password, content.file.read()))
def __init__(self): BytesIO.__init__(self) self.mode = None
def __init__(self, *args, **kwargs): BytesIO.__init__(self, *args, **kwargs)
def __init__(self): BytesIO.__init__(self) self._lock = threading.RLock()
def __init__(self, val): BytesIO.__init__(self, val) self.name = "image.png"
def __init__(self, buf=b'', callback=None): self._callback = callback self._progress = 0 self._len = len(buf) BytesIO.__init__(self, buf)
def __init__(self, val): BytesIO.__init__(self, val) self.name = "david.png"
def __init__(self, dict, key): self.dict, self.key = dict, key BytesIO.__init__(self, dict[key])
def __init__(self, file): BytesIO.__init__(self) self.wrappedFile = file
def __init__(self, data_buffer=None, callback=None): self.callback = callback self.progress = 0 self.length = len(data_buffer) BytesIO.__init__(self, data_buffer)
def __init__(self, compress=False): BytesIO.__init__(self) self.compress = compress self.filters = Array()
def __init__(self, filename): BytesIO.__init__(self) self.name = filename
def __init__(self, fn, uid, gid, mode, buf): BytesIO.__init__(self, buf) self.name = fn self.uid = uid self.gid = gid self.mode = mode
def __init__(self, compresslevel=9): BytesIO.__init__(self) self.__uncompressed_size = 0 self.__compressor = bz2.BZ2Compressor(compresslevel)
def __init__(self, bytes='', packet_size=256*8, crc_attach=False): BytesIO.__init__(self, bytes) self.packet_size = packet_size self.packets_get = 0 self.crc_attach = crc_attach
def __init__(self, ext=None, mime='image/jpeg'): self.ext = ext self.mime = mime BytesIO.__init__(self)
def __init__(self, initial_bytes=None): if initial_bytes and type(initial_bytes) is not bytes: initial_bytes = initial_bytes.encode('ascii') BytesIO.__init__(self, initial_bytes)
def __init__(self, buf=''): self.stop = False BytesIO.__init__(self, buf)
def __init__(self): BytesIO.__init__(self) self.allow_read_past_eof = False
def __init__(self, initial_data=None, encoding='utf-8', errors='strict'): BytesIO.__init__(self) self._encoding_for_bytes = encoding self._errors = errors if initial_data is not None: self.write(initial_data)