Example #1
0
def get(url, ignoreVersion=False):
    '''Given a Send url, download and return the encrypted data and metadata'''
    prefix, urlid, key = splitkeyurl(url)

    if checkServerVersion(prefix, ignoreVersion) == False:
        raise Exception(
            'Potentially incompatible server version, use --ignore-version to disable version checks'
        )

    data = SpooledTemporaryFile(max_size=SPOOL_SIZE, mode='w+b')

    r = requests.get(prefix + 'api/download/' + urlid, stream=True)
    r.raise_for_status()
    content_length = int(r.headers['Content-length'])
    meta = json.loads(r.headers['X-File-Metadata'])
    filename = unquote_plus(meta['filename'])
    iv = meta['id']

    pbar = progbar(content_length)
    for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
        data.write(chunk)
        pbar.update(len(chunk))
    pbar.close()

    # The last 16 bytes / 128 bits of data is the GCM tag
    # https://www.w3.org/TR/WebCryptoAPI/#aes-gcm-operations :-
    # 7. Let ciphertext be equal to C | T, where '|' denotes concatenation.
    data.seek(-16, 2)
    tag = data.read()

    # now truncate the file to only contain encrypted data
    data.seek(-16, 2)
    data.truncate()

    data.seek(0)
    return data, filename, key, iv, tag
Example #2
0
class Buffer(FileWrapper):
    """Class implementing buffering of input and output streams.
    
    This class uses a separate buffer file to hold the contents of the
    underlying file while they are being manipulated.  As data is read
    it is duplicated into the buffer, and data is written from the buffer
    back to the file on close.
    """
    def __init__(self, fileobj, mode=None, max_size_in_memory=1024 * 8):
        """Buffered file wrapper constructor."""
        self._buffer = SpooledTemporaryFile(max_size=max_size_in_memory)
        self._in_eof = False
        self._in_pos = 0
        self._was_truncated = False
        super(Buffer, self).__init__(fileobj, mode)

    def _buffer_size(self):
        try:
            return len(self._buffer.file.getvalue())
        except AttributeError:
            return os.fstat(self._buffer.fileno()).st_size

    def _buffer_chunks(self):
        chunk = self._buffer.read(16 * 1024)
        if chunk == "":
            yield chunk
        else:
            while chunk != "":
                yield chunk
                chunk = self._buffer.read(16 * 1024)

    def _write_out_buffer(self):
        if self._check_mode("r"):
            self._read_rest()
            if "a" in self.mode:
                self._buffer.seek(self._in_pos)
                self._fileobj.seek(self._in_pos)
            else:
                self._fileobj.seek(0)
                self._buffer.seek(0)
        else:
            self._buffer.seek(0)
        if self._was_truncated:
            self._fileobj.truncate(0)
            self._was_truncated = False
        for chunk in self._buffer_chunks():
            self._fileobj.write(chunk)

    def flush(self):
        # flush the buffer; we only write to the underlying file on close
        self._buffer.flush()

    def close(self):
        if self.closed:
            return
        if self._check_mode("w"):
            self._write_out_buffer()
        super(Buffer, self).close()
        self._buffer.close()

    def _read(self, sizehint=-1):
        #  First return any data available from the buffer.
        #  Since we don't flush the buffer after every write, certain OSes
        #  (guess which!) will happily read junk data from the end of it.
        #  Instead, we explicitly read only up to self._in_pos.
        if not self._in_eof:
            buffered_size = self._in_pos - self._buffer.tell()
            if sizehint >= 0:
                buffered_size = min(sizehint, buffered_size)
        else:
            buffered_size = sizehint
        data = self._buffer.read(buffered_size)
        if data != "":
            return data
        # Then look for more data in the underlying file
        if self._in_eof:
            return None
        data = self._fileobj.read(sizehint)
        self._in_pos += len(data)
        self._buffer.write(data)
        if sizehint < 0 or len(data) < sizehint:
            self._in_eof = True
            self._buffer.flush()
        return data

    def _write(self, data, flushing=False):
        self._buffer.write(data)
        if self._check_mode("r") and not self._in_eof:
            diff = self._buffer.tell() - self._in_pos
            if diff > 0:
                junk = self._fileobj.read(diff)
                self._in_pos += len(junk)
                if len(junk) < diff:
                    self._in_eof = True
                    self._buffer.flush()

    def _seek(self, offset, whence):
        # Ensure we've read enough to simply do the seek on the buffer
        if self._check_mode("r") and not self._in_eof:
            if whence == 0:
                if offset > self._in_pos:
                    self._read_rest()
            if whence == 1:
                if self._buffer.tell() + offset > self._in_pos:
                    self._read_rest()
            if whence == 2:
                self._read_rest()
        # Then just do it on the buffer...
        self._buffer.seek(offset, whence)

    def _tell(self):
        return self._buffer.tell()

    def _truncate(self, size):
        if self._check_mode("r") and not self._in_eof:
            if size > self._in_pos:
                self._read_rest()
        self._in_eof = True
        try:
            self._buffer.truncate(size)
        except TypeError:
            et, ev, tb = sys.exc_info()
            # SpooledTemporaryFile.truncate() doesn't accept size paramter.
            try:
                self._buffer._file.truncate(size)
            except Exception:
                raise et, ev, tb
        # StringIO objects don't truncate to larger size correctly.
        if hasattr(self._buffer, "_file"):
            _file = self._buffer._file
            if hasattr(_file, "getvalue"):
                if len(_file.getvalue()) != size:
                    curpos = _file.tell()
                    _file.seek(0, 2)
                    _file.write("\x00" * (size - len(_file.getvalue())))
                    _file.seek(curpos)
        self._was_truncated = True

    def _read_rest(self):
        """Read the rest of the input stream."""
        if self._in_eof:
            return
        pos = self._buffer.tell()
        self._buffer.seek(0, 2)
        data = self._fileobj.read(self._bufsize)
        while data:
            self._in_pos += len(data)
            self._buffer.write(data)
            data = self._fileobj.read(self._bufsize)
        self._in_eof = True
        self._buffer.flush()
        self._buffer.seek(pos)
Example #3
0
class Buffer(FileWrapper):
    """Class implementing buffering of input and output streams.
    
    This class uses a separate buffer file to hold the contents of the
    underlying file while they are being manipulated.  As data is read
    it is duplicated into the buffer, and data is written from the buffer
    back to the file on close.
    """

    def __init__(self, fileobj, mode=None, max_size_in_memory=1024 * 8):
        """Buffered file wrapper constructor."""
        self._buffer = SpooledTemporaryFile(max_size=max_size_in_memory)
        self._in_eof = False
        self._in_pos = 0
        self._was_truncated = False
        super(Buffer, self).__init__(fileobj, mode)

    def _buffer_size(self):
        try:
            return len(self._buffer.file.getvalue())
        except AttributeError:
            return os.fstat(self._buffer.fileno()).st_size

    def _buffer_chunks(self):
        chunk = self._buffer.read(16 * 1024)
        if chunk == "":
            yield chunk
        else:
            while chunk != "":
                yield chunk
                chunk = self._buffer.read(16 * 1024)

    def _write_out_buffer(self):
        if self._check_mode("r"):
            self._read_rest()
            if "a" in self.mode:
                self._buffer.seek(self._in_pos)
                self._fileobj.seek(self._in_pos)
            else:
                self._fileobj.seek(0)
                self._buffer.seek(0)
        else:
            self._buffer.seek(0)
        if self._was_truncated:
            self._fileobj.truncate(0)
            self._was_truncated = False
        for chunk in self._buffer_chunks():
            self._fileobj.write(chunk)

    def flush(self):
        # flush the buffer; we only write to the underlying file on close
        self._buffer.flush()

    def close(self):
        if self.closed:
            return
        if self._check_mode("w"):
            self._write_out_buffer()
        super(Buffer, self).close()
        self._buffer.close()

    def _read(self, sizehint=-1):
        #  First return any data available from the buffer.
        #  Since we don't flush the buffer after every write, certain OSes
        #  (guess which!) will happily read junk data from the end of it.
        #  Instead, we explicitly read only up to self._in_pos.
        if not self._in_eof:
            buffered_size = self._in_pos - self._buffer.tell()
            if sizehint >= 0:
                buffered_size = min(sizehint, buffered_size)
        else:
            buffered_size = sizehint
        data = self._buffer.read(buffered_size)
        if data != "":
            return data
        # Then look for more data in the underlying file
        if self._in_eof:
            return None
        data = self._fileobj.read(sizehint)
        self._in_pos += len(data)
        self._buffer.write(data)
        if sizehint < 0 or len(data) < sizehint:
            self._in_eof = True
            self._buffer.flush()
        return data

    def _write(self, data, flushing=False):
        self._buffer.write(data)
        if self._check_mode("r") and not self._in_eof:
            diff = self._buffer.tell() - self._in_pos
            if diff > 0:
                junk = self._fileobj.read(diff)
                self._in_pos += len(junk)
                if len(junk) < diff:
                    self._in_eof = True
                    self._buffer.flush()

    def _seek(self, offset, whence):
        # Ensure we've read enough to simply do the seek on the buffer
        if self._check_mode("r") and not self._in_eof:
            if whence == 0:
                if offset > self._in_pos:
                    self._read_rest()
            if whence == 1:
                if self._buffer.tell() + offset > self._in_pos:
                    self._read_rest()
            if whence == 2:
                self._read_rest()
        # Then just do it on the buffer...
        self._buffer.seek(offset, whence)

    def _tell(self):
        return self._buffer.tell()

    def _truncate(self, size):
        if self._check_mode("r") and not self._in_eof:
            if size > self._in_pos:
                self._read_rest()
        self._in_eof = True
        try:
            self._buffer.truncate(size)
        except TypeError:
            et, ev, tb = sys.exc_info()
            # SpooledTemporaryFile.truncate() doesn't accept size paramter.
            try:
                self._buffer._file.truncate(size)
            except Exception:
                raise et, ev, tb
        # StringIO objects don't truncate to larger size correctly.
        if hasattr(self._buffer, "_file"):
            _file = self._buffer._file
            if hasattr(_file, "getvalue"):
                if len(_file.getvalue()) != size:
                    curpos = _file.tell()
                    _file.seek(0, 2)
                    _file.write("\x00" * (size - len(_file.getvalue())))
                    _file.seek(curpos)
        self._was_truncated = True

    def _read_rest(self):
        """Read the rest of the input stream."""
        if self._in_eof:
            return
        pos = self._buffer.tell()
        self._buffer.seek(0, 2)
        data = self._fileobj.read(self._bufsize)
        while data:
            self._in_pos += len(data)
            self._buffer.write(data)
            data = self._fileobj.read(self._bufsize)
        self._in_eof = True
        self._buffer.flush()
        self._buffer.seek(pos)