Example #1
0
class _UrlContextManager:
    def __init__(self, url, checksum_sha256):
        self.url = url
        self.checksum_sha256 = checksum_sha256
        self.handle = None

    def __enter__(self):
        session = requests.Session()
        retry_policy = retry.Retry(connect=10,
                                   read=10,
                                   status=10,
                                   backoff_factor=0.1,
                                   status_forcelist=RETRY_STATUS_CODES)
        adapter = HTTPAdapter(max_retries=retry_policy)
        session.mount("http://", adapter)
        session.mount("https://", adapter)

        resp = session.get(self.url)
        resp.raise_for_status()
        self.handle = BytesIO(resp.content)
        verify_checksum(self.handle, self.checksum_sha256)
        return self.handle.__enter__()

    def __exit__(self, exc_type, exc_val, exc_tb):
        try:
            return self.handle.__exit__(exc_type, exc_val, exc_tb)
        finally:
            self.handle = None
Example #2
0
class _S3ContextManager:
    def __init__(self, s3_bucket, s3_key, checksum_sha256, s3_config):
        self.s3_bucket = s3_bucket
        self.s3_key = s3_key
        self.checksum_sha256 = checksum_sha256
        self.s3_config = s3_config

    def __enter__(self):
        unsigned_requests = self.s3_config.get(S3Backend.CONFIG_UNSIGNED_REQUESTS_KEY, False)

        if unsigned_requests:
            resource_config = Config(signature_version=UNSIGNED)
        else:
            resource_config = None

        session = boto3.session.Session()
        s3 = session.resource("s3", config=resource_config)
        bucket = s3.Bucket(self.s3_bucket)
        s3_obj = bucket.Object(self.s3_key)
        self.buffer = BytesIO()
        s3_obj.download_fileobj(self.buffer)
        self.buffer.seek(0)
        verify_checksum(self.buffer, self.checksum_sha256)
        return self.buffer.__enter__()

    def __exit__(self, exc_type, exc_val, exc_tb):
        try:
            return self.buffer.__exit__(exc_type, exc_val, exc_tb)
        finally:
            self.buffer = None
Example #3
0
class _UrlContextManager(object):
    def __init__(self, url, checksum_sha256, seekable):
        self.url = url
        self.checksum_sha256 = checksum_sha256
        self.seekable = seekable
        self.handle = None

    def __enter__(self):
        if self.seekable:
            req = requests.get(self.url)
            self.handle = BytesIO(req.content)
        else:
            req = requests.get(self.url, stream=True)
            self.handle = req.raw
        return self.handle.__enter__()

    def __exit__(self, exc_type, exc_val, exc_tb):
        if self.handle is not None:
            return self.handle.__exit__(exc_type, exc_val, exc_tb)
class AppendedFiles():
    """
    Two WormHoleFiles one after another.
    Takes 1 or 2 dict(s) as arguments; they're passed to WormHoleFiles'
    at the init.

    This is aimed at merging the TOC track starting at LBA45000 with
    the last one to mimic one big track at LBA0 with the files at the
    same LBA than the GD-ROM.
    """

    def __init__(self, wormfile1, wormfile2=None, *args, **kwargs):

        self._f1 = WormHoleFile(**wormfile1)

        self._f1.seek(0, 2)
        self._f1_len = self._f1.tell()
        self._f1.seek(0, 0)

        self._f2_len = 0
        if wormfile2:
            self._f2 = WormHoleFile(**wormfile2)

            self._f2.seek(0, 2)
            self._f2_len = self._f2.tell()
            self._f2.seek(0, 0)
        else:
            # So the rest of the code works for one or 2 files.
            self._f2 = BytesIO(b'')

        self.seek(0, 0)

    def seek(self, a, b=0):
        if b == 0:
            self.MetaPointer = a
        if b == 1:
            self.MetaPointer += a
        if b == 2:
            self.MetaPointer = self._f1_len + self._f2_len - a

        if self.MetaPointer >= self._f1_len:
            self._f1.seek(0, 2)
            self._f2.seek(a - self._f1_len, 0)
        else:
            self._f1.seek(a, 0)
            self._f2.seek(0, 0)

    def read(self, length=None):
        if length == None:
            length = self._f1_len + self._f2_len - self.MetaPointer
        tmp = self.MetaPointer
        FutureOffset = self.MetaPointer + length
        if FutureOffset < self._f1_len:  # Read inside file1
            data = self._f1.read(length)
        elif tmp > self._f1_len:        # Read inside file2
            data = self._f2.read(length)
        else:                           # Read end of file1 and start of file2
            data = self._f1.read(self._f1_len - tmp)
            data += self._f2.read(FutureOffset - self._f1_len)

        self.seek(FutureOffset)  # It might be enough to just update
        # self.MetaPointer, but this is safer.
        return data

    def tell(self):
        return self.MetaPointer

    def __enter__(self):
        return self

    def __exit__(self, type=None, value=None, traceback=None):
        # This is required to close files properly when using the with
        # statement. Which isn't required by ISO9660 anymore, but could
        # be useful for other uses so it stays!
        self._f1.__exit__()
        if self._f2_len:
            self._f2.__exit__()
Example #5
0
class _BaseBinaryWrapper:
    def __init__(self, stream: Union[typing.BinaryIO, bytes] = b""):
        if isinstance(stream, bytes) or isinstance(stream, bytearray):
            self.stream = BytesIO(stream)
        else:
            self.stream = stream

    # Wrappings:
    def close(self) -> None:
        return self.stream.close()

    def flush(self) -> None:
        return self.stream.flush()

    def read(self, n: int = -1) -> AnyStr:
        return self.stream.read(n)

    def readable(self) -> bool:
        return self.stream.readable()

    def readline(self, limit: int = -1) -> AnyStr:
        return self.stream.readline(limit)

    def readlines(self, hint: int = -1) -> List[AnyStr]:
        return self.stream.readlines(hint)

    def write(self, s: Union[bytes, bytearray]) -> int:
        return self.stream.write(s)

    def writable(self) -> bool:
        return self.stream.writable()

    def writelines(self, lines: Iterable[AnyStr]) -> None:
        self.stream.writelines(lines)

    def seek(self, offset: int, whence: int = 0) -> int:
        return self.stream.seek(offset, whence)

    def seekable(self) -> bool:
        return self.stream.seekable()

    def tell(self) -> int:
        return self.stream.tell()

    def fileno(self) -> int:
        return self.stream.fileno()

    def __enter__(self):
        self.stream.__enter__()
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.stream.__exit__(exc_type, exc_val, exc_tb)

    # helper functions

    def readall(self):
        self.stream.seek(0)
        return self.stream.read()

    def getvalue(self):
        if isinstance(self.stream, BytesIO):
            return self.stream.getvalue()
        pos = self.stream.tell()
        ret = self.readall()
        self.stream.seek(pos)
        return ret

    def align(self, alignment=4):
        if offset := (self.tell() % alignment):
            self.seek(self.tell() + alignment - offset)
Example #6
0
class VerifiableStream(BinaryIO):
    """A binary stream whose contents can be verified to not have changed.

    The stream does not accept a HMAC key, but generates it randomly as a nonce. While unusual,
    this is intentional -- these streams are meant to be used as part of model serialization,
    where their nonces and HMAC codes are stored in a cryptographically signed metadata file.
    In other words, the HMAC simply ensures that stream's data has not changed, and does not
    guarantee the data's origin -- that's the metadata signature's job.

    The stream is meant to be used in the following sequence:
        - instantiate the stream
        - write all data to the stream (the stream is not readable yet!)
        - call "finalize()" on the stream, saving the returned nonce and HMAC code
        - read data from the stream (the stream is not writable any more!)
    """
    def __init__(self):
        """Create a new VerifiableStream with a random nonce."""
        self._finalized = False
        self._random_nonce = os.urandom(
            16)  # this is bytes, be careful trying to add strings to it
        self._underlying_stream = BytesIO()
        self._hmac_state = hmac.new(self._random_nonce, digestmod=HASHER)

    def _ensure_finalized(self):
        """Raise an error if the stream has not already been finalized."""
        if not self._finalized:
            raise AssertionError(
                "Expected the stream to be finalized, but it was not!")

    def _ensure_not_finalized(self):
        """Raise an error if the stream has already been finalized."""
        if self._finalized:
            raise AssertionError(
                "Expected the stream to not be finalized, but it was!")

    def finalize(self):
        """Calculate the HMAC code for the stream, disable writing and enable reading.

        Returns:
            tuple (nonce, HMAC code)  (both of type string)
        """
        self._ensure_not_finalized()

        self._finalized = True

        nonce_string = _convert_base64_bytes_to_string(self._random_nonce)
        hmac_string = _convert_base64_bytes_to_string(
            self._hmac_state.digest())

        return nonce_string, hmac_string

    # methods for writing require that the stream not be finalized
    def writable(self) -> bool:
        """Return True if the stream is writable, and False otherwise."""
        if self._finalized:
            return False
        else:
            return self._underlying_stream.writable()

    @validate(b=bytes)
    def write(self, b: bytes) -> int:
        """Write the given binary data to the stream, and include it in the HMAC calculation."""
        self._ensure_not_finalized()
        num_bytes = self._underlying_stream.write(b)
        self._hmac_state.update(b)
        return num_bytes

    def writelines(self, lines: Iterable[bytes]) -> None:
        """Write lines to a stream"""
        self._ensure_not_finalized(
        )  # technically done by `write` but doesn't hurt to be safe
        for line in lines:
            self.write(line)
        return None

    # methods for reading require that the stream is finalized
    def readable(self) -> bool:
        """Return True if the stream is readable, and False otherwise."""
        if self._finalized:
            return self._underlying_stream.readable()
        else:
            return False

    def read(self, size=None) -> bytes:
        """Read bytes from stream"""
        self._ensure_finalized()
        return self._underlying_stream.read(size)

    def readall(self) -> bytes:
        """Read lines from stream"""
        raise NotImplementedError(
            "`VerifiablStream` does not implement `readall` since the underlying BtytesIO does not "
            "implement it.")

    def readline(self, size=None) -> bytes:
        """Read a line from stream"""
        self._ensure_finalized()
        return self._underlying_stream.readline(size)

    def readlines(self, size=None) -> List[bytes]:
        """Read lines from stream"""
        self._ensure_finalized()
        return self._underlying_stream.readlines(size)

    def read1(self, size) -> bytes:
        """Read bytes from stream"""
        self._ensure_finalized()
        return self._underlying_stream.read1(size)

    def readinto(self, b) -> Optional[int]:
        """Read bytes into another buffer"""
        self._ensure_finalized()
        return self._underlying_stream.readinto(b)

    def readinto1(self, b) -> Optional[int]:
        """Read bytes into another buffer"""
        self._ensure_finalized()
        return self._underlying_stream.readinto1(b)

    # seeking requires a finalized stream
    def seekable(self):
        """Return True if the read pointer in the stream can be moved, and False otherwise."""
        if self._finalized:
            return self._underlying_stream.seekable()
        else:
            return False

    def seek(self, *args, **kwargs) -> int:
        """Seek to a new position. Return the new position"""
        self._ensure_finalized()
        return self._underlying_stream.seek(*args, **kwargs)

    def truncate(self, size: Optional[int] = ...) -> None:
        """Truncate the stream"""
        raise NotImplementedError(
            "`VerifiableStream` does not support truncation. It is too "
            "complicated to keep track of the hmac digests")

    def close(self):
        """Close the stream, discarding its data. Will raise an error if not finalized yet."""
        if self._finalized:
            return self._underlying_stream.close()
        else:
            raise AssertionError(
                "Attempting to close an unfinalized VerifiableStream. This is "
                "almost certainly a bug.")

    # a bunch of attributes/methods that are always accessible
    def isatty(self) -> bool:
        """Determine whether this is a terminal"""
        return self._underlying_stream.isatty()

    @property
    def closed(self) -> bool:
        """Determine whether the stream is closed"""
        return self._underlying_stream.closed

    def fileno(self) -> int:
        """Return the underlying file descriptor"""
        # this will technically raise UnsuportedOperation, but better to let BytesIO do that
        return self._underlying_stream.fileno()

    def mode(self) -> str:
        """Return the underlying file descriptor"""
        # this doesn't exist for the underlying stream
        raise AssertionError(
            "`VerifiableStream` does not have a mode. This is probably a bug in "
            "something assuming that the stream is a backed by a file")

    def name(self) -> str:
        """Return the underlying file descriptor"""
        # this doesn't exist for the underlying stream
        raise AssertionError(
            "`VerifiableStream` does not have a name. This is probably a bug in "
            "something assuming the stream is a file descriptor")

    def flush(self) -> None:
        """Flush the underlying stream"""
        # this technically does nothing in BytesIO
        return self._underlying_stream.flush()

    def tell(self) -> int:
        """Tell the current position"""
        return self._underlying_stream.tell()

    # context manager methods
    def __enter__(self) -> "VerifiableStream":
        """Enter"""
        return self

    def __exit__(
        self,
        exc_type: Optional[Type[BaseException]],
        exc_val: Optional[BaseException],
        exc_tb: Optional[TracebackType],
    ) -> bool:
        """Exit"""
        return self._underlying_stream.__exit__(exc_type, exc_val, exc_tb)
Example #7
0
class BCBytesStream():
    """A class that provides additional serialization and deserialization methods
    over a base BufferedReader class.

    The BufferedReader object is class member _br and all unknown method
    calls are passed to _br"""
    def __init__(self, br=None):
        """Must be initialized with a BufferedReader."""
        if type(br) == BufferedReader or type(br) == BytesIO:
            self._br = br
        elif type(br) == bytes:
            self._br = BytesIO(br)
        elif not br:
            self._br = BytesIO()
        else:
            raise TypeError(
                "BCBytesStream requires BufferedReader, BytesIO or bytes object, not {}"
                .format(type(br)))

    def __getattr__(self, name):
        return getattr(self._br, name)

    def __enter__(self, *args, **kwargs):
        self._br.__enter__(*args, **kwargs)
        return self

    def __exit__(self, *args, **kwargs):
        return self._br.__exit__(*args, **kwargs)

    def deser_boolean(self):
        return struct.unpack("?", self.read(1))[0]

    def ser_boolean(self, val):
        self.write(struct.pack("?", val))

    def deser_int8(self):
        return struct.unpack("<b", self.read(1))[0]

    def ser_int8(self, val):
        self.write(struct.pack("<b", val))

    def deser_uint8(self):
        return struct.unpack("<B", self.read(1))[0]

    def ser_uint8(self, val):
        self.write(struct.pack("<B", val))

    def deser_int16(self):
        return struct.unpack("<h", self.read(2))[0]

    def ser_int16(self, val):
        self.write(struct.pack("<h", val))

    def deser_uint16(self, big=False):
        fmt = ">" if big else "<"
        fmt += "H"
        return struct.unpack(fmt, self.read(2))[0]

    def ser_uint16(self, val):
        self.write(struct.pack("<H", val))

    def deser_int32(self):
        return struct.unpack("<i", self.read(4))[0]

    def ser_int32(self, val):
        self.write(struct.pack("<i", val))

    def deser_uint32(self):
        return struct.unpack("<I", self.read(4))[0]

    def ser_uint32(self, val):
        self.write(struct.pack("<I", val))

    def deser_int64(self):
        return struct.unpack("<q", self.read(8))[0]

    def ser_int64(self, val):
        self.write(struct.pack("<q", val))

    def deser_uint64(self):
        return struct.unpack("<Q", self.read(8))[0]

    def ser_uint64(self, val):
        self.write(struct.pack("<Q", val))

    def deser_double(self):
        return struct.unpack("<d", self.read(8))[0]

    def ser_double(self, val):
        self.write(struct.pack("<d", val))

    def deser_uint256(self):
        r = 0
        for i in range(8):
            t = struct.unpack("<I", self.read(4))[0]
            r += t << (i * 32)
        return r

    def ser_uint256(self, val):
        rs = b""
        for i in range(8):
            rs += struct.pack("<I", val & 0xFFFFFFFF)
            val >>= 32
        self.write(rs)

    def deser_compact_size(self):
        nit = struct.unpack("<B", self.read(1))[0]
        if nit == 253:
            nit = struct.unpack("<H", self.read(2))[0]
        elif nit == 254:
            nit = struct.unpack("<I", self.read(4))[0]
        elif nit == 255:
            nit = struct.unpack("<Q", self.read(8))[0]
        return nit

    def ser_compact_size(self, val):
        r = b""
        if val < 253:
            r = struct.pack("B", val)
        elif val < 0x10000:
            r = struct.pack("<BH", 253, val)
        elif val < 0x100000000:
            r = struct.pack("<BI", 254, val)
        else:
            r = struct.pack("<BQ", 255, val)
        self.write(r)

    def deser_string(self):
        nit = self.deser_compact_size()
        return self.read(nit).decode("utf-8")

    def ser_string(self, s):
        self.ser_compact_size(len(s))
        self.write(s)

    def deser_vector(self, c):
        nit = self.deser_compact_size()
        r = []
        for _ in range(nit):
            t = c()
            t.deserialize(self)
            r.append(t)
        return r

    def ser_vector(self, l, ser_function_name=None):
        """Serialize a vector object.

        ser_function_name: Allow for an alternate serialization function on the
        entries in the vector."""

        self.ser_compact_size(len(l))
        for i in l:
            if ser_function_name:
                getattr(i, ser_function_name)(self)
            else:
                self.write(i)

    def deser_uint256_vector(self):
        nit = self.deser_compact_size()
        r = []
        for i in range(nit):
            t = self.deser_uint256(self)
            r.append(t)
        return r

    def ser_uint256_vector(self, l):
        self.ser_compact_size(len(l))
        for i in l:
            self.ser_uint256(i)

    def deser_string_vector(self):
        nit = self.deser_compact_size()
        r = []
        for i in range(nit):
            t = self.deser_string()
            r.append(t)
        return r

    def ser_string_vector(self, l):
        self.ser_compact_size(len(l))
        for sv in l:
            self.ser_string(sv)

    def peep_byte(self):
        pos = self.tell()
        r = self.read(1)
        self.seek(pos)
        return r

    def deserialize_magic(self):
        magic = self.read(4)
        if magic == b'\xf9\xbe\xb4\xd9':
            network = "mainnet"
        elif magic == b'\x0b\x11\x09\x07':
            network = "testnet"
        elif magic == b'\xfa\xbf\xb5\xda':
            network = "regtest"
        else:
            network = "unknown"

        return magic, network