def flush(self): if self.s.tell() > 0: wdt = self.web.wdt wdt.feed() self.web.write(self.s.getvalue()) wdt.feed() self.s = BytesIO()
class WebWriter: def __init__(self, web): self.s = BytesIO() self.web = web def write(self, txt): if isinstance(txt, str): txt = txt.encode() s = self.s wdt = self.web.wdt wdt.feed() s.write(txt) wdt.feed() if s.tell() > 1200: wdt.feed() self.web.write(s.getvalue()) wdt.feed() self.s = BytesIO() def flush(self): if self.s.tell() > 0: wdt = self.web.wdt wdt.feed() self.web.write(self.s.getvalue()) wdt.feed() self.s = BytesIO()
def pack_map_pairs(self, pairs): self._pack_map_pairs(len(pairs), pairs) ret = self._buffer.getvalue() if self._autoreset: self._buffer = StringIO() elif USING_STRINGBUILDER: self._buffer = StringIO(ret) return ret
def encode(cls, data): sub_blocks = [] stream = BytesIO(data) while True: next_bytes = stream.read(255) byte_len = struct.pack("B", len(next_bytes)) sub_blocks.append(cls(byte_len + next_bytes)) if not len(next_bytes): break return sub_blocks
def pack_map_header(self, n): if n >= 2**32: raise PackValueError self._pack_map_header(n) ret = self._buffer.getvalue() if self._autoreset: self._buffer = StringIO() elif USING_STRINGBUILDER: self._buffer = StringIO(ret) return ret
def pack(self, obj): try: self._pack(obj) except: self._buffer = StringIO() # force reset raise ret = self._buffer.getvalue() if self._autoreset: self._buffer = StringIO() elif USING_STRINGBUILDER: self._buffer = StringIO(ret) return ret
def write(self, txt): if isinstance(txt, str): txt = txt.encode() s = self.s wdt = self.web.wdt wdt.feed() s.write(txt) wdt.feed() if s.tell() > 1200: wdt.feed() self.web.write(s.getvalue()) wdt.feed() self.s = BytesIO()
def test_concat(self): "Test that we can concatenate output and retrieve the objects back out." self._oso(self.test_objects) fob = StringIO() for ob in self.test_objects: dump(ob, fob) fob.seek(0) obs2 = [] try: while True: obs2.append(load(fob)) except EOFError: pass assert obs2 == self.test_objects
def __init__(self, default=None, encoding=None, unicode_errors=None, use_single_float=False, autoreset=True, use_bin_type=False, strict_types=False): if encoding is None: encoding = 'utf_8' else: print("encoding is deprecated, Use raw=False instead.", PendingDeprecationWarning) if unicode_errors is None: unicode_errors = 'strict' self._strict_types = strict_types self._use_float = use_single_float self._autoreset = autoreset self._use_bin_type = use_bin_type self._encoding = encoding self._unicode_errors = unicode_errors self._buffer = StringIO() if default is not None: if not callable(default): raise TypeError("default must be callable") self._default = default
def draw_image(self, path=None, bytes=None, x=0, y=0, w=320, h=240): """Draw image from flash. Args: path (string): Image file path. bytes (bytes): Image bytes. x (int): X coordinate of image left. Default is 0. y (int): Y coordinate of image top. Default is 0. w (int): Width of image. Default is 320. h (int): Height of image. Default is 240. """ x2 = x + w - 1 y2 = y + h - 1 if self.is_off_grid(x, y, x2, y2): return if (path is None and bytes is None) or (path is not None and bytes is not None): return with open(path, "rb") if path else BytesIO(bytes) as f: chunk_height = 1024 // w chunk_count, remainder = divmod(h, chunk_height) chunk_size = chunk_height * w * 2 chunk_y = y if chunk_count: for c in range(0, chunk_count): buf = f.read(chunk_size) self.block(x, chunk_y, x2, chunk_y + chunk_height - 1, buf) chunk_y += chunk_height if remainder: buf = f.read(remainder * w * 2) self.block(x, chunk_y, x2, chunk_y + remainder - 1, buf)
async def done(psbt): print('sign 5: done') signed_bytes = None with BytesIO() as bfd: with output_encoder(bfd) as fd: print('sign 6: done') if psbt.is_complete(): print('sign 7: done') psbt.finalize(fd) print('sign 8: done') else: print('sign 9: done') psbt.serialize(fd) print('sign 10: done') bfd.seek(0) signed_bytes = bfd.read() print('signed_bytes={}'.format(signed_bytes)) print('sign 11: done') gc.collect() from ur1.encode_ur import encode_ur from ubinascii import hexlify signed_str = hexlify(signed_bytes) print('signed_str={}'.format(signed_str)) from ux import DisplayURCode o = DisplayURCode('Signed Txn', 'Scan to Wallet', signed_str) await o.interact_bare() UserAuthorizedAction.cleanup()
def process(self): log('*** PAGE ***', self.page) if (self.page == '') or (self.last == self.page): self.page = 'index' try: page = __import__('web.{}'.format(self.page), None, None, ('page', ), 0).page except ImportError: log('!!! Page {} not found !!!'.format(self.page)) page = __import__('web.index', None, None, ('page', ), 0).page if not self.last == self.page: self.last = self.page play((1047, 30), 120, (1568, 30)) s = BytesIO() for p in page(self): if isinstance(p, WebServer.IndexDrawer): for q in __import__('web.index', None, None, ('page', ), 0).page(self): self.wdt.feed() self.writer.write(q) self.wdt.feed() else: self.wdt.feed() self.writer.write(p) self.wdt.feed() self.wdt.feed() self.writer.flush() self.wdt.feed()
def patmatch(pattern, where): # search forward, return file obj right after pattern pat = BB(pattern) pos = where.find(pat) if pos == -1: raise KeyError(pattern) return BytesIO(where[pos+len(pat):])
def loads(data): """ Parse CBOR bytes and return Python objects. """ if data is None: raise ValueError("got None for buffer to decode in loads") return _loads(BytesIO(data))
def pub_json(self, tpc, obj, **kwarg): gc_collect() with BytesIO() as json: json_dump(obj, json) gc_collect() ok = self.publish(tpc, json.getvalue(), **kwarg) gc_collect() return ok
def __init__(self, raw): is_buffered = False # is_buffered = isinstance(raw, IOBase) or isinstance(raw, file) self.io = raw if is_buffered else BytesIO(raw) self.header = "" self.screen_desc = "" self.global_color_table = "" self.application_extensions = [] self.comment_extensions = [] self.frames = [] self.trailer = "" self.parse()
def pub_json(self, tpc, obj, **kwarg): gc_collect() print(tpc) print(obj) if wifi.is_connected() and self.mqtt: if type(tpc) != type(b''): tpc = tpc.encode(UTF8) with BytesIO() as json: json_dump(obj, json) gc_collect() self.mqtt.publish(tpc, json.getvalue(), **kwarg) sleep(0.5) gc_collect() return True return False
def sendto(self, bytes, *args, **kwargs): if self.type != socket.SOCK_DGRAM: return _BaseSocket.sendto(self, bytes, *args, **kwargs) if not self._proxyconn: self.bind(("", 0)) address = args[-1] flags = args[:-1] header = BytesIO() RSV = b"\x00\x00" header.write(RSV) STANDALONE = b"\x00" header.write(STANDALONE) self._write_SOCKS5_address(address, header) sent = _BaseSocket.send(self, header.getvalue() + bytes, *flags, **kwargs) return sent - header.tell()
def recvfrom(self, bufsize, flags=0): if self.type != socket.SOCK_DGRAM: return _BaseSocket.recvfrom(self, bufsize, flags) if not self._proxyconn: self.bind(("", 0)) buf = BytesIO(_BaseSocket.recv(self, bufsize + 1024, flags)) buf.seek(2, 1) frag = buf.read(1) if ord(frag): raise NotImplementedError("Received UDP packet fragment") fromhost, fromport = self._read_SOCKS5_address(buf) if self.proxy_peername: peerhost, peerport = self.proxy_peername if fromhost != peerhost or peerport not in (0, fromport): raise socket.error(EAGAIN, "Packet filtered") return (buf.read(bufsize), (fromhost, fromport))
try: from uio import BytesIO import umsgpack as msgpack except: try: from io import BytesIO import msgpack except ImportError: print("SKIP") raise SystemExit b = BytesIO() msgpack.pack(False, s) print(b.getvalue()) b = BytesIO() msgpack.pack({"a": (-1, 0, 2, [3, None], 128)}, b) print(b.getvalue()) # pack to a small-int not allowed try: msgpack.pack(123, 1) except (AttributeError, OSError): # CPython and uPy have different errors print("Exception") # pack to an object not allowed try: msgpack.pack(123, {}) except (AttributeError, OSError): # CPython and uPy have different errors print("Exception")
def load_xml_string(text, buffer_size=1024): stm = BytesIO(text.encode("utf8")) tokens = xml_parse_token(stm.read, buffer_size) return XMLNode(stm, tokens, 0)
def sign_psbt_buf(psbt_buf): # sign a PSBT file found on a microSD card from uio import BytesIO from common import dis from sram4 import tmp_buf from utils import HexStreamer, Base64Streamer, HexWriter, Base64Writer UserAuthorizedAction.cleanup() # copy buffer into SPI Flash # - accepts hex or base64 encoding, but binary prefered with BytesIO(psbt_buf) as fd: dis.fullscreen('Reading...') # see how long it is psbt_len = fd.seek(0, 2) fd.seek(0) # determine encoding used, altho we prefer binary taste = fd.read(10) fd.seek(0) if taste[0:5] == b'psbt\xff': print('tastes like text PSBT') decoder = None def output_encoder(x): return x elif taste[0:10] == b'70736274ff': print('tastes like binary PSBT') decoder = HexStreamer() output_encoder = HexWriter psbt_len //= 2 elif taste[0:6] == b'cHNidP': print('tastes like Base64 PSBT') decoder = Base64Streamer() output_encoder = Base64Writer psbt_len = (psbt_len * 3 // 4) + 10 else: return total = 0 with SFFile(TXN_INPUT_OFFSET, max_size=psbt_len) as out: print('sign 1') # blank flash await out.erase() print('sign 2') while 1: n = fd.readinto(tmp_buf) print('sign copy to SPI flash 1: n={}'.format(n)) if not n: break if n == len(tmp_buf): abuf = tmp_buf else: abuf = memoryview(tmp_buf)[0:n] if not decoder: out.write(abuf) total += n else: for here in decoder.more(abuf): out.write(here) total += len(here) print('sign copy to SPI flash 2: {}/{} = {}'.format(total, psbt_len, total/psbt_len)) dis.progress_bar_show(total / psbt_len) print('sign 3') # might have been whitespace inflating initial estimate of PSBT size assert total <= psbt_len psbt_len = total print('sign 4') # Create a new BytesIO() to hold the result async def done(psbt): print('sign 5: done') signed_bytes = None with BytesIO() as bfd: with output_encoder(bfd) as fd: print('sign 6: done') if psbt.is_complete(): print('sign 7: done') psbt.finalize(fd) print('sign 8: done') else: print('sign 9: done') psbt.serialize(fd) print('sign 10: done') bfd.seek(0) signed_bytes = bfd.read() print('signed_bytes={}'.format(signed_bytes)) print('sign 11: done') gc.collect() from ur1.encode_ur import encode_ur from ubinascii import hexlify signed_str = hexlify(signed_bytes) print('signed_str={}'.format(signed_str)) from ux import DisplayURCode o = DisplayURCode('Signed Txn', 'Scan to Wallet', signed_str) await o.interact_bare() UserAuthorizedAction.cleanup() print('sign 12: done') UserAuthorizedAction.active_request = ApproveTransaction(psbt_len, approved_cb=done) print('sign 13: done') # kill any menu stack, and put our thing at the top abort_and_goto(UserAuthorizedAction.active_request) print('sign 14: done')
( '301b0f000000000017a914e9c3dd0c07aac76179ebc76a6c78d4d67c6c160a87', 'p2sh', False, 'e9c3dd0c07aac76179ebc76a6c78d4d67c6c160a'), # from testnet: a4c89e0ffb84d06a1e62f0f9f0f5974db250878caa1f71f9992a1f865b8ff2fa # via <https://github.com/bitcoinjs/bitcoinjs-lib/issues/856> ( 'b88201000000000017a914f0ca58dc8e539421a3cb4a9c22c059973075287c87', 'p2sh', False, 'f0ca58dc8e539421a3cb4a9c22c059973075287c'), # XXX missing: P2SH segwit, 1of1 and N of M ( 'd0f13d0000000000160014f2369bac6d24ed11313fa65adda1971d10e17bff', 'p2pkh', True, 'f2369bac6d24ed11313fa65adda1971d10e17bff') ] for raw_txo, expect_type, expect_sw, expect_hash in cases: expect_hash = a2b_hex(expect_hash) out = CTxOut() out.deserialize(BytesIO(a2b_hex(raw_txo))) print("Case: %s... " % raw_txo[0:30]) addr_type, addr_or_pubkey, is_segwit = out.get_address() assert is_segwit == expect_sw, 'wrong segwit' assert addr_or_pubkey == expect_hash, 'wrong pubkey/addr' assert addr_type == expect_type, addr_type
def __init__(self, web): self.s = BytesIO() self.web = web
class Packer(object): """ MessagePack Packer usage: packer = Packer() astream.write(packer.pack(a)) astream.write(packer.pack(b)) Packer's constructor has some keyword arguments: :param callable default: Convert user type to builtin type that Packer supports. See also simplejson's document. :param bool use_single_float: Use single precision float type for float. (default: False) :param bool autoreset: Reset buffer after each pack and return its content as `bytes`. (default: True). If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. :param bool use_bin_type: Use bin type introduced in msgpack spec 2.0 for bytes. It also enables str8 type for unicode. :param bool strict_types: If set to true, types will be checked to be exact. Derived classes from serializeable types will not be serialized and will be treated as unsupported type and forwarded to default. Additionally tuples will not be serialized as lists. This is useful when trying to implement accurate serialization for python types. :param str encoding: (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') :param str unicode_errors: Error handler for encoding unicode. (default: 'strict') """ def __init__(self, default=None, encoding=None, unicode_errors=None, use_single_float=False, autoreset=True, use_bin_type=False, strict_types=False): if encoding is None: encoding = 'utf_8' else: print("encoding is deprecated, Use raw=False instead.", PendingDeprecationWarning) if unicode_errors is None: unicode_errors = 'strict' self._strict_types = strict_types self._use_float = use_single_float self._autoreset = autoreset self._use_bin_type = use_bin_type self._encoding = encoding self._unicode_errors = unicode_errors self._buffer = StringIO() if default is not None: if not callable(default): raise TypeError("default must be callable") self._default = default def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, check=isinstance, check_type_strict=_check_type_strict): default_used = False if self._strict_types: check = check_type_strict list_types = list else: list_types = (list, tuple) while True: if nest_limit < 0: raise PackValueError("recursion limit exceeded") if obj is None: return self._buffer.write(b"\xc0") if check(obj, bool): if obj: return self._buffer.write(b"\xc3") return self._buffer.write(b"\xc2") if check(obj, int_types): if 0 <= obj < 0x80: return self._buffer.write(struct.pack("B", obj)) if -0x20 <= obj < 0: return self._buffer.write(struct.pack("b", obj)) if 0x80 <= obj <= 0xff: return self._buffer.write(struct.pack("BB", 0xcc, obj)) if -0x80 <= obj < 0: return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) if 0xff < obj <= 0xffff: return self._buffer.write(struct.pack(">BH", 0xcd, obj)) if -0x8000 <= obj < -0x80: return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) if 0xffff < obj <= 0xffffffff: return self._buffer.write(struct.pack(">BI", 0xce, obj)) if -0x80000000 <= obj < -0x8000: return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) if 0xffffffff < obj <= 0xffffffffffffffff: return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) if -0x8000000000000000 <= obj < -0x80000000: return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) if not default_used and self._default is not None: obj = self._default(obj) default_used = True continue raise PackOverflowError("Integer value out of range") if check(obj, (bytes, bytearray)): n = len(obj) if n >= 2**32: raise PackValueError("%s is too large" % type(obj).__name__) self._pack_bin_header(n) return self._buffer.write(obj) if check(obj, Unicode): if self._encoding is None: raise TypeError("Can't encode unicode string: " "no encoding is specified") obj = obj.encode(self._encoding, self._unicode_errors) n = len(obj) if n >= 2**32: raise PackValueError("String is too large") self._pack_raw_header(n) return self._buffer.write(obj) if check(obj, memoryview): n = len(obj) * obj.itemsize if n >= 2**32: raise PackValueError("Memoryview is too large") self._pack_bin_header(n) return self._buffer.write(obj) if check(obj, float): if self._use_float: return self._buffer.write(struct.pack(">Bf", 0xca, obj)) return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) if check(obj, ExtType): code = obj.code data = obj.data assert isinstance(code, int) assert isinstance(data, bytes) L = len(data) if L == 1: self._buffer.write(b'\xd4') elif L == 2: self._buffer.write(b'\xd5') elif L == 4: self._buffer.write(b'\xd6') elif L == 8: self._buffer.write(b'\xd7') elif L == 16: self._buffer.write(b'\xd8') elif L <= 0xff: self._buffer.write(struct.pack(">BB", 0xc7, L)) elif L <= 0xffff: self._buffer.write(struct.pack(">BH", 0xc8, L)) else: self._buffer.write(struct.pack(">BI", 0xc9, L)) self._buffer.write(struct.pack("b", code)) self._buffer.write(data) return if check(obj, list_types): n = len(obj) self._pack_array_header(n) for i in xrange(n): self._pack(obj[i], nest_limit - 1) return if check(obj, dict): return self._pack_map_pairs(len(obj), dict_iteritems(obj), nest_limit - 1) if not default_used and self._default is not None: obj = self._default(obj) default_used = 1 continue raise TypeError("Cannot serialize %r" % (obj, )) def pack(self, obj): try: self._pack(obj) except: self._buffer = StringIO() # force reset raise ret = self._buffer.getvalue() if self._autoreset: self._buffer = StringIO() elif USING_STRINGBUILDER: self._buffer = StringIO(ret) return ret def pack_map_pairs(self, pairs): self._pack_map_pairs(len(pairs), pairs) ret = self._buffer.getvalue() if self._autoreset: self._buffer = StringIO() elif USING_STRINGBUILDER: self._buffer = StringIO(ret) return ret def pack_array_header(self, n): if n >= 2**32: raise PackValueError self._pack_array_header(n) ret = self._buffer.getvalue() if self._autoreset: self._buffer = StringIO() elif USING_STRINGBUILDER: self._buffer = StringIO(ret) return ret def pack_map_header(self, n): if n >= 2**32: raise PackValueError self._pack_map_header(n) ret = self._buffer.getvalue() if self._autoreset: self._buffer = StringIO() elif USING_STRINGBUILDER: self._buffer = StringIO(ret) return ret def pack_ext_type(self, typecode, data): if not isinstance(typecode, int): raise TypeError("typecode must have int type.") if not 0 <= typecode <= 127: raise ValueError("typecode should be 0-127") if not isinstance(data, bytes): raise TypeError("data must have bytes type") L = len(data) if L > 0xffffffff: raise PackValueError("Too large data") if L == 1: self._buffer.write(b'\xd4') elif L == 2: self._buffer.write(b'\xd5') elif L == 4: self._buffer.write(b'\xd6') elif L == 8: self._buffer.write(b'\xd7') elif L == 16: self._buffer.write(b'\xd8') elif L <= 0xff: self._buffer.write(b'\xc7' + struct.pack('B', L)) elif L <= 0xffff: self._buffer.write(b'\xc8' + struct.pack('>H', L)) else: self._buffer.write(b'\xc9' + struct.pack('>I', L)) self._buffer.write(struct.pack('B', typecode)) self._buffer.write(data) def _pack_array_header(self, n): if n <= 0x0f: return self._buffer.write(struct.pack('B', 0x90 + n)) if n <= 0xffff: return self._buffer.write(struct.pack(">BH", 0xdc, n)) if n <= 0xffffffff: return self._buffer.write(struct.pack(">BI", 0xdd, n)) raise PackValueError("Array is too large") def _pack_map_header(self, n): if n <= 0x0f: return self._buffer.write(struct.pack('B', 0x80 + n)) if n <= 0xffff: return self._buffer.write(struct.pack(">BH", 0xde, n)) if n <= 0xffffffff: return self._buffer.write(struct.pack(">BI", 0xdf, n)) raise PackValueError("Dict is too large") def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): self._pack_map_header(n) for (k, v) in pairs: self._pack(k, nest_limit - 1) self._pack(v, nest_limit - 1) def _pack_raw_header(self, n): if n <= 0x1f: self._buffer.write(struct.pack('B', 0xa0 + n)) elif self._use_bin_type and n <= 0xff: self._buffer.write(struct.pack('>BB', 0xd9, n)) elif n <= 0xffff: self._buffer.write(struct.pack(">BH", 0xda, n)) elif n <= 0xffffffff: self._buffer.write(struct.pack(">BI", 0xdb, n)) else: raise PackValueError('Raw is too large') def _pack_bin_header(self, n): if not self._use_bin_type: return self._pack_raw_header(n) elif n <= 0xff: return self._buffer.write(struct.pack('>BB', 0xc4, n)) elif n <= 0xffff: return self._buffer.write(struct.pack(">BH", 0xc5, n)) elif n <= 0xffffffff: return self._buffer.write(struct.pack(">BI", 0xc6, n)) else: raise PackValueError('Bin is too large') def bytes(self): return self._buffer.getvalue() def reset(self): self._buffer = StringIO()
def load(self, dis=None): # Search all slots for any we can read, decrypt that, # and pick the newest one (in unlikely case of dups) # reset self.current.clear() self.overrides.clear() self.my_pos = 0 self.is_dirty = 0 self.capacity = 0 # 4k, but last 32 bytes are a SHA (itself encrypted) global _tmp buf = bytearray(4) empty = 0 for pos in SLOTS: if dis: dis.progress_bar_show( (pos - SLOTS.start) / (SLOTS.stop - SLOTS.start)) gc.collect() SF.read(pos, buf) if buf[0] == buf[1] == buf[2] == buf[3] == 0xff: # erased (probably) empty += 1 continue # check if first 2 bytes makes sense for JSON aes = self.get_aes(pos) chk = aes.copy().cipher(b'{"') if chk != buf[0:2]: # doesn't look like JSON meant for me continue # probably good, read it chk = sha256() aes = aes.cipher expect = None with SFFile(pos, length=4096, pre_erased=True) as fd: for i in range(4096 / 32): b = aes(fd.read(32)) if i != 127: _tmp[i * 32:(i * 32) + 32] = b chk.update(b) else: expect = b try: # verify checksum in last 32 bytes assert expect == chk.digest() # loads() can't work from a byte array, and converting to # bytes here would copy it; better to use file emulation. fd = BytesIO(_tmp) d = ujson.load(fd) self.capacity = fd.seek(0, 1) / 4096 # .tell() is missing except: # One in 65k or so chance to come here w/ garbage decoded, so # not an error. continue got_age = d.get('_age', 0) if got_age > self.current.get('_age', -1): # likely winner self.current = d self.my_pos = pos #print("NV: data @ %d w/ age=%d" % (pos, got_age)) else: # stale data seen; clean it up. assert self.current['_age'] > 0 #print("NV: cleanup @ %d" % pos) SF.sector_erase(pos) SF.wait_done() # 4k is a large object, sigh, for us right now. cleanup gc.collect() # done, if we found something if self.my_pos: return # nothing found. self.my_pos = 0 self.current = self.default_values() if empty == len(SLOTS): # Whole thing is blank. Bad for plausible deniability. Write 3 slots # with garbage. They will be wasted space until it fills. blks = list(SLOTS) shuffle(blks) for pos in blks[0:3]: for i in range(0, 4096, 256): h = ngu.random.bytes(256) SF.wait_done() SF.write(pos + i, h)
def FromHex(obj, hex_string): obj.deserialize(BytesIO(hex_str_to_bytes(hex_string))) return obj
def reset(self): self._buffer = StringIO()
from sffile import SFFile # NOTE: not a psbt, just a txn # - 2 ins, 2 outs unsigned = a2b_hex('0100000002fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f0000000000eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac11000000') fd = SFFile(0, max_size=65536) list(fd.erase()) fd.write(b'psbt\xff\x01\x00' + bytes([len(unsigned)]) + unsigned + (b'\0'*8)) psbt_len = fd.tell() rfd = SFFile(0, psbt_len) p = psbtObject.read_psbt(rfd) #p.validate() # failed because no subpaths; don't care amt = 600000000 sc = a2b_hex('1976a9141d0f172a0ecb48aee1be1f2687d2963ae33f71a188ac') outpt2 = a2b_hex('ef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff') replacement = CTxIn() replacement.deserialize(BytesIO(outpt2)) digest = p.make_txn_segwit_sighash(0, replacement, amt, sc, 0x01) print('Got: ' + b2a_hex(digest).decode('ascii')) assert digest == a2b_hex('c37af31116d1b27caf68aae9e3ac82f1477929014d5b917657d0eb49478cb670')
def parse_section_hdr(self, hdr): # Read file name, unpacked size and crypto values out of a section header, # but assume we wrote it and don't be flexible or compliant or correct to standard. def BB(n): return a2b_hex(n.replace(' ','')) fh = BytesIO(hdr) def patmatch(pattern, where): # search forward, return file obj right after pattern pat = BB(pattern) pos = where.find(pat) if pos == -1: raise KeyError(pattern) return BytesIO(where[pos+len(pat):]) # find length part rv = patmatch('01 04 06 00 01 09', hdr) body_size = read_var64(rv) # skip forward to crypto details rv = patmatch('07 0b 01 00 01 24 ' + '06 f1 07 01', rv.getvalue()) crypto_props_len = read_var64(rv) start_pos = rv.seek(0, 1) # .tell() is missing first, second = rv.read(2) self.rounds_pow = first & 0x3f assert first & 0xc0 == 0xc0, "require salt+iv" salt_len = ((second >> 4) & 0xf) + 1 iv_len = (second & 0xf) + 1 assert salt_len >= 16 assert iv_len >= 16 self.salt = rv.read(salt_len) self.iv = rv.read(iv_len) end_pos = rv.seek(0, 1) # .tell() is missing assert end_pos - start_pos == crypto_props_len, (end_pos, start_pos, crypto_props_len) rv = patmatch('01 00 0c', rv.getvalue()) unpacked_size = read_var64(rv) assert rv.read(1) == b'\0' rv = patmatch('08 0a 01', rv.getvalue()) expect_crc = unpack('<L', rv.read(4))[0] assert rv.read(1) == b'\0' rv = patmatch('05 01 11', rv.getvalue()) fname_len = read_var64(rv) - 1 assert rv.read(1) == b'\0' # remove also a null at end of string fname = decode_utf_16_le(rv.read(fname_len))[:-1] assert rv.read(2) == b'\0\0' return fname, body_size, unpacked_size, expect_crc