def add_global_chunck(): nonlocal current_chunk, kind, quantity_globals, vals_ser dbgprint(f'making chunk for #{quantity_globals} values') header = kind + int2bytes(quantity_globals, 4).hex() chunks.append(current_chunk + header + vals_ser) dbgprint(f'chunk idx {len(chunks) -1} chunk {len(chunks[-1])}') assert len(chunks[-1]) <= max_space, f'failed for chunk idx {len(chunks) -1 } #{len(chunks[-1])}'
def step_over( self, expr: Union[Expr, DebugSession, None] = None) -> DebugSession: if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return if expr is None: expr = self.changes_handler.session.pc elif isinstance(expr, DebugSession): expr = expr.pc if expr.exp_type not in ['call', 'loop', 'block', 'if', 'else']: dbgprint("doing regular step") return self.step() end = None if expr.exp_type == 'call': end = expr.code.next_instr(expr) else: end = expr.code.end_expr(expr) dbgprint(f"end instruction of {expr} is {end}") if not self.device.step_until(end.addr): dbgprint(f'could not stepover {expr}') dbgprint(f'stepped until {end}') _json = self.device.get_execution_state() _sess = DebugSession.from_json(_json, self.module, self.device) self.changes_handler.add(_sess) return _sess
def serialize_first_msg(session): #globals gls = session['globals'] kind_globals = KINDS['globalsState'] quantity_globals = int2bytes(len(gls), 4).hex() globals_ser = kind_globals + quantity_globals dbgprint(f"serializeing globals {quantity_globals}") #Table tbl = session['table'] kind_table = KINDS['tblState'] tblinit_ser = int2bytes(tbl.get('init', 0), 4).hex() tblmax_ser = int2bytes(tbl.get('max', 0), 4).hex() tbl_size = int2bytes(len(tbl['elements']), 4).hex() tbl_ser = kind_table + tblinit_ser + tblmax_ser + tbl_size # memory kind_mem = KINDS['memState'] mem = session['memory'] minit_ser = int2bytes(mem.get('init', 0), 4).hex() mmax_ser = int2bytes(mem.get('max', 0), 4).hex() currentp_ser = int2bytes(mem['pages'], 4).hex() mem_ser = kind_mem + mmax_ser + minit_ser + currentp_ser return globals_ser + tbl_ser + mem_ser
def receive_rmvbp(wood, aMedium) -> bool: dbgprint("receive rmvbp") bp_end = b'!\n' _ = aMedium.recv_until(b'BP ') bp_bytes = aMedium.recv_until(bp_end)[:-len(bp_end)] dbgprint(f"removed bp {bp_bytes.decode()}") return True
def recv_until(self, until: Union[List[bytes], bytes], event: bool = False, wait: bool = True, timeout: bool = False) -> bytes: aSocket = self.event_socket if event else self.socket _untils = until if isinstance(until, bytes): _untils = [until] if len(aSocket.recvbuff) > 0: for u in _untils: _bytes = aSocket.pop_until(u) if _bytes is not None: return _bytes while wait: if not aSocket.connected: return b'' _bytes = aSocket.recv(self.recvbuff_size, timeout=timeout) if len(_bytes) == 0: print("closing connection") dbgprint("connection closed") aSocket.close() return b'' aSocket.add_bytes(_bytes) for u in _untils: _bytes = aSocket.pop_until(u) if _bytes is not None: return _bytes
def receive_until_ack(wood, aMedium) -> bool: dbgprint("receive until pc") bp_end = b'!\n' _ = aMedium.recv_until(b'Until ') bp_bytes = aMedium.recv_until(bp_end)[:-len(bp_end)] dbgprint(f"ack until pc {bp_bytes.decode()}") aMedium.recv_until(b'STEP DONE!\n') return True
def serialize_callstack(callstack, chunks, max_space): dbgprint(f'serialzing #{len(callstack)} frames') kind = KINDS['callstackState'] header_len = len(kind) + 4# 4 chars for quantity stack values current_chunk = chunks.pop(-1) if chunks and len(chunks[-1]) < max_space else "" quantity = 0 frames_ser = "" # dbgprint(f"chunck used #{len(current_chunk)} {current_chunk}") dbgframes = [] def chunk_callstack(): nonlocal kind, quantity, frames_ser, current_chunk quantity_ser = int2bytes(quantity, 2).hex() header = kind + quantity_ser # dbgprint(f"chunk for #{quantity} frames") chunks.append(current_chunk + header + frames_ser) # for i,f in enumerate(dbgframes): # dbgprint(f'i:{i} frame {f}') for frame in callstack: type_ser = int2bytes(frame['type'], 1).hex() sp_ser = signedint2bytes(frame['sp'], 4).hex() fp_ser = signedint2bytes(frame['fp'], 4).hex() (retaddr_ser, _) = serialize_pointer(frame['ra']) frame_ser = type_ser + sp_ser + fp_ser + retaddr_ser if isfunc_type(frame): fid = int(frame['fidx'], 16) #FIXME do we really get hex at this level? funcid_ser = int2bytes( fid, 4).hex() frame_ser += funcid_ser else: (blockaddr_ser, _) = serialize_pointer(frame['block_key']) frame_ser += blockaddr_ser #dbgprint(f'frame {frame} - {frame_ser}') if max_space < len(current_chunk) + header_len + len(frames_ser) + len(frame_ser) : if frames_ser != '': # dbgprint("FRAMES_CARL in the TRue") chunk_callstack() else: # dbgprint("FRAMES_CARL in the else") chunks.append(current_chunk) quantity = 0 frames_ser = "" current_chunk = "" dbgframes = [] # dbgprint(f'adding one frame of #{len(frame_ser)}') dbgframes.append(frame) quantity += 1 frames_ser += frame_ser if frames_ser != "": # dbgprint("FRAMES_CARL in the outer true") chunk_callstack() elif current_chunk != "": # dbgprint("FRAMES_CARL in the outer elif") chunks.append(current_chunk)
def run(self) -> None: if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return # self.__update_session() #TODO uncomment if self.device.run(): infoprint(f'`{self.device.name}` is running') else: dbgprint(f'device {self.device.name} failed to run')
def pause(self) -> None: #TODO ask for debug session if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return if self.device.pause(): infoprint(f'`{self.device.name}` is paused') else: dbgprint(f'device {self.device.name} failed to pause')
def serialize_pc(pc_addr, chunks, max_space): #TODO add padding to the pointer to make even chars #dbgprint(f"serialie_pc: {pc_addr}") dbgprint(f"serialize {pc_addr}") kind = KINDS['pcState'] (p, _) = serialize_pointer(pc_addr) pc_ser = kind + p dbgprint(f"pc_ser - {kind} {p}") add_in_chunks(chunks, pc_ser, max_space)
def get_execution_state(self): dump_msg = AMessage(Interrupts['dump'] + '\n', receive_dump) _dumpjson = self.medium.send(dump_msg) dbgprint(f'the dumpjson {_dumpjson}') if self.offset != _dumpjson['start'][0]: dbgprint('new offset') self.offset = _dumpjson['start'][0] return wood_state_to_wa_state(_dumpjson)
def step(self, amount: int = 1) -> DebugSession: if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return self.__update_session() if self.device.step(amount): _json = self.device.get_execution_state() _sess = DebugSession.from_json(_json, self.module, self.device) self.changes_handler.add(_sess) infoprint("stepped") return _sess
def add_breakpoint(self, expr: Union[Expr, int]) -> None: if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return if isinstance(expr, int): [expr] = self.module.linenr(expr) if self.device.add_breakpoint(expr.addr): # infoprint(f"added breakpoint at {expr}") infoprint(f"added breakpoint") self.breakpoints.append(expr.copy()) else: dbgprint(f'failed to add breakpoint {expr}')
def remove_breakpoint(self, inst: Union[Expr, int]) -> None: if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return if isinstance(inst, int): [inst] = self.module.linenr(inst) if self.device.remove_breakpoint(inst.addr): infoprint(f'breakpoint {inst} removed') self.breakpoints = list( filter(lambda i: i.addr != inst.addr, self.breakpoints)) else: infoprint(f"could not remove breakpoint {inst}")
def __update_session(self, debugsess: Union[DebugSession, None] = None) -> None: ds = self.changes_handler.session if debugsess is None else debugsess if ds is None: return if ds.modified: upd = ds.get_update() if not upd.valid: dbgprint("invalid change") return self.changes_handler.add(upd) #TODO other update self.receive_session(upd)
def __handle_event(self, event: dict) -> None: ev = event['event'] if ev == 'at bp': dbgprint( f"reached breakpoint {self.module.addr(event['breakpoint'])}") self.__reachedbp = True self.debug_session() if 'single-stop' in self.policies: infoprint(f"enforcing `single-stop' to `{self.device.name}`") for bp in self.breakpoints: self.remove_breakpoint(bp) self.run() elif 'remove-and-proceed' in self.policies: dbgprint( f"applying `remove-and-proceed` policy to `{self.device.name}`" ) expr = self.module.addr(event['breakpoint']) dbgprint(f"the expr {expr}") self.remove_breakpoint(expr) self.run() elif ev == 'disconnection': dbgprint(f'device {self.device.name} disconnected') elif ev == 'error': infoprint(f"error occured a device `{self.device.name}") _sess = DebugSession.from_json(event['execution_state'], self.module, self.device) _sess.exception = event['msg'] # end = event['time'].monotonic() # self.register_measure(event['start_time'], end, _sess) self.changes_handler.add(_sess) else: errprint('not understood event occurred')
def commit(self, mod: Union[WAModule, None] = None): if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return wasm = None if mod is not None: wasm = mod.compile() self.module = mod self.__changeshandler.module = mod else: wasm = self.__changeshandler.commit() if self.device.commit(wasm): infoprint('Module Updated')
def receive_session(self, session: dict) -> bool: recv_int = Interrupts['receivesession'] wood_state = wa_state_to_wood_state(session, self.offset) dbgprint(f"State to send {wood_state}") sers = encoder.serialize_session(wood_state, recv_int, self.max_bytes) msgs = [] l = len(sers) assert l >= 2, f'at least two expected got {l}' for idx, content in enumerate(sers): rpl = receive_done_session if (idx + 1) == l else receive_ack msgs.append(AMessage(content + '\n', rpl)) dbgprint(f"about to send #{len(msgs)}") replies = self.medium.send(msgs) return replies[-1]
def old_bp_addr_helper(offset, code_addr): bp_addr = util.sum_hexs([offset, code_addr]) # remove '0x' amount_chars = math.floor(len(bp_addr[2:]) / 2) if amount_chars % 2 != 0: dbgprint("WARNING: breakpoint address is not even addr") dbgprint( f"offset {offset} code_addr {code_addr} chars {amount_chars} calculated addr: {bp_addr}" ) else: pass _hex = hex(amount_chars) if int(_hex[2:], 16) < 16: _hex = '0x0' + _hex[2:] return (_hex, bp_addr)
def upload(self, wasm: bytes, config: dict) -> None: global proxy_config proxy_config = config interrupt = Interrupts['updateModule'] ask4commit = AMessage(interrupt + '\n', receive_ack) sers = encoder.serialize_wasm(interrupt, wasm, self.max_bytes) l = len(sers) msgs = [ask4commit] for idx, content in enumerate(sers): rpl = receive_uploaddone if (idx + 1) == l else receive_ack dbgprint(f'#{len(content) + 1} Content {content}') msgs.append(AMessage(content + '\n', rpl)) for m in msgs: self.medium.send(m)
def receive_events(wood: WOODManager, aMedium: AMedium, callback: callable) -> None: import time #TODO remove at_start = b'AT ' at_end = b'!\n' err_start = b'{"error":' err_end = b'}\n' timeout = float(0.1) while True: if not aMedium.has_event(timeout): continue #input has been received _start = aMedium.recv_until([at_start, err_start], event=True) _end = aMedium.recv_until([at_end, err_end], event=True) if not aMedium.connected: wood.connected = False callback({'event': 'disconnection'}) break if _start.find(at_start) >= 0: # print("at bp ") _bp = _end[:-len(at_end)].decode() bp = hex(int(_bp, 16) - int(wood.offset, 16)) callback({'event': 'at bp', 'breakpoint': bp}) else: start = time.monotonic() _dump = receive_dump(wood, aMedium, ignore_prev_hash=False) if _dump is None: continue _bytes = err_start + _end[:-len(b'\n')] _obj = json.loads(_bytes.decode()) _event = { 'event': 'error', 'msg': _obj['error'], 'start_time': start, 'time': time } _dump['session_size'] = _dump['session_size'] + len( _bytes) # TODO remove _event['execution_state'] = wood_state_to_wa_state(_dump) callback(_event) dbgprint("stopping event thread") wood.stopEventThread()
def debug_session(self) -> DebugSession: if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return starttime = time.monotonic() _json = self.device.get_execution_state() _sess = DebugSession.from_json(_json, self.module, self.device) end2 = time.monotonic() self.register_measure(starttime, end2, _sess) # s = f"time:{end2 - starttime},callstack:{len(_sess.callstack.all_frames)},stack:{len(_sess.stack)}\n" # print(s) # f = open(self.__bench , "a") # f.write(s) # f.close() self.changes_handler.add(_sess) return _sess
def commit(self, wasm) -> bool: interrupt = Interrupts['updateModule'] ask4commit = AMessage(interrupt + '\n', receive_ack) sers = encoder.serialize_wasm(interrupt, wasm, self.max_bytes) l = len(sers) msgs = [ask4commit] for idx, content in enumerate(sers): rpl = receive_commitdone if (idx + 1) == l else receive_ack dbgprint(f'#{len(content) + 1} Content {content}') msgs.append(AMessage(content + '\n', rpl)) replies = self.medium.send(msgs) succ = replies[-1] if succ: self.offset = self.__ask_for_offset() dbgprint(f"new offset post commit {self.offset}") return succ
def serialize_memory(memory, chunks, max_space): #output of the form # Header | # ------------------------------| # 1 byte | 4 bytes | 4 bytes | bytes size = end offset - begin offset + 1 #| memory | begin offset | end offset | bytes ... dbgprint(f'serializing memory #{len(memory["bytes"])} bytes') #TODO replace total, with the use of pages header_bytes = 9 header_len = header_bytes * 2 memcell_len = 1 * 2 #1 byte per memory cell if memory['pages'] == 0: return current_chunk = "" if chunks and (len(chunks[-1]) + header_len + memcell_len ) <= max_space: #space for at least 1 memory cells in previous chunk # dbgprint(f'using exiting chunck') current_chunk = chunks.pop(-1) begin_off = 0 end_off = 0 mem_bytes = memory['bytes'] # mem_bytes = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\x11\x11\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f' total_bytes = len(mem_bytes) while end_off < total_bytes: free_space = (max_space - len(current_chunk) - header_len) // memcell_len end_off = end_off + free_space if end_off > total_bytes: end_off = total_bytes _bytes = mem_bytes[begin_off : end_off] bytes_ser = _bytes.hex() beg_ser = int2bytes(begin_off, quantity_bytes=4).hex() end_ser = int2bytes(begin_off + (len(_bytes) - 1), quantity_bytes=4).hex() header = KINDS['memState'] + beg_ser + end_ser chunks.append(current_chunk + header + bytes_ser) assert len(chunks[-1]) <= max_space current_chunk = "" begin_off = end_off
def upload_proxies(self, proxy: Union[None, dict, List[str]] = None) -> None: if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return if proxy is None: proxy = self.__proxy_config if proxy is not None: if isinstance(proxy, dict): cleaned = self.validate_proxyconfig(self.module, proxy) self.device.send_proxies(proxy) self.__proxy_config = cleaned elif isinstance(proxy, list): config = {} config['host'] = self.__proxy_config['host'] config['port'] = self.__proxy_config['port'] config['proxy'] = proxy cleaned = self.validate_proxyconfig(self.module, config) self.device.send_proxies(cleaned) self.__proxy_config = cleaned
def upload_module(self, mod: WAModule, config: Union[dict, None] = None, proxy: Union[List[str], None] = None) -> None: if not self.device.connected: dbgprint(f'First connect to {self.device.name}') return if config is None and proxy is None: return self.commit(mod) if config is None: config = {} config['host'] = self.__proxy_config['host'] config['port'] = self.__proxy_config['port'] config['proxy'] = [] if proxy is None else proxy cleaned_config = self.validate_proxyconfig(mod, config) wasm = mod.compile() self.device.upload(wasm, cleaned_config) self.__proxy_config = cleaned_config
def receive_dump_helper(sock, ignore_prev_hash=True): global prev_h _noise = sock.recv_until(b'DUMP!\n') raw_end = b']}' re_len = len(raw_end) json_bytes = b'' json_bytes += sock.recv_until(b'"elements":[') + raw_end elements = sock.recv_until(raw_end)[:-re_len] json_bytes += sock.recv_until(b'"bytes":[') + raw_end membytes = sock.recv_until(raw_end)[:-2] json_bytes += sock.recv_until(b'"labels":[') + raw_end labels = sock.recv_until(raw_end)[:-re_len] json_bytes += sock.recv_until(b'\n')[:-len(b'\n')] dec = None try: dec = json_bytes.decode() except: print(f"failed for raw {json_bytes}") raise ValueError("something wrong") if not ignore_prev_hash: h = hash(json_bytes) if prev_h == h: dbgprint("Ignoring Received session") return None prev_h = h dbgprint(f'bytes {dec}') parsed = json.loads(dec) parsed['memory']['bytes'] = membytes parsed['table']['elements'] = bytes2int(elements) br_tbl = parsed['br_table'] br_tbl['size'] = int(br_tbl['size'], 16) br_tbl['labels'] = bytes2int(labels) parsed['session_size'] = len(json_bytes) # TODO remove return parsed
def same_signature(type1: Type, type2: Type) -> bool: dbgprint(f"comparing {type1} with {type2}") if len(type1.parameters) != len(type2.parameters): return False for e1, e2 in zip(type1.parameters, type2.parameters): if e1 != e2: return False if type1.results is None: if type2.results is None: return True return False elif type2.results is None: return False #both are lists if len(type1.results) != len(type2.results): return False for r1, r2 in zip(type1.results, type2.results): if r1 != r2: return False return True
def serialize_breakpoints(bps, chunks, max_space): dbgprint(f'serializing bps {bps}') kind = KINDS['bpsState'] header_len = len(kind) + 2# 2 chars needed to express quantity of breakpoints current_chunk = chunks.pop(-1) if chunks and len(chunks[-1]) < max_space else "" bps_ser = "" quantity_bps = 0 def add_chunk(): nonlocal kind, quantity_bps, bps_ser, current_chunk header = kind + int2bytes(quantity_bps, 1).hex() #dbgprint(f'making chunk for #{quantity_bps} bps') #dbgprint(f'header {header} - {bps_ser}') #dbgprint(f'current_chunk {current_chunk}') chunks.append(current_chunk + header + bps_ser) for bp in bps: (_serbp, _) = serialize_pointer(bp) if max_space < len(current_chunk) + header_len + len(bps_ser) + len(_serbp): #dbgprint(f'call whitin for add_chunk: bp not added yet {bp} with ser {_serbp}') if bps_ser != '': add_chunk() else: #dbgprint("in the else") chunks.append(current_chunk) quantity_bps = 0 bps_ser = "" current_chunk = "" quantity_bps += 1 bps_ser += _serbp if bps_ser != "": #dbgprint("breakpoints_ser: call from outsite for") add_chunk() elif current_chunk != "": chunks.append(current_chunk)
def serialize_globals(vals, chunks, max_space): #'globals': [{'idx': 0, 'type': 'i32', 'value': 0}, {'idx': 1, 'type': 'i32', 'value': 0}, {'idx': 2, 'type': 'i32', 'value': 88}], # dbgprint(f'serializing globals #{len(vals)} - globals - {vals}') kind = KINDS['globalsState'] header_len = len(kind) + 8# 8 chars for quantity globals values current_chunk = chunks.pop(-1) if chunks and len(chunks[-1]) < max_space else "" quantity_globals = 0 vals_ser = "" # dbgprint(f"chunck used #{len(current_chunk)}") def add_global_chunck(): nonlocal current_chunk, kind, quantity_globals, vals_ser dbgprint(f'making chunk for #{quantity_globals} values') header = kind + int2bytes(quantity_globals, 4).hex() chunks.append(current_chunk + header + vals_ser) dbgprint(f'chunk idx {len(chunks) -1} chunk {len(chunks[-1])}') assert len(chunks[-1]) <= max_space, f'failed for chunk idx {len(chunks) -1 } #{len(chunks[-1])}' for vobj in vals: _serval = serialize_stackValue(vobj) if max_space < len(current_chunk) + header_len + len(vals_ser) + len(_serval) : if vals_ser != '': add_global_chunck() else: dbgprint("in the else") chunks.append(current_chunk) quantity_globals = 0 vals_ser = "" current_chunk = "" quantity_globals +=1 vals_ser += _serval if vals_ser != "": add_global_chunck() elif current_chunk != "": dbgprint("in the elif") chunks.append(current_chunk)