def qdump__arma__Mat(d, value): array = value["mem_local”] cols = value["n_cols"] rows = value["n_rows"] maxDisplayItems = 50 innerType = d.templateArgument(value.type, 0) p = gdb.Value(array.cast(innerType.pointer())) d.putItemCount(cols) d.putNumChild(cols) if d.isExpanded(): numDisplayItems = min(maxDisplayItems, cols) with dumper.Children(d, numChild=cols, maxNumChild=numDisplayItems, childType="<column>", addrBase=p, addrStep=p.dereference().__sizeof__): for i in range(0, int(numDisplayItems)): with dumper.Children(d): d.putItemCount(rows) d.putNumChild(rows) if d.isExpanded(): numDisplayItems = min(maxDisplayItems, rows) with dumper.Children(d, numChild=rows, maxNumChild=numDisplayItems, childType=innerType, addrBase=p, addrStep=p.dereference().__sizeof__): for j in range(0, int(numDisplayItems)): d.putSubItem(j, p.dereference()) p += 1
def unordered_map_helper(d, value, is_set=False): """Dumps the internal unordered_map for HashSet and HashMap.""" (size, _) = value["__table_"]["__p2_"].split("pp") d.putItemCount(size) keyType = value.type[0] valueType = value.type[1] pairType = value.type[4][0] if d.isExpanded(): curr = value["__table_"]["__p1_"].split("pp")[0] def traverse_list(node): while node: (next_, _, pad, pair) = d.split("pp@{%s}" % (pairType.name), node) yield pair.split("{%s}@{%s}" % (keyType.name, valueType.name))[::2] node = next_ with dumper.Children(d, size, childType=value.type[0], maxNumChild=1000): for (i, pair) in zip(d.childRange(), traverse_list(curr)): if is_set: d.putSubItem(i, pair[0]) else: d.putPairItem(i, pair, 'key', 'value')
def deque_helper_libstd(d, value, name_fn=None): innerType = value.type[0] innerSize = innerType.size() bufsize = 1 if innerSize < 512: bufsize = 512 // innerSize (mapptr, mapsize, startCur, startFirst, startLast, startNode, finishCur, finishFirst, finishLast, finishNode) = value.split("pppppppppp") size = bufsize * ((finishNode - startNode) // d.ptrSize() - 1) size += (finishCur - finishFirst) // innerSize size += (startLast - startCur) // innerSize d.check(0 <= size and size <= 1000 * 1000 * 1000) d.putItemCount(size) if d.isExpanded(): with dumper.Children(d, size, maxNumChild=2000, childType=innerType): pcur = startCur plast = startLast pnode = startNode for i in d.childRange(): name = name_fn(i, size) if name_fn is not None else i d.putSubItem(name, d.createValue(pcur, innerType)) pcur += innerSize if pcur == plast: newnode = pnode + d.ptrSize() pfirst = d.extractPointer(newnode) plast = pfirst + bufsize * d.ptrSize() pcur = pfirst pnode = newnode
def deque_helper_libstd(d, value, elem_fn): """Dumps the deque for containers of bools or Queue for libstdc++.""" inner_type = value.type[0] inner_size = inner_type.size() buf_size = 1 if inner_size < 512: buf_size = 512 // inner_size (mptr, msize, start_cur, start_first, start_last, start_node, finish_cur, finish_first, finish_last, finish_node) = value.split("pppppppppp") size = buf_size * ((finish_node - start_node) // d.ptrSize() - 1) size += (finish_cur - finish_first) // inner_size size += (start_last - start_cur) // inner_size d.check(0 <= size and size <= 1000 * 1000 * 1000) d.putItemCount(size) if d.isExpanded(): with dumper.Children(d, size, maxNumChild=2000, childType=inner_type): pcur = start_cur plast = start_last pnode = start_node for i in d.childRange(): value = d.createValue(pcur, inner_type) elem_fn(d, i, size, value) pcur += inner_size if pcur == plast: newnode = pnode + d.ptrSize() pfirst = d.extractPointer(newnode) plast = pfirst + buf_size * d.ptrSize() pcur = pfirst pnode = newnode
def unordered_map_helper_libcpp(d, value, elem_fn): """Dumps the unordered_map for HashSet and HashMap for libc++.""" (size, _) = value["__table_"]["__p2_"].split("pp") d.putItemCount(size) key_type = value.type[0] value_type = value.type[1] pair_type = value.type[4][0] if d.isExpanded(): curr = value["__table_"]["__p1_"].split("pp")[0] def traverse_list(node): while node: (next_, _, pad, pair) = d.split("pp@{%s}" % (pair_type.name), node) yield pair.split("{%s}@{%s}" % (key_type.name, value_type.name))[::2] node = next_ with dumper.Children(d, size, childType=value.type[0], maxNumChild=1000): for (i, pair) in zip(d.childRange(), traverse_list(curr)): elem_fn(d, i, pair[0], pair[1])
def qdump__Hase(d, value): log("start") size = int(d.call('int', value, 'size')) log("size = {}".format(size)) for pos in range(size): log("v = {}".format(pos)) # d.call('int&', value, 'operator[]',pos) d.putValue("i = {}".format(value)) d.putNumChild(1) if d.isExpanded(): with dumper.Children(d): d.putSubItem("numbers[1]", value["numbers"]) log("end")
def deque_helper_libcpp(d, value, elem_fn): """Dumps the deque for containers of bools or Queue for libc++.""" inner_type = value.type[0] inner_size = inner_type.size() mptr, mfirst, mbegin, mend, start, size = value.split("pppptt") d.check(0 <= size and size <= 1000 * 1000 * 1000) d.putItemCount(size) if d.isExpanded(): ptr_size = d.ptrSize() buf_size = (4096 // inner_size) if inner_size < 256 else 16 with dumper.Children(d, size, maxNumChild=2000, childType=inner_type): for i in d.childRange(): k, j = divmod(start + i, buf_size) base = d.extractPointer(mfirst + k * ptr_size) value = d.createValue(base + j * inner_size, inner_type) elem_fn(d, i, size, value)
def deque_helper(d, value, name_fn=None): """Dumps the internal dequeue for Stack<bool> or Queue.""" innerType = value.type[0] innerSize = innerType.size() mptr, mfirst, mbegin, mend, start, size = value.split("pppptt") d.check(0 <= size and size <= 1000 * 1000 * 1000) d.putItemCount(size) if d.isExpanded(): ptrSize = d.ptrSize() bufsize = (4096 // innerSize) if innerSize < 256 else 16 with dumper.Children(d, size, maxNumChild=2000, childType=innerType): for i in d.childRange(): k, j = divmod(start + i, bufsize) base = d.extractPointer(mfirst + k * ptrSize) name = name_fn(i, size) if name_fn is not None else i d.putSubItem(name, d.createValue(base + j * innerSize, innerType))
def map_helper(d, value, is_set=False): """Dumps the internal map for Set or Map.""" try: (proxy, head, size) = value.split("ppp") d.check(0 <= size and size <= 100 * 1000 * 1000) # Sometimes there is extra data at the front. Don't know why at the moment. except RuntimeError: (junk, proxy, head, size) = value.split("pppp") d.check(0 <= size and size <= 100 * 1000 * 1000) d.putItemCount(size) if d.isExpanded(): keyType = value.type[0] valueType = value.type[1] pairType = value.type[3][0] def in_order_traversal(node): (left, right, parent, color, pad, pair) = d.split("pppB@{%s}" % (pairType.name), node) if left: for res in in_order_traversal(left): yield res yield pair.split("{%s}@{%s}" % (keyType.name, valueType.name))[::2] if right: for res in in_order_traversal(right): yield res with dumper.Children(d, size, maxNumChild=1000): for (i, pair) in zip(d.childRange(), in_order_traversal(head)): if is_set: d.putSubItem(i, pair[0]) else: d.putPairItem(i, pair, 'key', 'value')
def map_helper_libcpp(d, value, elem_fn): """Dumps the internal map for Set or Map for libc++.""" try: (proxy, head, size) = value.split("ppp") d.check(0 <= size and size <= 100 * 1000 * 1000) # JEB sometimes there is extra data at the front (?) except RuntimeError: (junk, proxy, head, size) = value.split("pppp") d.check(0 <= size and size <= 100 * 1000 * 1000) d.putItemCount(size) if d.isExpanded(): key_type = value.type[0] value_type = value.type[1] pair_type = value.type[3][0] def in_order_traversal(node): (left, right, parent, color, pad, pair) = d.split("pppB@{%s}" % (pair_type.name), node) if left: for res in in_order_traversal(left): yield res yield pair.split("{%s}@{%s}" % (key_type.name, value_type.name))[::2] if right: for res in in_order_traversal(right): yield res with dumper.Children(d, size, maxNumChild=1000): for (i, pair) in zip(d.childRange(), in_order_traversal(head)): elem_fn(d, i, pair[0], pair[1])
def vector_helper(d, value, name_fn=None): """Dumps the internal vector for Vector, Stack, PriorityQueue, and Grid.""" innerType = value.type[0] isBool = innerType.name == 'bool' # Check if compiled with libstdc++ or libc++ isLibCpp = not value.type.name.startswith('std::vector') if isBool: if isLibCpp: start = value["__begin_"].pointer() size = value["__size_"] alloc = size else: start = value["_M_start"]["_M_p"].pointer() soffset = value["_M_start"]["_M_offset"].integer() finish = value["_M_finish"]["_M_p"].pointer() foffset = value["_M_finish"]["_M_offset"].integer() alloc = value["_M_end_of_storage"].pointer() size = (finish - start) * 8 + foffset - soffset # 8 is CHAR_BIT. else: if isLibCpp: start = value["__begin_"].pointer() finish = value["__end_"].pointer() alloc = value["__end_cap_"].pointer() else: start = value["_M_start"].pointer() finish = value["_M_finish"].pointer() alloc = value["_M_end_of_storage"].pointer() size = int((finish - start) / innerType.size()) d.check(finish <= alloc) if size > 0: d.checkPointer(start) d.checkPointer(finish) d.checkPointer(alloc) d.check(0 <= size and size <= 1000 * 1000 * 1000) d.putItemCount(size) if d.isExpanded(): if isBool: if d.isExpanded(): with dumper.Children(d, size, maxNumChild=10000, childType=innerType): for i in d.childRange(): q = start + int(i / 8) name = name_fn(i, size) if name_fn is not None else i with dumper.SubItem(d, i): d.putValue((int(d.extractPointer(q)) >> (i % 8)) & 1) d.putType("bool") d.putNumChild(0) else: maxNumChild = 1000 * 1000 d.checkIntType(start) d.checkIntType(size) addrBase = start innerSize = innerType.size() d.putNumChild(size) with dumper.Children(d, size, innerType, None, maxNumChild, addrBase=addrBase, addrStep=innerSize): for i in d.childRange(): name = name_fn(i, size) if name_fn is not None else i d.putSubItem( name, d.createValue(addrBase + i * innerSize, innerType))
def qdump__cv__Mat(d, value): # ptrSize = d.ptrSize() dims = value['dims'].integer() if dims != 2: d.putEmptyValue() d.putPlainChildren(value) return flags = value['flags'].integer() channels = 1 + (flags >> 3) & 63 rows = int(value['rows']) cols = int(value['cols']) depth = flags & 7 if depth == 0: typeName = 'uchar' cvTypeName = 'CV_8U' elemSize = 1 elif depth == 1: typeName = 'char' cvTypeName = 'CV_8S' elemSize = 1 elif depth == 2: typeName = 'ushort' cvTypeName = 'CV_16U' elemSize = 2 elif depth == 3: typeName = 'short' cvTypeName = 'CV_16S' elemSize = 2 elif depth == 4: typeName = 'int' cvTypeName = 'CV_32S' elemSize = 4 elif depth == 5: typeName = 'float' cvTypeName = 'CV_32F' elemSize = 4 elif depth == 6: typeName = 'double' cvTypeName = 'CV_64F' elemSize = 8 d.putValue('%dx%d %sC%d' % (rows, cols, cvTypeName, channels)) address = value["data"].pointer() step = value['step']['p'].dereference().integer() d.putNumChild(1) if d.isExpanded(): with dumper.Children(d): # d.putIntItem('width', width) d.putIntItem('cols', cols) d.putIntItem('channels', channels) # d.putIntItem('data', value['data']) d.putIntItem('dims', value['dims']) d.putIntItem('rows', rows) d.putIntItem('size', value['size']['p'].dereference()) d.putIntItem('step', step) with dumper.SubItem(d, 'type'): d.putValue(cvTypeName) d.putNumChild(0) d.putIntItem('flags', flags) d.putIntItem('refcount', value['u']['refcount']) with dumper.SubItem(d, "data"): # if d.isExpanded(): d.putValue("0x%x" % value["data"].integer()) d.putNumChild(rows) # base = value["data"].dereference() with dumper.Children(d): for i in range(rows): if channels == 1: d.putArrayItem('[%d]' % i, address + i*step, cols, typeName) else: with dumper.SubItem(d, '[%d]' % i): with dumper.Children(d): for j in range(cols): d.putArrayItem('[%d]' % j, address + i*step + j*channels*elemSize, channels, typeName) format = d.currentItemFormat() if format == dumper.SeparateFormat: if value['dims'].integer() == 2: # img = cv2.cv.CreateImageHeader((cols,rows), depth, channels) # bytes = value['step'] * value['rows'] # cv2.cv.SetData(img, d.readMemory(value['data'], bytes)) # if channels == 1: # cv2.cv.CvtColor(img, img, cv2.cv.CV_GRAY2RGB) # d.putField("editformat", DisplayImageData) # d.put('editvalue="') # d.put('%08x%08x%08x%08x' % (cols, rows, byteSize, 13)) # d.put(img.data) # d.put('",') d.putDisplay('imagedata:separate', '%08x%08x%08x%08x' % (cols, rows, cols*rows, 1) + d.readMemory(value["data"], cols*rows))
def vector_helper(d, value, elem_fn): """Dumps the internal vector for Vector, Stack, PriorityQueue, and Grid.""" inner_type = value.type[0] is_bool = inner_type.name == 'bool' # Check if compiled with libstdc++ or libc++ class_is_lib_cpp = is_lib_cpp(value) if is_bool: if class_is_lib_cpp: start = value["__begin_"].pointer() size = value["__size_"].integer() # JDZ extract integer value alloc = size else: start = value["_M_start"]["_M_p"].pointer() soffset = value["_M_start"]["_M_offset"].integer() finish = value["_M_finish"]["_M_p"].pointer() foffset = value["_M_finish"]["_M_offset"].integer() alloc = value["_M_end_of_storage"].pointer() size = (finish - start) * 8 + foffset - soffset # 8 is CHAR_BIT. else: if class_is_lib_cpp: start = value["__begin_"].pointer() finish = value["__end_"].pointer() alloc = value["__end_cap_"].pointer() else: start = value["_M_start"].pointer() finish = value["_M_finish"].pointer() alloc = value["_M_end_of_storage"].pointer() size = int((finish - start) / inner_type.size()) d.check(finish <= alloc) if size > 0: d.checkPointer(start) d.checkPointer(finish) d.checkPointer(alloc) d.check(0 <= size and size <= 1000 * 1000 * 1000) d.putItemCount(size) if d.isExpanded(): if is_bool: if d.isExpanded(): with dumper.Children(d, size, maxNumChild=10000, childType=inner_type): for i in d.childRange(): q = start + int(i / 8) with dumper.SubItem(d, i): # std::vector<bool> stores elements as special # bit-array, so we read each bit and convert from # {0, 1} -> {false, true} val = (int(d.extractPointer(q)) >> (i % 8)) & 1 d.putValue(val != 0) else: max_num_child = 1000 * 1000 d.checkIntType(start) d.checkIntType(size) addr_base = start inner_size = inner_type.size() d.putNumChild(size) with dumper.Children(d, size, inner_type, None, max_num_child, addrBase=addr_base, addrStep=inner_size): for i in d.childRange(): value = d.createValue(addr_base + i * inner_size, inner_type) elem_fn(d, i, size, value)