def import_to_structures(self, ask=False): """ Imports virtual tables and returns tid_t of new structure :return: idaapi.tid_t """ cdecl_typedef = idaapi.print_tinfo( None, 4, 5, idaapi.PRTYPE_MULTI | idaapi.PRTYPE_TYPE | idaapi.PRTYPE_SEMI, self.create_tinfo(), self.vtable_name, None) if ask: cdecl_typedef = idaapi.asktext( 0x10000, cdecl_typedef, "The following new type will be created") if not cdecl_typedef: return previous_ordinal = idaapi.get_type_ordinal(idaapi.cvar.idati, self.vtable_name) if previous_ordinal: idaapi.del_numbered_type(idaapi.cvar.idati, previous_ordinal) ordinal = idaapi.idc_set_local_type(previous_ordinal, cdecl_typedef, idaapi.PT_TYP) else: ordinal = idaapi.idc_set_local_type(-1, cdecl_typedef, idaapi.PT_TYP) if ordinal: print "[Info] Virtual table " + self.vtable_name + " added to Local Types" return idaapi.import_type(idaapi.cvar.idati, -1, self.vtable_name) else: print "[Error] Failed to create virtual table " + self.vtable_name print "*" * 100 print cdecl_typedef print "*" * 100
def pack(self, start=0, stop=None): if self.collisions[start:stop].count(True): print "[Warning] Collisions detected" return final_tinfo = idaapi.tinfo_t() udt_data = idaapi.udt_type_data_t() origin = self.items[start].offset if start else 0 offset = origin for item in filter(lambda x: x.enabled, self.items[start:stop]): # Filter disabled members gap_size = item.offset - offset if gap_size: udt_data.push_back(TemporaryStructureModel.get_padding_member(offset - origin, gap_size)) if item.is_array: array_size = self.calculate_array_size(bisect.bisect_left(self.items, item)) if array_size: udt_data.push_back(item.get_udt_member(array_size, offset=origin)) offset = item.offset + item.size * array_size continue udt_data.push_back(item.get_udt_member(offset=origin)) offset = item.offset + item.size final_tinfo.create_udt(udt_data, idaapi.BTF_STRUCT) cdecl = idaapi.print_tinfo(None, 4, 5, idaapi.PRTYPE_MULTI | idaapi.PRTYPE_TYPE | idaapi.PRTYPE_SEMI, final_tinfo, self.structure_name, None) cdecl = idaapi.asktext(0x10000, '#pragma pack(push, 1)\n' + cdecl, "The following new type will be created") if cdecl: structure_name = idaapi.idc_parse_decl(idaapi.cvar.idati, cdecl, idaapi.PT_TYP)[0] previous_ordinal = idaapi.get_type_ordinal(idaapi.cvar.idati, structure_name) if previous_ordinal: reply = QtGui.QMessageBox.question( None, "HexRaysPyTools", "Structure already exist. Do you want to overwrite it?", QtGui.QMessageBox.Yes | QtGui.QMessageBox.No ) if reply == QtGui.QMessageBox.Yes: idaapi.del_numbered_type(idaapi.cvar.idati, previous_ordinal) ordinal = idaapi.idc_set_local_type(previous_ordinal, cdecl, idaapi.PT_TYP) else: return else: ordinal = idaapi.idc_set_local_type(-1, cdecl, idaapi.PT_TYP) if ordinal: print "[Info] New type {0} was added to Local Types".format(structure_name) tid = idaapi.import_type(idaapi.cvar.idati, -1, structure_name) if tid: tinfo = idaapi.create_typedef(structure_name) ptr_tinfo = idaapi.tinfo_t() ptr_tinfo.create_ptr(tinfo) for scanned_var in self.get_unique_scanned_variables(origin): scanned_var.apply_type(ptr_tinfo) return tinfo else: print "[ERROR] Structure {0} probably already exist".format(structure_name)
def pack(self, start=0, stop=None): if self.collisions[start:stop].count(True): print "[Warning] Collisions detected" return final_tinfo = idaapi.tinfo_t() udt_data = idaapi.udt_type_data_t() origin = self.items[start].offset offset = origin for item in filter(lambda x: x.enabled, self.items[start:stop]): # Filter disabled members gap_size = item.offset - offset if gap_size: udt_data.push_back( TemporaryStructureModel.get_padding_member( offset - origin, gap_size)) if item.is_array: array_size = self.calculate_array_size( bisect.bisect_left(self.items, item)) if array_size: udt_data.push_back( item.get_udt_member(array_size, offset=origin)) offset = item.offset + item.size * array_size continue udt_data.push_back(item.get_udt_member(offset=origin)) offset = item.offset + item.size final_tinfo.create_udt(udt_data, idaapi.BTF_STRUCT) cdecl = idaapi.print_tinfo( None, 4, 5, idaapi.PRTYPE_MULTI | idaapi.PRTYPE_TYPE | idaapi.PRTYPE_SEMI, final_tinfo, self.structure_name, None) cdecl = idaapi.asktext(0x10000, cdecl, "The following new type will be created") if cdecl: structure_name = idaapi.idc_parse_decl(idaapi.cvar.idati, cdecl, idaapi.PT_TYP)[0] ordinal = idaapi.idc_set_local_type(-1, cdecl, idaapi.PT_TYP) if ordinal: print "[Info] New type {0} was added to Local Types".format( structure_name) tid = idaapi.import_type(idaapi.cvar.idati, -1, structure_name) if tid: tinfo = idaapi.create_typedef(structure_name) ptr_tinfo = idaapi.tinfo_t() ptr_tinfo.create_ptr(tinfo) for scanned_var in self.get_scanned_variables(origin): scanned_var.apply_type(ptr_tinfo) return tinfo else: print "[ERROR] Structure {0} probably already exist".format( structure_name) return None
def merge_attributes_callback(self, message_info, input_attribute1, input_attribute2): message = "%s\n" % message_info message += "Value from local : %s\n" % input_attribute1 message += "Value from remote : %s\n" % input_attribute2 output_attribute_result = idaapi.asktext(4096, input_attribute1, message) if output_attribute_result is None: output_attribute_result = "" return output_attribute_result if output_attribute_result is None: return ya.PROMPT_MERGE_CONFLICT_UNSOLVED return ya.PROMPT_MERGE_CONFLICT_SOLVED
def repo_commit(self, commit_msg=None): status = False modified_files = self.repo.get_modified_objects_in_path("cache/") deleted_files = self.repo.get_deleted_objects_in_path("cache/") untracked_files = self.repo.get_untracked_objects_in_path("cache/") if len(untracked_files) > 0: logger.debug("adding %i untracked files..." % len(untracked_files)) self.repo.add_files(untracked_files) if len(modified_files) > 0: logger.debug("adding %i modified files..." % len(modified_files)) self.repo.add_files(modified_files) if len(deleted_files) > 0: logger.debug("removing %i deleted files..." % len(deleted_files)) self.repo.remove_files(deleted_files) # check if we have to commit cache uncommited_cache = False if (len(untracked_files) + len(modified_files) + len(deleted_files)) > 0: uncommited_cache = True if uncommited_cache: # warning in test mode idaapi.asktext will be override to return "dummy message" max_prefix_len = 0 for (prefix, text) in self.auto_comments: max_prefix_len = max(len(prefix), max_prefix_len) prefix_format = "[%-" + ("%d" % max_prefix_len) + "s] " sorted_comments = list() for (prefix, text) in self.auto_comments: sorted_comments.append((prefix_format % prefix) + text) if commit_msg is None: commit_msg = "" sorted_comments.sort() for msg in sorted_comments: commit_msg += msg + '\n' if len(commit_msg) > TRUNCATE_COMMIT_MESSAGE_LENGTH: commit_msg = commit_msg[:TRUNCATE_COMMIT_MESSAGE_LENGTH] + "\n...truncated" if self.options.ask_commit_msg is True: commit_msg = idaapi.asktext( len(commit_msg) * 2 + 256, commit_msg, "Commit message :") if commit_msg != "": self.repo.commit(commit_msg) self.auto_comments = set() status = True return status
def repo_commit(self, commit_msg=None): logger.info("committing changes") untracked_files = self.repo.get_untracked_objects_in_path("cache/") modified_files = self.repo.get_modified_objects_in_path("cache/") deleted_files = self.repo.get_deleted_objects_in_path("cache/") if not len(modified_files) and not len(deleted_files) and not len( untracked_files): return False for f in untracked_files: logger.info("added %s" % os.path.relpath(f, "cache")) for f in modified_files: logger.info("modified %s" % os.path.relpath(f, "cache")) for f in deleted_files: logger.info("deleted %s" % os.path.relpath(f, "cache")) self.repo.add_files(untracked_files) self.repo.add_files(modified_files) self.repo.remove_files(deleted_files) # warning in test mode idaapi.asktext will be overrided to return "dummy message" max_prefix_len = 0 for (prefix, text) in self.auto_comments: max_prefix_len = max(len(prefix), max_prefix_len) prefix_format = "[%-" + ("%d" % max_prefix_len) + "s] " sorted_comments = list() for (prefix, text) in self.auto_comments: sorted_comments.append((prefix_format % prefix) + text) if commit_msg is None: commit_msg = "" sorted_comments.sort() for msg in sorted_comments: commit_msg += msg + '\n' if len(commit_msg) > TRUNCATE_COMMIT_MESSAGE_LENGTH: commit_msg = commit_msg[:TRUNCATE_COMMIT_MESSAGE_LENGTH] + "\n...truncated" if self.options.ask_commit_msg is True: commit_msg = idaapi.asktext( len(commit_msg) * 2 + 256, commit_msg, "Commit message :") if commit_msg != "": self.repo.commit(commit_msg) self.auto_comments = set() return True return False
def callback(self, input_file1, input_file2, output_file_result): logger.debug("PythonResolveFileConflictCallback.callback(%s, %s, %s)" % (input_file1, input_file2, output_file_result)) if not output_file_result.endswith(".xml"): return True merger_conflict = PythonGuiPromptMergeConflict() merger = ya.Merger(merger_conflict, ya.OBJECT_VERSION_MERGE_PROMPT) merge_flag = merger.smartMerge(input_file1, input_file2, output_file_result) if merge_flag == ya.OBJECT_MERGE_STATUS_NOT_UPDATED: logger.error( "PythonResolveFileConflictCallback: callback: object version was not updated" ) with open(output_file_result, 'r') as foutput: input_content = foutput.read() while True: if len(input_content) >= 65536: idc.Warning( "[File too big to be edited, please edit manually %s then continue]" % output_file_result) merged_content = open(output_file_result, 'r').read() else: merged_content = idaapi.asktext( len(input_content) * 2, input_content, "manual merge stuff") if merged_content not in [None, ""]: try: xml.dom.minidom.parseString(merged_content) except: logger.warning("invalid xml content") logger.warning(traceback.format_exc()) idc.Warning("invalid xml content") # loop again in while continue with open(output_file_result, 'w') as foutput_: foutput_.write(merged_content) # Everything worked : stop endless while break else: return False # endif merge_flag == OBJECT_MERGE_STATUS_NOT_UPDATED: try: xml.dom.minidom.parse(output_file_result) except: logger.error( "invalid xml output generate by PythonResolveFileConflictCallback" ) idaapi.msg( "invalid xml output generate by PythonResolveFileConflictCallback" ) return False return True
def activate(self, ctx): hx_view = idaapi.get_tform_vdui(ctx.widget) result = self.check(hx_view.cfunc, hx_view.item) if result is None: return struct_tinfo, offset, idx = result ordinal = struct_tinfo.get_ordinal() struct_name = struct_tinfo.dstr() if (offset + idx) % 2: default_field_type = "_BYTE" elif (offset + idx) % 4: default_field_type = "_WORD" else: default_field_type = "_DWORD" declaration = idaapi.asktext( 0x10000, "{0} field_{1:X}".format(default_field_type, offset + idx), "Enter new structure member:" ) if declaration is None: return result = self.__parse_declaration(declaration) if result is None: return field_tinfo, field_name = result field_size = field_tinfo.get_size() udt_data = idaapi.udt_type_data_t() udt_member = idaapi.udt_member_t() struct_tinfo.get_udt_details(udt_data) udt_member.offset = offset * 8 struct_tinfo.find_udt_member(idaapi.STRMEM_OFFSET, udt_member) gap_size = udt_member.size // 8 gap_leftover = gap_size - idx - field_size if gap_leftover < 0: print "[ERROR] Too big size for the field. Type with maximum {0} bytes can be used".format(gap_size - idx) return iterator = udt_data.find(udt_member) iterator = udt_data.erase(iterator) if gap_leftover > 0: udt_data.insert(iterator, TemporaryStructureModel.get_padding_member(offset + idx + field_size, gap_leftover)) udt_member = idaapi.udt_member_t() udt_member.offset = offset * 8 + idx udt_member.name = field_name udt_member.type = field_tinfo udt_member.size = field_size iterator = udt_data.insert(iterator, udt_member) if idx > 0: udt_data.insert(iterator, TemporaryStructureModel.get_padding_member(offset, idx)) struct_tinfo.create_udt(udt_data, idaapi.BTF_STRUCT) struct_tinfo.set_numbered_type(idaapi.cvar.idati, ordinal, idaapi.BTF_STRUCT, struct_name) hx_view.refresh_view(True)
def activate(self, ctx): hx_view = idaapi.get_widget_vdui(ctx.widget) if not self.check(hx_view): return item = hx_view.item.it.to_specific_type parent = hx_view.cfunc.body.find_parent_of(item).to_specific_type if parent.op != idaapi.cot_idx or parent.y.op != idaapi.cot_num: idx = 0 else: idx = parent.y.numval() struct_tinfo = item.x.type struct_tinfo.remove_ptr_or_array() offset = item.m ordinal = struct_tinfo.get_ordinal() struct_name = struct_tinfo.dstr() if (offset + idx) % 2: default_field_type = "_BYTE" elif (offset + idx) % 4: default_field_type = "_WORD" elif (offset + idx) % 8: default_field_type = "_DWORD" else: default_field_type = "_QWORD" if const.EA64 else "_DWORD" declaration = idaapi.asktext( 0x10000, "{0} field_{1:X}".format(default_field_type, offset + idx), "Enter new structure member:") if declaration is None: return result = self.parse_declaration(declaration) if result is None: logger.warn("Bad member declaration") return field_tinfo, field_name = result field_size = field_tinfo.get_size() udt_data = idaapi.udt_type_data_t() udt_member = idaapi.udt_member_t() struct_tinfo.get_udt_details(udt_data) udt_member.offset = offset * 8 struct_tinfo.find_udt_member(idaapi.STRMEM_OFFSET, udt_member) gap_size = udt_member.size // 8 gap_leftover = gap_size - idx - field_size if gap_leftover < 0: logger.error( "Too big size for the field. Type with maximum {0} bytes can be used" .format(gap_size - idx)) return iterator = udt_data.find(udt_member) iterator = udt_data.erase(iterator) if gap_leftover > 0: udt_data.insert( iterator, helper.create_padding_udt_member(offset + idx + field_size, gap_leftover)) udt_member = idaapi.udt_member_t() udt_member.offset = offset * 8 + idx udt_member.name = field_name udt_member.type = field_tinfo udt_member.size = field_size iterator = udt_data.insert(iterator, udt_member) if idx > 0: udt_data.insert(iterator, helper.create_padding_udt_member(offset, idx)) struct_tinfo.create_udt(udt_data, idaapi.BTF_STRUCT) struct_tinfo.set_numbered_type(idaapi.cvar.idati, ordinal, idaapi.BTF_STRUCT, struct_name) hx_view.refresh_view(True)
def showText(text, title="text"): idaapi.asktext(len(text) + 1024, text, title)