def parse(g, f):
    s = struct.Struct("<L")
    st = struct.Struct("<LLLL")
    generalList = []
    while True:
        Buf = f.read(16)
        if not Buf or len(Buf) != 16:
            break
        t = st.unpack(Buf)
        addr = t[0]
        addrLen = t[1]
        backtraceLen = t[2]
        dataAttrib = t[3]
        backtraces = None
        special = 0
        #print "{0:08x}, {1:08x}, {2:08x}, {3:08x}".format(addr, addrLen, backtraceLen, dataAttrib)
        if (backtraceLen > 0) and ((backtraceLen & special_magic)  == 0):
            backtraces = []
            for i in range(backtraceLen):
                backtraceElementBuf = f.read(4)
                if not backtraceElementBuf or len(backtraceElementBuf) != 4:
                    raise ParseError()
                backtraceElement = s.unpack(backtraceElementBuf)
                backtraces.append(backtraceElement[0])
        else:
            #thread data or global variable
            special = backtraceLen
            if special:
                if special == thread_data:
                    printDebug("thread:{0:08x}-{1:08x} special = {2:08x}".format(addr, addr+addrLen, special))
                else:
                    printDebug("global:{0:08x}-{1:08x} special = {2:08x}".format(addr, addr+addrLen, special))

        userContent = None
        if (dataAttrib & DATA_ATTR_USER_CONTENT) != 0 and addrLen > 0:
            userContent = f.read(addrLen)
            if not userContent or len(userContent) != addrLen:
                printError("{0:08x}, {1}, {2}".format(addr, len(userContent), addrLen))
                raise ParseError()
        e = HeapElement(addr, addrLen, backtraces, userContent)
        if special:
            e.special = special
        e.dataAttrib = dataAttrib
        g.addElement(e)
        generalList.append(e)
    return generalList
def solve_reference(l, address):
    global __s
    s = __s
    e_target = searchInListLoose(l, address)
    if not e_target:
        printError('fails to find address')
        return
    start = e_target.addr
    end = start + e_target.size
    for e in l:
        if e.userContent and len(e.userContent) >= 4:
            length = len(e.userContent) / 4
            for i in range(length):
                val = s.unpack_from(e.userContent, i * 4)[0]
                if val >= address and val <= end:
                    writeHeapElement(e, sys.stdout)
                    break
    def parseSemanticFile(self,stream):
        while True:
            l = stream.readline()
            if not l:
                break
            l = l.strip()
            if not l or l[0] == '#':
                continue
            if self.handleObjectTypeTag(l) or self.handleFunctionMapTag(l):
                pass
            else:
                printError('failed to parse semantic rule')
                raise WrongSemanticError()

        if not self.semanticArray:
            printError("No semantic string found")
            raise WrongSemanticError()
def solve_reference(l, address):
    global __s
    s = __s
    e_target = searchInListLoose(l, address)
    if not e_target:
        printError('fails to find address')
        return
    start = e_target.addr
    end = start + e_target.size
    for e in l:
        if e.userContent and len(e.userContent) >= 4:
            length = len(e.userContent) / 4
            for i in range(length):
                val = s.unpack_from(e.userContent, i * 4)[0]
                if val >= address and val <= end:
                    writeHeapElement(e, sys.stdout)
                    break
    def parseSemanticFile(self, stream):
        while True:
            l = stream.readline()
            if not l:
                break
            l = l.strip()
            if not l or l[0] == '#':
                continue
            if self.handleObjectTypeTag(l) or self.handleFunctionMapTag(l):
                pass
            else:
                printError('failed to parse semantic rule')
                raise WrongSemanticError()

        if not self.semanticArray:
            printError("No semantic string found")
            raise WrongSemanticError()
        generalList = analyze.parse(g,f)

    for outData in generalList:
        if not outData.backtraces:
            continue
        backtrace = backtrace_element(outData.backtraces)
        if backtrace not in objDict:
            objDict[backtrace] = OldObjectDesc(outData.size,outData.backtraces)
        else:
            oldDesc = objDict[backtrace]
            oldDesc.allocations +=  outData.size


if __name__ == '__main__':
    if len(sys.argv) <= 2:
            printError("need files of heap dump")
            sys.exit(1)
    objDict = handleFiles(sys.argv[1],sys.argv[2])
    sortedList = []
    for item in objDict.items():
        sortedList.append(item[1]) 
    sortedList.sort()
    for i in range(len(sortedList)):
        if i == len(sortedList):
            break
        print("Delta: {0}".format(sortedList[i].allocations))
        print("BeginStacktrace:")
        for backtrace in sortedList[i].backtraces._backtraces:
            print("{0:08x}".format(backtrace))
        print("EndStacktrace:")
        print("")
    offset = 0
    for match in findit:
        address = int(match.group(1), 16)
        if address in numberDict:
            end = match.end()
            start = match.start()
            addrData = numberDict[address]
            f.write(content[offset:start])
            f.write("{4:08x}	{0}/{1} --- {2} --- {3}".format(
                addrData.soPath, addrData.soName, addrData.funcName,
                addrData.lineInfo, addrData.relativeAddress))
            offset = end
    f.write(content[offset:])


if __name__ == '__main__':
    if len(sys.argv) != 3:
        printError('<maps file> <search path>')
        sys.exit(1)
    content = sys.stdin.read()

    mapEnties = parseMap(sys.argv[1])
    numbers = getUniqueNumbers(content)
    numberDict = {}
    SoJob = {}
    for pair in generateMapEntryNumberPair(numbers, mapEnties):
        updateSoJobs(pair[1], pair[0], SoJob)
        updateNumberDict(pair[1], pair[0], numberDict)
    handleJobs(numberDict, SoJob.items(), sys.argv[2])
    printContent(content, numberDict, sys.stdout)
            #raise WrongSemanticError()
        if self.composedName_:
            self.IOMapping_[self.totalCount_ - 1] = (self.composedName_,
                                                     self.lines_)
            self.composedName_ = None

        self.currentAllocation_ = 0
        self.lines_ = []


if __name__ == '__main__':
    setDebug(True)
    myoptparser = OptionParser()
    myargTuple = myoptparser.parse_args()
    if not myargTuple[1]:
        printError("need a file to analyze")
    with open(myargTuple[1][0], 'r') as f:
        callBack = MyCallback()
        myParser = Parser()
        myParser.parseStream(f, callBack)

        def getHitCount(s):
            return s.hitCount_

        #open the following statments to benchmark & optimize the semantic array order
        #for semantic in sorted(callBack.typeParser_.semanticArray,key = getHitCount,reverse = True):
        #    print '{0} : {1}'.format(semantic.pattern_,semantic.hitCount_)
        sortedObjectList = sorted(callBack.objs_.iteritems(),
                                  key=operator.itemgetter(1),
                                  reverse=True)
        printCount = 0
def printContent(content,numberDict,f):
    findit = hexRe.finditer(content)
    offset = 0
    for match in findit:
        address = int(match.group(1),16)
        if address in numberDict:
            end = match.end()
            start = match.start()
            addrData = numberDict[address]
            f.write(content[offset:start])
            f.write("{4:08x}	{0}/{1} --- {2} --- {3}".format(addrData.soPath,addrData.soName,addrData.funcName,addrData.lineInfo,addrData.relativeAddress))
            offset = end
    f.write(content[offset:])

if __name__ == '__main__':
    if len(sys.argv) != 3:
        printError('<maps file> <search path>')
        sys.exit(1)
    content = sys.stdin.read()

    mapEnties = parseMap(sys.argv[1])
    numbers = getUniqueNumbers(content)
    numberDict = {}
    SoJob =  {}
    for pair in generateMapEntryNumberPair(numbers,mapEnties):
        updateSoJobs(pair[1],pair[0],SoJob)
        updateNumberDict(pair[1],pair[0],numberDict)
    handleJobs(numberDict,SoJob.items(),sys.argv[2])
    printContent(content,numberDict,sys.stdout)
        l = analyze.parse(g,f)
    del g
    for outData in l:
        if outData.special or not outData.backtraces:
            continue
        bt = backtrace_element(outData.backtraces)
        if bt not in objDict:
            objDict[bt] = OldObjectDesc(outData.size,outData.backtraces)
        else:
            oldDesc = objDict[bt]
            oldDesc.allocations += outData.size


if __name__ == '__main__':
    if len(sys.argv) <= 2:
            printError("need files of heap dump")
            sys.exit(1)
    objDict = handleFiles(sys.argv[1],sys.argv[2])
    sortedList = []
    for item in objDict.items():
        sortedList.append(item[1]) 
    sortedList.sort()
    for i in range(len(sortedList)):
        if i == len(sortedList):
            break
        print("Delta: {0}".format(sortedList[i].allocations))
        print("BeginStacktrace:")
        for backtrace in sortedList[i].backtraces._backtraces:
            print("{0:08x}".format(backtrace))
        print("EndStacktrace:")
        print("")
        analyze.analyzeHeapElementMember(he,l,callbackFunc)
    #writeRefZeroAndNotSpecial(l)


if __name__ == '__main__':

    myoptparser = OptionParser()

    myoptparser.add_option("-m","--map-file",help="assign map file",action="store", type="string", dest="mapfile")
    myoptparser.add_option("-s","--symbol-path",help="assign symbol path",action="store", type="string", dest="sympath")
    myoptparser.add_option("-w","--writer",help="set a writer by name",action="store", type="string", dest="writer")
    
    myargTuple = myoptparser.parse_args() 

    if not myargTuple[0].mapfile:
        printError("need to use -m to specify map file")

    if not myargTuple[0].sympath:
        printError('need to use -s to specify sym file')

    #initialize generalList
    with open(myargTuple[1][0],"rb") as f:
        g = analyze.HeapGraph()
        generalList = analyze.parse(g,f)
    
    generalList.sort()
    del g
    #kick off address filter to get line numbers for object analysis
    process = subprocess.Popen(("python","AddressFilter.py",myargTuple[0].mapfile,myargTuple[0].sympath),stdout=subprocess.PIPE,stdin=subprocess.PIPE)
    
    def thread_call_back(returnList):
# no effectic semantic found
            printDebug('no effectic semantic found' + str(self.lines_))
            #raise WrongSemanticError()
        if self.composedName_:
            self.IOMapping_[self.totalCount_ - 1] = (self.composedName_,self.lines_)
            self.composedName_ = None

        self.currentAllocation_ = 0
        self.lines_ = []

if __name__ == '__main__':
    setDebug(True)
    myoptparser = OptionParser()
    myargTuple = myoptparser.parse_args() 
    if not myargTuple[1]:
        printError("need a file to analyze")
    with open(myargTuple[1][0],'r') as f:
        callBack = MyCallback()
        myParser = Parser()
        myParser.parseStream(f,callBack)

        def getHitCount(s):
            return s.hitCount_
        
        
        #open the following statments to benchmark & optimize the semantic array order
        #for semantic in sorted(callBack.typeParser_.semanticArray,key = getHitCount,reverse = True):
        #    print '{0} : {1}'.format(semantic.pattern_,semantic.hitCount_)
        sortedObjectList = sorted(callBack.objs_.iteritems(), key=operator.itemgetter(1),reverse = True)
        printCount = 0
        for o in sortedObjectList: