def run(self): # load config from database self.load_config() # load apikeys from database self.load_apikeys() # initialize twiter api self.auth = tweepy.OAuthHandler(self.config["twitterconsumerkey"], self.config["twitterconsumersecret"]) self.auth.set_access_token(self.config["twitteraccesskey"], self.config["twitteraccesssecret"]) self.api = tweepy.API(self.auth) # monitor tw queue utils.info("starting tweetout module (%s, %s)" % (self.config["tweetqueue"], self.config["tweetmediaqueue"])) try: while True: count = utils.queuecount(queuefile=self.config["tweetqueue"]) if count > 0: message = utils.dequeue(queuefile=self.config["tweetqueue"]) self.send_tweet(message) count = utils.queuecount(queuefile=self.config["tweetmediaqueue"]) if count > 0: imgdata = utils.dequeue(queuefile=self.config["tweetmediaqueue"]) self.send_tweet_media(imgdata) time.sleep(self.config["tweetdelay"]) # reload config from database self.load_config() except: import traceback traceback.print_exc() pass
def run(self): # load config from database self.load_config() # load apikeys from database self.load_apikeys() # initialize twiter api self.auth = tweepy.OAuthHandler(self.config["twitterconsumerkey"], self.config["twitterconsumersecret"]) self.auth.set_access_token(self.config["twitteraccesskey"], self.config["twitteraccesssecret"]) self.api = tweepy.API(self.auth) # monitor txp queue utils.info("starting commandhandler module (%s)" % (self.config["cmdqueue"])) count = utils.queuecount(queuefile=self.config["cmdqueue"]) while True: count = utils.queuecount(queuefile=self.config["cmdqueue"]) if count > 0: message = utils.dequeue(queuefile=self.config["cmdqueue"]) self.parser(message) time.sleep(self.config["queuemonitordelay"]) # reload config from database self.load_config()
def _reverse(self, context): log.info('[+] Reversing pointers in %s' % (context.heap)) # make structure lengths from interval between pointers lengths = self.makeLengths(context.heap, context._structures_addresses) # we really should be lazyloading structs.. t0 = time.time() tl = t0 loaded = 0 todo = sorted(set(context._structures_addresses) - set(context._structures.keys())) fromcache = len(context._structures_addresses) - len(todo) # build structs from pointers boundaries. and creates pointer fields if # possible. log.info('[+] Adding new raw structures from pointers boundaries') offsets = list(context._pointers_offsets) for i, ptr_value in enumerate(context._structures_addresses): # toh stoupid if ptr_value in todo: loaded += 1 size = lengths[i] # get offset of pointer fields offsets, my_pointers_addrs = utils.dequeue( offsets, ptr_value, ptr_value + size) # save the ref/struct type mystruct = structure.makeStructure(context, ptr_value, size) context._structures[ptr_value] = mystruct # mystruct.save() # get pointers addrs in start -> start+size log.debug( 'Adding %d pointer fields field ' % (len(my_pointers_addrs))) for p_addr in my_pointers_addrs: f = mystruct.addField( p_addr, fieldtypes.FieldType.POINTER, context.config.get_word_size(), False) #log.debug('Add field at %lx offset:%d'%( p_addr,p_addr-ptr_value)) if time.time() - tl > 10: # i>0 and i%10000 == 0: tl = time.time() # DEBUG... rate = ( (tl - t0) / (loaded)) if loaded else ((tl - t0) / (loaded + fromcache)) log.info( '%2.2f secondes to go (b:%d/c:%d)' % ((len(todo) - i) * rate, loaded, fromcache)) log.info( '[+] Extracted %d structures in %2.0f (b:%d/c:%d)' % (loaded + fromcache, time.time() - t0, loaded, fromcache)) context.parsed.add(str(self)) return
def _reverse(self, context): log.info('[+] Reversing malloc_chunk in %s'%(context.heap)) ## we really should be lazyloading structs.. t0 = time.time() tl = t0 loaded = 0 prevLoaded = 0 unused = 0 #lengths = context._malloc_sizes doneStructs = context._structures.keys() # FIXME why is that a LIST ????? todo = sorted(set(context._malloc_addresses) - set(doneStructs)) fromcache = len(context._malloc_addresses) - len(todo) offsets = list(context._pointers_offsets) # build structs from pointers boundaries. and creates pointer fields if possible. log.info('[+] Adding new raw structures from malloc_chunks contents - %d todo'%(len(todo))) #for i, ptr_value in enumerate(context.listStructuresAddresses()): for i, (ptr_value, size) in enumerate(zip(map(int,context._malloc_addresses), map(int,context._malloc_sizes))): # TODO if len(_structure.keys()) +/- 30% de _malloc, do malloc_addr - keys() , # and use fsking utils.dequeue() if ptr_value in doneStructs: # FIXME TODO THAT IS SUCKY SUCKY sys.stdout.write('.') sys.stdout.flush() continue loaded += 1 #size = lengths[i] # save the ref/struct type chunk_addr = ptr_value-2*Config.WORDSIZE mc1 = context.heap.readStruct(chunk_addr, libc.ctypes_malloc.malloc_chunk) #if mc1.check_inuse(context.mappings, chunk_addr): if True: mystruct = structure.makeStructure(context, ptr_value, size) context._structures[ ptr_value ] = mystruct # add pointerFields offsets, my_pointers_addrs = utils.dequeue(offsets, ptr_value, ptr_value+size) log.debug('Adding %d pointer fields field on struct of size %d'%( len(my_pointers_addrs), size) ) # optimise insertion if len(my_pointers_addrs) > 0: mystruct.addFields(my_pointers_addrs, fieldtypes.FieldType.POINTER, Config.WORDSIZE, False) #cache to disk mystruct.saveme() # next if time.time()-tl > 10: #i>0 and i%10000 == 0: tl = time.time() rate = ((tl-t0)/(loaded)) if loaded else ((tl-t0)/(loaded+fromcache)) #DEBUG... log.info('%2.2f secondes to go (b:%d/c:%d)'%( (len(todo)-i)*rate, loaded, fromcache ) ) log.info('[+] Extracted %d structures in %2.0f (b:%d/c:%d/u:%d)'%(loaded+ fromcache, time.time()-t0,loaded, fromcache, unused ) ) context.parsed.add(str(self)) return
def _reverse(self, context): log.info('[+] Reversing pointers in %s' % (context.heap)) # make structure lengths from interval between pointers lengths = self.makeLengths(context.heap, context._structures_addresses) ## we really should be lazyloading structs.. t0 = time.time() tl = t0 loaded = 0 todo = sorted( set(context._structures_addresses) - set(context._structures.keys())) fromcache = len(context._structures_addresses) - len(todo) # build structs from pointers boundaries. and creates pointer fields if possible. log.info('[+] Adding new raw structures from pointers boundaries') offsets = list(context._pointers_offsets) for i, ptr_value in enumerate(context._structures_addresses): # toh stoupid if ptr_value in todo: loaded += 1 size = lengths[i] # get offset of pointer fields offsets, my_pointers_addrs = utils.dequeue( offsets, ptr_value, ptr_value + size) # save the ref/struct type mystruct = structure.makeStructure(context, ptr_value, size) context._structures[ptr_value] = mystruct #mystruct.save() # get pointers addrs in start -> start+size log.debug('Adding %d pointer fields field ' % (len(my_pointers_addrs))) for p_addr in my_pointers_addrs: f = mystruct.addField(p_addr, fieldtypes.FieldType.POINTER, Config.WORDSIZE, False) #log.debug('Add field at %lx offset:%d'%( p_addr,p_addr-ptr_value)) if time.time() - tl > 10: #i>0 and i%10000 == 0: tl = time.time() rate = ((tl - t0) / (loaded)) if loaded else ( (tl - t0) / (loaded + fromcache)) #DEBUG... log.info('%2.2f secondes to go (b:%d/c:%d)' % ((len(todo) - i) * rate, loaded, fromcache)) log.info('[+] Extracted %d structures in %2.0f (b:%d/c:%d)' % (loaded + fromcache, time.time() - t0, loaded, fromcache)) context.parsed.add(str(self)) return
def run(self): # load config from database self.load_config() # monitor txp queue utils.info("txparser:run: starting txparser module (%s)" % (self.config["taskqueue"])) count = utils.queuecount(queuefile=self.config["taskqueue"]) while True: count = utils.queuecount(queuefile=self.config["taskqueue"]) if count > 0: message = utils.dequeue(queuefile=self.config["taskqueue"]) self.parser(message) time.sleep(self.config["queuemonitordelay"]) # reload config from database self.load_config()
def refreshOne(context, ptr_value): aligned=context.structures_addresses lengths=[(aligned[i+1]-aligned[i]) for i in range(len(aligned)-1)] lengths.append(context.heap.end-aligned[-1]) # add tail size = lengths[aligned.index(ptr_value)] offsets = list(context.pointers_offsets) offsets, my_pointers_addrs = utils.dequeue(offsets, ptr_value, ptr_value+size) # save the ref/struct type mystruct = structure.makeStructure(context, ptr_value, size) context.structures[ ptr_value ] = mystruct for p_addr in my_pointers_addrs: f = mystruct.addField(p_addr, fieldtypes.FieldType.POINTER, Config.WORDSIZE, False) #resolvePointers mystruct.resolvePointers() #resolvePointers return mystruct
def refreshOne(context, ptr_value): aligned=context.structures_addresses lengths=[(aligned[i+1]-aligned[i]) for i in range(len(aligned)-1)] lengths.append(context.heap.end-aligned[-1]) # add tail size = lengths[aligned.index(ptr_value)] offsets = list(context.pointers_offsets) offsets, my_pointers_addrs = utils.dequeue(offsets, ptr_value, ptr_value+size) # save the ref/struct type mystruct = structure.makeStructure(context, ptr_value, size) context.structures[ ptr_value ] = mystruct for p_addr in my_pointers_addrs: f = mystruct.addField(p_addr, fieldtypes.FieldType.POINTER, Config.WORDSIZE, False) #resolvePointers mystruct.resolvePointers(context._structures_addresses, context.listStructures()) #resolvePointers return mystruct
def buildAnonymousStructs(mappings, heap, _aligned, not_aligned, p_addrs, structCache, reverse=False): ''' values: ALIGNED pointer values ''' lengths = [] ############3 kill me I need a context raise NotImplementedError( 'kill me I need a context instead of a mapping. Who uses me anyway') aligned = list(_aligned) for i in range(len(aligned) - 1): lengths.append(aligned[i + 1] - aligned[i]) lengths.append(heap.end - aligned[-1]) # add tail addrs = list(p_addrs) unaligned = list(not_aligned) if reverse: aligned.reverse() lengths.reverse() addrs.reverse() unaligned.reverse() #dequeue=dequeue_reverse # this is the list of build anon struct. it will grow towards p_addrs... # tis is the optimised key list of structCache structs_addrs = numpy.array([]) nbMembers = 0 # make AnonymousStruct for i in range(len(aligned)): hasMembers = False start = aligned[i] size = lengths[i] ## debug if start in DEBUG_ADDRS: logging.getLogger('progressive').setLevel(logging.DEBUG) else: logging.getLogger('progressive').setLevel(logging.INFO) # the pointers field address/offset addrs, my_pointers_addrs = utils.dequeue( addrs, start, start + size) ### this is not reverse-compatible # the pointers values, that are not aligned unaligned, my_unaligned_addrs = utils.dequeue(unaligned, start, start + size) ### read the struct anon = structure.makeStructure(mappings, aligned[i], size) #save the ref/struct type structCache[anon.vaddr] = anon structs_addrs = numpy.append(structs_addrs, anon.vaddr) log.debug('Created a struct with %d pointers fields' % (len(my_pointers_addrs))) # get pointers addrs in start -> start+size for p_addr in my_pointers_addrs: f = anon.addField(p_addr, FieldType.POINTER, Config.WORDSIZE, False) log.debug('Add field at %lx offset:%d' % (p_addr, p_addr - start)) ## set field for unaligned pointers, that sometimes gives good results ( char[][] ) for p_addr in my_unaligned_addrs: log.debug('Guess field at %lx offset:%d' % (p_addr, p_addr - start)) if anon.guessField(p_addr) is not None: #, FieldType.UKNOWN): nbMembers += 1 hasMembers = True # not added # try to decode fields log.debug('build: decoding fields') anon.decodeFields() # try to resolve pointers ##log.debug('build: resolve pointers') ##structs_addrs.sort() #what is the point ? most of them are not resolvable yet... ##anon.resolvePointers() # debug if hasMembers: for _f in anon.fields: if _f.size == -1: log.debug('ERROR, %s ' % (_f)) log.debug('Created a struct %s with %d fields' % (anon, len(anon.fields))) #log.debug(anon.toString()) # yield (anon, structs_addrs) log.info('Typed %d stringfields' % (nbMembers)) return
def buildAnonymousStructs( mappings, heap, _aligned, not_aligned, p_addrs, structCache, reverse=False): ''' values: ALIGNED pointer values ''' lengths = [] # 3 kill me I need a context raise NotImplementedError( 'kill me I need a context instead of a mapping. Who uses me anyway') aligned = list(_aligned) for i in range(len(aligned) - 1): lengths.append(aligned[i + 1] - aligned[i]) lengths.append(heap.end - aligned[-1]) # add tail addrs = list(p_addrs) unaligned = list(not_aligned) if reverse: aligned.reverse() lengths.reverse() addrs.reverse() unaligned.reverse() # dequeue=dequeue_reverse # this is the list of build anon struct. it will grow towards p_addrs... # tis is the optimised key list of structCache structs_addrs = numpy.array([]) nbMembers = 0 # make AnonymousStruct for i in range(len(aligned)): hasMembers = False start = aligned[i] size = lengths[i] # debug if start in DEBUG_ADDRS: logging.getLogger('progressive').setLevel(logging.DEBUG) else: logging.getLogger('progressive').setLevel(logging.INFO) # the pointers field address/offset addrs, my_pointers_addrs = utils.dequeue( addrs, start, start + size) # this is not reverse-compatible # the pointers values, that are not aligned unaligned, my_unaligned_addrs = utils.dequeue( unaligned, start, start + size) # read the struct anon = structure.makeStructure(mappings, aligned[i], size) # save the ref/struct type structCache[anon.vaddr] = anon structs_addrs = numpy.append(structs_addrs, anon.vaddr) log.debug( 'Created a struct with %d pointers fields' % (len(my_pointers_addrs))) # get pointers addrs in start -> start+size for p_addr in my_pointers_addrs: f = anon.addField( p_addr, FieldType.POINTER, Config.WORDSIZE, False) log.debug('Add field at %lx offset:%d' % (p_addr, p_addr - start)) # set field for unaligned pointers, that sometimes gives good results ( # char[][] ) for p_addr in my_unaligned_addrs: log.debug( 'Guess field at %lx offset:%d' % (p_addr, p_addr - start)) if anon.guessField(p_addr) is not None: # , FieldType.UKNOWN): nbMembers += 1 hasMembers = True # not added # try to decode fields log.debug('build: decoding fields') anon.decodeFields() # try to resolve pointers ##log.debug('build: resolve pointers') # structs_addrs.sort() # what is the point ? most of them are not resolvable yet... # anon.resolvePointers() # debug if hasMembers: for _f in anon.fields: if _f.size == -1: log.debug('ERROR, %s ' % (_f)) log.debug( 'Created a struct %s with %d fields' % (anon, len( anon.fields))) # log.debug(anon.toString()) # yield (anon, structs_addrs) log.info('Typed %d stringfields' % (nbMembers)) return