Пример #1
0
    def _list_records(self):
        if not self._is_record_cache_dirty():
            return self._structures

        # otherwise cache Load
        log.debug('[+] Loading cached records list')
        self._structures = dict([(long(vaddr), s) for vaddr, s in structure.cache_load_all_lazy(self)])
        log.debug('[+] Loaded %d cached records addresses from disk', len(self._structures))

        # If we are missing some allocators from the cache loading
        # then recreated them in cache from Allocated memory
        nb_missing = len(self._structures_addresses) - len(self._structures)
        if nb_missing != 0:
            from haystack.reverse.heuristics import reversers
            log.debug('[+] Missing cached records %d' % nb_missing)
            if nb_missing < 10:
                nb_unique = len(set(self._structures_addresses) - set(self._structures))
                log.warning('TO check missing:%d unique:%d', nb_missing, nb_unique)
            # use BasicCachingReverser to get user blocks
            cache_reverse = reversers.BasicCachingReverser(self.memory_handler)
            _ = cache_reverse.reverse_context(self)
            log.info('[+] Built %d/%d records from allocations',
                     len(self._structures),
                     len(self._structures_addresses))
        return self._structures
Пример #2
0
def main():
    from haystack.reverse import context
    ctx = context.get_context('test/dumps/skype/skype.1/skype.1.f')
    from haystack.reverse import structure
    it = structure.cache_load_all_lazy(ctx)

    structs = []
    for i in range(10000):
        structs.append(it.next())

    [s.to_string() for addr, s in structs]

    # 51 Mo

    structure.CacheWrapper.refs.size = 5
    for i in range(5):
        structure.CacheWrapper.refs[i] = i

    # 51 Mo

    from meliae import scanner
    scanner.dump_all_objects('filename.json')

    from meliae import loader
    om = loader.load('filename.json')
    s = om.summarize()
    s
    '''
  Total 206750 objects, 150 types, Total size = 27.2MiB (28495037 bytes)
   Index   Count   %      Size   % Cum     Max Kind
       0   75801  36   7529074  26  26   27683 str
       1   11507   5   6351864  22  48     552 Field
       2      16   0   5926913  20  69 2653328 numpy.ndarray
       3   10000   4   1680000   5  75     168 CacheWrapper
       4    2099   1   1158648   4  79     552 AnonymousStructInstance
       5    1182   0    857136   3  82   98440 dict
       6   18630   9    745200   2  85      40 weakref
       7   14136   6    633148   2  87   43812 list
  '''
    # clearly Field instances keep some place....
    # most 10000 Anonymous intances are not int memory now

    om.compute_referrers()

    # om[ addr].parents
    # om[ addr].children

    # get the biggest Field
    f_addr = s.summaries[1].max_address
    om[f_addr]

    # Field(179830860 552B 21refs 1par)

    om[f_addr].parents
    # [179834316]
    # >>> om[ 179834316 ]
    # list(179834316 132B 19refs 1par)  <- list of fields in Struct

    l_addr = om[f_addr].parents[0]
    om[l_addr].parents
    # [179849516]
    # >>> om[ 179849516 ]
    # AnonymousStructInstance(179849516 552B 23refs 19par)

    anon_addr = om[l_addr].parents[0]
    om[anon_addr]
    # 179849516 is a anon struct
    import networkx
    import matplotlib.pyplot as plt

    graphme()
Пример #3
0
def main():
    from haystack.reverse import context
    ctx = context.get_context('test/dumps/skype/skype.1/skype.1.f')
    from haystack.reverse import structure
    it = structure.cache_load_all_lazy(ctx)

    structs = []
    for i in range(10000):
        structs.append(it.next())

    [s.to_string() for addr, s in structs]

    # 51 Mo

    structure.CacheWrapper.refs.size = 5
    for i in range(5):
        structure.CacheWrapper.refs[i] = i

    # 51 Mo

    from meliae import scanner
    scanner.dump_all_objects('filename.json')

    from meliae import loader
    om = loader.load('filename.json')
    s = om.summarize()
    s
    '''
  Total 206750 objects, 150 types, Total size = 27.2MiB (28495037 bytes)
   Index   Count   %      Size   % Cum     Max Kind
       0   75801  36   7529074  26  26   27683 str
       1   11507   5   6351864  22  48     552 Field
       2      16   0   5926913  20  69 2653328 numpy.ndarray
       3   10000   4   1680000   5  75     168 CacheWrapper
       4    2099   1   1158648   4  79     552 AnonymousStructInstance
       5    1182   0    857136   3  82   98440 dict
       6   18630   9    745200   2  85      40 weakref
       7   14136   6    633148   2  87   43812 list
  '''
    # clearly Field instances keep some place....
    # most 10000 Anonymous intances are not int memory now

    om.compute_referrers()

    # om[ addr].parents
    # om[ addr].children

    # get the biggest Field
    f_addr = s.summaries[1].max_address
    om[f_addr]

    # Field(179830860 552B 21refs 1par)

    om[f_addr].parents
    # [179834316]
    # >>> om[ 179834316 ]
    # list(179834316 132B 19refs 1par)  <- list of fields in Struct

    l_addr = om[f_addr].parents[0]
    om[l_addr].parents
    # [179849516]
    # >>> om[ 179849516 ]
    # AnonymousStructInstance(179849516 552B 23refs 19par)

    anon_addr = om[l_addr].parents[0]
    om[anon_addr]
    # 179849516 is a anon struct
    import networkx
    import matplotlib.pyplot as plt

    graphme()