def command_memdump(self): try: from meliae import scanner scanner.dump_all_objects("sockulf.dump") return "Memory dumped!" except: return "Could not dump memory! (meliae not installed?)"
def dump_objects(): """ This is a thread target which every X minutes """ from meliae import scanner scanner.dump_all_objects(PROFILING_OUTPUT_FMT % get_filename_fmt())
def on_cmd(self, cmd): if cmd is None: return if cmd.startswith('play'): self.player.play() elif cmd.startswith('next'): self.player.play(self.player.get_next()) elif cmd.startswith('previous'): self.player.play(self.player.get_previous()) elif cmd.startswith('pause'): self.player.pause() elif cmd.startswith('list'): print '=====================================' for info in self.player.list: print '%s. %s' % (info[0], info[2]) print '=====================================' elif cmd.startswith('info'): print '=====================================' print '%s. %s' % (self.player.index, self.player.get_title()) print '=====================================' elif cmd.startswith('stop'): self.player.stop() sys.exit(0) elif cmd.startswith('dump'): from meliae import scanner scanner.dump_all_objects('./dump.txt') else: print '''=====================================
def launch_meliae_profiling(): """Lanch of a run function first to get memory profiling with meliae""" run() scanner.dump_all_objects(rawMemoryOutput) memRawStats = create_mem_stat(rawMemoryOutput) print_mem_stat_file(memRawStats, f=MemorySizeOutput) stat = "Total {0} objects, {1} types, Total size = {2:.1f}MiB " + \ "({3} bytes)\n".format(memRawStats.total_count, len(memRawStats.summaries), memRawStats.total_size / 1024. / 1024, memRawStats.total_size) stat += " Index Count % Size % Cum Max Kind \n" statPercent = 0 statPieChart = "" i = 0 for l in get_mem_stats(memRawStats, 10): stat += "{0:6d} {1:8d} {2:6.2f} {3:8d} {4:6.2f} {5:6.2f} {6:8d} " + \ "{7:s}\n".format(l[0], l[1], l[2], l[3], l[4], l[5], l[6], l[7]) if i < 10: statPieChart += "{0:6.2f}/ {1:s} ".format( l[4], string.replace("\\texttt{"+l[7]+"}",'_','\_')) if i != 9: statPieChart += "," statPercent += l[4] i += 1 #statPieChart += "{0:6.2f}/ {1:s}".format(100-statPercent, "\\texttt{Others}") return stat, statPieChart
def render_GET(self, request): """ .. http:get:: /debug/memory/dump A GET request to this endpoint returns a Meliae-compatible dump of the memory contents. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/memory/dump **Example response**: The content of the memory dump file. """ dump_file_path = os.path.join(self.session.config.get_state_dir(), 'memory_dump.json') scanner.dump_all_objects(dump_file_path) date_str = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") request.setHeader(b'content-type', 'application/json') request.setHeader( b'Content-Disposition', 'attachment; filename=tribler_memory_dump_%s.json' % date_str) return open(dump_file_path).read()
async def get_memory_dump(self, request): if sys.platform == "win32": # On Windows meliae (especially older versions) segfault on writing to file dump_buffer = MemoryDumpBuffer() try: scanner.dump_all_objects(dump_buffer) except OverflowError as e: # https://bugs.launchpad.net/meliae/+bug/569947 logging.error( "meliae dump failed (your version may be too old): %s", str(e)) content = dump_buffer.getvalue() dump_buffer.close() else: # On other platforms, simply writing to file is much faster dump_file_path = self.state_dir / 'memory_dump.json' scanner.dump_all_objects(dump_file_path) with open(dump_file_path) as dump_file: content = dump_file.read() date_str = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") return RESTResponse( content, headers={ 'Content-Type', 'application/json', 'Content-Disposition', 'attachment; filename=tribler_memory_dump_%s.json' % date_str })
def dump_objects(): """ This is a thread target which every X minutes """ # pylint: disable=E0401 from meliae import scanner scanner.dump_all_objects(PROFILING_OUTPUT_FMT % get_filename_fmt())
def main(): from meliae import scanner filename = 'dump.memory' fh = open( filename, 'wb' ) scanner.dump_all_objects( fh ) fh.close() print('saved memory dump to: %r'%( filename, ))
def start_memory_dumper(): """ Initiates the memory profiler. """ start = time() from meliae import scanner LoopingCall(lambda: scanner.dump_all_objects("memory-%d.out" % (time() - start))).start(MEMORY_DUMP_INTERVAL, now=True) reactor.addSystemEventTrigger("before", "shutdown", lambda: scanner.dump_all_objects("memory-%d-shutdown.out" % (time() - start)))
def bench_mem(timeout, filename="meliae-dump-"): try: from meliae import scanner import time except ImportError: pass else: gevent.sleep(timeout) scanner.dump_all_objects("%s%d.json" % (filename, time.clock()))
def bench_mem(timeout, filename='meliae-dump-'): try: from meliae import scanner import time except ImportError: pass else: gevent.sleep(timeout) scanner.dump_all_objects('%s%d.json' % (filename, time.clock()))
def print_leaks(self, prefix=''): if not USE_MELIAE: return objgraph.show_growth() tmp = tempfile.mkstemp(prefix='pcp-test')[1] scanner.dump_all_objects(tmp) leakreporter = loader.load(tmp) summary = leakreporter.summarize() print('{0}: {1}'.format(prefix, summary))
def do_memory_dump(): response.content_type = 'application/json' try: from meliae import scanner except ImportError: logger.error('Cannot run a memory dump, missing python-meliae') return json.dumps(None) p = '/tmp/memory-%s' % self.name scanner.dump_all_objects(p) return json.dumps(p)
def test_sar(self): """Parses all the sar files and creates the pdf outputs""" for example in self.sar_files: print("Parsing: {0}".format(example)) grapher = SarGrapher([example]) stats = SarStats(grapher) usage = resource.getrusage(resource.RUSAGE_SELF) if USE_MELIAE: objgraph.show_growth() tmp = tempfile.mkstemp(prefix='sar-test')[1] scanner.dump_all_objects(tmp) leakreporter = loader.load(tmp) summary = leakreporter.summarize() print( "SAR parsing: {0} usertime={1} systime={2} mem={3} MB".format( end_of_path(example), usage[0], usage[1], (usage[2] / 1024.0))) if USE_PROFILER: self.profile.disable() str_io = StringIO.StringIO() sortby = 'cumulative' pstat = pstats.Stats(self.profile, stream=str_io).sort_stats(sortby) pstat.print_stats(TOP_PROFILED_FUNCTIONS) print("\nProfiling of sar.parse()") print(str_io.getvalue()) # Set up profiling for pdf generation self.profile.enable() out = "{0}.pdf".format(example) stats.graph(example, [], out) if USE_PROFILER: self.profile.disable() str_io = StringIO.StringIO() sortby = 'cumulative' pstat = pstats.Stats(self.profile, stream=str_io).sort_stats(sortby) pstat.print_stats(TOP_PROFILED_FUNCTIONS) print("\nProfiling of sarstats.graph()") print(str_io.getvalue()) print("Wrote: {0}".format(out)) os.remove(out) grapher.close() del grapher del stats usage = resource.getrusage(resource.RUSAGE_SELF) print( "SAR graphing: {0} usertime={1} systime={2} mem={3} MB".format( end_of_path(example), usage[0], usage[1], (usage[2] / 1024.0)))
def do_memdump(): try: from meliae import scanner except ImportError: sys.stderr.write("meliae module unavailable\n") return import gc # to get more accurate and comparable results, do a full garbage # collection before dumping. gc.collect() with profile_output("meliae.json") as f: scanner.dump_all_objects(f)
def meliae_dump(): """Dump memory using meliae.""" try: from meliae import scanner dump_dir = config.general.log_folder filename = os.path.join(dump_dir, 'meliae-%s.json' % ( datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S",))) gc.collect() scanner.dump_all_objects(filename) except ImportError, e: return "Meliae not available: %s" % (e,)
def meliae_dump(): """Dump memory using meliae.""" try: from meliae import scanner dump_dir = config.general.log_folder filename = os.path.join( dump_dir, 'meliae-%s.json' % (datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S", ))) gc.collect() scanner.dump_all_objects(filename) except ImportError, e: return "Meliae not available: %s" % (e, )
def on_memory_dump_button_clicked(self, dump_core): self.export_dir = QFileDialog.getExistingDirectory(self, "Please select the destination directory", "", QFileDialog.ShowDirsOnly) if len(self.export_dir) > 0: filename = "tribler_mem_dump_%s_%s.json" % \ ('core' if dump_core else 'gui', datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")) if dump_core: self.request_mgr = TriblerRequestManager() self.request_mgr.download_file("debug/memory/dump", lambda data: self.on_memory_dump_data_available(filename, data)) else: scanner.dump_all_objects(os.path.join(self.export_dir, filename))
def dump_memory_profile(self, msg): # pragma: no cover '''Log memory profiling information. Get the memory profiling method from the dump-memory-profile setting, and log the results at DEBUG level. ``msg`` is a message the caller provides to identify at what point the profiling happens. ''' kind = self.settings['dump-memory-profile'] interval = self.settings['memory-dump-interval'] if kind == 'none': return now = time.time() if self.last_memory_dump + interval > now: return self.last_memory_dump = now # Log wall clock and CPU times for self, children. utime, stime, cutime, cstime, elapsed_time = os.times() duration = elapsed_time - self._started logging.debug('process duration: %s s' % duration) logging.debug('CPU time, in process: %s s' % utime) logging.debug('CPU time, in system: %s s' % stime) logging.debug('CPU time, in children: %s s' % cutime) logging.debug('CPU time, in system for children: %s s' % cstime) logging.debug('dumping memory profiling data: %s' % msg) logging.debug('VmRSS: %s KiB' % self._vmrss()) if kind == 'simple': return # These are fairly expensive operations, so we only log them # if we're doing expensive stuff anyway. logging.debug('# objects: %d' % len(gc.get_objects())) logging.debug('# garbage: %d' % len(gc.garbage)) if kind == 'heapy': from guppy import hpy h = hpy() logging.debug('memory profile:\n%s' % h.heap()) elif kind == 'meliae': filename = 'obnam-%d.meliae' % self.memory_dump_counter logging.debug('memory profile: see %s' % filename) from meliae import scanner scanner.dump_all_objects(filename) self.memory_dump_counter += 1
def start_memory_dumper(): """ Initiates the memory profiler. """ msg("starting memory dump looping call") from meliae import scanner start = time() meliae_out_dir = path.join(environ["OUTPUT_DIR"], "meliae", str(PID)) makedirs(meliae_out_dir) meliae_out_file = path.join(meliae_out_dir, "memory-%s.out") LoopingCall(lambda: scanner.dump_all_objects(meliae_out_file % str(time() - start))).start(PROFILE_MEMORY_INTERVAL, now=True) reactor.addSystemEventTrigger("before", "shutdown", lambda: scanner.dump_all_objects(meliae_out_file % str(time() - start) + "-shutdown"))
def meliae_dump(): """Dump memory using meliae.""" if scanner is None: return "Meliae not available" try: dump_dir = settings.LOG_FOLDER filename = os.path.join( dump_dir, 'meliae-%s.json' % (now().strftime("%Y%m%d%H%M%S", ))) gc.collect() scanner.dump_all_objects(filename) except Exception as e: return "Error while trying to dump memory: %s" % (e, ) else: return 'Output written to: %s' % (filename, )
def dump_memory(): now = time() - start if PROFILE_MEMORY_GRAPH_BACKREF_TYPES: for type_ in types: for sample_number in xrange( PROFILE_MEMORY_GRAPH_BACKREF_AMOUNT): objects = objgraph.by_type(type_) if objects: objgraph.show_chain(objgraph.find_backref_chain( random.choice(objects), objgraph.is_proper_module), filename=objgraph_out_file % (type_, now, sample_number)) else: logger.error("No objects of type %s found!", type_) scanner.dump_all_objects(meliae_out_file % now)
def write_memory_dump(): """Dump memory to a temporary filename with the meliae package. @return: JSON filename where memory dump has been written to @rtype: string """ # first do a full garbage collection run gc.collect() if gc.garbage: log.warn(LOG_CHECK, "Unreachabe objects: %s", pprint.pformat(gc.garbage)) from meliae import scanner fo, filename = get_temp_file(mode='wb', suffix='.json', prefix='lcdump_') try: scanner.dump_all_objects(fo) finally: fo.close() return filename
def meliae_dump(): """Dump memory using meliae.""" try: from meliae import scanner dump_dir = settings.LOG_FOLDER filename = os.path.join(dump_dir, 'meliae-%s.json' % ( datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S",))) gc.collect() scanner.dump_all_objects(filename) except ImportError as e: return "Meliae not available: %s" % (e,) except Exception as e: return "Error while trying to dump memory: %s" % (e,) else: return 'Output written to: %s' % (filename,)
def debug_dump(): """ Called when receiving a debug signal. Interrupt running process, and provide a python prompt for interactive debugging.""" logger.warn("receive signal to dump memory") try: from meliae import scanner # @UnresolvedImport except: logger.warn("can't dump memory, meliae is not available") return try: filename = tempfile.mktemp(suffix='.json', prefix='rdiff-dump-') logger.info("create memory dump: %s" % (filename,)) scanner.dump_all_objects(filename) except: logger.warn("fail to dump memory", exc_info=True)
def on_memory_dump_button_clicked(self, dump_core): self.export_dir = QFileDialog.getExistingDirectory(self, "Please select the destination directory", "", QFileDialog.ShowDirsOnly) if len(self.export_dir) > 0: filename = "tribler_mem_dump_%s_%s.json" % \ ('core' if dump_core else 'gui', datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")) if dump_core: self.request_mgr = TriblerRequestManager() self.request_mgr.download_file("debug/memory/dump", lambda data: self.on_memory_dump_data_available(filename, data)) elif scanner: scanner.dump_all_objects(os.path.join(self.export_dir, filename)) else: ConfirmationDialog.show_error(self.window(), "Error when performing a memory dump", "meliae memory dumper is not compatible with Python 3")
def debug_dump_mem(): """ Called when receiving a debug signal. Interrupt running process, and provide a python prompt for interactive debugging.""" logger.warning("receive signal to dump memory") try: from meliae import scanner # @UnresolvedImport except: logger.warning("can't dump memory, meliae is not available") return try: filename = tempfile.mktemp(suffix='.json', prefix='rdiff-dump-') logger.info("create memory dump: %s", filename) scanner.dump_all_objects(filename) except: logger.warning("fail to dump memory", exc_info=True)
def update_drinks(drinks): global current_drinks global max_position new_drinks = {} position = 0 for drink in drinks: #print(drink) new_drinks[position] = drink position += int(1000.0 / drink.price_factor) current_drinks, max_position = new_drinks, position # Dump memory map at this point if debug_memory: global mem_counter mem_counter += 1 scanner.dump_all_objects('memory%04d.json' % mem_counter)
def dump_memory(signum, frame): """ Dump memory stats for the current process to a temp directory. Uses the meliae output format. """ timestamp = datetime.now().isoformat() format_str = '{}/meliae.{}.{}.{{}}.dump'.format( tempfile.gettempdir(), timestamp, os.getpid(), ) scanner.dump_all_objects(format_str.format('pre-gc')) # force garbarge collection for gen in xrange(3): gc.collect(gen) scanner.dump_all_objects(format_str.format("gc-gen-{}".format(gen)))
def render_GET(self, request): """ .. http:get:: /debug/memory/dump A GET request to this endpoint returns a Meliae-compatible dump of the memory contents. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/memory/dump **Example response**: The content of the memory dump file. """ content = "" if sys.platform == "win32": # On Windows meliae (especially older versions) segfault on writing to file dump_buffer = MemoryDumpBuffer() try: scanner.dump_all_objects(dump_buffer) except OverflowError as e: # https://bugs.launchpad.net/meliae/+bug/569947 logging.error( "meliae dump failed (your version may be too old): %s", str(e)) content = dump_buffer.getvalue() dump_buffer.close() else: # On other platforms, simply writing to file is much faster dump_file_path = os.path.join(self.session.config.get_state_dir(), 'memory_dump.json') scanner.dump_all_objects(dump_file_path) with open(dump_file_path, 'r') as dump_file: content = dump_file.read() date_str = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") request.setHeader(b'content-type', 'application/json') request.setHeader( b'Content-Disposition', 'attachment; filename=tribler_memory_dump_%s.json' % date_str) return content
def search(args): """ Default function for the search command line option. Search a process's memory for a specific Structure. Returns findings in pickled or text format. See the command line --help . """ log.debug('args: %s' % args) structType = getKlass(args.structName) if args.baseOffset: args.baseOffset = int(args.baseOffset, 16) mappings = MemoryMapper(args).getMappings() if args.fullscan: targetMapping = mappings else: if args.hint: log.debug('Looking for the mmap containing the hint addr.') m = mappings.getMmapForAddr(args.hint) if not m: log.error('This hint is not a valid addr (0x%x)' % (args.hint)) return targetMapping = [m] else: targetMapping = [m for m in mappings if m.pathname == '[heap]'] targetMapping = memory_mapping.Mappings(targetMapping, mappings.name) if len(targetMapping) == 0: log.warning('No memorymapping found. Searching everywhere.') targetMapping = mappings finder = StructFinder(mappings, targetMapping) try: outs = finder.find_struct(structType, hintOffset=args.hint, maxNum=args.maxnum) except KeyboardInterrupt, e: from meliae import scanner scanner.dump_all_objects('haystack-search.dump') if not args.debug: raise e import code code.interact(local=locals()) return None
def meliae_dump(file_path): """ 内存分析 辅助函数: objs = om.objs ft=lambda tname: [o for o in objs.values() if o.type_str == tname] fp=lambda id: [objs.get(rid) for rid in objs.get(id).parents] fr=lambda id: [objs.get(rid) for rid in objs.get(id).children] #exec 'def fp1(id):\n obj = fo1(id)\n return fp(obj)' exec 'def fps(obj, rs=None):\n if rs is None:\n rs = []\n if len(rs) > 2000:\n return rs\n if obj is not None and obj not in rs:\n rs.append(obj)\n for p in fr(obj):\n fps(p, rs=rs)\n return rs' exec 'def fps1(obj, rs=None):\n if rs is None:\n rs = []\n if len(rs) > 2000:\n return rs\n if obj is not None and obj not in rs:\n if obj.num_parents == 0:\n rs.append(obj)\n for p in fp(obj):\n fps(p, rs=rs)\n return rs' fo=lambda id: objs.get(id) 运行时辅助: import gc get_objs = lambda :dict([(id(o), o) for o in gc.get_objects()]) fid = lambda oid: [o for o in gc.get_objects() if (id(o) == oid)] fr = lambda o: gc.get_referents(o) fp = lambda o: gc.get_referrers(o) """ from meliae import scanner scanner.dump_all_objects(file_path)
def dump_memory(signum, frame): """ Dump memory stats for the current process to a temp directory. Uses the meliae output format. """ timestamp = datetime.now().isoformat() format_str = '{}/meliae.{}.{}.{{}}.dump'.format( tempfile.gettempdir(), timestamp, os.getpid(), ) scanner.dump_all_objects(format_str.format('pre-gc')) # force garbarge collection for gen in xrange(3): gc.collect(gen) scanner.dump_all_objects( format_str.format("gc-gen-{}".format(gen)) )
def search(args): """ Default function for the search command line option. Search a process's memory for a specific Structure. Returns findings in pickled or text format. See the command line --help . """ log.debug('args: %s'%args) structType = getKlass(args.structName) if args.baseOffset: args.baseOffset=int(args.baseOffset,16) mappings = MemoryMapper(args).getMappings() if args.fullscan: targetMapping = mappings else: if args.hint: log.debug('Looking for the mmap containing the hint addr.') m = mappings.getMmapForAddr(args.hint) if not m: log.error('This hint is not a valid addr (0x%x)'%(args.hint)) return targetMapping = [m] else: targetMapping = [m for m in mappings if m.pathname == '[heap]'] targetMapping = memory_mapping.Mappings(targetMapping, mappings.name) if len(targetMapping) == 0: log.warning('No memorymapping found. Searching everywhere.') targetMapping = mappings finder = StructFinder(mappings, targetMapping) try: outs=finder.find_struct( structType, hintOffset=args.hint ,maxNum=args.maxnum) except KeyboardInterrupt,e: from meliae import scanner scanner.dump_all_objects('haystack-search.dump') if not args.debug: raise e import code code.interact(local=locals()) return None
def _dump_memory_usage(self, *args): """Dump memory usage data to a file. This method writes out memory usage data for the current process into a timestamped file. By default the data is written to a file named /tmp/mozsvc-memdump.<pid>.<timestamp> but this can be customized with the environment variable "MOSVC_MEMORY_DUMP_FILE". If the "meliae" package is not installed or if an error occurs during processing, then the file "mozsvc-memdump.error.<pid>.<timestamp>" will be written with a traceback of the error. """ now = int(time.time()) try: filename = "%s.%d.%d" % (MEMORY_DUMP_FILE, os.getpid(), now) from meliae import scanner scanner.dump_all_objects(filename) except Exception: filename = "%s.error.%d.%d" % (MEMORY_DUMP_FILE, os.getpid(), now) with open(filename, "w") as f: f.write("ERROR DUMPING MEMORY USAGE\n\n") traceback.print_exc(file=f)
def on_memory_dump_button_clicked(self, dump_core): self.export_dir = QFileDialog.getExistingDirectory( self, "Please select the destination directory", "", QFileDialog.ShowDirsOnly ) if len(self.export_dir) > 0: filename = "tribler_mem_dump_%s_%s.json" % ( 'core' if dump_core else 'gui', datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S"), ) if dump_core: self.rest_request = TriblerNetworkRequest( "debug/memory/dump", lambda data, _: self.on_memory_dump_data_available(filename, data) ) elif scanner: scanner.dump_all_objects(os.path.join(self.export_dir, filename)) else: ConfirmationDialog.show_error( self.window(), "Error when performing a memory dump", "meliae memory dumper is not compatible with Python 3", )
def measure_memory(self, _id): from meliae import scanner, loader scanner.dump_all_objects(self.MEMORY_DUMP % _id) om = loader.load(self.MEMORY_DUMP % _id) om.remove_expensive_references() summary = om.summarize() print summary #print('runsnakemem %s' % self.MEMORY_DUMP) usage = resource.getrusage(resource.RUSAGE_SELF) print 'maximum resident set size', usage.ru_maxrss print 'shared memory size', usage.ru_ixrss print 'unshared memory size', usage.ru_idrss print 'unshared stack size', usage.ru_isrss import psutil self_pid = psutil.Process() # pylint: disable=E1101 print self_pid.memory_info()
def render_GET(self, request): """ .. http:get:: /debug/memory/dump A GET request to this endpoint returns a Meliae-compatible dump of the memory contents. **Example request**: .. sourcecode:: none curl -X GET http://localhost:8085/debug/memory/dump **Example response**: The content of the memory dump file. """ content = "" if sys.platform == "win32": # On Windows meliae (especially older versions) segfault on writing to file dump_buffer = MemoryDumpBuffer() try: scanner.dump_all_objects(dump_buffer) except OverflowError as e: # https://bugs.launchpad.net/meliae/+bug/569947 logging.error("meliae dump failed (your version may be too old): %s", str(e)) content = dump_buffer.getvalue() dump_buffer.close() else: # On other platforms, simply writing to file is much faster dump_file_path = os.path.join(self.session.config.get_state_dir(), 'memory_dump.json') scanner.dump_all_objects(dump_file_path) with open(dump_file_path, 'r') as dump_file: content = dump_file.read() date_str = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") request.setHeader(b'content-type', 'application/json') request.setHeader(b'Content-Disposition', 'attachment; filename=tribler_memory_dump_%s.json' % date_str) return content
def start_memory_dumper(): """ Initiates the memory profiler. """ logger.info("starting memory dump looping call") from meliae import scanner # Setup the whole thing start = time() memdump_dir = path.join(environ["OUTPUT_DIR"], "memprof", str(PID)) makedirs(memdump_dir) meliae_out_file = path.join(memdump_dir, "memory-%06.2f.out") objgraph_out_file = path.join(memdump_dir, "objgraph-%s-%06.2f-%d.png") if PROFILE_MEMORY_GRAPH_BACKREF_TYPES: import objgraph import random types = PROFILE_MEMORY_GRAPH_BACKREF_TYPES.split() def dump_memory(): now = time() - start if PROFILE_MEMORY_GRAPH_BACKREF_TYPES: for type_ in types: for sample_number in xrange( PROFILE_MEMORY_GRAPH_BACKREF_AMOUNT): objects = objgraph.by_type(type_) if objects: objgraph.show_chain(objgraph.find_backref_chain( random.choice(objects), objgraph.is_proper_module), filename=objgraph_out_file % (type_, now, sample_number)) else: logger.error("No objects of type %s found!", type_) scanner.dump_all_objects(meliae_out_file % now) LoopingCall(dump_memory).start(PROFILE_MEMORY_INTERVAL, now=True) reactor.addSystemEventTrigger( "before", "shutdown", lambda: scanner.dump_all_objects( path.join(memdump_dir, "memory-%06.2f-%s.out") % (time() - start, "-shutdown")))
def sigdumpmem_handler(signum, frame): scanner.dump_all_objects(DUMP_FILE)
def dump_memory(signum, frame): """Dump memory stats for the current process to a temp directory. Uses the meliae output format.""" scanner.dump_all_objects('{}/meliae.{}.{}.dump'.format(tempfile.gettempdir(), datetime.now().isoformat(), os.getpid()))
def OnMemdump(self, event): from meliae import scanner scanner.dump_all_objects("memory-dump.out")
def main(self): u.memory_use_log() t_start = time.time() # Replaced with self.cur in __init__ # db = db_glue.DB(self.args.database_file) # assert (db.metadata_get('schema_version') == '5') # normalize start and end times if (self.args.start is None): sql = 'SELECT min(created_at) AS st FROM {0};'.format(self.table) self.cur.execute(sql) self.args.start = self.cur.fetchone()[0] if (self.args.end is None): sql = 'SELECT max(created_at) AS et FROM {0};'.format(self.table) self.cur.execute(sql) # add one second because end time is exclusive self.args.end = self.cur.fetchone()[0] + timedelta(seconds=1) self.args.start = time_.as_utc(self.args.start) self.args.end = time_.as_utc(self.args.end) # print test sequence parameters self.log_parameters() # set up model parameters model_class = u.class_by_name(self.args.model) model_class.parms_init(self.args.model_parms, log_parms=True) # build schedule self.schedule_build(self.args.limit) l.info('scheduled %s tests (%s left over)' % (len(self.schedule), self.args.end - self.schedule[-1].end)) if (not os.path.exists(self.args.output_dir)): os.mkdir(self.args.output_dir) l.info('results in %s' % (self.args.output_dir)) # testing loop for (i, t) in enumerate(self.schedule): if (i+1 < self.args.start_test): l.info('using saved test %d per --start-test' % (i+1)) l.warning('token and tweet counts will be incorrect') # FIXME: hack..... try: t.model = u.Deleted_To_Save_Memory() t.results = u.Deleted_To_Save_Memory() t.i = i t.train_tweet_ct = -1e6 t.train_token_ct = -1e6 t.test_tweet_ct = -1e6 t.unshrink_from_disk(self.args.output_dir, results=True) t.attempted = True except (IOError, x): if (x.errno != 2): raise t.attempted = False else: l.info('starting test %d of %d: %s' % (i+1, len(self.schedule), t)) t.do_test(model_class, self.cur, self.args, i) t.summarize() if (t.attempted): if (self.args.profile_memory): # We dump a memory profile here because it's the high water # mark; we're about to reduce usage significantly. import meliae.scanner as ms filename = 'memory.%d.json' % (i) l.info('dumping memory profile %s' % (filename)) ms.dump_all_objects('%s/%s' % (self.args.output_dir, filename)) t.shrink_to_disk(self.args.output_dir) l.debug('result: %s' % (t.summary)) u.memory_use_log() # done! l.debug('computing summary') self.summarize() l.debug('summary: %s' % (self.summary)) l.debug('saving TSV results') test_indices = u.sl_union_fromtext(len(self.schedule), ':') self.tsv_save_tests('%s/%s' % (self.args.output_dir, 'tests.tsv'), test_indices) l.debug('saving pickled summary') self.memory_use = u.memory_use() self.memory_use_peak = "Not implemented" self.time_use = time.time() - t_start u.pickle_dump('%s/%s' % (self.args.output_dir, 'summary'), self) u.memory_use_log() l.info('done in %s' % (u.fmt_seconds(self.time_use)))
# "meliae" provides a way to dump python memory usage information to a JSON # disk format, which can then be parsed into useful things like graph # representations. # # https://launchpad.net/meliae # http://jam-bazaar.blogspot.com/2009/11/memory-debugging-with-meliae.html from meliae import scanner scanner.dump_all_objects('objects.json')
pass queue = Queue() data = {'n_calls': 0} func = bind(count_calls, data) task = queue.run(['t1', 't2', 't3', 't4', 't5', 't6'], func) task.wait() queue.shutdown() queue.destroy() del func # Test memory consumption. from meliae import scanner gc.collect() scanner.dump_all_objects("test.dump") from meliae import loader om = loader.load('test.dump') om.remove_expensive_references() om.collapse_instance_dicts() om.compute_referrers() om.compute_total_size() #print om.summarize() from pprint import pprint as pp def larger(x, y): return om[y].total_size - om[x].total_size def larger(x, y): return int(y.total_size - x.total_size)
def dump(filename): scanner.dump_all_objects(filename)
def main(self): u.memory_use_log() t_start = time.time() db = db_glue.DB(self.args.database_file) l.info('opened database %s' % (self.args.database_file)) assert (db.metadata_get('schema_version') == '5') # normalize start and end times if (self.args.start is None): sql = 'SELECT min(created_at) AS "st [timestamp]" FROM tweet' self.args.start = db.sql(sql)[0]['st'] if (self.args.end is None): sql = 'SELECT max(created_at) AS "et [timestamp]" FROM tweet' # add one second because end time is exclusive self.args.end = db.sql(sql)[0]['et'] + timedelta(seconds=1) self.args.start = time_.as_utc(self.args.start) self.args.end = time_.as_utc(self.args.end) # print test sequence parameters self.log_parameters() # set up model parameters model_class = u.class_by_name(self.args.model) model_class.parms_init(self.args.model_parms, log_parms=True) # build schedule self.schedule_build(self.args.limit) l.info('scheduled %s tests (%s left over)' % (len(self.schedule), self.args.end - self.schedule[-1].end)) if (not os.path.exists(self.args.output_dir)): os.mkdir(self.args.output_dir) l.info('results in %s' % (self.args.output_dir)) # testing loop for (i, t) in enumerate(self.schedule): if (i+1 < self.args.start_test): l.info('using saved test %d per --start-test' % (i+1)) l.warning('token and tweet counts will be incorrect') # FIXME: hack..... try: t.model = u.Deleted_To_Save_Memory() t.results = u.Deleted_To_Save_Memory() t.i = i t.train_tweet_ct = -1e6 t.train_token_ct = -1e6 t.test_tweet_ct = -1e6 t.unshrink_from_disk(self.args.output_dir, results=True) t.attempted = True except IOError, x: if (x.errno != 2): raise t.attempted = False else: l.info('starting test %d of %d: %s' % (i+1, len(self.schedule), t)) t.do_test(model_class, db, self.args, i) t.summarize() if (t.attempted): if (self.args.profile_memory): # We dump a memory profile here because it's the high water # mark; we're about to reduce usage significantly. import meliae.scanner as ms filename = 'memory.%d.json' % (i) l.info('dumping memory profile %s' % (filename)) ms.dump_all_objects('%s/%s' % (self.args.output_dir, filename)) t.shrink_to_disk(self.args.output_dir) l.debug('result: %s' % (t.summary)) u.memory_use_log()
def preform_memory_dump(fpath): from meliae import scanner scanner.dump_all_objects(fpath)
from meliae import scanner from meliae import loader scanner.dump_all_objects('/home/wangq/dump.txt') om = loader.load('/home/wangq/dump.txt') om.compute_parents() om.collapse_instance_dicts() om.summarize()
def main(): from haystack.reverse import context ctx = context.get_context('test/dumps/skype/skype.1/skype.1.f') from haystack.reverse import structure it = structure.cacheLoadAllLazy(ctx) structs = [] for i in range(10000): structs.append(it.next()) [s.toString() for addr, s in structs] #51 Mo structure.CacheWrapper.refs.size = 5 for i in range(5): structure.CacheWrapper.refs[i] = i #51 Mo from meliae import scanner scanner.dump_all_objects('filename.json') from meliae import loader om = loader.load('filename.json') s = om.summarize() s ''' Total 206750 objects, 150 types, Total size = 27.2MiB (28495037 bytes) Index Count % Size % Cum Max Kind 0 75801 36 7529074 26 26 27683 str 1 11507 5 6351864 22 48 552 Field 2 16 0 5926913 20 69 2653328 numpy.ndarray 3 10000 4 1680000 5 75 168 CacheWrapper 4 2099 1 1158648 4 79 552 AnonymousStructInstance 5 1182 0 857136 3 82 98440 dict 6 18630 9 745200 2 85 40 weakref 7 14136 6 633148 2 87 43812 list ''' # clearly Field instances keep some place.... # most 10000 Anonymous intances are not int memory now om.compute_referrers() # om[ addr].parents # om[ addr].children # get the biggest Field f_addr = s.summaries[1].max_address om[f_addr] #Field(179830860 552B 21refs 1par) om[f_addr].parents # [179834316] # >>> om[ 179834316 ] # list(179834316 132B 19refs 1par) <- list of fields in Struct l_addr = om[f_addr].parents[0] om[l_addr].parents # [179849516] # >>> om[ 179849516 ] # AnonymousStructInstance(179849516 552B 23refs 19par) anon_addr = om[l_addr].parents[0] om[anon_addr] #179849516 is a anon struct import networkx import matplotlib.pyplot as plt graphme()
def get(self): from meliae import scanner import time scanner.dump_all_objects('/tmp/dump%s.txt' % time.time()) self.write("success!!")