def main(**args): plot_type = getattr(sys.modules[plots.__name__], args['plot']) plot = plot_type(processes=args['p'], mode=args['mode'], header=args['header'], prefix=args['prefix'], outdir=args['o'], aggregate=(not args['full']), wide=args['wide'], save=True) try: batch_system = args.get('batch_system', cfg.batch_system) acct_path = args.get('acct_path', cfg.acct_path) host_name_ext = args.get('host_name_ext', cfg.host_name_ext) acct = batch_acct.factory(batch_system, acct_path, host_name_ext) reader = acct.find_jobids(args['jobids']) filelist = [] for acct in reader: date_dir = os.path.join( cfg.pickles_dir, datetime.fromtimestamp(acct['end_time']).strftime('%Y-%m-%d')) filelist.append(os.path.join(date_dir, acct['id'])) except: filelist = tspl_utils.getfilelist(args['files']) plot.run(filelist)
def main(**args): plot_type = getattr(sys.modules[plots.__name__],args['plot']) plot = plot_type(processes=args['p'],mode=args['mode'], header=args['header'], prefix=args['prefix'],outdir=args['o'], aggregate=(not args['full']),wide=args['wide'], save=True) try: batch_system = args.get('batch_system',cfg.batch_system) acct_path = args.get('acct_path',cfg.acct_path) host_name_ext = args.get('host_name_ext',cfg.host_name_ext) acct = batch_acct.factory(batch_system, acct_path, host_name_ext) reader = acct.find_jobids(args['jobids']) filelist = [] for acct in reader: date_dir = os.path.join(cfg.pickles_dir, datetime.fromtimestamp(acct['end_time']).strftime('%Y-%m-%d')) filelist.append(os.path.join(date_dir,acct['id'])) except: filelist = tspl_utils.getfilelist(args['files']) plot.run(filelist)
def main(**args): batch_system = args.get('batch_system',cfg.batch_system) acct_path = args.get('acct_path',cfg.acct_path) host_name_ext = args.get('host_name_ext',cfg.host_name_ext) acct = batch_acct.factory(batch_system, acct_path, host_name_ext) reader = acct.find_jobids(args['jobids']) filelist = [] for acct in reader: date_dir = os.path.join(cfg.pickles_dir, datetime.fromtimestamp(acct['end_time']).strftime('%Y-%m-%d')) filelist.append(os.path.join(date_dir,acct['id'])) for f in filelist: with open(f) as fd: if args['a']: data = pickle.load(fd) data = diff(data.aggregate_stats(args['type']))/diff(data.times) print data if not args['plot']: return plot_type = getattr(sys.modules[plots.__name__],args['plot']) plot = plot_type(processes=args['p'],mode=args['mode'], header=args['header'], prefix=args['prefix'],outdir=args['o'], aggregate=(not args['full']),wide=args['wide'], save=True)
def main(**args): batch_system = args.get("batch_system", cfg.batch_system) acct_path = args.get("acct_path", cfg.acct_path) host_name_ext = args.get("host_name_ext", cfg.host_name_ext) acct = batch_acct.factory(batch_system, acct_path, host_name_ext) reader = acct.find_jobids(args["jobids"]) filelist = [] for acct in reader: date_dir = os.path.join(cfg.pickles_dir, datetime.fromtimestamp(acct["end_time"]).strftime("%Y-%m-%d")) filelist.append(os.path.join(date_dir, acct["id"])) for f in filelist: with open(f) as fd: if args["a"]: data = pickle.load(fd) data = diff(data.aggregate_stats(args["type"])) / diff(data.times) print data if not args["plot"]: return plot_type = getattr(sys.modules[plots.__name__], args["plot"]) plot = plot_type( processes=args["p"], mode=args["mode"], header=args["header"], prefix=args["prefix"], outdir=args["o"], aggregate=(not args["full"]), wide=args["wide"], save=True, )
def main(**args): acct = batch_acct.factory(cfg.batch_system, cfg.acct_path, cfg.host_name_ext) if args['jobid']: reader = acct.find_jobids(args['jobid']).next() date_dir = os.path.join(cfg.pickles_dir, datetime.fromtimestamp(reader['end_time']).strftime('%Y-%m-%d')) pickle_file = os.path.join(date_dir, reader['id']) else: pickle_file = args['file'] with open(pickle_file) as fd: data = pickle.load(fd) print "Hosts:", data.hosts.keys() if not args['host']: pass elif args['host'] in data.hosts: data.hosts = { args['host'] : data.hosts[args['host']] } else: print args['host'],"does not exist in", args['file'] return for host_name, host in data.hosts.iteritems(): print "Host:",host_name print "Types:",host.stats.keys() print host.marks if not args['type']: pass elif args['type'] in host.stats: host.stats = { args['type'] : host.stats[args['type']] } else: print args['type'],"does not exist in", args['file'] return for type_name, type_device in host.stats.iteritems(): print '' print "Type:", type_name print "Schema:", data.get_schema(type_name).keys() for device_name, device in type_device.iteritems(): print "Device:",device_name print device print 'test'
def main(**args): acct = batch_acct.factory(cfg.batch_system, cfg.acct_path, cfg.host_name_ext) if args['jobid']: reader = acct.find_jobids(args['jobid']).next() date_dir = os.path.join( cfg.pickles_dir, datetime.fromtimestamp(reader['end_time']).strftime('%Y-%m-%d')) pickle_file = os.path.join(date_dir, reader['id']) else: pickle_file = args['file'] with open(pickle_file) as fd: data = pickle.load(fd) print "Hosts:", data.hosts.keys() if not args['host']: pass elif args['host'] in data.hosts: data.hosts = {args['host']: data.hosts[args['host']]} else: print args['host'], "does not exist in", args['file'] return for host_name, host in data.hosts.iteritems(): print "Host:", host_name print "Types:", host.stats.keys() print host.marks if not args['type']: pass elif args['type'] in host.stats: host.stats = {args['type']: host.stats[args['type']]} else: print args['type'], "does not exist in", args['file'] return for type_name, type_device in host.stats.iteritems(): print '' print "Type:", type_name print "Schema:", data.get_schema(type_name).keys() for device_name, device in type_device.iteritems(): print "Device:", device_name print device print 'test'
def __init__(self,processes=1,**kwargs): self.processes=kwargs.get('processes',1) self.start = kwargs.get('start',(datetime.now()-timedelta(days=1))) self.end = kwargs.get('end',datetime.now()) self.pickles_dir = kwargs.get('pickle_dir',cfg.pickles_dir) self.seek = kwargs.get('seek',cfg.seek) self.batch_system = kwargs.get('batch_system','SLURM') self.acct_path = kwargs.get('acct_path',cfg.acct_path) self.tacc_stats_home = kwargs.get('tacc_stats_home',cfg.tacc_stats_home) self.host_list_dir = kwargs.get('host_list_dir',cfg.host_list_dir) self.host_name_ext = kwargs.get('host_name_ext',cfg.host_name_ext) self.pickle_prot = pickle.HIGHEST_PROTOCOL self.acct = batch_acct.factory(self.batch_system, self.acct_path, self.host_name_ext) try: self.start = datetime.strptime(self.start,'%Y-%m-%d') self.end = datetime.strptime(self.end,'%Y-%m-%d') except: pass self.start = time.mktime(self.start.timetuple()) self.end = time.mktime(self.end.timetuple())
def __init__(self,**kwargs): self.processes=kwargs.get('processes',1) self.pickles_dir = kwargs.get('pickle_dir',cfg.pickles_dir) self.start = kwargs.get('start',None) self.end = kwargs.get('end',None) if not self.start: self.start = (datetime.now()-timedelta(days=1)) if not self.end: self.end = (datetime.now()+timedelta(days=1)) self.seek = kwargs.get('seek',cfg.seek) self.tacc_stats_home = kwargs.get('tacc_stats_home',cfg.tacc_stats_home) self.host_list_dir = kwargs.get('host_list_dir',cfg.host_list_dir) self.batch_system = kwargs.get('batch_system',cfg.batch_system) self.acct_path = kwargs.get('acct_path',cfg.acct_path) self.host_name_ext = kwargs.get('host_name_ext',cfg.host_name_ext) print(self.batch_system,self.acct_path,self.host_name_ext) self.acct = batch_acct.factory(self.batch_system, self.acct_path, self.host_name_ext) try: self.start = datetime.strptime(self.start,'%Y-%m-%d') self.end = datetime.strptime(self.end,'%Y-%m-%d') except: pass self.start = time.mktime(self.start.date().timetuple()) self.end = time.mktime(self.end.date().timetuple()) self.pool = multiprocessing.Pool(processes = self.processes) self.partial_pickle = functools.partial(job_pickle, pickle_dir = self.pickles_dir, tacc_stats_home = self.tacc_stats_home, host_list_dir = self.host_list_dir, acct = self.acct) print("Use",self.processes,"processes") print("Gather node-level data from",self.tacc_stats_home+"/archive/") print("Write pickle files to",self.pickles_dir)
def __init__(self, **kwargs): self.processes = kwargs.get('processes', 1) self.pickles_dir = kwargs.get('pickle_dir', cfg.pickles_dir) self.start = kwargs.get('start', None) self.end = kwargs.get('end', None) if not self.start: self.start = (datetime.now() - timedelta(days=1)) if not self.end: self.end = (datetime.now() + timedelta(days=1)) self.archive_dir = kwargs.get('archive_dir', cfg.archive_dir) self.host_list_dir = kwargs.get('host_list_dir', cfg.host_list_dir) self.batch_system = kwargs.get('batch_system', cfg.batch_system) self.acct_path = kwargs.get('acct_path', cfg.acct_path) self.host_name_ext = kwargs.get('host_name_ext', cfg.host_name_ext) print(self.batch_system, self.acct_path, self.host_name_ext) self.acct = batch_acct.factory(self.batch_system, self.acct_path) try: self.start = datetime.strptime(self.start, '%Y-%m-%d') self.end = datetime.strptime(self.end, '%Y-%m-%d') except: pass self.start = time.mktime(self.start.date().timetuple()) self.end = time.mktime(self.end.date().timetuple()) self.pool = multiprocessing.Pool(processes=self.processes) self.partial_pickle = functools.partial( job_pickle, pickle_dir=self.pickles_dir, archive_dir=self.archive_dir, host_list_dir=self.host_list_dir, host_name_ext=self.host_name_ext) print("Use", self.processes, "processes") print("Gather node-level data from", self.archive_dir + "/archive/") print("Write pickle files to", self.pickles_dir)