def job_pickle(reader_inst, pickles_dir, archive_dir): date_dir = os.path.join(pickles_dir, datetime.fromtimestamp(reader_inst['end_time']).strftime('%Y-%m-%d')) try: os.makedirs(date_dir) except: pass pickle_file = os.path.join(date_dir, reader_inst['id']) validated = False if os.path.exists(pickle_file): validated = True with open(pickle_file) as fd: try: job = pickle.load(fd) for host in job.hosts.values(): if not host.marks.has_key('begin %s' % job.id) or not host.marks.has_key('end %s' % job.id): validated = False break except: validated = False if not validated: print(reader_inst['id'] + " is not validated: process") with open(pickle_file, 'w') as fd: job = job_stats.from_acct(reader_inst, archive_dir, '', '') if job: pickle.dump(job, fd, pickle.HIGHEST_PROTOCOL) else: print(reader_inst['id'] + " is validated: do not process")
def job_pickle(reader_inst, pickles_dir, archive_dir, host_name_ext=cfg.host_name_ext): date_dir = os.path.join( pickles_dir, datetime.fromtimestamp(reader_inst['end_time']).strftime('%Y-%m-%d')) try: os.makedirs(date_dir) except: pass pickle_file = os.path.join(date_dir, reader_inst['id']) validated = False if os.path.exists(pickle_file): try: with open(pickle_file) as fd: job = pickle.load(fd) validated = test_job(job) except EOFError as e: print(e) if not validated: job = job_stats.from_acct(reader_inst, archive_dir, '', host_name_ext) if job and test_job(job): pickle.dump(job, open(pickle_file, 'w'), pickle.HIGHEST_PROTOCOL) validated = True return (reader_inst['id'], validated)
def job_pickle(reader_inst, pickle_dir = '.', tacc_stats_home = cfg.tacc_stats_home, host_list_dir = cfg.host_list_dir, acct = None, pickle_prot = pickle.HIGHEST_PROTOCOL): print(reader_inst) if reader_inst['end_time'] == 0: return date_dir = os.path.join(pickle_dir, datetime.fromtimestamp(reader_inst['end_time']).strftime('%Y-%m-%d')) try: os.makedirs(date_dir) except: pass if os.path.exists(os.path.join(date_dir, reader_inst['id'])): print(reader_inst['id'] + " exists, don't reprocess") return job = job_stats.from_acct(reader_inst, tacc_stats_home, host_list_dir, acct) with open(os.path.join(date_dir, job.id), 'wb') as pickle_file: pickle.dump(job, pickle_file, pickle_prot)
def job_pickle(reader_inst, pickles_dir, archive_dir, host_name_ext=cfg.host_name_ext): date_dir = os.path.join( pickles_dir, datetime.fromtimestamp(reader_inst['end_time']).strftime('%Y-%m-%d')) try: os.makedirs(date_dir) except: pass pickle_file = os.path.join(date_dir, reader_inst['id']) validated = False if os.path.exists(pickle_file): try: with open(pickle_file, 'rb') as fd: try: job = p.load(fd) except MemoryError as e: print(e) return (reader_inst['id'], validated) except UnicodeDecodeError as e: try: job = p.load( fd, encoding="latin1") # Python2 Compatibility except: print(e) return (reader_inst['id'], validated) except: return (reader_inst['id'], validated) validated = test_job(job) except EOFError as e: print(e) if not validated: job = job_stats.from_acct(reader_inst, archive_dir, '', host_name_ext) print("processed jobid ", reader_inst['id']) if job and test_job(job): try: with open(pickle_file, 'wb') as fd: p.dump(job, fd, protocol=p.HIGHEST_PROTOCOL) except MemoryError as e: print(e) return (reader_inst['id'], validated) validated = True return (reader_inst['id'], validated)
def job_pickle(reader_inst, pickles_dir, archive_dir, host_name_ext = cfg.host_name_ext): date_dir = os.path.join(pickles_dir, datetime.fromtimestamp(reader_inst['end_time']).strftime('%Y-%m-%d')) try: os.makedirs(date_dir) except: pass pickle_file = os.path.join(date_dir, reader_inst['id']) validated = False if os.path.exists(pickle_file): try: with open(pickle_file, 'rb') as fd: try: job = p.load(fd) except MemoryError as e: print(e) return (reader_inst['id'], validated) except UnicodeDecodeError as e: try: job = p.load(fd, encoding = "latin1") # Python2 Compatibility except: print(e) return (reader_inst['id'], validated) except: return (reader_inst['id'], validated) validated = test_job(job) except EOFError as e: print(e) if not validated: job = job_stats.from_acct(reader_inst, archive_dir, '', host_name_ext) print("processed jobid ",reader_inst['id']) if job and test_job(job): try: with open(pickle_file, 'wb') as fd: p.dump(job, fd, protocol = p.HIGHEST_PROTOCOL) except MemoryError as e: print(e) return (reader_inst['id'], validated) validated = True return (reader_inst['id'], validated)
def job_pickle(reader_inst, pickle_dir=cfg.pickles_dir, archive_dir=cfg.archive_dir, host_list_dir=cfg.host_list_dir, host_name_ext=cfg.host_name_ext): if reader_inst['end_time'] == 0: return date_dir = os.path.join( pickle_dir, datetime.fromtimestamp(reader_inst['end_time']).strftime('%Y-%m-%d')) try: os.makedirs(date_dir) except: pass pickle_file = os.path.join(date_dir, reader_inst['id']) validated = False if os.path.exists(pickle_file): validated = True with open(pickle_file) as fd: try: job = pickle.load(fd) for host in job.hosts.values(): if not host.marks.has_key( 'begin %s' % job.id) or not host.marks.has_key( 'end %s' % job.id): validated = False break except: validated = False if not validated: print(reader_inst['id'] + " is not validated: process") with open(pickle_file, 'w') as fd: job = job_stats.from_acct(reader_inst, archive_dir, host_list_dir, host_name_ext) if job: pickle.dump(job, fd, pickle.HIGHEST_PROTOCOL) else: print(reader_inst['id'] + " is validated: do not process")