def plotLabelledSet(): # Creating a web report report = otter.Otter("otterHtmls/training_set_noDS.html", author="Gonghan Xu", title="Training Set") dat = np.load("heavyTrainSet_noDS.npy") dat = sorted(dat) # dat = dat[: 10] startLoadTime = time.time() numIns = len(dat) with report: # Putting the plots onto the web page. for idx, ins in enumerate(dat, start=1): if idx % 20 == 0: print "Generating {}st/{} image".format(idx, numIns) fig, ax = ins.getPlot() # Writing the downsampled time_freq image # Creating a row with three columns. row = bt.Row(3) row[0] + idx # Putting the figure to the middle cell. row[1].width = 6 row[1] + fig report + row if ins.hasDoubleChirp: row[2] + "Double Chirp" else: row[2] + "Not Double Chirp" plt.close(fig) endLoadTime = time.time() loadTime = endLoadTime - startLoadTime print "Web page generation time:", loadTime, "sec"
def plotArbiAngles(): # Creating a web report report = otter.Otter("otterHtmls/GT0448_100SampleTfMaps.html", author="Gonghan Xu", title="100 Time-frequency maps without labels") with report: # Putting the plots onto the web page. for waveName in waveList: plotNum = 0 wavePath = PATH.join(WAVEFORM_DIR, waveName + ".h5") for iota in np.linspace(0, pi, 10, endpoint=True): for phi in np.linspace(0, 2 * pi, 10, endpoint=True): iotaStr = utils.ang_to_str(iota) phiStr = utils.ang_to_str(phi) # Constructing a data row # Writing the data parameters plotNum += 1 # Generate the downsampled time-freq map # used for training wf_data = gen_waveform(wavePath, iota, phi) tf_data = tf_decompose(wf_data['hp'], wf_data["sample_times"], motherFreq, maxScale) wplane = tf_data["wplane"] wfreqs = tf_data["wfreqs"] sample_times = wf_data["sample_times"] # Get the selected region. wplane_sel, freqs_sel, times_sel = \ utils.select_wplane(wplane, wfreqs, sample_times, mid_t=0, xnum=500, ynum=350, left_t_window=-0.05, right_t_window=0.03, freq_window=500) print "{}. {}, iota: {}, phi: {}".\ format(plotNum, waveName, iotaStr, phiStr) fig, ax = plt.subplots(figsize=fig_size) ax.pcolormesh(times_sel, freqs_sel, wplane_sel, cmap="gray") ax.set_xlabel("time (s)") ax.set_ylabel("frequency (Hz)") ax.set_title("{}. {}, iota: {}, phi: {}, " "mother freq: {:.2f}".format( plotNum, waveName, iotaStr, phiStr, motherFreq)) # plt.show() # Writing the downsampled time_freq image # Creating a row with three columns. row = bt.Row(3) # Putting the figure to the middle cell. row[1].width = 6 row[1] + fig report + row
def plotInstances(dataSetName): instances = np.load(dataSetName) instances.sort() report = otter.Otter("otterHtmls/GT0453_100SampleInstances.html", author="Gonghan Xu", title="100 time-frequency maps for " "{} with manual label".format( instances[0].waveformName)) # i = 0 with report: plotNum = 0 for instance in instances: # i += 1 # if i == 5: # break # Loading parameters. plotNum += 1 waveName = instance.waveformName wavePath = PATH.join(WAVEFORM_DIR, waveName + ".h5") iota = instance.iota phi = instance.phi hasDoubleChirp = instance.hasDoubleChirp motherFreq = instance.motherFreq iotaStr = utils.ang_to_str(iota) phiStr = utils.ang_to_str(phi) # Generate the down-sampled time-freq map # used for training with embedded data parameters. wf_data = gen_waveform(wavePath, iota, phi) tf_data = tf_decompose(wf_data['hp'], wf_data["sample_times"], motherFreq, maxScale) wplane = tf_data["wplane"] wfreqs = tf_data["wfreqs"] sample_times = wf_data["sample_times"] # Get the selected region. wplane_sel, freqs_sel, times_sel = \ utils.select_wplane(wplane, wfreqs, sample_times, mid_t=0, xnum=500, ynum=350, left_t_window=-0.05, right_t_window=0.03, freq_window=500) print "{}. {}, iota: {}, phi: {}, motherFreq: {:.2f}".\ format(plotNum, waveName, iotaStr, phiStr, motherFreq) fig, ax = plt.subplots(figsize=fig_size) ax.pcolormesh(times_sel, freqs_sel, wplane_sel, cmap="gray") ax.set_xlabel("time (s)") ax.set_ylabel("frequency (Hz)") ax.set_title("{}. {}, iota: {}, phi: {}, " "mother freq: {:.2f}".format(plotNum, waveName, iotaStr, phiStr, motherFreq)) # plt.show() # Writing the down-sampled time_freq image. # Creating a row with three columns. row = bt.Row(3) # Putting the figure to the middle cell. row[1].width = 6 row[1] + fig report + row row = bt.Row(3) row[1] + "({}) Double Chirp: {}".format(plotNum, hasDoubleChirp) report + row
def plotProbs(): # Creating a web report report = otter.Otter("otterHtmls/probs.html", author="Gonghan Xu", title="All Cases (Linear Kernel; " "Using Original Image Size: 508 * 328)") dat = np.load("heavyTrainSet_noDS.npy") # dat = sorted(dat) # dat = dat[: 248] startLoadTime = time.time() numIns = len(dat) shuffIndices, probs, accus, fails, confMat \ = getCrossValProbs(dat, nfolds=5) # sys.exit() print "Start generating plots.." numAccurates = 0 counter = 0 with report: # Putting the plots onto the web page. for c, i in enumerate(range(numIns), start=1): if c % 50 == 0: print "At {}st/{} image".format(c, numIns) # if not fails[i]: # continue counter += 1 ins = dat[shuffIndices[i]] fig, ax = ins.getPlot() # Writing the downsampled time_freq image # Creating a row with three columns. row = bt.Row(4) row[0] + counter # Putting the figure to the middle cell. row[1].width = 6 row[1] + fig if ins.hasDoubleChirp: row[2] + "Hand Label: Double Chirp" else: row[2] + "Hand Label: Not Double Chirp" row[3] + "Predicted Prob (Double Chirp): {:.3f}"\ .format(probs[i]) report + row plt.close(fig) # if ins.hasDoubleChirp: # numAccurates += 1 if probs[i] > 0.5 else 0 # else: # numAccurates += 1 if probs[i] <= 0.5 else 0 # if dat[shuffIndices[i]].hasDoubleChirp: # numAccurates += 1 if probs[shuffIndices[i]] > 0.5 else 0 # else: # numAccurates += 1 if probs[shuffIndices[i]] <= 0.5 else 0 endLoadTime = time.time() loadTime = endLoadTime - startLoadTime print "Web page generation time:", loadTime, "sec" print "Verifying classification accuracy: {:.5f}"\ .format(1 - 1.0 * sum(fails) / numIns)
return cluster, status, job uber_repository = git.MetaRepository( "/home/daniel.williams/events/O3/o3a_catalog_events") events = gitlab.find_events(repository, milestone="PE: C01 Reruns") mattermost = mattermost.Mattermost() mattermost.send_message(":mega: The run supervising robot is running. :robot:", "@daniel-williams") report = otter.Otter( filename= "/home/daniel.williams/public_html/LVC/projects/O3/C01/summary.html", author="R. Daniel Williams", title="Asimov/Olivaw : Event supervision report") with report: report + "This report contains the latest updates on the run status of the various PE runs ongoing at CIT." report + "Supervisor last checked these events at " + str( datetime.datetime.now()) message = """# Run updates\n""" message += """| Event | Gitlab State | Run state | Production | Approx | Sampler | Status |\n""" message += """|---|---|---|---|---|---|\n""" for event in events: print(event.title) status = None
def main(eventflag): report = otter.Otter( filename= "/home/daniel.williams/public_html/LVC/projects/O3/C01/audit_current.html", author="R. Daniel Williams", title="Asimov/Olivaw : Preferred run audit report") with report: report + "This report contains the latest updates on the run status of the various PE runs ongoing at CIT." report + "Supervisor last checked these events at " + str( datetime.datetime.now()) global events if eventflag: events = [event for event in events if event.title == eventflag] for event in events: print(event.title) # if event.state == None: # continue # if "Special" in event.labels: # continue # if "Preferred cleaned" in event.labels: # continue try: repo = uber_repository.get_repo(event.title) except: print(f"{event.title} missing from the uberrepo") continue repo.update(stash=True) with report: report + f"#{event.title}" preferred_summary = os.path.join(repo.directory, "Preferred", "PESummary_metafile", "posterior_samples.h5") try: print(preferred_summary) data = read(preferred_summary) print(data.labels) with report: report + f"{data.labels}" except OSError: with report: report + f"There is no preferred file in this repository." try: event_prods = repo.find_prods(config.get("olivaw", "run_category")) except: print(f"No runs in this repository") continue if event.state in ["Generating PSDs", "Productions running", "Stuck"]: psds_dict = {} prod_keys = [ key for key in event.data.keys() if "Prod" in key[0:5] ] n_productions = len(event_prods) event_psds_l = [] pref_prod = [] for prod in event_prods: prod = prod.split("_")[0] if prod in event.data: cluster = event.data[prod] run_ini = os.path.join(event.data[f"{prod}_rundir"], "config.ini") actual_config = RunConfiguration(run_ini) engine_data = actual_config.get_engine() if "finalised" in cluster.lower(): print(f"{prod} is the preferred production") pref_prod.append(prod) try: if len(pref_prod) == 0: continue upload_results(repo, event, pref_prod, report=report) except FileNotFoundError as e: print(e)
def main(): report = otter.Otter( filename= "/home/daniel.williams/public_html/LVC/projects/O3/C01/audit_mcmc.html", author="R. Daniel Williams", title="Asimov/Olivaw : Preferred run audit report") with report: report + "This report contains the latest updates on the run status of the various PE runs ongoing at CIT." report + "Supervisor last checked these events at " + str( datetime.datetime.now()) all_ids, all_wds = get_all_jobs() for event in events: print(event.title) if event.state == None: continue if "Special" in event.labels: continue try: repo = uber_repository.get_repo(event.title) except: print(f"{event.title} missing from the uberrepo") continue repo.update(stash=True) with report: report + f"#{event.title}" try: event_prods = repo.find_prods(config.get("olivaw", "run_category")) except: print(f"No runs in this repository") continue if event.state in ["Generating PSDs", "Productions running", "Stuck"]: psds_dict = {} prod_keys = [ key for key in event.data.keys() if "Prod" in key[0:5] ] n_productions = len(event_prods) event_psds_l = [] pref_prod = [] for prod in event_prods: prod = prod.split("_")[0] if prod in event.data: if "blocked" in event.data[prod].lower(): continue cluster = event.data[prod] prod_rundir = event.data[f"{prod}_rundir"] run_ini = os.path.join(prod_rundir, "config.ini") actual_config = RunConfiguration(run_ini) try: engine_data = actual_config.get_engine() except: continue if not "daniel.williams" in prod_rundir: continue if actual_config.ini.get("analysis", "engine") == "lalinferencemcmc": # Keep only lists that correspond to the working directory job_ids = [ number for number, directory in zip(all_ids, all_wds) if (prod in directory) and ( event.title in directory) ] print(job_ids) if len(job_ids) > 0: report += job_ids tmp = "tmp" try: os.makedirs(tmp) except: pass try: os.makedirs(f"{prod_rundir}/{tmp}/html") #os.popen(f"rm -r /home/john.veitch/projects/O3/SEOBNRv4P_rota_runs/{event.title}/{prod}") os.makedirs( f"/home/john.veitch/projects/O3/SEOBNRv4P_rota_runs/{event.title}/{prod}-robot" ) except: pass raw_pp_str = os.popen( f'grep cbcBayesPostProc {prod_rundir}/lalinference*.sh' ).read() pspath0 = raw_pp_str.split('hdf5_snr.txt ')[-1].split( ' ')[0] for job in job_ids: os.system( f'condor_ssh_to_job -ssh scp {job} remote:./*.hdf* {prod_rundir}/{tmp}' ) for h5file in glob.glob(f"{prod_rundir}/{tmp}/*.hdf5"): pspath1 = h5file # os.path.join(prod_rundir, tmp,'*.hdf5') # print(pspath1) file = h5file.split("/")[-1] copy( h5file, f"/home/john.veitch/projects/O3/SEOBNRv4P_rota_runs/{event.title}/{prod}-robot/{file}" ) pspath = raw_pp_str.replace(pspath0, pspath1) webpath = pspath.split("--outpath")[1].split()[0] new_webpath = f"{prod_rundir}/{tmp}/html" print(pspath.replace(webpath, new_webpath))
def html(event, webdir): """ Return the ledger for a given event. If no event is specified then the entire production ledger is returned. """ server, repository = connect_gitlab() if not webdir: webdir = config.get("report", "report_root") click.echo("Getting events...") events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone"), subset=[event], repo=False, update=False) click.echo("Got events") if len(glob.glob("asimov.conf")) > 0: config_file = "asimov.conf" else: config_file = None report = otter.Otter(f"{webdir}/index.html", author="Olivaw", title="Olivaw PE Report", author_email=config.get("report", "report_email"), config_file=config_file) with report: navbar = bt.Navbar("Asimov", background="navbar-dark bg-primary") report + navbar with report: time = bt.Container() time + f"Report generated at {str(datetime.now(tz))}" report + time cards = [] container = bt.Container() container + "# All PE Productions" for event in events: click.secho(event.title, bold=True) event_report = otter.Otter(f"{webdir}/{event.title}.html", author="Olivaw", title=f"Olivaw PE Report | {event.title}", author_email="*****@*****.**", config_file=config_file) with event_report: navbar = bt.Navbar("Asimov", background="navbar-dark bg-primary") event_report + navbar card = bt.Card(title=f"<a href='{event.title}.html'>{event.title}</a>") toc = bt.Container() for production in event.productions: toc + f"* [{production.name}](#{production.name}) | {production.pipeline} |" # + bt.Badge({production.pipeline}, "info") with event_report: title_c = bt.Container() title_c + f"#{event.title}" event_report + title_c event_report + toc production_list = bt.ListGroup() for production in event.productions: click.echo(f"{event.title}\t{production.name}") if production.pipeline.lower() in known_pipelines: pipe = known_pipelines[production.pipeline.lower()]( production, "C01_offline") event_log = otter.Otter( f"{webdir}/{event.title}-{production.name}.html", author="Olivaw", title=f"Olivaw PE Report | {event.title} | {production.name}", author_email="*****@*****.**", config_file=config_file) status_map = { "cancelled": "light", "finished": "success", "uploaded": "success", "processing": "primary", "running": "primary", "stuck": "warning", "restart": "secondary", "ready": "secondary", "wait": "light", "stop": "danger", "manual": "light", "stopped": "light" } with event_report: container = bt.Container() container + f"## {production.name}" container + f"<a id='{production.name}'/>" container + "### Ledger" container + production.meta if production.pipeline.lower() == "bilby": container + f"### Progress" progress_line = [] procs = pipe.check_progress() for proc, val in procs.items(): container + f"- {proc.split('_')[-1]}\t{val[0]}\t{val[1]}" progress_line.append(f"{val[1]}") else: progress_line = [] if production.status.lower() == "running": progress = str(bt.Badge("|".join(progress_line))) else: progress = "" if production.status.lower() == "uploaded": link = os.path.join( "https://ldas-jobs.ligo.caltech.edu", config.get('general', 'webroot').replace("/home/", "~").replace( "public_html/", ""), production.event.name, production.name, "results", "home.html") item_text = f"<a href='{link}'>{production.name}</a>" else: item_text = f"<a href='{event.title}.html#{production.name}'>{production.name}</a>" production_list.add_item( item_text + str(bt.Badge(f"{production.pipeline}", "info")) + progress + str(bt.Badge(f"{production.status}")), context=status_map[production.status]) # logs = pipe.collect_logs() # container + f"### Log files" # container + f"<a href='{event.title}-{production.name}.html'>Log file page</a>" # with event_log: # for log, message in logs.items(): # log_card = bt.Card(title=f"{log}") # log_card.add_content("<div class='card-body'><pre>"+message+"</pre></div>") # event_log + log_card with event_report: event_report + container card.add_content(production_list) cards.append(card) with report: if len(cards) == 1: report + card else: for i, card in enumerate(cards): if i % 2 == 0: deck = bt.CardDeck() deck + card if i % 2 == 1: report + deck