def status(event, production): """ Show the review status of an event. """ if isinstance(event, str): event = [event] server, repository = connect_gitlab() gitlab_events = gitlab.find_events(repository, subset=event, update=False, repo=False) for event in gitlab_events: click.secho(event.title, bold=True) if production: productions = [ prod for prod in event.productions if prod.name == production ] else: productions = event.productions for production in productions: click.secho(f"\t{production.name}", bold=True) if "review" in production.meta: click.echo(production.meta['review']) else: click.secho( "\t\tNo review information exists for this production.")
def calibration(event): gitlab_events = gitlab.find_events(repository, subset=event) # Update existing events for event in gitlab_events: if "disable_repo" in event.event_object.meta: if event.event_object.meta['disable_repo'] == True: continue try: event.event_object._check_calibration() except DescriptionException: print(event.title) time = event.event_object.meta['event time'] calibrations = find_calibrations(time) print(calibrations) # try: for ifo, envelope in calibrations.items(): description = f"Added calibration {envelope} for {ifo}." try: event.event_object.repository.add_file(os.path.join(f"/home/cal/public_html/uncertainty/O3C01/{ifo}", envelope), f"C01_offline/calibration/{ifo}.dat", commit_message=description) except GitCommandError as e: if "nothing to commit," in e.stderr: pass calibrations[ifo] = f"C01_offline/calibration/{ifo}.dat" envelopes = yaml.dump({"calibration": calibrations}) event.add_note(CALIBRATION_NOTE.format(envelopes))
def resultslinks(event, update, root): """ Find all available results for a given event. """ server, repository = connect_gitlab() events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone"), subset=[event], update=update, repo=False) for event in events: click.secho(f"{event.title}") logger = logging.AsimovLogger(event=event.event_object) for production in event.productions: try: for result, meta in production.results().items(): print( f"{production.event.name}/{production.name}/{result}, {production.results(result)}" ) pathlib.Path( os.path.join(root, production.event.name, production.name)).mkdir(parents=True, exist_ok=True) os.symlink( f"{production.results(result)}", f"{root}/{production.event.name}/{production.name}/{result.split('/')[-1]}" ) except AttributeError: pass
def calibration(event, calibration): server, repository = connect_gitlab() event = gitlab.find_events(repository, subset=event)[0] try: event.event_object._check_calibration() except DescriptionException: print(event.title) time = event.event_object.meta['event time'] if not calibration[0]: calibrations = find_calibrations(time) #for ifo, envelope in calibrations.items(): # calibrations[ifo] = os.path.join(f"/home/cal/public_html/uncertainty/O3C01/{ifo}", envelope) else: calibrations = {} for cal in calibration: calibrations[cal.split(":")[0]] = cal.split(":")[1] print(calibrations) for ifo, envelope in calibrations.items(): description = f"Added calibration {envelope} for {ifo}." try: event.event_object.repository.add_file( envelope, f"C01_offline/calibration/{ifo}.dat", commit_message=description) except GitCommandError as e: if "nothing to commit," in e.stderr: pass calibrations[ifo] = f"C01_offline/calibration/{ifo}.dat" envelopes = yaml.dump({"calibration": calibrations}) event.add_note(CALIBRATION_NOTE.format(envelopes))
def status(event): """ Provide a simple summary of the status of a given event. Arguments --------- name : str, optional The name of the event. """ server, repository = connect_gitlab() events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone"), subset=[event], update=False, repo=False) for event in events: click.secho(f"{event.title:30}", bold=True) if len(event.event_object.meta['productions']) > 0: click.secho("\tProductions", bold=True) for production in event.event_object.meta['productions']: click.echo(f"\t\t{list(production.keys())[0]}") if len(event.event_object.get_all_latest()) > 0: click.secho("\tJobs waiting", bold=True) waiting = event.event_object.get_all_latest() for awaiting in waiting: click.echo(f"\t\t{awaiting.name}\t{awaiting.status}")
def configurator(event, json_data=None): """Add data from the configurator.""" server, repository = connect_gitlab() gitlab_event = gitlab.find_events(repository, subset=event)[0] if json_data: with open(json_data, "r") as datafile: data = json.load(datafile) new_data = {"quality": {}, "priors": {}} new_data["quality"]["sample-rate"] = int(data["srate"]) new_data["quality"]["lower-frequency"] = {} # Factor 0.875 to account for PSD roll off new_data["quality"]["upper-frequency"] = int(0.875 * data["srate"] / 2) new_data["quality"]["start-frequency"] = data['f_start'] new_data["quality"]["segment-length"] = int(data['seglen']) new_data["quality"]["window-length"] = int(data['seglen']) new_data["quality"]["psd-length"] = int(data['seglen']) def decide_fref(freq): if (freq >= 5) and (freq < 10): return 5 else: return floor(freq / 10) * 10 new_data["quality"]["reference-frequency"] = decide_fref(data['f_ref']) new_data["priors"]["amp order"] = data['amp_order'] new_data["priors"]["chirp-mass"] = [ data["chirpmass_min"], data["chirpmass_max"] ] update(gitlab_event.event_object.meta, new_data) gitlab_event.update_data()
def checkifo(event): server, repository = connect_gitlab() gitlab_events = gitlab.find_events(repository, subset=event) for event in gitlab_events: if "event time" not in event.event_object.meta: print(f"Time not found {event.event_object.name}") time = event.event_object.meta['event time'] gpsstart = time - 600 gpsend = time + 600 bits = ['Bit 0', 'Bit 1', 'Bit 2'] active_ifo = [] for ifo in ["L1", "H1", "V1"]: frametypes = event.event_object.meta['data']['frame-types'] urls = find_urls(site=f"{ifo[0]}", frametype=frametypes[ifo], gpsstart=gpsstart, gpsend=gpsend) datacache = Cache.from_urls(urls) if len(datacache) == 0: print(f"No {ifo} data found.") continue if "state vector" in event.meta: state_vector_channel = event.meta['state vector'] else: state_vector_channel = ast.literal_eval( config.get("data", "state-vector")) state = gwpy.timeseries.StateVector.read( datacache, state_vector_channel[ifo], start=gpsstart, end=gpsend, pad= 0 # padding data so that errors are not raised even if found data are not continuous. ) if not np.issubdtype(state.dtype, np.unsignedinteger): # if data are not unsigned integers, cast to them now so that # we can determine the bit content for the flags state = state.astype( "uint32", casting="unsafe", subok=True, copy=False, ) flags = state.to_dqflags() segments = flags[bits[0]].active for bit in bits: segments -= ~flags[bit].active if len(segments) > 0: active_ifo += [ifo] print(event.event_object.name) if event.event_object.meta['interferometers'] != active_ifo: print(f"Gitlab data\t{event.event_object.meta['interferometers']}") print(f"Recommended IFOS\t{active_ifo}") event.event_object.meta['interferometers'] = active_ifo event.update_data()
def audit(event): """ Conduct an audit of the contents of production ini files against the production ledger. Parameters ---------- event : str, optional The event to be checked. Optional; if the event isn't provided all events will be audited. """ if isinstance(event, str): event = [event] _, repository = connect_gitlab() gitlab_events = gitlab.find_events(repository, subset=event, update=False, repo=True) for production in gitlab_events[0].productions: category = config.get("general", "calibration_directory") config_file = os.path.join(production.event.repository.directory, category, f"{production.name}.ini") pipe = known_pipelines[production.pipeline.lower()](production, category) click.echo(pipe.read_ini(config_file))
def results(event, production, file, hash=None): """ Fetch or list the results of a production. """ if config.get("ledger", "engine") == "gitlab": _, repository = connect_gitlab() gitlab_event = gitlab.find_events(repository, subset=event) event = gitlab_event[0].event_object elif config.get("ledger", "engine") == "yamlfile": ledger = Ledger(config.get("ledger", "location")) event = ledger.get_event(event) production = [production_o for production_o in event.productions if production_o.name == production][0] store = Store(root=config.get("storage", "results_store")) if not file: try: items = store.manifest.list_resources(event.name, production.name).items() click.secho(f"{'Resource':30} {'Hash':32} {'UUID':32}") click.secho("-"*96) for resource, details in items: click.secho(f"{resource:30} {details['hash']:32} {details['uuid']:32}") except KeyError: click.secho("There are no results for this production.") else: try: click.echo(store.fetch_file(event, production, file, hash)) except FileNotFoundError: click.secho(f"{file} could not be found for this production.")
def production(event, pipeline, family, comment, needs, template, status): """ Add a new production to an event. """ gitlab_event = gitlab.find_events(repository, subset=event) event = gitlab_event[0].event_object # event_prods = event.productions names = [production.name for production in event_prods] family_entries = [int(name.split(family)[1]) for name in names if family in name] # if "bayeswave" in needs: bw_entries = [production.name for production in event_prods if "bayeswave" in production.pipeline.lower()] needs = bw_entries # production = {"comment": comment, "pipeline": pipeline, "status": status} if needs: production['needs'] = needs if template: production['template'] = template number = max(family_entries)+1 production_dict = {f"{family}{number}": production} production = Production.from_dict(production_dict, event=event) # click.echo(production) event.add_production(production) gitlab_event[0].update_data()
def notes(event): gitlab_events = gitlab.find_events(repository, subset=event) # Update existing events delete_list = [] for event in gitlab_events: for note in event.issue_object.notes.list(): if "error was detected" in note.body: delete_list.append(note) for note in delete_list: note.delete()
def add_data(event, ini_data=None): gitlab_event = gitlab.find_events(repository, subset=event) def update(d, u): for k, v in u.items(): if isinstance(v, collections.abc.Mapping): d[k] = update(d.get(k, {}), v) else: d[k] = v return d ini = ConfigParser() ini.optionxform = str ini.read(ini_data) new_data = {"quality": {}, "priors": {}} new_data["quality"]["sample-rate"] = int(ini.get("engine", "srate")) new_data["quality"]["lower-frequency"] = ast.literal_eval( ini.get("lalinference", "flow")) new_data["quality"]["segment-length"] = float(ini.get("engine", "seglen")) new_data["quality"]["window-length"] = float(ini.get("engine", "seglen")) new_data["quality"]["psd-length"] = float(ini.get("engine", "seglen")) new_data["quality"]["reference-frequency"] = float( ini.get("engine", "fref")) # new_data["priors"]["amp order"] = data['amp_order'] try: ini.get("engine", "comp-max") new_data["priors"]["component"] = [ float(ini.get("engine", "comp-min")), float(ini.get("engine", "comp-max")) ] except: pass #new_data["priors"]["component"] = [float(ini.get("engine", "comp-min")), # None] try: new_data["priors"]["chirp-mass"] = [ float(ini.get("engine", "chirpmass-min")), float(ini.get("engine", "chirpmass-max")) ] new_data["priors"]["chirp-mass"] = [ None, ini.get("engine", "distance-max") ] except: pass new_data["priors"]["q"] = [float(ini.get("engine", "q-min")), 1.0] update(gitlab_event[0].event_object.meta, new_data) print(gitlab_event[0].event_object.meta) gitlab_event[0].update_data()
def build(event): """ Create the run configuration files for a given event for jobs which are ready to run. If no event is specified then all of the events will be processed. """ server, repository = connect_gitlab() events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone"), subset=[event], update=False) for event in events: click.echo(f"Working on {event.title}") logger = logging.AsimovLogger(event=event.event_object) ready_productions = event.event_object.get_all_latest() for production in ready_productions: click.echo(f"\tWorking on production {production.name}") if production.status in { "running", "stuck", "wait", "finished", "uploaded", "cancelled", "stopped" }: continue try: configuration = production.get_configuration() except ValueError: try: rundir = config.get("general", "rundir_default") production.make_config(f"{production.name}.ini") click.echo(f"Production config {production.name} created.") logger.info("Run configuration created.", production=production) try: event.event_object.repository.add_file( f"{production.name}.ini", os.path.join(f"{production.category}", f"{production.name}.ini")) logger.info( "Configuration committed to event repository.", production=production) except Exception as e: logger.error( f"Configuration could not be committed to repository.\n{e}", production=production) except DescriptionException as e: logger.error("Run configuration failed", production=production, channels=["file", "mattermost"])
def submit(event, update): """ Submit the run configuration files for a given event for jobs which are ready to run. If no event is specified then all of the events will be processed. """ server, repository = connect_gitlab() events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone"), subset=[event], update=update) for event in events: logger = logging.AsimovLogger(event=event.event_object) ready_productions = event.event_object.get_all_latest() for production in ready_productions: if production.status.lower() in { "running", "stuck", "wait", "processing", "uploaded", "finished", "manual", "cancelled", "stopped" }: continue if production.status.lower() == "restart": if production.pipeline.lower() in known_pipelines: pipe = known_pipelines[production.pipeline.lower()]( production, "C01_offline") pipe.clean() pipe.submit_dag() else: #try: # configuration = production.get_configuration() #except ValueError as e: # #build(event) # logger.error(f"Error while trying to submit a configuration. {e}", production=production, channels="gitlab") if production.pipeline.lower() in known_pipelines: pipe = known_pipelines[production.pipeline.lower()]( production, "C01_offline") try: pipe.build_dag() except PipelineException: logger.error( "The pipeline failed to build a DAG file.", production=production) try: pipe.submit_dag() production.status = "running" except PipelineException as e: production.status = "stuck" logger.error( f"The pipeline failed to submit the DAG file to the cluster. {e}", production=production)
def create(event, pipeline, family, comment, needs, template, status, approximant): """ Add a new production to an event. """ if config.get("ledger", "engine") == "gitlab": _, repository = connect_gitlab() gitlab_event = gitlab.find_events(repository, subset=event) event = gitlab_event[0].event_object elif config.get("ledger", "engine") == "yamlfile": ledger = Ledger(config.get("ledger", "location")) event = ledger.get_event(event) # event_prods = event.productions names = [production.name for production in event_prods] family_entries = [int(name.split(family)[1]) for name in names if family in name] # if "bayeswave" in needs: bw_entries = [production.name for production in event_prods if ("bayeswave" in production.pipeline.lower()) and (production.review.status not in {"REJECTED", "DEPRECATED"})] needs = bw_entries # production = {"comment": comment, "pipeline": pipeline, "status": status} if needs: production['needs'] = needs if template: production['template'] = template if approximant: production['approximant'] = approximant if len(family_entries)>0: number = max(family_entries)+1 else: number = 0 production_dict = {f"{family}{number}": production} production = Production.from_dict(production_dict, event=event) # click.echo(production) event.add_production(production) if config.get("ledger", "engine") == "gitlab": gitlab_event[0].update_data() elif config.get("ledger", "engine") == "yamlfile": ledger.events[event.name] = event.to_dict() ledger.save()
def add_data(event, yaml_data, json_data=None): gitlab_event = gitlab.find_events(repository, subset=event) def update(d, u): for k, v in u.items(): if isinstance(v, collections.abc.Mapping): d[k] = update(d.get(k, {}), v) else: d[k] = v return d if yaml_data: with open(yaml_data, "r") as datafile: data = yaml.safe_load(datafile.read()) gitlab_event[0].event_object.meta = update(gitlab_event[0].event_object.meta, data) gitlab_event[0].update_data() print(gitlab_event[0].event_object.meta)
def load(event, data): server, repository = connect_gitlab() gitlab_event = gitlab.find_events(repository, subset=event)[0] def update(d, u): for k, v in u.items(): if isinstance(v, collections.abc.Mapping): d[k] = update(d.get(k, {}), v) else: d[k] = v return d if data: with open(data, "r") as datafile: data = yaml.safe_load(datafile.read()) gitlab_event.event_object.meta = update(gitlab_event.event_object.meta, data) gitlab_event.update_data() print(gitlab_event.event_object.meta)
def results(event, update): """ Find all available results for a given event. """ server, repository = connect_gitlab() events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone"), subset=[event], update=update, repo=False) for event in events: click.secho(f"{event.title}") logger = logging.AsimovLogger(event=event.event_object) for production in event.productions: try: for result, meta in production.results().items(): print( f"{production.event.name}/{production.name}/{result}, {production.results(result)}" ) except: pass
def populate(event, yaml, ini): """ Populate an event ledger with data from ini or yaml files. """ gitlab_events = gitlab.find_events(repository, subset=[event]) event = gitlab_events[0] event_o = event.event_object # Check the calibration files for this event click.echo("Check the calibration.") calibration(event_o.name) # Check the IFOs for this event click.echo("Check the IFO list") try: checkifo(event_o.name) except: pass # Add default data click.echo("Add in default channel data.") if yaml: add_data(event_o.name, yaml)
def add(event, production, status, message): """ Add a review signoff or rejection to an event. """ server, repository = connect_gitlab() gitlab_events = gitlab.find_events(repository, subset=[event], update=False, repo=False) for event in gitlab_events: production = [ production_o for production_o in event.productions if production_o.name == production ][0] click.secho(event.title, bold=True) message_o = ReviewMessage(message=message, production=production, status=status) production.review.add(message_o) if hasattr(event, "issue_object"): event.issue_object.update_data()
def configurator(event, json_data=None): gitlab_event = gitlab.find_events(repository, subset=event) def update(d, u): for k, v in u.items(): if isinstance(v, collections.abc.Mapping): d[k] = update(d.get(k, {}), v) else: d[k] = v return d if json_data: with open(json_data, "r") as datafile: data = json.load(datafile) new_data = {"quality": {}, "priors": {}} new_data["quality"]["sample-rate"] = data["srate"] new_data["quality"]["lower-frequency"] = {} new_data["quality"]["upper-frequency"] = int(0.875 * data["srate"]/2) new_data["quality"]["start-frequency"] = data['f_start'] new_data["quality"]["segment-length"] = data['seglen'] new_data["quality"]["window-length"] = data['seglen'] new_data["quality"]["psd-length"] = data['seglen'] def decide_fref(freq): if (freq >= 5) and (freq < 10): return 5 else: return floor(freq/10)*10 new_data["quality"]["reference-frequency"] = decide_fref(data['f_ref']) new_data["priors"]["amp order"] = data['amp_order'] new_data["priors"]["chirp-mass"] = [data["chirpmass_min"], data["chirpmass_max"]] update(gitlab_event[0].event_object.meta, new_data) print(gitlab_event[0].event_object.meta) gitlab_event[0].update_data()
def ledger(event, yaml_f): """ Return the ledger for a given event. If no event is specified then the entire production ledger is returned. """ server, repository = connect_gitlab() events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone"), subset=[event], update=False, repo=False) total = [] for event in events: total.append(yaml.safe_load(event.event_object.to_yaml())) click.echo(yaml.dump(total)) if yaml_f: with open(yaml_f, "w") as f: f.write(yaml.dump(total))
from ligo.gracedb.rest import GraceDb, HTTPError client = GraceDb(service_url=config.get("gracedb", "url")) r = client.ping() superevent_iterator = client.superevents('O3B_CBC_CATALOG') superevent_ids = [ superevent['superevent_id'] for superevent in superevent_iterator ] server = gitlab.gitlab.Gitlab(config.get("gitlab", "url"), private_token=config.get("gitlab", "token")) repository = server.projects.get(config.get("olivaw", "tracking_repository")) gitlab_events = gitlab.find_events(repository) super_events = set(superevent_ids) - {event.title for event in gitlab_events} # Add the new events for superevent in list(super_events): data = client.superevent(superevent).json() event_data = client.event(data['preferred_event']).json() event_url = f"https://catalog-dev.ligo.org/events/{data['preferred_event']}/view/" event = Event(name=superevent, repository=f"[email protected]:pe/O3/{superevent}", gid=data['preferred_event'], gid_url=event_url,
def html(event, webdir): """ Return the ledger for a given event. If no event is specified then the entire production ledger is returned. """ server, repository = connect_gitlab() if not webdir: webdir = config.get("report", "report_root") click.echo("Getting events...") events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone"), subset=[event], repo=False, update=False) click.echo("Got events") if len(glob.glob("asimov.conf")) > 0: config_file = "asimov.conf" else: config_file = None report = otter.Otter(f"{webdir}/index.html", author="Olivaw", title="Olivaw PE Report", author_email=config.get("report", "report_email"), config_file=config_file) with report: navbar = bt.Navbar("Asimov", background="navbar-dark bg-primary") report + navbar with report: time = bt.Container() time + f"Report generated at {str(datetime.now(tz))}" report + time cards = [] container = bt.Container() container + "# All PE Productions" for event in events: click.secho(event.title, bold=True) event_report = otter.Otter(f"{webdir}/{event.title}.html", author="Olivaw", title=f"Olivaw PE Report | {event.title}", author_email="*****@*****.**", config_file=config_file) with event_report: navbar = bt.Navbar("Asimov", background="navbar-dark bg-primary") event_report + navbar card = bt.Card(title=f"<a href='{event.title}.html'>{event.title}</a>") toc = bt.Container() for production in event.productions: toc + f"* [{production.name}](#{production.name}) | {production.pipeline} |" # + bt.Badge({production.pipeline}, "info") with event_report: title_c = bt.Container() title_c + f"#{event.title}" event_report + title_c event_report + toc production_list = bt.ListGroup() for production in event.productions: click.echo(f"{event.title}\t{production.name}") if production.pipeline.lower() in known_pipelines: pipe = known_pipelines[production.pipeline.lower()]( production, "C01_offline") event_log = otter.Otter( f"{webdir}/{event.title}-{production.name}.html", author="Olivaw", title=f"Olivaw PE Report | {event.title} | {production.name}", author_email="*****@*****.**", config_file=config_file) status_map = { "cancelled": "light", "finished": "success", "uploaded": "success", "processing": "primary", "running": "primary", "stuck": "warning", "restart": "secondary", "ready": "secondary", "wait": "light", "stop": "danger", "manual": "light", "stopped": "light" } with event_report: container = bt.Container() container + f"## {production.name}" container + f"<a id='{production.name}'/>" container + "### Ledger" container + production.meta if production.pipeline.lower() == "bilby": container + f"### Progress" progress_line = [] procs = pipe.check_progress() for proc, val in procs.items(): container + f"- {proc.split('_')[-1]}\t{val[0]}\t{val[1]}" progress_line.append(f"{val[1]}") else: progress_line = [] if production.status.lower() == "running": progress = str(bt.Badge("|".join(progress_line))) else: progress = "" if production.status.lower() == "uploaded": link = os.path.join( "https://ldas-jobs.ligo.caltech.edu", config.get('general', 'webroot').replace("/home/", "~").replace( "public_html/", ""), production.event.name, production.name, "results", "home.html") item_text = f"<a href='{link}'>{production.name}</a>" else: item_text = f"<a href='{event.title}.html#{production.name}'>{production.name}</a>" production_list.add_item( item_text + str(bt.Badge(f"{production.pipeline}", "info")) + progress + str(bt.Badge(f"{production.status}")), context=status_map[production.status]) # logs = pipe.collect_logs() # container + f"### Log files" # container + f"<a href='{event.title}-{production.name}.html'>Log file page</a>" # with event_log: # for log, message in logs.items(): # log_card = bt.Card(title=f"{log}") # log_card.add_content("<div class='card-body'><pre>"+message+"</pre></div>") # event_log + log_card with event_report: event_report + container card.add_content(production_list) cards.append(card) with report: if len(cards) == 1: report + card else: for i, card in enumerate(cards): if i % 2 == 0: deck = bt.CardDeck() deck + card if i % 2 == 1: report + deck
from asimov import condor, git from asimov.ini import RunConfiguration import os, glob, datetime import otter import click server = gitlab.gitlab.Gitlab('https://git.ligo.org', private_token=config.get("gitlab", "token")) repository = server.projects.get(config.get("olivaw", "tracking_repository")) uber_repository = git.MetaRepository(config.get("olivaw", "metarepository")) events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone")) mattermost = mattermost.Mattermost() from pesummary.gw.file.read import read def get_psds_rundir(rundir): psds = {} dets = ['L1', 'H1', 'V1'] for det in dets: asset = f"{rundir}/ROQdata/0/BayesWave_PSD_{det}/post/clean/glitch_median_PSD_forLI_{det}.dat" if os.path.exists(asset): psds[det] = asset return psds
def monitor(event, update, dry_run): """ Monitor condor jobs' status, and collect logging information. """ server, repository = connect_gitlab() events = gitlab.find_events(repository, milestone=config.get("olivaw", "milestone"), subset=[event], update=update, repo=True) for event in events: stuck = 0 running = 0 ready = 0 finish = 0 click.secho(f"{event.title}", bold=True) on_deck = [ production for production in event.productions if production.status.lower() in ACTIVE_STATES ] for production in on_deck: click.secho(f"\t{production.name}", bold=True) if not dry_run: logger = logging.AsimovLogger(event=event.event_object) else: logger = None # Deal with jobs which need to be stopped first if production.status.lower() == "stop": pipe = known_pipelines[production.pipeline.lower()]( production, "C01_offline") if not dry_run: pipe.eject_job() production.status = "stopped" click.echo(f"\t\t{production.name} stopped") else: click.echo("\t\t{production.name} --> stopped") continue # Get the condor jobs try: if "job id" in production.meta: if not dry_run: job = condor.CondorJob(production.meta['job id']) else: click.echo(f"\t\tRunning under condor") else: raise ValueError # Pass to the exception handler if not dry_run: if job.status.lower() == "running": pass if job.status.lower() == "processing": pass if event.state == "running" and job.status.lower( ) == "stuck": click.echo("\t\tJob is stuck on condor") event.state = "stuck" production.status = "stuck" stuck += 1 production.meta['stage'] = 'production' elif event.state == "processing" and job.status.lower( ) == "stuck": click.echo("\t\tPost-processing is stuck on condor") production.status = "stuck" stuck += 1 production.meta['stage'] = "post" else: running += 1 except ValueError as e: click.echo(e) click.echo( f"\t\t{production.name}\t{production.status.lower()}") if production.pipeline.lower() in known_pipelines: click.echo("Investigating...") pipe = known_pipelines[production.pipeline.lower()]( production, "C01_offline") if production.status.lower() == "stop": pipe.eject_job() production.status = "stopped" elif production.status.lower() == "finished": click.echo("Finished") pipe.after_completion() elif production.status.lower() == "processing": # Need to check the upload has completed try: pipe.after_processing() except ValueError as e: click.echo(e) #production.status = "stuck" #stuck += 1 production.meta['stage'] = "after processing" elif pipe.detect_completion() and production.status.lower( ) == "running": # The job has been completed, collect its assets production.meta['job id'] = None finish += 1 production.status = "finished" pipe.after_completion() else: # It looks like the job has been evicted from the cluster click.echo(f"Attempting to rescue {production.name}") #event.state = "stuck" #production.status = "stuck" #production.meta['stage'] = 'production' try: pipe.resurrect() except: production.status = "stuck" production.meta['error'] = "resurrection error" if production.status == "stuck": event.state = "stuck" production.event.issue_object.update_data() if (running > 0) and (stuck == 0): event.state = "running" elif (stuck == 0) and (running == 0) and (finish > 0): event.state = "finished"
def clone(location): import venv import pathlib import shutil working = "working" results = "results" remote_config = os.path.join(location, "asimov.conf") config = configparser.ConfigParser() config.read([remote_config]) click.echo(f'Cloning {config.get("project", "name")}') root = os.path.join( os.getcwd(), config.get("project", "name").lower().replace(" ", "-")) pathlib.Path(root).mkdir(parents=True, exist_ok=True) os.chdir(root) config.set("project", "root", root) # Make the virtual environment #builder = venv.EnvBuilder(system_site_packages=False, # clear=False, # symlinks=False, # upgrade=False, # with_pip=True, # prompt=f"Asimov {project_name}") #builder.create("environment") #config.set("general", "environment", "environment") # Make the working directory #shutil.copytree(os.path.join(config.get("general", "rundir_default"), working) #config.set("general", "rundir_default", working) # Make the git directory #pathlib.Path(checkouts).mkdir(parents=True, exist_ok=True) #config.set("general", "git_default", checkouts) # Copy the results store #shutil.copyfile(os.path.join(location, config.get("storage", "results_store")), results) shutil.copytree( os.path.join(location, config.get("storage", "results_store")), results) config.set("storage", "results_store", results) # Make the ledger if config.get("ledger", "engine") == "yamlfile": shutil.copyfile( os.path.join(location, config.get("ledger", "location")), "ledger.yml") elif config.get("ledger", "engine") == "gitlab": _, repository = connect_gitlab(config) events = gitlab.find_events(repository, update=False, subset=[None], label=config.get("gitlab", "event_label"), repo=False) total = [] for event in events: total.append(yaml.safe_load(event.event_object.to_yaml())) with open("ledger.yml", "w") as f: f.write(yaml.dump(total)) config.set("ledger", "engine", "yamlfile") config.set("ledger", "location", "ledger.yml") with open("asimov.conf", "w") as config_file: config.write(config_file)
def setUpClass(cls): _, repository = connect_gitlab() cls.events = gitlab.find_events(repository)