def __init__(self, submitdir, raise_err=True): self.submitdir = os.path.abspath(submitdir) self.submitdir_exists = True if not os.path.isdir(submitdir): self.submitdir_exists = False if raise_err is False: return raise SubmitDirException("Invalid submit dir: %s" % submitdir) self.braindump_file = os.path.join(self.submitdir, "braindump.yml") if not os.path.isfile(self.braindump_file): self.braindump_file = os.path.join(self.submitdir, "braindump.txt") # Read the braindump file self.braindump = utils.slurp_braindb(os.path.join(self.submitdir)) # Read some attributes from braindump file self.wf_uuid = self.braindump["wf_uuid"] self.root_wf_uuid = self.braindump["root_wf_uuid"] self.user = self.braindump["user"] self.archname = os.path.join(self.submitdir, "archive.tar.gz")
def setup(submit_dir, config_properties): """ Setup the populate module @submit_dir submit directory path of the workflow run @config_properties path to the propery file """ # global reference global global_base_submit_dir global global_braindb_submit_dir global global_db_url global global_top_wf_uuid global_base_submit_dir = submit_dir #Getting values from braindump file config = utils.slurp_braindb(submit_dir) if ('submit_dir' in config or 'run' in config): if 'submit_dir' in config: global_braindb_submit_dir = os.path.abspath(config['submit_dir']) else: global_braindb_submit_dir = os.path.abspath(config['run']) else: logger.error( "Submit directory cannot be found in the braindump.txt . ") sys.exit(1) # Create the sqllite db url global_db_url = connection.url_by_submitdir(submit_dir, connection.DBType.WORKFLOW, config_properties) global_top_wf_uuid = connection.get_wf_uuid(submit_dir) if global_db_url is None: sys.exit(1)
def pegasus_remove(ctx, dag_id=None, verbose=False, submit_dir=None): """pegasus-remove helps you remove an entire workflow.""" if not submit_dir and not dag_id: print( "You must provide either a dag_id or dagdirectory to remove a workflow." ) ctx.exit(1) if submit_dir: cwd = os.getcwd() submit_dir = str(Path(submit_dir).resolve()) try: os.chdir(submit_dir) except PermissionError: click.secho( click.style("Error: ", fg="red", bold=True) + "Cannot change to directory %s" % submit_dir) ctx.exit(1) config = slurp_braindb(submit_dir) if not config: click.secho( click.style("Error: ", fg="red", bold=True) + "%s is not a valid submit-dir" % submit_dir) ctx.exit(1) dag_log_file = config["dag"] + ".dagman.out" pattern = re.compile(r"\.([0-9\.]+) \(CONDOR_DAGMAN\) STARTING UP") with open(dag_log_file) as fp: for line in fp.readlines(): match = pattern.search(line) if match: dag_id = match.group(1) else: if not dag_id: click.secho( click.style("Error: ", fg="red", bold=True) + "You must provide either a dag-id or dag-directory to remove a workflow." ) ctx.exit(1) os.chdir(cwd) if dag_id: condor_rm = shutil.which("condor_rm") cmd = (condor_rm, dag_id) rv = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if rv.returncode == 0: click.echo(rv.stdout.decode().strip()) click.secho("✨ Success", fg="green") else: click.echo(rv.stderr.decode().strip()) click.secho("Error ", fg="red", bold=True) ctx.exit(42)
def get_workflow_uuid(submit_dir): bdump_yml = Path(submit_dir) / "braindump.yml" bdump_txt = Path(submit_dir) / "braindump.txt" if bdump_yml.exists() is False and bdump_txt.exists() is False: raise ValueError("Not a valid workflow submit directory: %r" % submit_dir) braindump = utils.slurp_braindb(submit_dir) return braindump["root_wf_uuid"], braindump["wf_uuid"]
def purge_wf_uuid_from_dashboard_database(rundir, output_db): """ This function purges a workflow id from the output database. """ # Parse the braindump file wfparams = utils.slurp_braindb(rundir) wf_uuid = wfparams.get("wf_uuid", None) if wf_uuid is None: return expunge.delete_dashboard_workflow(output_db, wf_uuid)
def purge_wf_uuid_from_database(rundir, output_db): """ This function purges a workflow id from the output database. """ # PM-652 do nothing for sqlite # DB is already rotated in pegasus-monitord if output_db.lower().startswith("sqlite"): return # Parse the braindump file wfparams = utils.slurp_braindb(rundir) wf_uuid = wfparams.get("wf_uuid", None) if wf_uuid is None: return expunge.delete_workflow(output_db, wf_uuid)
def populate_chart(wf_uuid, expand=False): """ Populates the workflow info object corresponding to the wf_uuid @param wf_uuid the workflow uuid @param expand expand workflow or not. """ workflow_stampede_stats = get_wf_stats(wf_uuid, expand) workflow_info = populate_workflow_details(workflow_stampede_stats) sub_wf_uuids = workflow_stampede_stats.get_sub_workflow_ids() workflow_info.sub_wf_id_uuids = sub_wf_uuids if len(sub_wf_uuids) > 0: workflow_info.job_instance_id_sub_wf_uuid_map = get_job_inst_sub_workflow_map( workflow_stampede_stats) config = utils.slurp_braindb(rlb(workflow_info.submit_dir)) if ('dag' in config): dag_file_name = config['dag'] workflow_info.dag_label = dag_file_name[:dag_file_name.find(".dag")] workflow_info.dag_file_path = os.path.join( rlb(workflow_info.submit_dir), dag_file_name) if ('dax' in config): workflow_info.dax_file_path = config['dax'] return workflow_stampede_stats, workflow_info
def pegasus_run(ctx, grid=False, json=False, verbose=0, submit_dir=None): """.""" logging.basicConfig(level=logging.ERROR - (min(verbose, 3) * 10)) os.umask(0o022) cwd = os.getcwd() config = slurp_braindb(submit_dir) submit_dir = str(Path(submit_dir).resolve()) if not config: click.secho( click.style("Error: ", fg="red", bold=True) + "%s is not a valid submit-dir" % submit_dir) ctx.exit(1) try: os.chdir(submit_dir) except PermissionError: click.secho( click.style("Error: ", fg="red", bold=True) + "Cannot change to directory %s" % submit_dir) ctx.exit(1) if grid: try: grid_check() except (FileNotFoundError, PermissionError, ValueError) as e: click.secho(click.style("Error: ", fg="red", bold=True) + str(e)) ctx.exit(1) if config["dag"]: try: # sanity check: Is the DAG file there? check_dag(config["dag"]) # PM-870 we have already changed to the directory, don't prepend $run again dag_sub_file = config["dag"] + ".condor.sub" # PM-702: clean up .halt files from pegasus-halt halt_released = False if Path(config["dag"] + ".halt").exists(): click.echo( "Found a previously halted workflow. Releasing it now.") os.system("find . -name '*.dag.halt' -exec rm {} \\;") halt_released = True # After the switch from condor_submit_dag, we lost the check to see if # a workflow is already running. This replaces those checks. if Path("monitord.pid").exists(): if halt_released: ctx.exit(0) else: click.secho( click.style("Error: ", fg="red", bold=True) + "It looks like the workflow is already running! If you are sure\n" + " that is not the case, please remove the monitord.pid file and try\n" + " again.", err=True, ) ctx.exit(1) # PM-797 do condor_submit on dagman.condor.sub file if it exists exec_dag(dag_sub_file, config["condor_log"]) log.debug("# dagman is running") if json: click.echo(dumps(config)) else: click.secho(""" Your workflow has been started and is running in the base directory: %(submit_dir)s *** To monitor the workflow you can run *** pegasus-status -l %(submit_dir)s *** To remove your workflow run *** pegasus-remove %(submit_dir)s""" % {"submit_dir": submit_dir}) except (FileNotFoundError, PermissionError, ValueError) as e: click.secho(click.style("Error: ", fg="red", bold=True) + str(e)) ctx.exit(1) except subprocess.CalledProcessError as e: rc = e.returncode if rc != 0: click.secho( click.style("Error: ", fg="red", bold=True) + "Running %s failed with %d" % (e.cmd, rc)) ctx.exit(rc) elif config["type"] == "shell": try: exec_script(config["script"]) click.secho("✨ Success", fg="green") except (FileNotFoundError, PermissionError) as e: click.secho(click.style("Error: ", fg="red", bold=True) + str(e)) ctx.exit(1) except subprocess.CalledProcessError as e: rc = e.returncode if rc != 0: click.secho( click.style("Error: ", fg="red", bold=True) + "Running %s failed with %d" % (config["script"], rc)) ctx.exit(rc) os.chdir(cwd)