def set_project(self,project_config): project_env=gb_m.loadEnv(project_config) if project_env["DESIGN_FILE"]: self.project_design_file=project_env["DESIGN_FILE"] self.project_reads_base=dirname(project_env["DESIGN_FILE"]) if "ORIGINAL_READS_BASE" in project_env: self.project_reads_base=project_env["ORIGINAL_READS_BASE"] if project_env["PROJECT_TEAM_NAME"]: self.project_team_id=project_env["PROJECT_TEAM_NAME"] if project_env["PROJECT_NAME"]: self.project_name=project_env["PROJECT_NAME"] if project_env["LOG_BASE"]: self.biocore_log_base=project_env["LOG_BASE"] if project_env["CWL_SCRIPT"]: self.project_cwl_script=project_env["CWL_SCRIPT"] if project_env["RUN_ID"]: self.project_run_id=project_env["RUN_ID"] if project_env["READS_BASE"]:self.scratch_reads_base=project_env["READS_BASE"] if project_env["RESULTS_DIR"]:self.project_results_base=project_env["RESULTS_DIR"] if project_env["PATH2_JSON_FILES"]:self.bicore_pipelinejson_dir=project_env["PATH2_JSON_FILES"] if project_env["PIPELINE_META_BASE"]:self.bicore_pipelinemeta_dir=project_env["PIPELINE_META_BASE"] if project_env["AWS_ACCOUNT_ID"]:self.aws_account_id=project_env["AWS_ACCOUNT_ID"] if project_env["AWS_AGENT_ID"]:self.aws_onprem_agent_id=project_env["AWS_AGENT_ID"] if project_env["AWS_AGENT_IP"]:self.aws_onprem_agent_ip=project_env["AWS_AGENT_IP"] if project_env["JSON_TEMPLATE"]: self.json_template=project_env["JSON_TEMPLATE"] try: with open(project_env["JSON_TEMPLATE"]) as f: json_temp_obj=json.load(f) for key in list(json_temp_obj.keys()): data=json_temp_obj[key] if isinstance(data,list): for index,item in enumerate(data): if isinstance(item,dict): if "File" in item["class"]: if isfile(str(item["path"])):self.json_template_files.append(item["path"]) elif "Directory" in item["class"]: if isdir(str(item["location"])):self.json_template_directories.append(item["location"]) elif isinstance(data,dict): if "File" in data["class"]: if isfile(str(data["path"])):self.json_template_files.append(data["path"]) elif "Directory" in data["class"]: if isdir(str(data["location"])):self.json_template_directories.append(data["location"]) else: if isfile(str(data)):self.json_template_files.append(data) except:pass
sys.exit() if not isdir(source_dir): print("ERROR: Bad source directory - see:%s " % (source_dir)) prog_usage() sys.exit() if not isdir(dest_dir): print("ERROR: Bad destination directory - see:%s " % (dest_dir)) prog_usage() sys.exit() ##reformat the input if not source_dir.endswith("/"): source_dir += "/" if not dest_dir.endswith("/"): dest_dir += "/" target_dir = dest_dir if target_dir.endswith("/"): target_dir = target_dir[:-1] project_env = gb.loadEnv("~/.bashrc") log_file = basename(__file__) + ".log" if isinstance(project_env, dict) and "LOGS_BASE" in project_env: if not isdir(project_env["LOGS_BASE"]): gb.mkdir_p(project_env["LOGS_BASE"]) log_file = join( project_env["LOGS_BASE"], basename(target_dir) + "." + basename(__file__) + ".log") ## rsync the content between the source and destination directories log = open(log_file, 'w') log.write("**********************************\n") log.write("**********************************\n") log.write("Date:%s\n" % (date.today())) print("Date:%s\n" % (date.today())) log.write("\n") log.write("Log file:%s\n" % (log_file))
assert False, "unhandled option" if pipeline_config is None or not isfile(pipeline_config): msg="ERROR: pipeline.cfg missing" print("%s - Check %s"%(msg,pipeline_config)) prog_usage() sys.exit() #get project global environment variables # variables of interest for this step: # 1)LOG_BASE # 2)JSON_TEMPLATE # 3)PATH2_JSON_FILES # 4)DESIGN_FILE # 5)READS_BASE # 6)RUN_ID project_env=gb.loadEnv(pipeline_config) if not project_env["LOG_BASE"]: print("ERROR: Log directory missing - see:%s"%(project_env["LOG_BASE"])) print("create the above directory and try again.") sys.exit() if not project_env["PATH2_JSON_FILES"]: print("ERROR: Json files base directory missing - see:%s"%(project_env["PATH2_JSON_FILES"])) print("create the above directory and try again.") sys.exit() if not project_env["ORIGINAL_READS_BASE"]: print("ERROR: Path to Reads files is incorrect - see:%s"%(project_env["ORIGINAL_READS_BASE"])) sys.exit() if not isdir(project_env["LOG_BASE"]): gb.mkdir_p(project_env["LOG_BASE"]) log_file=join(project_env["LOG_BASE"],basename(__file__)+".log")