def func_run_job(str_job): """ Run the command for the job on the commandline. """ return(Commandline.Commandline().func_CMD(str_job, f_use_bash=True))
def test_func_cmd_for_piped_command(self): """ Test the case of send a piped command """ # Set up environment str_test_file_1 = "test_func_cmd_for_piped_command_1.txt" str_test_file_2 = "test_func_cmd_for_piped_command_2.txt" str_answer = "hello" str_expected_answer = "he" self.func_remove_files([str_test_file_1, str_test_file_2]) str_setup_command = "echo \"" + str_answer + "\" > " + str_test_file_1 str_command = "".join([ "cat ", str_test_file_1, " | cut -f 1 -d ", str_answer[2], " > ", str_test_file_2 ]) # Send command and get result cmdl_cur = Commandline.Commandline() cmdl_cur.func_CMD(str_command=str_setup_command) cmdl_cur.func_CMD(str_command=str_command) # Get confirmation that file was written correctly with open(str_test_file_2) as hndl_test: str_result = hndl_test.read()[:-1] self.func_test_equals(str_expected_answer, str_result) # Destroy environment self.func_remove_files([str_test_file_1, str_test_file_2])
def test_rnaseq_mutation_pipeline_for_args_long(self): """ Tests rnaseq_mutation_pipeline.py for help args call short. """ # Create test environment str_command = "python rnaseq_mutation_pipeline.py --help" # Run command f_success = Commandline.Commandline().func_CMD(str_command) # Test error self.assertTrue(f_success, str_command)
def test_rnaseq_mutation_pipeline_for_no_args(self): """ Tests rnaseq_mutation_pipeline.py for no args call. """ # Create test environment str_command = "python rnaseq_mutation_pipeline.py" # Run command f_success = Commandline.Commandline().func_CMD(str_command) # Test error self.assertFalse(f_success, str_command)
def test_func_cmd_for_simple_command_return_exception(self): """ Test the case of sending a simple command and having an exception throw in the call (failure) """ # Set up environment str_answer = str(False) str_command = "python sciedpiper/make_return_code.py -e" cmdl_cur = Commandline.Commandline() str_return = str(cmdl_cur.func_CMD(str_command=str_command)) self.func_test_equals(str_answer, str_return)
def test_func_cmd_for_simple_command_return_positive(self): """ Test the case of sending a simple command and getting the return value of 99 (failure) """ # Set up environment str_answer = str(False) str_command = "python sciedpiper/make_return_code.py -r 99" cmdl_cur = Commandline.Commandline() str_return = str(cmdl_cur.func_CMD(str_command=str_command)) self.func_test_equals(str_answer, str_return)
def test_func_cmd_for_simple_command_return_zero(self): """ Test the case of sending a simple command and getting the return value of 0 (success) """ # Set up environment str_answer = str(True) str_command = "python make_return_code.py -r 0" cmdl_cur = Commandline.Commandline() str_return = str(cmdl_cur.func_CMD(str_command = str_command)) self.func_test_equals(str_answer, str_return)
def test_func_cmd_for_test_mode(self): """ Test the case of send a simple command """ # Set up environment str_test_file = "test_func_cmd_for_simple_command.txt" str_answer = "hello" self.func_remove_files([str_test_file]) str_command = "echo \"" + str_answer + "\" > " + str_test_file # Send command and get result cmdl_cur = Commandline.Commandline() f_success = cmdl_cur.func_CMD(str_command=str_command, f_test=True) # Get confirmation that file was written correctly f_success = f_success and not os.path.exists(str_test_file) # Destroy environment self.func_remove_files([str_test_file])
def test_rnaseq_mutation_pipeline_for_test(self): """ Tests rnaseq_mutation_pipeline.py for test mode. """ # Create test environment str_command = " ".join([ "python rnaseq_mutation_pipeline.py", "--alignment_mode STAR", "--variant_call_mode GATK", "--threads 8", "--plot", "--reference", self.str_reference_genome, "--left", self.str_left_file, "--right", self.str_right_file, "--test", "--out_dir", "_".join([self.str_testing_area, "vanilla_test"]), "--vcf", self.str_reference_vcf, self.str_update_command ]) # Run command f_success = Commandline.Commandline().func_CMD(str_command) # Test error self.assertTrue(f_success, str_command)
def test_func_cmd_for_simple_command(self): """ Test the case of send a simple command """ # Set up environment str_test_file = "test_func_cmd_for_simple_command.txt" str_answer = "hello" self.func_remove_files([str_test_file]) str_command = "echo \"" + str_answer + "\" > " + str_test_file # Send command and get result cmdl_cur = Commandline.Commandline() cmdl_cur.func_CMD(str_command=str_command) # Get confirmation that file was written correctly with open(str_test_file) as hndl_test: str_result = hndl_test.read()[:-1] self.func_test_equals(str_answer, str_result) # Destroy environment self.func_remove_files([str_test_file])
def do_browser_login(self, dry_run=False): ''' Authenticate through the browser :param dry_run: If true, will do a dry run with no actual execution of functionality. :return: Authentication token ''' print("BROWSER LOGIN") if dry_run: print("DRY_RUN:: Did not login") return ("DRY_RUN_TOKEN") cmdline = Commandline.Commandline() cmdline.func_CMD(command="gcloud auth application-default login") cmd_ret = cmdline.func_CMD( command="gcloud auth application-default print-access-token", stdout=True) return (cmd_ret.decode("ASCII").strip(os.linesep))
def test_rnaseq_mutation_pipeline_for_gatk_call(self): """ Tests rnaseq_mutation_pipeline.py for gatk call. """ str_output_dir = os.path.join(self.str_testing_area, "vanilla_gatk") self.func_make_dummy_dirs([self.str_testing_area, str_output_dir]) # Create test environment str_command = " ".join([ "python rnaseq_mutation_pipeline.py", "--alignment_mode STAR", "--variant_call_mode GATK", "--threads 8", "--plot", "--reference", self.str_reference_genome, "--left", self.str_left_file, "--right", self.str_right_file, "--variant_filtering_mode GATK", "--out_dir", str_output_dir, "--vcf", self.str_reference_vcf, self.str_update_command ]) # Run command f_success = Commandline.Commandline().func_CMD(str_command) # Test error self.assertTrue(f_success, str_command)
def test_rnaseq_mutation_pipeline_for_move(self): """ Tests rnaseq_mutation_pipeline.py for moving files """ # Create test environment str_move_dir = os.path.join(self.str_testing_area, "move_test_runs") str_command = " ".join([ "python rnaseq_mutation_pipeline.py", "--alignment_mode STAR", "--variant_call_mode GATK", "--threads 8", "--plot", "--reference", self.str_reference_genome, "--left", self.str_left_file, "--right", self.str_right_file, "--out_dir", "_".join([self.str_testing_area, "vanilla_no_move"]), "--vcf", self.str_reference_vcf, self.str_update_command, "--move", str_move_dir ]) # Run command if not os.path.exists(str_move_dir): os.mkdir(str_move_dir) f_success = Commandline.Commandline().func_CMD(str_command) # Test error self.assertTrue(f_success, str_command)
c_ACCESS_REMOVE = "Remove" c_PERMISSIONS = [c_ACCESS_EDIT, c_ACCESS_REVIEWER, c_ACCESS_VIEW, c_ACCESS_REMOVE] # SCP specific c_CLUSTER_FILE_TYPE = "Cluster" c_TEXT_TYPE = "text/plain" c_INVALID_STUDYDESC_CHAR = ["<",".","+","?",">"] c_VALID_STUDYNAME_CHAR = string.ascii_letters + string.digits + "".join([" ","-",".","/","(",")","+",",",":"]) # Matrix API specific c_MATRIX_API_OK = 200 c_MATRIX_REQUEST_API_OK = 202 c_MATRIX_BAD_FORMAT = 102 c_MATRIX_BAD_FORMAT_TEXT = "The requested format is not supported in the service." cmdline = Commandline.Commandline() class APIManager: ''' Base class for REST API interaction. Handles common operations. ''' def __init__(self): return def login(self, token=None, dry_run=False, api_base='https://portals.broadinstitute.org/single_cell/api/v1/'): """ Authenticates as user and get's token to perform actions on the user's behalf. :param token: User token to use with API
if parsed_args.validate and not hasattr(parsed_args, "summarize_list"): print("VALIDATE FILES") command = ["verify_portal_file.py"] if hasattr(parsed_args, "cluster_file"): command.extend(["--coordinates-file", parsed_args.cluster_file]) if hasattr(parsed_args, "expression_file"): command.extend(["--expression-files", parsed_args.expression_file]) if hasattr(parsed_args, "metadata_file"): command.extend(["--metadata-file", parsed_args.metadata_file]) if parsed_args.dry_run: print("TESTING:: no command executed." + os.linesep + "Would have executed: " + os.linesep + " ".join(command)) else: valid_code = Commandline.Commandline().func_CMD(" ".join(command)) print(valid_code) if not valid_code: print( "There was an error validating the files, did not upload. Code=" + str(valid_code)) exit(valid_code) ## Upload cluster file if hasattr(parsed_args, "cluster_file"): print("UPLOAD CLUSTER FILE") connection = login(manager=connection, dry_run=parsed_args.dry_run) ret = connection.upload_cluster( file=parsed_args.cluster_file, study_name=parsed_args.study_name, cluster_name=parsed_args.cluster_name,
def func_run_sample(self, lstr_sample_info): # str_script is None indicates a bash script was not made and the raw # command can be run. This is a case of running a pipeline locally # without sample.txt files or config files that update aspects of the # pipeline that would require a script to encapsulate those changes, # for example a Path change. # Also a dispatch command outside of local dispatching will also # require a script to be ran, # Check to see if a script needs to be made and ran. # Happens on certain proccessing requirements # like updating environmental variables with pipeline # config files. str_script = self.func_update_command(lstr_sample_info) if str_script: return(Commandline.Commandline().func_CMD(str_script, f_use_bash=True)) elif str_script is None: # Holds the commands to run lcmd_commands = [] ## Output dir related # If the output dir is not specified then move and copy functions are disabled f_archive = True if(not hasattr(self.ns_arguments, Arguments.C_STR_OUTPUT_DIR) or not self.ns_arguments.str_out_dir): f_archive = False ## Make output directory PipelineRunner.func_make_output_dir(self.ns_arguments) # Make pipeline object and indicate Log file pline_cur = Pipeline.Pipeline(str_name=self.prog, str_log_to_file=self.ns_arguments.str_log_file if hasattr(self.ns_arguments, "str_log_file") else os.path.join(self.ns_arguments.str_out_dir, "custom_log.txt"), str_update_source_path=self.ns_arguments.str_update_classpath if hasattr(self.ns_arguments, "str_update_classpath") else None) # Update the logger with the arguments if self.version: str_version_log = "".join(["PipelineRunner.func_run_sample:: ", "Pipeline version:", str(self.version), "\n", "PipelineRunner.func_run_sample:: ", "The call to the pipeline was: ", " ".join(["\n"] + sys.argv + ["\n"]), "PipelineRunner.func_run_sample:: ", "This run was started with the ", "following arg.\n"]) str_args_log = "\n".join([str(str_namespace_key) + " = " + str(str_namespace_value) for str_namespace_key, str_namespace_value in vars(self.ns_arguments).items()] + ["\n"]) pline_cur.logr_logger.info(str_version_log) pline_cur.logr_logger.info(str_args_log) # Put pipeline in test mode if needed. if hasattr(self.ns_arguments, "f_Test") and self.ns_arguments.f_Test: pline_cur.func_test_mode() # Turn off archiving if output directory was not given if hasattr(self.ns_arguments, "f_archive") and not f_archive: pline_cur.logr_logger.warning("PipelineRunner.func_run_sample:: Turning off archiving, please specify an output directory if you want this feature enabled.") pline_cur.f_archive = False # Run the user based pipeline # If the commands are not existent (parsed from JSON) # then build them from script # Where variables are being used. #if self.ns_arguments.str_wdl: # # If WDL is being output, switch the values of the arguments # # with the name of the argument allowing us to track them, # import inspect # import copy # ns_wdl_arguments = copy.deepcopy(self.ns_arguments) # lstr_members = [member[0] for member in inspect.getmembers(ns_wdl_arguments) # if not (member[0].startswith("_") or member[0].endswith("_") or inspect.isroutine(member))] # for str_member in lstr_members: # setattr(ns_wdl_arguments, str_member, "${"+str_member+"}".encode('utf-8')) # lcmd_commands = self.func_make_commands(args_parsed = ns_wdl_arguments, cur_pipeline = pline_cur) #else: lcmd_commands = self.func_make_commands(args_parsed=self.ns_arguments, cur_pipeline=pline_cur) # Write JSON file if hasattr(self.ns_arguments, "str_json_file_out") and self.ns_arguments.str_json_file_out: JSONManager.JSONManager.func_pipeline_to_json(lcmd_commands=lcmd_commands, dict_args=vars(self.ns_arguments), str_file=self.ns_arguments.str_json_file_out, f_pretty=True) pline_cur.logr_logger.info("Writing JSON file to: " + self.ns_arguments.str_json_file_out) return(True) # Run commands if not hasattr(self.ns_arguments, "lstr_copy"): setattr(self.ns_arguments, "lstr_copy", None) if not hasattr(self.ns_arguments, "str_move_dir"): setattr(self.ns_arguments, "str_move_dir", None) if not hasattr(self.ns_arguments, "str_compress"): setattr(self.ns_arguments, "str_compress", "none") if not hasattr(self.ns_arguments, "f_clean"): setattr(self.ns_arguments, "f_clean", False) if not hasattr(self.ns_arguments, "i_time_stamp_diff"): setattr(self.ns_arguments, "i_time_stamp_diff", None) return(pline_cur.func_run_commands(lcmd_commands=lcmd_commands, str_output_dir=self.ns_arguments.str_out_dir, f_clean=self.ns_arguments.f_clean, f_self_organize_commands=self.ns_arguments.f_graph_organize, li_wait=[int(str_wait) for str_wait in self.ns_arguments.lstr_wait.split(",")], lstr_copy=self.ns_arguments.lstr_copy if self.ns_arguments.lstr_copy else None, str_move=self.ns_arguments.str_move_dir if self.ns_arguments.str_move_dir else None, str_compression_mode=self.ns_arguments.str_compress, i_time_stamp_wiggle=self.ns_arguments.i_time_stamp_diff, #str_wdl=self.ns_arguments.str_wdl, str_dot_file=self.ns_arguments.str_dot_path, i_benchmark_secs=self.ns_arguments.i_mem_benchmark, args_original=None ))