def __push_flowcells_into_relevant_pipelines__(self,configs,mockdb): """ Provides the interface from which all post casava flowcell pipelines are run. """ if configs["system"].get("Logging","debug") is "True": print " Starting post casava flowcell pipelines for " + self.flowcell_key flowcell_dir_name = os.path.basename(self.output_dir) automation_parameters_config = MyConfigParser() automation_parameters_config.read(configs["system"].get("Filenames","automation_config")) running_location = "Speed" parsed = parse_sample_sheet(configs['system'],mockdb,self.output_dir) description = parsed['description'].replace(parsed['SampleID']+'_','') description_dict = parse_description_into_dictionary(description) if 'Pipeline' in description_dict: pipeline_key = description_dict['Pipeline'] else: description_pieces = parsed['description'].split('-') pipeline_key = description_pieces[-1] if pipeline_key.startswith('CCGL'): pipeline_key='CCGL' pipeline_name = automation_parameters_config.safe_get("Flowcell pipeline",pipeline_key) if pipeline_name is None: return 1 if configs["system"].get("Logging","debug") is "True": print "Starting " + pipeline_name pipeline_config = MyConfigParser() pipeline_config.read(configs["system"].get('Pipeline',pipeline_name)) pipeline = mockdb[pipeline_name].__new__(configs['system'],input_dir=self.output_dir,pipeline_key=pipeline_key,seq_run_key=self.seq_run_key,project=parsed['project_name'],flowcell_dir_name=flowcell_dir_name,pipeline_config=pipeline_config,**parsed) return 1
def things_to_do_if_initializing_flowcell_pipeline_with_input_directory(configs,storage_devices,mockdb,source_dir,pipeline_name=None,base_output_dir=None): """ Starts pipelines that read the entire flowcell data. """ if configs["system"].get("Logging","debug") is "True": print " Starting post casava flowcell pipelines" flowcell_dir_name = os.path.basename(source_dir) automation_parameters_config = MyConfigParser() automation_parameters_config.read(configs["system"].get("Filenames","automation_config")) running_location = "Speed" parsed = parse_sample_sheet(configs['system'],mockdb,source_dir) description = parsed['description'].replace(parsed['SampleID']+'_','') description_dict = parse_description_into_dictionary(description) if configs["system"].get("Logging","debug") is "True": print " Description = " + str(parsed['description']) if 'Pipeline' in description_dict: pipeline_key = description_dict['Pipeline'] else: description_pieces = parsed['description'].split('_') pipeline_key = description_pieces[-1] if pipeline_key.startswith('CCGL'): pipeline_key='CCGL' pipeline_name_check = automation_parameters_config.safe_get("Flowcell pipeline",pipeline_key) if pipeline_name_check != pipeline_name: return 1 if pipeline_name is None: return 1 if configs["system"].get("Logging","debug") is "True": print "Starting " + pipeline_name pipeline = mockdb[pipeline_name].__new__(configs['system'],input_dir=source_dir,pipeline_key=pipeline_key,seq_run_key=None,project=parsed['project_name'],flowcell_dir_name=flowcell_dir_name,running_location='Speed',pipeline_config=configs["pipeline"],**parsed) return 1
def __push_samples_into_relevant_pipelines__(self,configs,mockdb): """ Provides the interface from which all post casava sample pipelines are run. """ if configs["system"].get("Logging","debug") is "True": print " Starting post casava sample pipelines for " + self.flowcell_key print " Determining Sample dirs" sample_dirs = list_project_sample_dirs(self.output_dir.split(":")) if configs["system"].get("Logging","debug") is "True": print " Samples: " + str(sample_dirs) flowcell_dir_name = os.path.basename(self.output_dir) automation_parameters_config = MyConfigParser() automation_parameters_config.read(configs["system"].get("Filenames","automation_config")) fastqc_pipeline_config = MyConfigParser() fastqc_pipeline_config.read(configs["system"].get("Pipeline","FastQCPipeline")) for project in sample_dirs: for sample in sample_dirs[project]: #running_location = identify_running_location_with_most_currently_available(configs,storage_devices) running_location = "Speed" parsed = parse_sample_sheet(configs['system'],mockdb,sample_dirs[project][sample][0]) if configs["system"].get("Logging","debug") is "True": print " Pushing fastqc pipeline for " + sample fastqc_pipeline = mockdb["FastQCPipeline"].__new__(configs['system'],input_dir=sample_dirs[project][sample][0],flowcell_dir_name=flowcell_dir_name,project=parsed['project_name'],pipeline_config=fastqc_pipeline_config,seq_run_key=self.seq_run_key,**parsed) description_dict = parse_description_into_dictionary(parsed['description']) if 'Pipeline' in description_dict: pipeline_key = description_dict['Pipeline'] else: description_pieces = parsed['description'].split('-') pipeline_key = description_pieces[-1] pipeline_name = automation_parameters_config.safe_get("Pipeline",pipeline_key) if pipeline_name is None: continue if configs["system"].get("Logging","debug") is "True": print "Starting " + pipeline_name + " for " + sample pipeline = mockdb[pipeline_name].__new__(configs['system'],input_dir=sample_dirs[project][sample][0],pipeline_key=pipeline_key,seq_run_key=self.seq_run_key,project=parsed['project_name'],flowcell_dir_name=flowcell_dir_name,**parsed)
def things_to_do_if_initializing_pipeline_with_input_directory(configs,storage_devices,mockdb,source_dir,pipeline_name=None,base_output_dir=None,combine_projects=True): if combine_project: sample_dirs["dummy_project"] = list_sample_dirs(source_dir) else: sample_dirs = list_project_sample_dirs(source_dir) target_config = MyConfigParser() target_config.read(configs["system"].get("Filenames","target_config")) for project in sample_dirs: for sample in sample_dirs[project]: running_location = identify_running_location_with_most_currently_available(configs,storage_devices) parsed = parse_sample_sheet(configs['system'],mockdb,sample_dirs[project][sample][0]) if base_output_dir is None: base_output_dir = configs['pipeline'].get('Common_directories','archive_directory') automation_parameters_config = MyConfigParser() automation_parameters_config.read(configs["system"].get("Filenames","automation_config")) description_dict = parse_description_into_dictionary(parsed['description']) if 'Pipeline' in description_dict: pipeline_key = description_dict['Pipeline'] else: description_pieces = parsed['description'].split('_') pipeline_key = description_pieces[-1] pipeline_name_for_sample = automation_parameters_config.safe_get("Pipeline",pipeline_key) if not pipeline_name_for_sample == pipeline_name: continue mockdb[pipeline_name].__new__(configs['system'],input_dir=sample_dirs[project][sample][0],pipeline_config=configs["pipeline"],project=parsed['project_name'],pipeline_key=pipeline_key,**parsed) flowcell_dict = mockdb['SequencingRun'].__attribute_value_to_object_dict__('flowcell_key') flowcell_dict = mockdb['SequencingRun'].__attribute_value_to_object_dict__('flowcell_key') if parsed['flowcell'].key in flowcell_dict: seq_run = flowcell_dict[parsed['flowcell'].key] pass else: try: base_dir = get_sequencing_run_base_dir(source_dir) [date,machine_key,run_number,side,flowcell_key] = parse_sequencing_run_dir(base_dir) machine = mockdb['HiSeqMachine'].__get__(configs['system'],machine_key) run_type = determine_run_type(base_dir) seq_run = mockdb['SequencingRun'].__new__(configs['system'],flowcell,machine,date,run_number,output_dir=base_dir,side=side,run_type=run_type) fill_demultiplex_stats(configs['system'],mockdb,seq_run.output_dir,flowcell,machine) except: pass return 1