def main(demux_fcid_dir, restrict_to_projects=None, restrict_to_samples=None): demux_fcid_dir = "/proj/a2014205/INBOX/140528_D00415_0049_BC423WACXX" # G.Grigelioniene_14_01 process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140702_D00415_0052_AC41A2ANXX" # M.Kaller_14_06 sample P1171_102, P1171_104, P1171_106, P1171_108 process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140905_D00415_0057_BC45KVANXX" # M.Kaller_14_06 sample P1171_102, P1171_104, P1171_106 ---- rerun process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140815_SN1025_0222_AC4HA6ACXX" # M.Kaller_14_05 sample P1170_101, P1170_103, P1170_105 process_demultiplexed_flowcell(demux_fcid_dir, None, None) # M.Kaller_14_08 sample P1272_101, P1272_104 time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140815_SN1025_0223_BC4HAPACXX" # M.Kaller_14_05 sample P1170_101, P1170_103, P1170_105 process_demultiplexed_flowcell(demux_fcid_dir, None, None) # M.Kaller_14_08 sample P1272_101, P1272_104 time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140919_SN1018_0203_BHA3THADXX" # M.Kaller_14_05 P1170_103, P1170_105 --- rerun process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes ###UPPSALA demux_fcid_dir = "/proj/a2014205/INBOX/140821_D00458_0029_AC45JGANXX" # uppsala run process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140917_D00458_0034_AC4FF3ANXX" # -- rerun process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes #and now a loop to update the DB time.sleep(3800) charon_session = CharonSession() ####charon_session.project_delete("ND-0522") while True: update_charon_with_local_jobs_status() ## this updated local_db and charon accordingly # grab all projects from Charon projects_dict = charon_session.projects_get_all()['projects'] for project_charon in projects_dict: project_name = project_charon["name"] project_dir = os.path.join("/proj/a2014205/nobackup/NGI/analysis_ready/DATA", project_name) if os.path.isdir(project_dir): projectObj = recreate_project_from_filesystem(project_dir, None) launch_analysis_for_samples([projectObj]) time.sleep(3800)
def main(demux_fcid_dir, restrict_to_projects=None, restrict_to_samples=None): demux_fcid_dir = "/proj/a2014205/INBOX/140528_D00415_0049_BC423WACXX" # G.Grigelioniene_14_01 process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140702_D00415_0052_AC41A2ANXX" # M.Kaller_14_06 sample P1171_102, P1171_104, P1171_106, P1171_108 process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140905_D00415_0057_BC45KVANXX" # M.Kaller_14_06 sample P1171_102, P1171_104, P1171_106 ---- rerun process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140815_SN1025_0222_AC4HA6ACXX" # M.Kaller_14_05 sample P1170_101, P1170_103, P1170_105 process_demultiplexed_flowcell( demux_fcid_dir, None, None) # M.Kaller_14_08 sample P1272_101, P1272_104 time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140815_SN1025_0223_BC4HAPACXX" # M.Kaller_14_05 sample P1170_101, P1170_103, P1170_105 process_demultiplexed_flowcell( demux_fcid_dir, None, None) # M.Kaller_14_08 sample P1272_101, P1272_104 time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140919_SN1018_0203_BHA3THADXX" # M.Kaller_14_05 P1170_103, P1170_105 --- rerun process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes ###UPPSALA demux_fcid_dir = "/proj/a2014205/INBOX/140821_D00458_0029_AC45JGANXX" # uppsala run process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes demux_fcid_dir = "/proj/a2014205/INBOX/140917_D00458_0034_AC4FF3ANXX" # -- rerun process_demultiplexed_flowcell(demux_fcid_dir, None, None) time.sleep(60) #wait for 1 minutes #and now a loop to update the DB time.sleep(3800) charon_session = CharonSession() ####charon_session.project_delete("ND-0522") while True: update_charon_with_local_jobs_status( ) ## this updated local_db and charon accordingly # grab all projects from Charon projects_dict = charon_session.projects_get_all()['projects'] for project_charon in projects_dict: project_name = project_charon["name"] project_dir = os.path.join( "/proj/a2014205/nobackup/NGI/analysis_ready/DATA", project_name) if os.path.isdir(project_dir): projectObj = recreate_project_from_filesystem( project_dir, None) launch_analysis_for_samples([projectObj]) time.sleep(3800)
import argparse import os from ngi_pipeline.conductor.flowcell import process_demultiplexed_flowcell if __name__ == '__main__': parser = argparse.ArgumentParser("Launch seqrun-level analysis.") parser.add_argument("-p", "--project", dest="restrict_to_projects", action="append", help=("Restrict processing to these projects. " "Use flag multiple times for multiple projects.")) parser.add_argument("-s", "--sample", dest= "restrict_to_samples", action="append", help=("Restrict processing to these samples. " "Use flag multiple times for multiple samples.")) parser.add_argument("-f", "--restart-failed", dest="restart_failed_jobs", action="store_true", help=("Restart jobs marked as 'FAILED' in Charon")) parser.add_argument("demux_fcid_dir", nargs="?", action="store", help=("The path to the Illumina demultiplexed fc directories " "to process.")) args_ns = parser.parse_args() process_demultiplexed_flowcell(args_ns.demux_fcid_dir, args_ns.restrict_to_projects, args_ns.restrict_to_samples, args_ns.restart_failed_jobs)
args = parser.parse_args() # The following option will be available only if the script has been called with the 'analyze' option if args.__dict__.get('restart_all_jobs'): args.restart_failed_jobs = True args.restart_finished_jobs = True args.restart_running_jobs = True # Finally execute corresponding functions ### TODO change to work with multiple flowcells if 'analyze_fc_dir' in args: LOG.info("Starting flowcell analysis in directory {}".format(args.analyze_fc_dir)) flowcell.process_demultiplexed_flowcell(args.analyze_fc_dir, args.restrict_to_projects, args.restrict_to_samples, args.restart_failed_jobs, args.restart_finished_jobs, args.restart_running_jobs, quiet=args.quiet, manual=True) ### TODO change to work with multiple projects elif 'analyze_project_dir' in args: project = recreate_project_from_filesystem(project_dir=args.analyze_project_dir, restrict_to_samples=args.restrict_to_samples) if project and os.path.split(project.base_path)[1] == "DATA": project.base_path = os.path.split(project.base_path)[0] launchers.launch_analysis([project], restart_failed_jobs=args.restart_failed_jobs, restart_finished_jobs=args.restart_finished_jobs, restart_running_jobs=args.restart_running_jobs, quiet=args.quiet,