def groups(self):
		subject_info = ccf_subject.SubjectInfo(self.project, self.subject, self.classifier)
		preproc_dirs = self.archive.available_functional_preproc_dir_full_paths(subject_info)
		groupsA = []
		for preproc_dir in preproc_dirs:
			groupsA.append(preproc_dir[preproc_dir.rindex(os.sep)+1:preproc_dir.index("_preproc")])
		groupsA.sort()
		return groupsA
    def create_process_data_job_script(self):
        module_logger.debug(debug_utils.get_name())

        xnat_pbs_jobs_control_folder = os_utils.getenv_required(
            'XNAT_PBS_JOBS_CONTROL')

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier, self.scan)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        mem_limit_str = str(self.mem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN) + ':gpus=1:K20x'
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + mem_limit_str

        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name
        load_cuda = "module load cuda-9.1"

        xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version(
        )
        xnat_pbs_setup_singularity_process = 'singularity exec --nv -B ' + xnat_pbs_jobs_control_folder + ':/opt/xnat_pbs_jobs_control,' \
                 + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
                 + ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
                 + ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_qunexrun_path()
        parameter_line = '  --parameterfolder=' + self._get_xnat_pbs_setup_script_singularity_qunexparameter_path(
        )
        studyfolder_line = '  --studyfolder=' + self.working_directory_name + '/' + self.subject + '_' + self.classifier
        subject_line = '  --subjects=' + self.subject + '_' + self.classifier
        overwrite_line = '  --overwrite=yes'
        hcppipelineprocess_line = '  --hcppipelineprocess=DiffusionPreprocessing'

        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(load_cuda + os.linesep)
            script.write(xnat_pbs_setup_singularity_load + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_process + ' \\' +
                         os.linesep)
            script.write(parameter_line + ' \\' + os.linesep)
            script.write(studyfolder_line + ' \\' + os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(overwrite_line + ' \\' + os.linesep)
            self._group_list = []
            script.write('  --boldlist="' + self._expand(self.groups) +
                         '" \\' + os.linesep)
            script.write(hcppipelineprocess_line + os.linesep)
            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
示例#3
0
	def groups(self):
		subject_info = ccf_subject.SubjectInfo(self.project, self.subject, self.classifier)
		preproc_dirs = self.archive.available_functional_preproc_dir_full_paths(subject_info)
		groupsA = []
		for preproc_dir in preproc_dirs:
			groupsA.append(preproc_dir[preproc_dir.rindex(os.sep)+1:preproc_dir.index("_preproc")])
		def fmrisort(x):
			priority = [ "rfMRI_REST1_AP","rfMRI_REST1_PA","rfMRI_REST1a_PA","rfMRI_REST1a_AP","rfMRI_REST1b_PA","rfMRI_REST1b_AP",
				"tfMRI_GUESSING_PA","tfMRI_GUESSING_AP","tfMRI_VISMOTOR_PA","tfMRI_CARIT_PA","tfMRI_CARIT_AP","tfMRI_EMOTION_PA","tfMRI_FACENAME_PA",
				"rfMRI_REST2_AP","rfMRI_REST2_PA","rfMRI_REST2a_AP","rfMRI_REST2a_PA","rfMRI_REST2b_AP","rfMRI_REST2b_PA" ]
			return priority.index(x);
		groupsA = sorted(groupsA, key=fmrisort)
		return groupsA
    def groups(self):
        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier)
        # diffusion not working, commenting it out and using functional
        #preproc_dirs = self.archive.available_diffusion_preproc_dir_full_paths(subject_info)
        preproc_dirs = self.archive.available_functional_preproc_dir_full_paths(
            subject_info)
        groupsA = []
        for preproc_dir in preproc_dirs:
            groupsA.append(preproc_dir[preproc_dir.rindex(os.sep) +
                                       1:preproc_dir.index("_preproc")])

        #groupsA.sort()
        return groupsA
                        dest='output',
                        required=False,
                        type=str)
    parser.add_argument('-a',
                        '--check-all',
                        dest='check_all',
                        action='store_true',
                        required=False,
                        default=False)

    # parse the command line arguments
    args = parser.parse_args()

    # check the specified subject for structural preprocessing completion
    subject_info = ccf_subject.SubjectInfo(project='irrelevant',
                                           subject_id=args.subject,
                                           classifier=args.classifier)
    completion_checker = OneSubjectCompletionChecker()

    if args.output:
        processing_output = open(args.output, 'w')
    else:
        processing_output = sys.stdout

    if completion_checker.is_processing_complete(
            working_dir=args.working_dir,
            fieldmap=args.fieldmap,
            subject_info=subject_info,
            verbose=args.verbose,
            output=processing_output,
            short_circuit=not args.check_all):
示例#6
0
                        dest='output',
                        required=False,
                        type=str)
    parser.add_argument('-a',
                        '--check-all',
                        dest='check_all',
                        action='store_true',
                        required=False,
                        default=False)

    # parse the command line arguments
    args = parser.parse_args()

    # check the specified subject and scan for functional preprocessing completion
    #subject_info = ccf_subject.SubjectInfo(project=args.project,subject_id=args.subject,classifier=args.classifier,extra=args.scan)
    subject_info = ccf_subject.SubjectInfo('irrelevant', args.subject,
                                           args.classifier, args.scan)
    completion_checker = OneSubjectCompletionChecker()

    if args.output:
        processing_output = open(args.output, 'w')
    else:
        processing_output = sys.stdout

    if completion_checker.is_processing_complete(
            working_dir=args.working_dir,
            subject_info=subject_info,
            verbose=args.verbose,
            output=processing_output,
            short_circuit=not args.check_all):
        print("Exiting with 0 code - Completion Check Successful")
        exit(0)
		qstat_queued_cmd = 'qstat -u ' + USER
		qstat_queued_cmd += ' | grep ' + subject_info.subject_id + '.Struc'
		qstat_queued_cmd += ' | grep " Q "'
		
		qstat_stream = platform.popen(qstat_queued_cmd, "r")
		qstat_results = qstat_stream.readline()
		qstat_stream.close()

		if qstat_results:
			return 'Q'

		return None
		
if __name__ == "__main__":
	subject = ccf_subject.SubjectInfo(sys.argv[1], sys.argv[2], sys.argv[3])
	status_checker = OneSubjectRunStatusChecker()	
	if status_checker.get_queued_or_running(subject):
		print("-----")
		print("project: " + subject.project)
		print("subject: " + subject.subject_id)
		print("session classifier: " + subject.classifier)
		print("JOB IS ALREADY QUEUED OR RUNNING")
	else:
		print ("-----")		
		print("project: " + subject.project)
		print("subject: " + subject.subject_id)
		print("session classifier: " + subject.classifier)
		print("JOB IS NOT RUNNING")
  	
if __name__ == '__main__':

    logging.config.fileConfig(
        file_utils.get_logging_config_file_name(__file__),
        disable_existing_loggers=False)

    # get Database credentials
    xnat_server = os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER')
    userid, password = user_utils.get_credentials(xnat_server)

    # get list of subjects to process
    subjectArg = sys.argv[1] if len(
        sys.argv) > 1 and sys.argv[1].count(":") > 1 else None
    if (subjectArg):
        # Pull from first argument if passed
        print("Retrieving subject list passed argument: " + subjectArg)
        subject_list = []
        subjectArg = subjectArg.strip()
        (project, subject_id, classifier, extra) = subjectArg.split(":")
        subject_info = ccf_subject.SubjectInfo(project, subject_id, classifier,
                                               extra)
        subject_list.append(subject_info)
    else:
        # Otherwise, pull from file
        subject_file_name = file_utils.get_subjects_file_name(__file__)
        print("Retrieving subject list from: " + subject_file_name)
        subject_list = ccf_subject.read_subject_info_list(subject_file_name,
                                                          separator=":")

    do_submissions(userid, password, subject_list)
    def create_process_data_job_script(self):

        project_build_dir = self.build_home + os.sep + self.project
        pipeline_processing_dir = self.working_directory_name.replace(
            project_build_dir + os.sep, '')
        scratch_processing_dir = self._SCRATCH_PROCESSING_DIR + os.sep + self.project
        if not os.path.exists(scratch_processing_dir):
            os.mkdir(scratch_processing_dir)

        module_logger.debug(debug_utils.get_name())

        xnat_pbs_jobs_control_folder = os_utils.getenv_required(
            'XNAT_PBS_JOBS_CONTROL')

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN) + ':haswell'
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + vmem_limit_str
        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name
        xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version(
        )
        xnat_pbs_setup_singularity_process = 'singularity exec -B ' \
                 + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
                 + ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
                 + ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_qunexrun_path()
        parameter_line = '  --parameterfolder=' + self._get_xnat_pbs_setup_script_singularity_qunexparameter_path(
        )
        #studyfolder_line   = '  --studyfolder=' + self.working_directory_name + '/' + self.subject + '_' + self.classifier
        studyfolder_line = '  --studyfolder=' + scratch_processing_dir + os.sep + pipeline_processing_dir + os.sep + self.subject + '_' + self.classifier
        subject_line = '  --subjects=' + self.subject + '_' + self.classifier
        overwrite_line = '  --overwrite=yes'
        hcppipelineprocess_line = '  --hcppipelineprocess=StructuralPreprocessing'
        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_load + os.linesep)
            script.write(os.linesep)
            script.write(
                '# TEMPORARILY MOVE PROCESSING DIRECTORY TO SCRATCH SPACE DUE TO "Cannot allocate memory" ERRORS IN BUILD SPACE'
                + os.linesep)
            script.write('mv ' + self.working_directory_name + " " +
                         scratch_processing_dir + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_process + ' \\' +
                         os.linesep)
            script.write(parameter_line + ' \\' + os.linesep)
            script.write(studyfolder_line + ' \\' + os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(overwrite_line + ' \\' + os.linesep)
            script.write(hcppipelineprocess_line + os.linesep)
            script.write(os.linesep)
            script.write('# MOVE PROCESSING BACK' + os.linesep)
            script.write('mv ' + scratch_processing_dir + os.sep +
                         pipeline_processing_dir + ' ' + project_build_dir +
                         os.linesep)
            script.write(os.linesep)
            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
示例#10
0
    def create_process_data_job_script(self):
        module_logger.debug(debug_utils.get_name())

        # copy the .XNAT_PROCESS script to the working directory
        processing_script_source_path = self.xnat_pbs_jobs_home
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += '.XNAT_PROCESS'

        processing_script_dest_path = self.working_directory_name
        processing_script_dest_path += os.sep + self.PIPELINE_NAME
        processing_script_dest_path += '.XNAT_PROCESS'

        shutil.copy(processing_script_source_path, processing_script_dest_path)
        os.chmod(processing_script_dest_path, stat.S_IRWXU | stat.S_IRWXG)

        # write the process data job script (that calls the .XNAT_PROCESS script)

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier, self.scan)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN)
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + vmem_limit_str

        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name

        xnat_pbs_setup_line = 'source ' + self._get_xnat_pbs_setup_script_path(
        ) + ' ' + self._get_db_name()
        xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version(
        )
        xnat_pbs_setup_singularity_process = 'singularity exec -B ' + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
                 + ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
                 + ',' + self._get_xnat_pbs_setup_script_freesurfer_license_path() + ':/export/freesurfer_license' \
                 + ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' + processing_script_source_path
        subject_line = '  --subject=' + self.subject
        scan_line = '  --scan=' + self.scan
        session_classifier_line = '  --classifier=' + self.classifier
        dcmethod_line = '  --dcmethod=TOPUP'
        topupconfig_line = '  --topupconfig=b02b0.cnf'
        gdcoeffs_line = '  --gdcoeffs=Prisma_3T_coeff_AS82.grad'
        wdir_line = '  --working-dir=' + self.working_directory_name

        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_line + os.linesep)
            script.write(xnat_pbs_setup_singularity_load + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_process + ' \\' +
                         os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(scan_line + ' \\' + os.linesep)
            script.write(session_classifier_line + ' \\' + os.linesep)
            script.write(dcmethod_line + ' \\' + os.linesep)
            script.write(topupconfig_line + ' \\' + os.linesep)
            script.write(gdcoeffs_line + ' \\' + os.linesep)
            script.write(wdir_line + os.linesep)

            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
	def create_process_data_job_script(self):
		module_logger.debug(debug_utils.get_name())

		xnat_pbs_jobs_control_folder = os_utils.getenv_required('XNAT_PBS_JOBS_CONTROL')

		subject_info = ccf_subject.SubjectInfo(self.project, self.subject, self.classifier)


		script_name = self.process_data_job_script_name

		with contextlib.suppress(FileNotFoundError):
			os.remove(script_name)

		walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
		## Using mem option instead of vmem for IcaFix	
		#vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'
		mem_limit_str = str(self.mem_limit_gbs) + 'gb'
		resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
		resources_line += ':ppn=' + str(self.WORK_PPN)
		## FIX shouldn't be limited to haswell cores
		#resources_line += ':haswell'
		resources_line += ',walltime=' + walltime_limit_str
		#resources_line += ',vmem=' + vmem_limit_str
		resources_line += ',mem=' + mem_limit_str
		stdout_line = '#PBS -o ' + self.working_directory_name
		stderr_line = '#PBS -e ' + self.working_directory_name
		scratch_tmpdir = '/scratch/' + os.getenv('USER') + '/singularity/tmp/' + self.subject + "_" + self.classifier;
		xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version()
		make_scratch_tmpdir = 'mkdir  -p ' + scratch_tmpdir
		xnat_pbs_setup_singularity_process = 'singularity exec -B ' + xnat_pbs_jobs_control_folder + ':/opt/xnat_pbs_jobs_control' \
										+ ',' + scratch_tmpdir + ':/tmp' \
											+ ',' + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
										+ ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
										+ ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_qunexrun_path()
		#xnat_pbs_setup_singularity_process = '/opt/xnat_pbs_jobs_control/run_qunexContainer.sh' 
		#xnat_pbs_setup_singularity_process = self._get_xnat_pbs_setup_script_singularity_qunexrun_path()
		#container_line   = '  --containerpath=' + self._get_xnat_pbs_setup_script_singularity_container_path()
		## Per MH, parameterfolder is irrelevant to MR-FIX
		#parameter_line   = '  --parameterfolder=' + self._get_xnat_pbs_setup_script_singularity_qunexparameter_path()
		studyfolder_line   = '  --studyfolder=' + self.working_directory_name + '/' + self.subject + '_' + self.classifier
		subject_line   = '  --subjects=' + self.subject+ '_' + self.classifier
		overwrite_line = '  --overwrite=yes'
		hcppipelineprocess_line = '  --hcppipelineprocess=MultiRunIcaFixProcessing'

		with open(script_name, 'w') as script:
			script.write(resources_line + os.linesep)
			script.write(stdout_line + os.linesep)
			script.write(stderr_line + os.linesep)
			script.write(os.linesep)
			script.write(xnat_pbs_setup_singularity_load + os.linesep)
			script.write(make_scratch_tmpdir + os.linesep)
			script.write(os.linesep)
			script.write(xnat_pbs_setup_singularity_process+ ' \\' + os.linesep)
			## Per MH, parameterfolder is irrelevant to MR-FIX
			#script.write(parameter_line + ' \\' + os.linesep)
			script.write(studyfolder_line + ' \\' + os.linesep)
			script.write(subject_line + ' \\' + os.linesep)
			script.write(overwrite_line + ' \\' + os.linesep)
			#script.write(container_line + ' \\' + os.linesep)
			self._group_list = []
			script.write('  --boldlist="' + self._expand(self.groups) + '" \\' + os.linesep)
			script.write(hcppipelineprocess_line + os.linesep)
			script.close()
			os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
示例#12
0
    def create_process_data_job_script(self):
        module_logger.debug(debug_utils.get_name())

        # copy the .XNAT_PROCESS script to the working directory
        processing_script_source_path = self.xnat_pbs_jobs_home
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += '.XNAT_PROCESS'

        processing_script_dest_path = self.working_directory_name
        processing_script_dest_path += os.sep + self.PIPELINE_NAME
        processing_script_dest_path += '.XNAT_PROCESS'

        shutil.copy(processing_script_source_path, processing_script_dest_path)
        os.chmod(processing_script_dest_path, stat.S_IRWXU | stat.S_IRWXG)

        # write the process data job script (that calls the .XNAT_PROCESS script)

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN)
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + vmem_limit_str

        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name

        xnat_pbs_setup_line = 'source ' + self._get_xnat_pbs_setup_script_path(
        ) + ' ' + self._get_db_name()
        xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version(
        )
        xnat_pbs_setup_singularity_process = 'singularity exec -B ' + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
                 + ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
                 + ',' + self._get_xnat_pbs_setup_script_freesurfer_license_path() + ':/export/freesurfer_license' \
                 + ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' + processing_script_source_path

        subject_line = '  --subject=' + self.subject
        session_classifier_line = '  --classifier=' + self.classifier

        if self._has_spin_echo_field_maps(subject_info):
            fieldmap_type_line = '  --fieldmap-type=' + 'SpinEcho'
        else:
            fieldmap_type_line = '  --fieldmap-type=' + 'SiemensGradientEcho'

        first_t1w_directory_name_line = '  --first-t1w-directory-name=' + self._get_first_t1w_directory_name(
            subject_info)
        first_t1w_file_name_line = '  --first-t1w-file-name=' + self._get_first_t1w_file_name(
            subject_info)
        first_t2w_directory_name_line = '  --first-t2w-directory-name=' + self._get_first_t2w_directory_name(
            subject_info)
        first_t2w_file_name_line = '  --first-t2w-file-name=' + self._get_first_t2w_file_name(
            subject_info)
        brain_size_line = '  --brainsize=' + str(self.brain_size)
        t1template_line = '  --t1template=' + self.T1W_TEMPLATE_NAME
        t1templatebrain_line = '  --t1templatebrain=' + self.T1W_TEMPLATE_BRAIN_NAME
        t1template2mm_line = '  --t1template2mm=' + self.T1W_TEMPLATE_2MM_NAME
        t2template_line = '  --t2template=' + self.T2W_TEMPLATE_NAME
        t2templatebrain_line = '  --t2templatebrain=' + self.T2W_TEMPLATE_BRAIN_NAME
        t2template2mm_line = '  --t2template2mm=' + self.T2W_TEMPLATE_2MM_NAME
        templatemask_line = '  --templatemask=' + self.TEMPLATE_MASK_NAME
        template2mmmask_line = '  --template2mmmask=' + self.TEMPLATE_2MM_MASK_NAME
        fnirtconfig_line = '  --fnirtconfig=' + self.FNIRT_CONFIG_FILE_NAME

        if subject_info.project in OneSubjectJobSubmitter._CONNECTOME_SKYRA_SCANNER_PROJECTS:
            gdcoeffs_line = '  --gdcoeffs=' + self.CONNECTOME_GDCOEFFS_FILE_NAME
        elif subject_info.project in OneSubjectJobSubmitter._PRISMA_3T_PROJECTS:
            gdcoeffs_line = '  --gdcoeffs=' + self.PRISMA_3T_GDCOEFFS_FILE_NAME
        else:
            raise ValueError(
                "Unrecognized project for setting gradient distortion coefficients file: "
                + subject_info.project)

        topupconfig_line = '  --topupconfig=' + self.TOPUP_CONFIG_FILE_NAME

        if self._has_spin_echo_field_maps(subject_info):
            se_phase_pos_line = '  --se-phase-pos=' + self._get_positive_spin_echo_file_name(
                subject_info)
            se_phase_neg_line = '  --se-phase-neg=' + self._get_negative_spin_echo_file_name(
                subject_info)
            # mag_line = None
            # phase_line = None
        else:
            se_phase_pos_line = None
            se_phase_neg_line = None
            # mag_line   = '  --fmapmag=' + self._get_fmap_mag_file_name(subject_info)
            # phase_line = '  --fmapphase=' + self._get_fmap_phase_file_name(subject_info)

        wdir_line = '  --working-dir=' + self.working_directory_name

        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_line + os.linesep)
            script.write(xnat_pbs_setup_singularity_load + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_process + ' \\' +
                         os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(session_classifier_line + ' \\' + os.linesep)
            script.write(fieldmap_type_line + ' \\' + os.linesep)
            script.write(first_t1w_directory_name_line + ' \\' + os.linesep)
            script.write(first_t1w_file_name_line + ' \\' + os.linesep)
            script.write(first_t2w_directory_name_line + ' \\' + os.linesep)
            script.write(first_t2w_file_name_line + ' \\' + os.linesep)
            script.write(brain_size_line + ' \\' + os.linesep)
            script.write(t1template_line + ' \\' + os.linesep)
            script.write(t1templatebrain_line + ' \\' + os.linesep)
            script.write(t1template2mm_line + ' \\' + os.linesep)
            script.write(t2template_line + ' \\' + os.linesep)
            script.write(t2templatebrain_line + ' \\' + os.linesep)
            script.write(t2template2mm_line + ' \\' + os.linesep)
            script.write(templatemask_line + ' \\' + os.linesep)
            script.write(template2mmmask_line + ' \\' + os.linesep)
            script.write(fnirtconfig_line + ' \\' + os.linesep)
            script.write(gdcoeffs_line + ' \\' + os.linesep)
            script.write(topupconfig_line + ' \\' + os.linesep)

            if (se_phase_pos_line):
                script.write(se_phase_pos_line + ' \\' + os.linesep)
            if (se_phase_neg_line):
                script.write(se_phase_neg_line + ' \\' + os.linesep)
            # if (mag_line): script.write(mag_line + ' \\' + os.linesep)
            # if (phase_line): script.write(phase_line + ' \\' + os.linesep)

            script.write(wdir_line + os.linesep)

            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
示例#13
0
def main():
    # create a parser object for getting the command line arguments
    parser = my_argparse.MyArgumentParser()

    # mandatory arguments
    parser.add_argument('-p',
                        '--project',
                        dest='project',
                        required=True,
                        type=str)
    parser.add_argument('-s',
                        '--subject',
                        dest='subject',
                        required=True,
                        type=str)
    parser.add_argument('-d',
                        '--study-dir',
                        dest='output_study_dir',
                        required=True,
                        type=str)

    # optional arguments
    parser.add_argument('-a',
                        '--scan',
                        dest='scan',
                        required=False,
                        type=str,
                        default=None)
    parser.add_argument('-c',
                        '--copy',
                        dest='copy',
                        action='store_true',
                        required=False,
                        default=False)
    parser.add_argument('-l',
                        '--log',
                        dest='log',
                        action='store_true',
                        required=False,
                        default=False)
    parser.add_argument('-r',
                        '--remove-non-subdirs',
                        dest='remove_non_subdirs',
                        action='store_true',
                        required=False,
                        default=False)

    phase_choices = [
        "STRUCT_PREPROC_PREREQS", "struct_preproc_prereqs",
        "STRUCT_PREPROC_HAND_EDIT_PREREQS", "struct_preproc_hand_edit_prereqs",
        "DIFF_PREPROC_PREREQS", "diff_preproc_prereqs", "FUNC_PREPROC_PREREQS",
        "func_preproc_prereqs", "MULTIRUNICAFIX_PREREQS",
        "multirunicafix_prereqs", "MSMALL_PREREQS", "msmall_prereqs",
        "DEDRIFTANDRESAMPLE_PREREQS", "dedriftandresample_prereqs",
        "REAPPLYFIX_PREREQS", "reapplyfix_prereqs"
    ]

    default_phase_choice = phase_choices[0]

    parser.add_argument('-ph',
                        '--phase',
                        dest='phase',
                        required=False,
                        choices=phase_choices,
                        default=default_phase_choice)

    parser.add_argument('-cl',
                        '--classifier',
                        dest='session_classifier',
                        required=False,
                        type=str,
                        default='3T')

    # parse the command line arguments
    args = parser.parse_args()

    # convert phase argument to uppercase
    args.phase = args.phase.upper()

    # show arguments
    module_logger.info("Arguments:")
    module_logger.info("            Project: " + args.project)
    module_logger.info("            Subject: " + args.subject)
    module_logger.info(" Session Classifier: " + args.session_classifier)
    module_logger.info("         Output Dir: " + args.output_study_dir)
    module_logger.info("              Phase: " + args.phase)
    if args.copy:
        module_logger.info("               Copy: " + str(args.copy))
    if args.log:
        module_logger.info("                Log: " + str(args.log))
    if args.remove_non_subdirs:
        module_logger.info(" Remove Non-Subdirs: " +
                           str(args.remove_non_subdirs))

    subject_info = ccf_subject.SubjectInfo(args.project, args.subject,
                                           args.session_classifier, args.scan)
    archive = ccf_archive.CcfArchive()

    data_retriever = DataRetriever(archive)
    data_retriever.copy = args.copy
    data_retriever.show_log = args.log

    # retrieve data based on phase requested
    if args.phase == "STRUCT_PREPROC_PREREQS":
        data_retriever.get_struct_preproc_prereqs(subject_info,
                                                  args.output_study_dir)

    elif args.phase == "STRUCT_PREPROC_HAND_EDIT_PREREQS":
        data_retriever.get_struct_preproc_hand_edit_prereqs(
            subject_info, args.output_study_dir)

    elif args.phase == "DIFF_PREPROC_PREREQS":
        data_retriever.get_diffusion_preproc_prereqs(subject_info,
                                                     args.output_study_dir)

    elif args.phase == "FUNC_PREPROC_PREREQS":
        data_retriever.get_functional_preproc_prereqs(subject_info,
                                                      args.output_study_dir)

    elif args.phase == "MULTIRUNICAFIX_PREREQS":
        data_retriever.get_multirunicafix_prereqs(subject_info,
                                                  args.output_study_dir)

    elif args.phase == "MSMALL_PREREQS":
        data_retriever.get_msmall_prereqs(subject_info, args.output_study_dir)

    elif args.phase == "DEDRIFTANDRESAMPLE_PREREQS":
        data_retriever.get_dedriftandresample_prereqs(subject_info,
                                                      args.output_study_dir)
        # Get the group average drift data
        # As of February 2017, the group average drift data has been moved from HCP_Staging to
        # HCP_1200
        data_retriever.get_msm_group_average_drift_data(
            "HCP_1200", args.output_study_dir)

    elif args.phase == "REAPPLYFIX_PREREQS":
        data_retriever.get_reapplyfix_prereqs(subject_info,
                                              args.output_study_dir)

    if args.remove_non_subdirs:
        # remove any non-subdirectory data at the output study directory level
        data_retriever.remove_non_subdirs(args.output_study_dir)
	def create_process_data_job_script(self):

		project_build_dir = self.build_home + os.sep + self.project
		pipeline_processing_dir = self.working_directory_name.replace(project_build_dir + os.sep, '');
		hand_edit_processing_dir = self._HAND_EDIT_PROCESSING_DIR + os.sep + self.project
		if not os.path.exists(hand_edit_processing_dir):
			os.mkdir(hand_edit_processing_dir)

		module_logger.debug(debug_utils.get_name())

		xnat_pbs_jobs_control_folder = os_utils.getenv_required('XNAT_PBS_JOBS_CONTROL')

		subject_info = ccf_subject.SubjectInfo(self.project, self.subject, self.classifier)

		script_name = self.process_data_job_script_name

		with contextlib.suppress(FileNotFoundError):
			os.remove(script_name)

		walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
		vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

		resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
		resources_line += ':ppn=' + str(self.WORK_PPN) + ':haswell'
		resources_line += ',walltime=' + walltime_limit_str
		resources_line += ',mem=' + vmem_limit_str

		stdout_line = '#PBS -o ' + self.working_directory_name
		stderr_line = '#PBS -e ' + self.working_directory_name

		xnat_pbs_setup_line = 'source ' + self._get_xnat_pbs_setup_script_path() + ' ' + self._get_db_name()
		xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version()
		
											## PREVIOUS LINE IS TEMPORARY ##
											## TEMPORARY ##
											##+ '/opt/xnat_pbs_jobs_control/run_qunex.sh' 
		xnat_pbs_setup_singularity_process = 'singularity exec -B ' \
											+ xnat_pbs_jobs_control_folder + ':/opt/xnat_pbs_jobs_control' \
											+ ',' + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
											+ ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
											+ ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' \
											+ os_utils.getenv_required('SINGULARITY_QUNEXRUN_PATH')
		parameter_line   = '  --parameterfolder=' + self._get_xnat_pbs_setup_script_singularity_qunexparameter_path()
		#studyfolder_line   = '  --studyfolder=' + self.working_directory_name + '/' + self.subject + '_' + self.classifier
		studyfolder_line   = '  --studyfolder=' + hand_edit_processing_dir + os.sep + pipeline_processing_dir + os.sep + self.subject + '_' + self.classifier
		subject_line   = '  --subjects=' + self.subject+ '_' + self.classifier
		#hcpdatapath_line   = '  --hcpdatapath=' + self.working_directory_name
		#parameterfile_line   = '  --parameterfile=' + xnat_pbs_jobs_control_folder + '/batch_parameters.txt'
		#mapfile_line   = '  --mapfile=' + xnat_pbs_jobs_control_folder + '/hcp_mapping.txt'
		overwrite_line = '  --overwrite=yes'
		hcppipelineprocess_line = '  --hcppipelineprocess=StructuralPreprocessingHandEdit'
		fsextrareconall_line = '  --fs-extra-reconall=\'' +  os_utils.getenv_required('FS_EXTRA_RECONALL') + '\''
		
		with open(script_name, 'w') as script:
			script.write(resources_line + os.linesep)
			script.write(stdout_line + os.linesep)
			script.write(stderr_line + os.linesep)
			script.write(os.linesep)
			script.write(xnat_pbs_setup_line + os.linesep)
			script.write(xnat_pbs_setup_singularity_load + os.linesep)
			
			script.write(os.linesep)

			script.write('# TEMPORARILY MOVE PROCESSING DIRECTORY TO SCRATCH SPACE DUE TO "Cannot allocate memory" ERRORS IN BUILD SPACE' + os.linesep)
			script.write('mv ' + self.working_directory_name + " " + hand_edit_processing_dir + os.linesep)
			script.write(os.linesep)

			script.write(xnat_pbs_setup_singularity_process+ ' \\' + os.linesep)
			
			script.write(parameter_line + ' \\' + os.linesep)
			script.write(studyfolder_line + ' \\' + os.linesep)
			script.write(subject_line + ' \\' + os.linesep)
			#script.write(hcpdatapath_line + ' \\' + os.linesep)
			#script.write(parameterfile_line + ' \\' + os.linesep)
			#script.write(mapfile_line + ' \\' + os.linesep)
			script.write(overwrite_line + ' \\' + os.linesep)
			script.write(hcppipelineprocess_line + ' \\' + os.linesep)
			script.write(fsextrareconall_line + os.linesep)

			script.write(os.linesep)
			script.write('# MOVE PROCESSING BACK' + os.linesep)
			script.write('mv ' + hand_edit_processing_dir + os.sep + pipeline_processing_dir + ' ' + project_build_dir + os.linesep)
			script.write(os.linesep)

			os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
示例#15
0
	# optional arguments
	parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
						required=False, default=False)
	parser.add_argument('-o', '--output', dest='output', required=False, type=str)
	parser.add_argument('-a', '--check-all', dest='check_all', action='store_true',
						required=False, default=False)

	# parse the command line arguments
	args = parser.parse_args()

	# check the specified subject and scan for functional preprocessing completion
	archive = ccf_archive.CcfArchive()
	subject_info = ccf_subject.SubjectInfo(
		project=args.project,
		subject_id=args.subject,
		classifier=args.classifier,
		extra=args.scan)
	completion_checker = OneSubjectCompletionChecker()

	if args.output:
		processing_output = open(args.output, 'w')
	else:
		processing_output = sys.stdout

	if completion_checker.is_processing_complete(
			archive=archive,
			subject_info=subject_info,
			verbose=args.verbose,
			output=processing_output,
			short_circuit=not args.check_all):
                        dest='check_all',
                        action='store_true',
                        required=False,
                        default=False)
    parser.add_argument('-m',
                        '--marked',
                        dest='marked',
                        action='store_true',
                        required=False,
                        default=False)
    # parse the command line arguments
    args = parser.parse_args()

    # check the specified subject for structural preprocessing completion
    archive = ccf_archive.CcfArchive()
    subject_info = ccf_subject.SubjectInfo(args.project, args.subject,
                                           args.classifier)
    completion_checker = OneSubjectCompletionXnatChecker()

    if args.output:
        processing_output = open(args.output, 'w')
    else:
        processing_output = sys.stdout

    if args.marked:
        if completion_checker.is_processing_marked_complete(
                archive=archive, subject_info=subject_info):
            print("Exiting with 0 code - Marked Completion Check Successful")
            exit(0)
        else:
            print("Exiting with 1 code - Marked Completion Check Unsuccessful")
            exit(1)
示例#17
0
    def create_process_data_job_script(self):
        module_logger.debug(debug_utils.get_name())

        # copy the .XNAT_PROCESS script to the working directory
        processing_script_source_path = self.xnat_pbs_jobs_home
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += '.XNAT_PROCESS'

        processing_script_dest_path = self.working_directory_name
        processing_script_dest_path += os.sep + self.PIPELINE_NAME
        processing_script_dest_path += '.XNAT_PROCESS'

        shutil.copy(processing_script_source_path, processing_script_dest_path)
        os.chmod(processing_script_dest_path, stat.S_IRWXU | stat.S_IRWXG)

        # write the process data job script (that calls the .XNAT_PROCESS script)

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier, self.scan)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN)
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + vmem_limit_str

        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name

        xnat_pbs_setup_line = 'source ' + self._get_xnat_pbs_setup_script_path(
        ) + ' ' + self._get_db_name()

        script_line = processing_script_dest_path
        user_line = '  --user='******'  --password='******'  --server=' + str_utils.get_server_name(self.server)
        project_line = '  --project=' + self.project
        subject_line = '  --subject=' + self.subject
        session_line = '  --session=' + self.session
        scan_line = '  --scan=' + self.scan
        session_classifier_line = '  --session-classifier=' + self.classifier
        dcmethod_line = '  --dcmethod=TOPUP'
        topupconfig_line = '  --topupconfig=b02b0.cnf'
        gdcoeffs_line = '  --gdcoeffs=Prisma_3T_coeff_AS82.grad'

        wdir_line = '  --working-dir=' + self.working_directory_name
        setup_line = '  --setup-script=' + self.setup_file_name

        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_line + os.linesep)
            script.write(os.linesep)
            script.write(script_line + ' \\' + os.linesep)
            script.write(user_line + ' \\' + os.linesep)
            script.write(password_line + ' \\' + os.linesep)
            script.write(server_line + ' \\' + os.linesep)
            script.write(project_line + ' \\' + os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(session_line + ' \\' + os.linesep)
            script.write(scan_line + ' \\' + os.linesep)
            script.write(session_classifier_line + ' \\' + os.linesep)
            script.write(dcmethod_line + ' \\' + os.linesep)
            script.write(topupconfig_line + ' \\' + os.linesep)
            script.write(gdcoeffs_line + ' \\' + os.linesep)
            script.write(wdir_line + ' \\' + os.linesep)
            script.write(setup_line + os.linesep)

            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)