def __init__(self, archive, build_home):
        """
		Initialize a OneSubjectJobSubmitter
		"""
        self._archive = archive
        self._build_home = build_home

        self._xnat_pbs_jobs_home = os_utils.getenv_required('XNAT_PBS_JOBS')
        self._log_dir = os_utils.getenv_required('XNAT_PBS_JOBS_LOG_DIR')

        self._scan = None
        self._working_directory_name_prefix = None
Beispiel #2
0
    def list_of_expected_files(self, working_dir, subject_info):

        hcp_run_utils = os_utils.getenv_required('HCP_RUN_UTILS')
        f = open(hcp_run_utils + os.sep + 'FunctionalPreprocessing' + os.sep +
                 'ExpectedOutputFiles.CCF.txt')
        list_from_file = f.readlines()

        l = []

        for name in list_from_file:
            # remove any comments (anything after a # on a line)
            filename = name.split('#', 1)[0]
            # remove leading and trailing whitespace
            filename = filename.strip()
            if filename:
                # replace internal whitespace with separator '/' or '\'
                filename = os.sep.join(filename.split())
                # replace subject id placeholder with actual subject id
                filename = filename.replace("{scan}", subject_info.extra)
                # prepend working directory and subject id directory
                filename = os.sep.join(
                    [working_dir, subject_info.subject_id, filename])
                l.append(filename)

        return l
    def create_process_data_job_script(self):
        module_logger.debug(debug_utils.get_name())

        xnat_pbs_jobs_control_folder = os_utils.getenv_required(
            'XNAT_PBS_JOBS_CONTROL')

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier, self.scan)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        mem_limit_str = str(self.mem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN) + ':gpus=1:K20x'
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + mem_limit_str

        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name
        load_cuda = "module load cuda-9.1"

        xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version(
        )
        xnat_pbs_setup_singularity_process = 'singularity exec --nv -B ' + xnat_pbs_jobs_control_folder + ':/opt/xnat_pbs_jobs_control,' \
                 + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
                 + ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
                 + ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_qunexrun_path()
        parameter_line = '  --parameterfolder=' + self._get_xnat_pbs_setup_script_singularity_qunexparameter_path(
        )
        studyfolder_line = '  --studyfolder=' + self.working_directory_name + '/' + self.subject + '_' + self.classifier
        subject_line = '  --subjects=' + self.subject + '_' + self.classifier
        overwrite_line = '  --overwrite=yes'
        hcppipelineprocess_line = '  --hcppipelineprocess=DiffusionPreprocessing'

        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(load_cuda + os.linesep)
            script.write(xnat_pbs_setup_singularity_load + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_process + ' \\' +
                         os.linesep)
            script.write(parameter_line + ' \\' + os.linesep)
            script.write(studyfolder_line + ' \\' + os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(overwrite_line + ' \\' + os.linesep)
            self._group_list = []
            script.write('  --boldlist="' + self._expand(self.groups) +
                         '" \\' + os.linesep)
            script.write(hcppipelineprocess_line + os.linesep)
            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
def main():
    # create a parser object for getting the command line options
    parser = my_argparse.MyArgumentParser()

    # mandatory arguments
    parser.add_argument('-u', '--user', dest='user', required=True, type=str)
    parser.add_argument(dest='input_file')

    # optional arguments
    parser.add_argument('-ser',
                        '--server',
                        dest='server',
                        required=False,
                        default='https://' +
                        os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                        type=str)

    parser.add_argument('-p',
                        '--password',
                        dest='password',
                        required=False,
                        type=str)

    # parse the command line arguments
    args = parser.parse_args()

    if args.password:
        password = args.password
    else:
        password = getpass.getpass("Password: "******"Parsed arguments:")
    _inform("    Username: "******"    Password: "******"*** password mask ***")
    _inform("      Server: " + args.server)
    _inform("  Input File: " + args.input_file)

    _inform("")

    input_file = open(args.input_file, 'r')
    for line in input_file:
        line = str_utils.remove_ending_new_lines(line)
        line = line.strip()

        if line != '' and line[0] != '#':
            (project, subject, session, resource) = line.split('\t')
            _inform("")
            _inform("     Project: " + project)
            _inform("     Subject: " + subject)
            _inform("     Session: " + session)
            _inform("    Resource: " + resource)
            _inform("")

            delete_resource.delete_resource(args.user, password, args.server,
                                            project, subject, session,
                                            resource)
Beispiel #5
0
    def submit_jobs(self, subject_list):

        # Read the configuration file
        config_file_name = file_utils.get_config_file_name(__file__)
        logger.info("Reading configuration from file: " + config_file_name)

        config = my_configparser.MyConfigParser()
        config.read(config_file_name)

        # Submit jobs for listed subjects
        for subject in subject_list:

            put_server = 'http://db-shadow' + str(self.get_and_inc_shadow_number()) + '.nrg.mir:8080'

            # get information for subject from the configuration file
            setup_file = xnat_pbs_jobs + os.sep + config.get_value(subject.subject_id, 'SetUpFile')
            clean_output_first = config.get_bool_value(subject.subject_id, 'CleanOutputFirst')
            walltime_limit_hrs = config.get_int_value(subject.subject_id, 'WalltimeLimit')
            vmem_limit_gbs = config.get_int_value(subject.subject_id, 'VmemLimit')
            processing_stage_str = config.get_value(subject.subject_id, 'ProcessingStage')
            processing_stage = one_subject_job_submitter.ProcessingStage.from_string(processing_stage_str)

            logger.info("")
            logger.info("--------------------------------------------------------------------------------")
            logger.info(" Submitting RestingStateStatsHCP7T jobs for:")
            logger.info("            project: " + subject.project)
            logger.info("         refproject: " + subject.structural_reference_project)
            logger.info("            subject: " + subject.subject_id)
            logger.info("         put_server: " + put_server)
            logger.info("         setup_file: " + setup_file)
            logger.info(" clean_output_first: " + str(clean_output_first))
            logger.debug("walltime_limit_hrs: " + str(walltime_limit_hrs))
            logger.debug("    vmem_limit_gbs: " + str(vmem_limit_gbs))
            logger.info("--------------------------------------------------------------------------------")

            submitter = one_subject_job_submitter.OneSubjectJobSubmitter(self._archive, self._archive.build_home)

            submitter.username = userid
            submitter.password = password
            submitter.server = 'https://' + os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER')

            submitter.project = subject.project
            submitter.subject = subject.subject_id
            submitter.session = subject.subject_id + '_7T'

            submitter.structural_reference_project = subject.structural_reference_project
            submitter.structural_reference_session = subject.subject_id + '_3T'

            submitter.walltime_limit_hours = walltime_limit_hrs
            submitter.vmem_limit_gbs = vmem_limit_gbs

            submitter.setup_script = setup_file
            submitter.clean_output_resource_first = clean_output_first
            submitter.put_server = put_server

            submitter.submit_jobs(processing_stage)
    def __init__(self, archive, build_home):
        """Constructs a OneSubjectJobSubmitter.

        :param archive: Archive with which this submitter is to work.
        :type archive: HcpArchive

        :param build_home: path to build space
        :type build_home: str
        """
        self._archive = archive
        self._build_home = build_home

        # home = os_utils.getenv_required('HOME')
        # self._xnat_pbs_jobs_home = home + os.sep
        # self._xnat_pbs_jobs_home += 'pipeline_tools' + os.sep
        # self._xnat_pbs_jobs_home += 'xnat_pbs_jobs'

        self._xnat_pbs_jobs_home = os_utils.getenv_required('XNAT_PBS_JOBS')
        self._log_dir = os_utils.getenv_required('LOG_DIR')
Beispiel #7
0
    def __init__(self, archive, build_home):
        """Constructs a OneSubjectJobSubmitter.

        :param archive: Archive with which this submitter is to work.
        :type archive: HcpArchive

        :param build_home: path to build space
        :type build_home: str
        """
        self._archive = archive
        self._build_home = build_home

        # home = os_utils.getenv_required('HOME')
        # self._xnat_pbs_jobs_home = home + os.sep
        # self._xnat_pbs_jobs_home += 'pipeline_tools' + os.sep
        # self._xnat_pbs_jobs_home += 'xnat_pbs_jobs'

        self._xnat_pbs_jobs_home = os_utils.getenv_required('XNAT_PBS_JOBS')
        self._log_dir = os_utils.getenv_required('XNAT_PBS_JOBS_LOG_DIR')
Beispiel #8
0
    def get_shadow_prefix(self):
        xnat_server = os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER')
        if xnat_server == 'db.humanconnectome.org':
            put_server_root = 'http://db-shadow'
        elif xnat_server == 'intradb.humanconnectome.org':
            put_server_root = 'http://intradb-shadow'
        else:
            raise ValueError("Unrecognized XNAT_PBS_JOBS_XNAT_SERVER: " + xnat_server)

        return put_server_root
	def _get_db_name(self):
		xnat_server = os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER')
		if xnat_server == 'db.humanconnectome.org':
			db_name = 'connectomedb'
		elif xnat_server == 'intradb.humanconnectome.org':
			db_name = 'intradb'
		else:
			raise ValueError("Unrecognized XNAT_PBS_JOBS_XNAT_SERVER: " + xnat_server)

		return db_name
def main():
    # create a parser object for getting the command line arguments
    parser = my_argparse.MyArgumentParser(
        description="Program to delete all DB resources of a given name for all sessions in a given ConnectomeDB project.")

    # mandatory arguments
    parser.add_argument('-u', '--user', dest='user', required=True, type=str)
    parser.add_argument('-pw', '--password', dest='password', required=True, type=str)
    parser.add_argument('-pr', '--project', dest='project', required=True, type=str)
    parser.add_argument('-r', '--resource', dest='resource', required=True, type=str)

    # optional arguments
    parser.add_argument('-ser', '--server', dest='server', required=False,
                        default='https://' + os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                        type=str)
    parser.add_argument('-f', '--force', dest='force', action='store_true', required=False, default=False)

    # parse the command line arguments
    args = parser.parse_args()

    # show parsed arguments
    _inform("Parsed arguments:")
    _inform("  Username: "******"  Password: "******"*** password mask ***")
    _inform("    Server: " + args.server)
    _inform("   Project: " + args.project)
    _inform("  Resource: " + args.resource)
    _inform("     Force: " + str(args.force))

    # find all instances of the specified resource in the specified project

    my_xnat_archive = xnat_archive.XNAT_Archive()

    archive_root = my_xnat_archive.project_archive_root(args.project)

    dir_list = glob.glob(archive_root + os.sep + '*')
    for directory in sorted(dir_list):
        resource_dir_to_look_for = directory + os.sep + 'RESOURCES' + os.sep + args.resource

        if os.path.isdir(resource_dir_to_look_for):

            unprefixed = resource_dir_to_look_for.replace(archive_root + os.sep, "")
            sep_loc = unprefixed.find(os.sep)
            session = unprefixed[:sep_loc]

            underscore_loc = session.find('_')
            subject = session[:underscore_loc]

            _inform("Deleting resource: " + args.resource + " for session: " + session)

            delete_resource.delete_resource(args.user, args.password, args.server,
                                            args.project,  subject,  session,
                                            args.resource, args.force)
    def submit_jobs(self, subject_list, config):

        # submit jobs for the listed subjects
        for subject in subject_list:

            put_server = 'http://db-shadow' + str(
                self.get_and_inc_shadow_number()) + '.nrg.mir:8080'

            # get information for the subject from the configuration
            setup_file = config.get_value(subject.subject_id, 'SetUpFile')
            clean_output_first = config.get_bool_value(subject.subject_id,
                                                       'CleanOutputFirst')
            # walltime_limit_hours = config.get_int_value(subject.subject_id, 'WalltimeLimit')
            # vmem_limit_gbs = config.get_int_value(subject.subject_id, 'VmemLimit')
            processing_stage_str = config.get_value(subject.subject_id,
                                                    'ProcessingStage')
            processing_stage = one_subject_job_submitter.ProcessingStage.from_string(
                processing_stage_str)

            logger.info("-----")
            logger.info(" Submitting bedpostxHCP3T jobs for:")
            logger.info("              project: " + subject.project)
            logger.info("              subject: " + subject.subject_id)
            logger.info("           put_server: " + put_server)
            logger.info("           setup_file: " + setup_file)
            logger.info("   clean_output_first: " + str(clean_output_first))
            # logger.info(" walltime_limit_hours: " + str(walltime_limit_hours))
            # logger.info("       vmem_limit_gbs: " + str(vmem_limit_gbs))
            logger.info("     processing_stage: " + str(processing_stage))
            logger.info("-----")

            submitter = one_subject_job_submitter.OneSubjectJobSubmitter(
                self._archive, self._archive.build_home)

            submitter.username = userid
            submitter.password = password
            submitter.server = 'https://' + os_utils.getenv_required(
                'XNAT_PBS_JOBS_XNAT_SERVER')

            submitter.setup_script = setup_file

            submitter.project = subject.project
            submitter.subject = subject.subject_id
            submitter.session = subject.subject_id + '_3T'

            # submitter.walltime_limit_hours = walltime_limit_hours
            # submitter.vmem_limit_gbs = vmem_limit_gbs

            submitter.clean_output_resource_first = clean_output_first
            submitter.put_server = put_server

            submitter.submit_jobs(processing_stage)
def make_all_links_into_copies_ext(full_path):

    xnat_pbs_jobs = os_utils.getenv_required('XNAT_PBS_JOBS')
    command_str = xnat_pbs_jobs + os.sep + 'lib' + os.sep + 'utils' + os.sep + 'make_symlinks_into_copies.sh' + ' ' + full_path

    completed_subprocess = subprocess.run(command_str,
                                          shell=True,
                                          check=True,
                                          stdout=subprocess.PIPE,
                                          universal_newlines=True)
    output = str_utils.remove_ending_new_lines(completed_subprocess.stdout)

    print(output)
Beispiel #13
0
def delete_resource(user,
                    password,
                    server,
                    project,
                    subject,
                    session,
                    resource,
                    perform_delete=True):
    # get XNAT session id
    xnat_session_id = xnat_access.get_session_id(
        server=str_utils.get_server_name(server),
        username=user,
        password=password,
        project=project,
        subject=subject,
        session=session)

    resource_url = ''
    resource_url += 'https://' + str_utils.get_server_name(server)
    resource_url += '/REST/projects/' + project
    resource_url += '/subjects/' + subject
    resource_url += '/experiments/' + xnat_session_id
    resource_url += '/resources/' + resource

    variable_values = '?removeFiles=true'

    resource_uri = resource_url + variable_values

    pipeline_engine = os_utils.getenv_required('XNAT_PBS_JOBS_PIPELINE_ENGINE')

    delete_cmd = 'java -Xmx1024m -jar ' + pipeline_engine + os.sep + 'lib' + os.sep + 'xnat-data-client-1.6.4-SNAPSHOT-jar-with-dependencies.jar'
    delete_cmd += ' -u ' + user
    delete_cmd += ' -p ' + password
    delete_cmd += ' -m DELETE'
    delete_cmd += ' -r ' + resource_uri

    if perform_delete:
        _inform("Deleting")
        _inform("    Server: " + server)
        _inform("   Project: " + project)
        _inform("   Subject: " + subject)
        _inform("   Session: " + session)
        _inform("  Resource: " + resource)

        completed_delete_process = subprocess.run(delete_cmd,
                                                  shell=True,
                                                  check=True)

    else:
        _inform("delete_cmd: " + delete_cmd)
        _inform("Deletion not attempted")
    def list_of_expected_files(self, working_dir, fieldmap, subject_info):

        hcp_run_utils = os_utils.getenv_required('HCP_RUN_UTILS')
        if os.path.isfile(
                hcp_run_utils + os.sep + self.processing_name + os.sep +
                self.expected_output_files_template_filename(fieldmap)):
            f = open(hcp_run_utils + os.sep + self.processing_name + os.sep +
                     self.expected_output_files_template_filename(fieldmap))
        else:
            xnat_pbs_jobs = os_utils.getenv_required('XNAT_PBS_JOBS')
            f = open(xnat_pbs_jobs + os.sep + self.processing_name + os.sep +
                     self.expected_output_files_template_filename(fieldmap))

        root_dir = os.sep.join([
            working_dir,
            subject_info.subject_id + '_' + subject_info.classifier
        ])
        l = file_utils.build_filename_list_from_file(
            f,
            root_dir,
            subjectid=subject_info.subject_id + '_' + subject_info.classifier,
            scan=subject_info.extra)
        return l
Beispiel #15
0
        description=
        "Batch mode submission of processing jobs for Structural Preprocessing"
    )

    # option arguments
    #
    # The --force-job-submission or -f option tells this program to ignore
    # checking to see whether a set of jobs is already submitted for a
    # subject/session and to go ahead and submit the jobs anyhow.
    parser.add_argument('-f',
                        '--force-job-submission',
                        dest='force_job_submission',
                        action='store_true',
                        required=False,
                        default=False)

    # parse the command line arguments
    args = parser.parse_args()

    # get Database credentials
    xnat_server = os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER')
    userid, password = user_utils.get_credentials(xnat_server)

    # get list of subjects to process
    subject_file_name = file_utils.get_subjects_file_name(__file__)
    print("Retrieving subject list from: " + subject_file_name)
    subject_list = ccf_subject.read_subject_info_list(subject_file_name,
                                                      separator=":")

    do_submissions(userid, password, subject_list, args.force_job_submission)
    def create_process_data_job_script(self):

        project_build_dir = self.build_home + os.sep + self.project
        pipeline_processing_dir = self.working_directory_name.replace(
            project_build_dir + os.sep, '')
        scratch_processing_dir = self._SCRATCH_PROCESSING_DIR + os.sep + self.project
        if not os.path.exists(scratch_processing_dir):
            os.mkdir(scratch_processing_dir)

        module_logger.debug(debug_utils.get_name())

        xnat_pbs_jobs_control_folder = os_utils.getenv_required(
            'XNAT_PBS_JOBS_CONTROL')

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN) + ':haswell'
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + vmem_limit_str
        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name
        xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version(
        )
        xnat_pbs_setup_singularity_process = 'singularity exec -B ' \
                 + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
                 + ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
                 + ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_qunexrun_path()
        parameter_line = '  --parameterfolder=' + self._get_xnat_pbs_setup_script_singularity_qunexparameter_path(
        )
        #studyfolder_line   = '  --studyfolder=' + self.working_directory_name + '/' + self.subject + '_' + self.classifier
        studyfolder_line = '  --studyfolder=' + scratch_processing_dir + os.sep + pipeline_processing_dir + os.sep + self.subject + '_' + self.classifier
        subject_line = '  --subjects=' + self.subject + '_' + self.classifier
        overwrite_line = '  --overwrite=yes'
        hcppipelineprocess_line = '  --hcppipelineprocess=StructuralPreprocessing'
        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_load + os.linesep)
            script.write(os.linesep)
            script.write(
                '# TEMPORARILY MOVE PROCESSING DIRECTORY TO SCRATCH SPACE DUE TO "Cannot allocate memory" ERRORS IN BUILD SPACE'
                + os.linesep)
            script.write('mv ' + self.working_directory_name + " " +
                         scratch_processing_dir + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_process + ' \\' +
                         os.linesep)
            script.write(parameter_line + ' \\' + os.linesep)
            script.write(studyfolder_line + ' \\' + os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(overwrite_line + ' \\' + os.linesep)
            script.write(hcppipelineprocess_line + os.linesep)
            script.write(os.linesep)
            script.write('# MOVE PROCESSING BACK' + os.linesep)
            script.write('mv ' + scratch_processing_dir + os.sep +
                         pipeline_processing_dir + ' ' + project_build_dir +
                         os.linesep)
            script.write(os.linesep)
            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
class OneSubjectJobSubmitter(one_subject_job_submitter.OneSubjectJobSubmitter):

    _SEVEN_MM_TEMPLATE_PROJECTS = os_utils.getenv_required(
        'SEVEN_MM_TEMPLATE_PROJECTS')
    _CONNECTOME_SKYRA_SCANNER_PROJECTS = os_utils.getenv_required(
        'CONNECTOME_SKYRA_SCANNER_PROJECTS')
    _PRISMA_3T_PROJECTS = os_utils.getenv_required('PRISMA_3T_PROJECTS')
    _SCRATCH_PROCESSING_DIR = os_utils.getenv_required(
        'SCRATCH_PROCESSING_DIR')
    _SUPPRESS_FREESURFER_ASSESSOR_JOB = True

    @classmethod
    def MY_PIPELINE_NAME(cls):
        return 'StructuralPreprocessing'

    def __init__(self, archive, build_home):
        super().__init__(archive, build_home)

    @property
    def PIPELINE_NAME(self):
        return OneSubjectJobSubmitter.MY_PIPELINE_NAME()

    @property
    def WORK_NODE_COUNT(self):
        return 1

    @property
    def WORK_PPN(self):
        return 1

    # @property
    # def FIELDMAP_TYPE_SPEC(self):
    #	 return "SE"  # Spin Echo Field Maps

    # @property
    # def PHASE_ENCODING_DIR_SPEC(self):
    #   return "PA" # Posterior-to-Anterior and Anterior to Posterior

    @property
    def use_prescan_normalized(self):
        return self._use_prescan_normalized

    @use_prescan_normalized.setter
    def use_prescan_normalized(self, value):
        self._use_prescan_normalized = value
        module_logger.debug(debug_utils.get_name() + ": set to " +
                            str(self._use_prescan_normalized))

    @property
    def brain_size(self):
        return self._brain_size

    @brain_size.setter
    def brain_size(self, value):
        self._brain_size = value
        module_logger.debug(debug_utils.get_name() + ": set to " +
                            str(self._brain_size))

    def _template_size_str(self):
        if self.project == None:
            raise ValueError(
                "project attribute must be set before template size can be determined"
            )

        if self.project in OneSubjectJobSubmitter._SEVEN_MM_TEMPLATE_PROJECTS:
            size_str = "0.7mm"
        else:
            size_str = "0.8mm"

        return size_str

    @property
    def T1W_TEMPLATE_NAME(self):
        return "MNI152_T1_" + self._template_size_str() + ".nii.gz"

    @property
    def T1W_TEMPLATE_BRAIN_NAME(self):
        return "MNI152_T1_" + self._template_size_str() + "_brain.nii.gz"

    @property
    def T1W_TEMPLATE_2MM_NAME(self):
        return "MNI152_T1_2mm.nii.gz"

    @property
    def T2W_TEMPLATE_NAME(self):
        return "MNI152_T2_" + self._template_size_str() + ".nii.gz"

    @property
    def T2W_TEMPLATE_BRAIN_NAME(self):
        return "MNI152_T2_" + self._template_size_str() + "_brain.nii.gz"

    @property
    def T2W_TEMPLATE_2MM_NAME(self):
        return "MNI152_T2_2mm.nii.gz"

    @property
    def TEMPLATE_MASK_NAME(self):
        return "MNI152_T1_" + self._template_size_str() + "_brain_mask.nii.gz"

    @property
    def TEMPLATE_2MM_MASK_NAME(self):
        return "MNI152_T1_2mm_brain_mask_dil.nii.gz"

    @property
    def FNIRT_CONFIG_FILE_NAME(self):
        return "T1_2_MNI152_2mm.cnf"

    @property
    def CONNECTOME_GDCOEFFS_FILE_NAME(self):
        return "coeff_SC72C_Skyra.grad"

    @property
    def PRISMA_3T_GDCOEFFS_FILE_NAME(self):
        return "Prisma_3T_coeff_AS82.grad"

    @property
    def TOPUP_CONFIG_FILE_NAME(self):
        return "b02b0.cnf"

    @property
    def freesurfer_assessor_script_name(self):
        module_logger.debug(debug_utils.get_name())
        return self.scripts_start_name + '.XNAT_CREATE_FREESURFER_ASSESSOR_job.sh'

    def create_get_data_job_script(self):
        """Create the script to be submitted to perform the get data job"""
        module_logger.debug(debug_utils.get_name())

        script_name = self.get_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        script = open(script_name, 'w')
        self._write_bash_header(script)
        script.write('#PBS -l nodes=1:ppn=1,walltime=4:00:00,mem=4gb' +
                     os.linesep)
        script.write('#PBS -o ' + self.working_directory_name + os.linesep)
        script.write('#PBS -e ' + self.working_directory_name + os.linesep)
        script.write(os.linesep)
        script.write('source ' + self._get_xnat_pbs_setup_script_path() + ' ' +
                     self._get_db_name() + os.linesep)
        script.write('module load ' +
                     self._get_xnat_pbs_setup_script_singularity_version() +
                     os.linesep)
        script.write(os.linesep)
        script.write(
            'singularity exec -B ' +
            self._get_xnat_pbs_setup_script_archive_root() + ',' +
            self._get_xnat_pbs_setup_script_singularity_bind_path() + ' ' +
            self._get_xnat_pbs_setup_script_singularity_container_xnat_path() +
            ' ' + self.get_data_program_path + ' \\' + os.linesep)
        script.write('  --project=' + self.project + ' \\' + os.linesep)
        script.write('  --subject=' + self.subject + ' \\' + os.linesep)
        script.write('  --classifier=' + self.classifier + ' \\' + os.linesep)
        if self.scan:
            script.write('  --scan=' + self.scan + ' \\' + os.linesep)
        script.write('  --working-dir=' + self.working_directory_name + ' \\' +
                     os.linesep)
        if self.use_prescan_normalized:
            script.write('  --use-prescan-normalized' + ' \\' + os.linesep)
        script.write('  --delay-seconds=120' + os.linesep)
        script.write(os.linesep)
        script.write('rm -rf ' + self.working_directory_name + os.sep +
                     self.subject + '_' + self.classifier +
                     '/unprocessed/T1w_MPR_vNav_4e_RMS' + os.linesep)
        script.close()
        os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)

    def _get_first_t1w_resource_fullpath(self, subject_info):
        t1w_resource_paths = self.archive.available_t1w_unproc_dir_full_paths(
            subject_info)
        if len(t1w_resource_paths) > 0:
            return t1w_resource_paths[0]
        else:
            raise RuntimeError("Session has no T1w resources")

    def _has_spin_echo_field_maps(self, subject_info):
        first_t1w_resource_path = self._get_first_t1w_resource_fullpath(
            subject_info)
        path_expr = first_t1w_resource_path + os.sep + '*SpinEchoFieldMap*' + '.nii.gz'
        spin_echo_file_list = glob.glob(path_expr)
        return len(spin_echo_file_list) > 0

    def _has_siemens_gradient_echo_field_maps(self, subject_info):
        first_t1w_resource_path = self._get_first_t1w_resource_fullpath(
            subject_info)
        path_expr_Magnitude = first_t1w_resource_path + os.sep + '*FieldMap_Magnitude*' + '.nii.gz'
        path_expr_Phase = first_t1w_resource_path + os.sep + '*FieldMap_Phase*' + '.nii.gz'
        siemens_gradient_echo_file_list = glob.glob(
            path_expr_Magnitude) + glob.glob(path_expr_Phase)
        return len(siemens_gradient_echo_file_list) > 1

    def _get_fmap_phase_file_path(self, subject_info):
        first_t1w_resource_path = self._get_first_t1w_resource_fullpath(
            subject_info)
        path_expr = first_t1w_resource_path + os.sep + '*FieldMap_Phase*' + '.nii.gz'
        fmap_phase_list = glob.glob(path_expr)

        if len(fmap_phase_list) > 0:
            fmap_phase_file = fmap_phase_list[0]
        else:
            raise RuntimeError("First T1w has no Phase FieldMap: " + path_expr)

        return fmap_phase_file

    def _get_fmap_phase_file_name(self, subject_info):
        full_path = self._get_fmap_phase_file_path(subject_info)
        basename = os.path.basename(full_path)
        return basename

    def _get_fmap_mag_file_path(self, subject_info):
        first_t1w_resource_path = self._get_first_t1w_resource_fullpath(
            subject_info)
        path_expr = first_t1w_resource_path + os.sep + '*FieldMap_Magnitude*' + '.nii.gz'
        fmap_mag_list = glob.glob(path_expr)

        if len(fmap_mag_list) > 0:
            fmap_mag_file = fmap_mag_list[0]
        else:
            raise RuntimeError("First T1w has no Magnitude FieldMap: " +
                               path_expr)

        return fmap_mag_file

    def _get_fmap_mag_file_name(self, subject_info):
        full_path = self._get_fmap_mag_file_path(subject_info)
        basename = os.path.basename(full_path)
        return basename

    def _get_positive_spin_echo_path(self, subject_info):
        first_t1w_resource_path = self._get_first_t1w_resource_fullpath(
            subject_info)
        path_expr = first_t1w_resource_path + os.sep + '*SpinEchoFieldMap*' + self.PAAP_POSITIVE_DIR + '.nii.gz'
        positive_spin_echo_file_list = glob.glob(path_expr)

        if len(positive_spin_echo_file_list) > 0:
            positive_spin_echo_file = positive_spin_echo_file_list[0]
        else:
            raise RuntimeError(
                "First T1w resource/scan has no positive spin echo field map")

        return positive_spin_echo_file

    def _get_positive_spin_echo_file_name(self, subject_info):
        full_path = self._get_positive_spin_echo_path(subject_info)
        basename = os.path.basename(full_path)
        return basename

    def _get_negative_spin_echo_path(self, subject_info):
        first_t1w_resource_path = self._get_first_t1w_resource_fullpath(
            subject_info)
        path_expr = first_t1w_resource_path + os.sep + '*SpinEchoFieldMap*' + self.PAAP_NEGATIVE_DIR + '.nii.gz'
        negative_spin_echo_file_list = glob.glob(path_expr)

        if len(negative_spin_echo_file_list) > 0:
            negative_spin_echo_file = negative_spin_echo_file_list[0]
        else:
            raise RuntimeError(
                "First T1w resource/scan has no negative spin echo field map")

        return negative_spin_echo_file

    def _get_negative_spin_echo_file_name(self, subject_info):
        full_path = self._get_negative_spin_echo_path(subject_info)
        basename = os.path.basename(full_path)
        return basename

    def _get_first_t1w_name(self, subject_info):
        t1w_unproc_names = self.archive.available_t1w_unproc_names(
            subject_info)
        if len(t1w_unproc_names) > 0:
            first_t1w_name = t1w_unproc_names[0]
        else:
            raise RuntimeError("Session has no available T1w scans")

        return first_t1w_name

    def _get_first_t1w_norm_name(self, subject_info):
        non_norm_name = self._get_first_t1w_name(subject_info)
        vNav_loc = non_norm_name.find('vNav')
        norm_name = non_norm_name[:vNav_loc] + 'vNav' + '_Norm' + non_norm_name[
            vNav_loc + 4:]
        return norm_name

    def _get_first_t1w_directory_name(self, subject_info):
        first_t1w_name = self._get_first_t1w_name(subject_info)
        return first_t1w_name

    def _get_first_t1w_resource_name(self, subject_info):
        return self._get_first_t1w_name(
            subject_info
        ) + self.archive.NAME_DELIMITER + self.archive.UNPROC_SUFFIX

    def _get_first_t1w_file_name(self, subject_info):
        if self.use_prescan_normalized:
            return self.session + self.archive.NAME_DELIMITER + self._get_first_t1w_norm_name(
                subject_info) + '.nii.gz'
        else:
            return self.session + self.archive.NAME_DELIMITER + self._get_first_t1w_name(
                subject_info) + '.nii.gz'

    def _get_first_t2w_name(self, subject_info):
        t2w_unproc_names = self.archive.available_t2w_unproc_names(
            subject_info)
        if len(t2w_unproc_names) > 0:
            first_t2w_name = t2w_unproc_names[0]
        else:
            raise RuntimeError("Session has no available T2w scans")

        return first_t2w_name

    def _get_first_t2w_norm_name(self, subject_info):
        non_norm_name = self._get_first_t2w_name(subject_info)
        vNav_loc = non_norm_name.find('vNav')
        norm_name = non_norm_name[:vNav_loc] + 'vNav' + '_Norm' + non_norm_name[
            vNav_loc + 4:]
        return norm_name

    def _get_first_t2w_directory_name(self, subject_info):
        first_t2w_name = self._get_first_t2w_name(subject_info)
        return first_t2w_name

    def _get_first_t2w_resource_name(self, subject_info):
        return self._get_first_t2w_name(
            subject_info
        ) + self.archive.NAME_DELIMITER + self.archive.UNPROC_SUFFIX

    def _get_first_t2w_file_name(self, subject_info):
        if self.use_prescan_normalized:
            return self.session + self.archive.NAME_DELIMITER + self._get_first_t2w_norm_name(
                subject_info) + '.nii.gz'
        else:
            return self.session + self.archive.NAME_DELIMITER + self._get_first_t2w_name(
                subject_info) + '.nii.gz'

    def create_process_data_job_script(self):

        project_build_dir = self.build_home + os.sep + self.project
        pipeline_processing_dir = self.working_directory_name.replace(
            project_build_dir + os.sep, '')
        scratch_processing_dir = self._SCRATCH_PROCESSING_DIR + os.sep + self.project
        if not os.path.exists(scratch_processing_dir):
            os.mkdir(scratch_processing_dir)

        module_logger.debug(debug_utils.get_name())

        xnat_pbs_jobs_control_folder = os_utils.getenv_required(
            'XNAT_PBS_JOBS_CONTROL')

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN) + ':haswell'
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + vmem_limit_str
        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name
        xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version(
        )
        xnat_pbs_setup_singularity_process = 'singularity exec -B ' \
                 + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
                 + ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
                 + ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_qunexrun_path()
        parameter_line = '  --parameterfolder=' + self._get_xnat_pbs_setup_script_singularity_qunexparameter_path(
        )
        #studyfolder_line   = '  --studyfolder=' + self.working_directory_name + '/' + self.subject + '_' + self.classifier
        studyfolder_line = '  --studyfolder=' + scratch_processing_dir + os.sep + pipeline_processing_dir + os.sep + self.subject + '_' + self.classifier
        subject_line = '  --subjects=' + self.subject + '_' + self.classifier
        overwrite_line = '  --overwrite=yes'
        hcppipelineprocess_line = '  --hcppipelineprocess=StructuralPreprocessing'
        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_load + os.linesep)
            script.write(os.linesep)
            script.write(
                '# TEMPORARILY MOVE PROCESSING DIRECTORY TO SCRATCH SPACE DUE TO "Cannot allocate memory" ERRORS IN BUILD SPACE'
                + os.linesep)
            script.write('mv ' + self.working_directory_name + " " +
                         scratch_processing_dir + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_singularity_process + ' \\' +
                         os.linesep)
            script.write(parameter_line + ' \\' + os.linesep)
            script.write(studyfolder_line + ' \\' + os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(overwrite_line + ' \\' + os.linesep)
            script.write(hcppipelineprocess_line + os.linesep)
            script.write(os.linesep)
            script.write('# MOVE PROCESSING BACK' + os.linesep)
            script.write('mv ' + scratch_processing_dir + os.sep +
                         pipeline_processing_dir + ' ' + project_build_dir +
                         os.linesep)
            script.write(os.linesep)
            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)

    def create_freesurfer_assessor_script(self):
        module_logger.debug(debug_utils.get_name())

        # copy the .XNAT_CREATE_FREESURFER_ASSESSOR script to the working directory
        freesurfer_assessor_source_path = self.xnat_pbs_jobs_home
        freesurfer_assessor_source_path += os.sep + self.PIPELINE_NAME
        freesurfer_assessor_source_path += os.sep + self.PIPELINE_NAME
        freesurfer_assessor_source_path += '.XNAT_CREATE_FREESURFER_ASSESSOR'

        freesurfer_assessor_dest_path = self.working_directory_name
        freesurfer_assessor_dest_path += os.sep + self.PIPELINE_NAME
        freesurfer_assessor_dest_path += '.XNAT_CREATE_FREESURFER_ASSESSOR'

        shutil.copy(freesurfer_assessor_source_path,
                    freesurfer_assessor_dest_path)
        os.chmod(freesurfer_assessor_dest_path, stat.S_IRWXU | stat.S_IRWXG)

        # write the freesurfer assessor submission script (that calls the .XNAT_CREATE_FREESURFER_ASSESSOR script)

        script_name = self.freesurfer_assessor_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        script = open(script_name, 'w')

        self._write_bash_header(script)
        script.write('#PBS -l nodes=1:ppn=1,walltime=4:00:00,mem=4gb' +
                     os.linesep)
        script.write('#PBS -o ' + self.working_directory_name + os.linesep)
        script.write('#PBS -e ' + self.working_directory_name + os.linesep)
        script.write(os.linesep)
        script.write('source ' + self._get_xnat_pbs_setup_script_path() + ' ' +
                     self._get_db_name() + os.linesep)
        script.write(os.linesep)
        script_line = freesurfer_assessor_dest_path
        user_line = '  --user='******'  --password='******'  --server=' + str_utils.get_server_name(self.server)
        project_line = '  --project=' + self.project
        subject_line = '  --subject=' + self.subject
        session_line = '  --session=' + self.session
        session_classifier_line = '  --session-classifier=' + self.classifier
        wdir_line = '  --working-dir=' + self.working_directory_name

        script.write(script_line + ' \\' + os.linesep)
        script.write(user_line + ' \\' + os.linesep)
        script.write(password_line + ' \\' + os.linesep)
        script.write(server_line + ' \\' + os.linesep)
        script.write(project_line + ' \\' + os.linesep)
        script.write(subject_line + ' \\' + os.linesep)
        script.write(session_line + ' \\' + os.linesep)
        script.write(session_classifier_line + ' \\' + os.linesep)
        script.write(wdir_line + os.linesep)

        script.close()
        os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)

    def create_scripts(self, stage):
        module_logger.debug(debug_utils.get_name())
        super().create_scripts(stage)

        if OneSubjectJobSubmitter._SUPPRESS_FREESURFER_ASSESSOR_JOB:
            return

        if stage >= ccf_processing_stage.ProcessingStage.PREPARE_SCRIPTS:
            self.create_freesurfer_assessor_script()

    def submit_process_data_jobs(self, stage, prior_job=None):
        module_logger.debug(debug_utils.get_name())

        # go ahead and submit the standard process data job and then
        # submit an additional freesurfer assessor job

        standard_process_data_jobno, all_process_data_jobs = super(
        ).submit_process_data_jobs(stage, prior_job)

        if OneSubjectJobSubmitter._SUPPRESS_FREESURFER_ASSESSOR_JOB:
            module_logger.info(
                "freesufer assessor job not submitted because freesurfer assessor creation has been suppressed"
            )
            return standard_process_data_jobno, all_process_data_jobs

        if stage >= ccf_processing_stage.ProcessingStage.PROCESS_DATA:
            if standard_process_data_jobno:
                fs_submit_cmd = 'qsub -W depend=afterok:' + standard_process_data_jobno + ' ' + self.freesurfer_assessor_script_name
            else:
                fs_submit_cmd = 'qsub ' + self.freesurfer_assessor_script_name

            completed_submit_process = subprocess.run(fs_submit_cmd,
                                                      shell=True,
                                                      check=True,
                                                      stdout=subprocess.PIPE,
                                                      universal_newlines=True)
            fs_job_no = str_utils.remove_ending_new_lines(
                completed_submit_process.stdout)
            all_process_data_jobs.append(fs_job_no)
            return fs_job_no, all_process_data_jobs

        else:
            module_logger.info(
                "freesurfer assessor job not submitted because of requested processing stage"
            )
            return standard_process_data_jobno, all_process_data_jobs

    def mark_running_status(self, stage):
        module_logger.debug(debug_utils.get_name())

        if stage > ccf_processing_stage.ProcessingStage.PREPARE_SCRIPTS:
            mark_cmd = self._xnat_pbs_jobs_home
            mark_cmd += os.sep + self.PIPELINE_NAME
            mark_cmd += os.sep + self.PIPELINE_NAME
            mark_cmd += '.XNAT_MARK_RUNNING_STATUS'
            mark_cmd += ' --user='******' --password='******' --server=' + str_utils.get_server_name(
                self.put_server)
            mark_cmd += ' --project=' + self.project
            mark_cmd += ' --subject=' + self.subject
            mark_cmd += ' --classifier=' + self.classifier
            mark_cmd += ' --resource=RunningStatus'
            mark_cmd += ' --queued'

            completed_mark_cmd_process = subprocess.run(
                mark_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            print(completed_mark_cmd_process.stdout)

            return
            completed_mark_cmd_process = subprocess.run(
                mark_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            print(completed_mark_cmd_process.stdout)

            return


if __name__ == "__main__":
    import ccf.structural_preprocessing.one_subject_run_status_checker as one_subject_run_status_checker

    xnat_server = os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER')
    username, password = user_utils.get_credentials(xnat_server)
    archive = ccf_archive.CcfArchive()

    subject = ccf_subject.SubjectInfo(sys.argv[1], sys.argv[2], sys.argv[3])
    submitter = OneSubjectJobSubmitter(archive, archive.build_home)

    run_status_checker = one_subject_run_status_checker.OneSubjectRunStatusChecker(
    )
    if run_status_checker.get_queued_or_running(subject):
        print("-----")
        print("NOT SUBMITTING JOBS FOR")
        print("project: " + subject.project)
        print("subject: " + subject.subject_id)
        print("session classifier: " + subject.classifier)
        print("JOBS ARE ALREADY QUEUED OR RUNNING")
Beispiel #19
0
    def submit_jobs(
            self,
            username,
            password,
            server,
            project,
            subject,
            session,
            structural_reference_project,
            structural_reference_session,
            put_server,
            clean_output_resource_first,
            setup_script,
            incomplete_only,
            scan,
            walltime_limit_hours,
            mem_limit_gbs,  # UNUSED
            vmem_limit_gbs):

        subject_info = hcp7t_subject.Hcp7TSubjectInfo(
            project, structural_reference_project, subject)

        # determine names of preprocessed resting state scans that are
        # available for the subject
        preproc_resting_state_scan_names = self.archive.available_resting_state_preproc_names(
            subject_info)
        inform("Preprocessed resting state scans available for subject: " +
               str(preproc_resting_state_scan_names))

        # determine names of the preprocessed MOVIE task scans that are available for the subject
        preproc_movie_scan_names = self.archive.available_movie_preproc_names(
            subject_info)
        inform("Preprocessed movie scans available for subject " +
               str(preproc_movie_scan_names))

        # determine names of the FIX processed scans that are available for the subject
        fix_processed_scan_names = self.archive.available_FIX_processed_names(
            subject_info)
        inform("FIX processed scans available for subject " +
               str(fix_processed_scan_names))

        # build list of scans to process
        scan_list = []
        if scan is None:
            scan_list = fix_processed_scan_names
        else:
            scan_list.append(scan)

        # process specified scans
        for scan_name in scan_list:
            if incomplete_only:
                completion_checker = PostFixHCP7T_OneSubjectCompletionChecker.PostFixHCP7T_OneSubjectCompletionChecker(
                )
                if completion_checker.is_processing_complete(
                        self.archive, subject_info, scan_name):
                    inform("scan: " + scan_name +
                           " has already completed PostFixHCP7T processing")
                    inform(
                        "Only submitting jobs for incomplete scans - skipping "
                        + scan_name)
                    continue

            inform("scan_name: " + scan_name)
            long_scan_name = self.archive.functional_scan_long_name(scan_name)
            output_resource_name = self.archive.PostFix_processed_resource_name(
                scan_name)

            inform("")
            inform("-------------------------------------------------")
            inform("Submitting jobs for scan: " + long_scan_name)
            inform("Output resource name: " + output_resource_name)
            inform("-------------------------------------------------")
            inform("")

            # make sure working directories don't have the same name based on the
            # same start time by sleeping a few seconds
            time.sleep(5)

            current_seconds_since_epoch = int(time.time())

            working_directory_name = self.build_home
            working_directory_name += os.sep + project
            working_directory_name += os.sep + self.PIPELINE_NAME
            working_directory_name += '.' + subject
            working_directory_name += '.' + long_scan_name
            working_directory_name += '.' + str(current_seconds_since_epoch)

            # make the working directory
            inform("Making working directory: " + working_directory_name)
            os.makedirs(name=working_directory_name)

            # get JSESSION ID
            jsession_id = xnat_access.get_jsession_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=username,
                password=password)
            inform("jsession_id: " + jsession_id)

            # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
            xnat_session_id = xnat_access.get_session_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=username,
                password=password,
                project=project,
                subject=subject,
                session=session)

            inform("xnat_session_id: " + xnat_session_id)

            # get XNAT Workflow ID
            workflow_obj = xnat_access.Workflow(username, password, server,
                                                jsession_id)
            workflow_id = workflow_obj.create_workflow(xnat_session_id,
                                                       project,
                                                       self.PIPELINE_NAME,
                                                       'Queued')
            inform("workflow_id: " + workflow_id)

            # Clean the output resource if requested
            if clean_output_resource_first:
                inform("Deleting resource: " + output_resource_name + " for:")
                inform("  project: " + project)
                inform("  subject: " + subject)
                inform("  session: " + session)

                delete_resource.delete_resource(
                    username, password, str_utils.get_server_name(server),
                    project, subject, session, output_resource_name)

            script_file_start_name = working_directory_name
            script_file_start_name += os.sep + subject
            script_file_start_name += '.' + long_scan_name
            script_file_start_name += '.' + self.PIPELINE_NAME
            script_file_start_name += '.' + project
            script_file_start_name += '.' + session

            # Create script to submit to do the actual work
            work_script_name = script_file_start_name + '.XNAT_PBS_job.sh'
            with contextlib.suppress(FileNotFoundError):
                os.remove(work_script_name)

            work_script = open(work_script_name, 'w')

            nodes_spec = 'nodes=1:ppn=1'
            walltime_spec = 'walltime=' + str(walltime_limit_hours) + ':00:00'
            vmem_spec = 'vmem=' + str(vmem_limit_gbs) + 'gb'

            work_script.write('#PBS -l ' + nodes_spec + ',' + walltime_spec +
                              ',' + vmem_spec + os.linesep)
            work_script.write('#PBS -o ' + working_directory_name + os.linesep)
            work_script.write('#PBS -e ' + working_directory_name + os.linesep)
            work_script.write(os.linesep)
            work_script.write(self.xnat_pbs_jobs_home + os.sep + '7T' +
                              os.sep + 'PostFixHCP7T' + os.sep +
                              'PostFixHCP7T.XNAT.sh \\' + os.linesep)
            work_script.write('  --user="******" \\' + os.linesep)
            work_script.write('  --password="******" \\' +
                              os.linesep)
            work_script.write('  --server="' +
                              str_utils.get_server_name(server) + '" \\' +
                              os.linesep)
            work_script.write('  --project="' + project + '" \\' + os.linesep)
            work_script.write('  --subject="' + subject + '" \\' + os.linesep)
            work_script.write('  --session="' + session + '" \\' + os.linesep)
            work_script.write('  --scan="' + long_scan_name + '" \\' +
                              os.linesep)
            work_script.write('  --working-dir="' + working_directory_name +
                              '" \\' + os.linesep)
            work_script.write('  --workflow-id="' + workflow_id + '" \\' +
                              os.linesep)
            work_script.write('  --setup-script=' + setup_script + os.linesep)

            work_script.close()
            os.chmod(work_script_name, stat.S_IRWXU | stat.S_IRWXG)

            # Create script to put the results into the DB
            put_script_name = script_file_start_name + '.XNAT_PBS_PUT_job.sh'
            self.create_put_script(put_script_name, username, password,
                                   put_server, project, subject, session,
                                   working_directory_name,
                                   output_resource_name,
                                   scan_name + '_' + self.PIPELINE_NAME)

            # Submit the job to do the work
            work_submit_cmd = 'qsub ' + work_script_name
            inform("work_submit_cmd: " + work_submit_cmd)

            completed_work_submit_process = subprocess.run(
                work_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            work_job_no = str_utils.remove_ending_new_lines(
                completed_work_submit_process.stdout)
            inform("work_job_no: " + work_job_no)

            # Submit the job to put the results in the DB
            put_submit_cmd = 'qsub -W depend=afterok:' + work_job_no + ' ' + put_script_name
            inform("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(
                completed_put_submit_process.stdout)
            inform("put_job_no: " + put_job_no)
Beispiel #20
0
    def submit_jobs(self, subject_list):

        # Read the configuration file
        config_file_name = file_utils.get_config_file_name(__file__)
        _inform("Reading configuration from file: " + config_file_name)

        config = my_configparser.MyConfigParser()
        config.read(config_file_name)

        # Submit jobs for listed subjects
        for subject in subject_list:

            put_server = 'http://db-shadow' + str(
                self.get_and_inc_shadow_number()) + '.nrg.mir:8080'

            # get information for subject from the configuration file
            setup_file = xnat_pbs_jobs_home + os.sep + config.get_value(
                subject.subject_id, 'SetUpFile')
            clean_output_first = config.get_bool_value(subject.subject_id,
                                                       'CleanOutputFirst')
            pre_eddy_walltime_limit_hrs = config.get_int_value(
                subject.subject_id, 'PreEddyWalltimeLimit')
            pre_eddy_vmem_limit_gbs = config.get_int_value(
                subject.subject_id, 'PreEddyVmemLimit')
            eddy_walltime_limit_hrs = config.get_int_value(
                subject.subject_id, 'EddyWalltimeLimit')
            post_eddy_walltime_limit_hrs = config.get_int_value(
                subject.subject_id, 'PostEddyWalltimeLimit')
            post_eddy_vmem_limit_gbs = config.get_int_value(
                subject.subject_id, 'PostEddyVmemLimit')

            _inform("")
            _inform(
                "--------------------------------------------------------------------------------"
            )
            _inform(" Submitting DiffusionPreprocessingHCP7T jobs for:")
            _inform("            project: " + subject.project)
            _inform("         refproject: " +
                    subject.structural_reference_project)
            _inform("            subject: " + subject.subject_id)
            _inform("         put_server: " + put_server)
            _inform("         setup_file: " + setup_file)
            _inform(" clean_output_first: " + str(clean_output_first))
            _inform(
                "--------------------------------------------------------------------------------"
            )

            submitter = one_subject_job_submitter.OneSubjectJobSubmitter(
                self._archive, self._archive.build_home)

            submitter.username = userid
            submitter.password = password
            submitter.server = 'https://' + os_utils.getenv_required(
                'XNAT_PBS_JOBS_XNAT_SERVER')

            submitter.project = subject.project
            submitter.subject = subject.subject_id
            submitter.session = subject.subject_id + '_7T'

            submitter.structural_reference_project = subject.structural_reference_project
            submitter.structural_reference_session = subject.subject_id + '_3T'

            submitter.pre_eddy_walltime_limit_hours = pre_eddy_walltime_limit_hrs
            submitter.pre_eddy_vmem_limit_gbs = pre_eddy_vmem_limit_gbs
            submitter.eddy_walltime_limit_hours = eddy_walltime_limit_hrs
            submitter.post_eddy_walltime_limit_hours = post_eddy_walltime_limit_hrs
            submitter.post_eddy_vmem_limit_gbs = post_eddy_vmem_limit_gbs

            submitter.setup_script = setup_file
            submitter.clean_output_resource_first = clean_output_first
            submitter.pe_dirs_spec = 'PAAP'
            submitter.put_server = put_server

            submitter.submit_jobs()
Beispiel #21
0
    def submit_jobs(self, subject_list):
        # read configuration file
        config_file_name = file_utils.get_config_file_name(__file__)

        _inform("")
        _inform(
            "--------------------------------------------------------------------------------"
        )
        _inform("Reading configuration from file: " + config_file_name)

        config = my_configparser.MyConfigParser()
        config.read(config_file_name)

        # submit jobs for listed subjects
        for subject in subject_list:

            put_server = 'http://db-shadow' + str(
                self._current_shadow_number) + '.nrg.mir:8080'

            setup_file = scripts_home + os.sep + config.get_value(
                subject.subject_id, 'SetUpFile')

            wall_time_limit = int(
                config.get_value(subject.subject_id, 'WalltimeLimit'))
            mem_limit = int(config.get_value(subject.subject_id, 'MemLimit'))
            vmem_limit = int(config.get_value(subject.subject_id, 'VmemLimit'))

            scan = subject.extra

            _inform("")
            _inform(
                "--------------------------------------------------------------------------------"
            )
            _inform(" Submitting RepairIcaFixProcessingHCP7T jobs for: ")
            _inform("            project: " + subject.project)
            _inform("         refproject: " +
                    subject.structural_reference_project)
            _inform("            subject: " + subject.subject_id)
            _inform("               scan: " + scan)
            _inform("         put_server: " + put_server)
            _inform("         setup_file: " + setup_file)
            _inform("    wall_time_limit: " + str(wall_time_limit))
            _inform("          mem_limit: " + str(mem_limit))
            _inform("         vmem_limit: " + str(vmem_limit))
            _inform(
                "--------------------------------------------------------------------------------"
            )

            # figure out the specification of the scan(s) to process and whether
            # to only process incomplete scans
            if scan == 'all':
                # want to run them all without regard to whether they are previously complete
                scan_spec = None
                incomplete_only = False
            elif scan == 'incomplete':
                # want to look at all of them and run only those that are incomplete
                scan_spec = None
                incomplete_only = True
            else:
                # want to run this specific one without regard to whether it is previously complete
                scan_spec = scan
                incomplete_only = False

            # Use the "one subject submitter" to submit the jobs for the current subject
            self._one_subject_submitter.submit_jobs(
                userid, password, 'https://' +
                os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                subject.project, subject.subject_id,
                subject.subject_id + '_7T',
                subject.structural_reference_project,
                subject.subject_id + '_3T', put_server, setup_file,
                incomplete_only, scan_spec, wall_time_limit, mem_limit,
                vmem_limit)

            self.increment_shadow_number()
    def submit_jobs(self,
                    username,
                    password,
                    subject_list,
                    config,
                    force_submission=False):

        # submit jobs for the listed subjects
        for subject in subject_list:

            run_status_checker = one_subject_run_status_checker.OneSubjectRunStatusChecker(
            )
            if not force_submission and run_status_checker.get_queued_or_running(
                    subject):
                print("-----")
                print("\t  NOT SUBMITTING JOBS FOR")
                print("\t                project: " + subject.project)
                print("\t                subject: " + subject.subject_id)
                print("\t                  extra: " + subject.extra)
                print("\t structural ref project: " +
                      subject.structural_reference_project)
                print("\t JOBS ARE ALREADY QUEUED OR RUNNING")
                continue

            submitter = one_subject_job_submitter.OneSubjectJobSubmitter(
                self._archive, self._archive.build_home)

            put_server = 'http://db-shadow' + str(
                self.get_and_inc_shadow_number()) + '.nrg.mir:8080'

            # get information for the subject from the configuration
            clean_output_first = config.get_bool_value(subject.subject_id,
                                                       'CleanOutputFirst')
            processing_stage_str = config.get_value(subject.subject_id,
                                                    'ProcessingStage')
            processing_stage = submitter.processing_stage_from_string(
                processing_stage_str)
            walltime_limit_hrs = config.get_value(subject.subject_id,
                                                  'WalltimeLimitHours')
            mem_limit_gbs = config.get_value(subject.subject_id, 'MemLimitGbs')
            vmem_limit_gbs = config.get_value(subject.subject_id,
                                              'VmemLimitGbs')
            output_resource_suffix = config.get_value(subject.subject_id,
                                                      'OutputResourceSuffix')

            print("-----")
            print("\tSubmitting", submitter.PIPELINE_NAME, "jobs for:")
            print("\t                project:", subject.project)
            print("\t      reference project:",
                  subject.structural_reference_project)
            print("\t                subject:", subject.subject_id)
            print("\t                  extra:", subject.extra)
            print("\t structural ref project:",
                  subject.structural_reference_project)
            print("\t             put_server:", put_server)
            print("\t     clean_output_first:", clean_output_first)
            print("\t       processing_stage:", processing_stage)
            print("\t     walltime_limit_hrs:", walltime_limit_hrs)
            print("\t          mem_limit_gbs:", mem_limit_gbs)
            print("\t         vmem_limit_gbs:", vmem_limit_gbs)
            print("\t output_resource_suffix:", output_resource_suffix)

            # user and server information
            submitter.username = username
            submitter.password = password
            submitter.server = 'https://' + os_utils.getenv_required(
                'XNAT_PBS_JOBS_XNAT_SERVER')

            # subject and project information
            submitter.project = subject.project
            submitter.structural_reference_project = subject.structural_reference_project
            submitter.subject = subject.subject_id
            submitter.session = subject.subject_id + '_7T'
            submitter.classifier = '7T'

            avail_retinotopy_task_names = self._archive.available_retinotopy_preproc_names(
                subject)
            avail_retinotopy_task_names = sorted(
                avail_retinotopy_task_names,
                key=one_subject_job_submitter.retinotopy_presentation_order_key
            )
            concat_spec = '_'.join(
                list(
                    map(one_subject_job_submitter.remove_scan_type,
                        avail_retinotopy_task_names)))
            submitter.scan = 'tfMRI_7T_' + concat_spec

            # job parameters
            submitter.clean_output_resource_first = clean_output_first
            submitter.put_server = put_server
            submitter.walltime_limit_hours = walltime_limit_hrs
            submitter.mem_limit_gbs = mem_limit_gbs
            submitter.vmem_limit_gbs = vmem_limit_gbs
            submitter.output_resource_suffix = output_resource_suffix

            # submit jobs
            submitted_job_list = submitter.submit_jobs(processing_stage)

            for job in submitted_job_list:
                print("\tsubmitted jobs: ", str(job))

            print("-----")
Beispiel #23
0
def main():
    # create a parser object for getting the command line options
    parser = my_argparse.MyArgumentParser(
        description="Program to delete a DB resource.")

    # mandatory arguments
    parser.add_argument('-u', '--user', dest='user', required=True, type=str)
    parser.add_argument('-pr',
                        '--project',
                        dest='project',
                        required=True,
                        type=str)
    parser.add_argument('-sub',
                        '--subject',
                        dest='subject',
                        required=True,
                        type=str)
    parser.add_argument('-ses',
                        '--session',
                        dest='session',
                        required=True,
                        type=str)
    parser.add_argument('-r',
                        '--resource',
                        dest='resource',
                        required=True,
                        type=str)

    # optional arguments
    parser.add_argument('-ser',
                        '--server',
                        dest='server',
                        required=False,
                        default='https://' +
                        os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                        type=str)
    parser.add_argument('-f',
                        '--force',
                        dest='force',
                        action="store_true",
                        required=False,
                        default=False)
    parser.add_argument('-pw',
                        '--password',
                        dest='password',
                        required=False,
                        type=str)

    # parse the command line arguments
    args = parser.parse_args()

    if args.password:
        password = args.password
    else:
        password = getpass.getpass("Password: "******"Parsed arguments:")
    _inform("  Username: "******"  Password: "******"*** password mask ***")
    _inform("    Server: " + args.server)
    _inform("   Project: " + args.project)
    _inform("   Subject: " + args.subject)
    _inform("   Session: " + args.session)
    _inform("  Resource: " + args.resource)
    _inform("     Force: " + str(args.force))

    if args.force:
        delete_it = True
    elif user_utils.should_proceed():
        delete_it = True
    else:
        delete_it = False

    delete_resource(args.user, password, args.server, args.project,
                    args.subject, args.session, args.resource, delete_it)
Beispiel #24
0
    def submit_jobs(self, username, password, subject_list, config):

        # submit jobs for the listed subjects
        for subject in subject_list:

            submitter = one_subject_job_submitter.OneSubjectJobSubmitter(
                self._archive, self._archive.build_home)

            put_server = 'http://db-shadow' + str(
                self.get_and_inc_shadow_number())
            put_server += '.nrg.mir:8080'

            # get information for the subject from the configuration
            setup_file = config.get_value(subject.subject_id, 'SetUpFile')
            clean_output_first = config.get_bool_value(subject.subject_id,
                                                       'CleanOutputFirst')
            processing_stage_str = config.get_value(subject.subject_id,
                                                    'ProcessingStage')
            processing_stage = submitter.processing_stage_from_string(
                processing_stage_str)
            walltime_limit_hrs = config.get_value(subject.subject_id,
                                                  'WalltimeLimitHours')
            vmem_limit_gbs = config.get_value(subject.subject_id,
                                              'VmemLimitGbs')
            reg_name = config.get_value(subject.subject_id, 'RegName')
            output_resource_suffix = config.get_value(subject.subject_id,
                                                      'OutputResourceSuffix')

            scan = subject.extra

            module_logger.info("-----")
            module_logger.info(" Submitting " + submitter.PIPELINE_NAME +
                               " jobs for:")
            module_logger.info("                project: " + subject.project)
            module_logger.info("                subject: " +
                               subject.subject_id)
            module_logger.info("     session classifier: " +
                               subject.classifier)
            module_logger.info("                   scan: " + scan)
            module_logger.info("             put_server: " + put_server)
            module_logger.info("             setup_file: " + setup_file)
            module_logger.info("     clean_output_first: " +
                               str(clean_output_first))
            module_logger.info("       processing_stage: " +
                               str(processing_stage))
            module_logger.info("     walltime_limit_hrs: " +
                               str(walltime_limit_hrs))
            module_logger.info("         vmem_limit_gbs: " +
                               str(vmem_limit_gbs))
            module_logger.info("               reg_name: " + str(reg_name))
            module_logger.info(" output_resource_suffix: " +
                               str(output_resource_suffix))
            module_logger.info("-----")

            # user and server information
            submitter.username = username
            submitter.password = password
            submitter.server = 'https://' + os_utils.getenv_required(
                'XNAT_PBS_JOBS_XNAT_SERVER')

            # subject and project information
            submitter.project = subject.project
            submitter.subject = subject.subject_id
            submitter.session = subject.subject_id + '_' + subject.classifier
            submitter.scan = scan
            submitter.reg_name = reg_name

            # job parameters
            submitter.setup_script = setup_file
            submitter.clean_output_resource_first = clean_output_first
            submitter.put_server = put_server
            submitter.walltime_limit_hours = walltime_limit_hrs
            submitter.vmem_limit_gbs = vmem_limit_gbs
            submitter.output_resource_suffix = output_resource_suffix

            # submit jobs
            submitter.submit_jobs(processing_stage)
Beispiel #25
0
    def submit_jobs(self,
                    username,
                    password,
                    subject_list,
                    config,
                    force_job_submission=False):

        # submit jobs for the listed subjects
        for subject in subject_list:

            if not force_job_submission:
                run_status_checker = one_subject_run_status_checker.OneSubjectRunStatusChecker(
                )
                if run_status_checker.get_queued_or_running(subject):
                    print("-----")
                    print("\t NOT SUBMITTING JOBS FOR")
                    print("\t			   project: " + subject.project)
                    print("\t			   subject: " + subject.subject_id)
                    print("\t	session classifier: " + subject.classifier)
                    print("\t JOBS ARE ALREADY QUEUED OR RUNNING")
                    continue

            submitter = one_subject_job_submitter.OneSubjectJobSubmitter(
                self._archive, self._archive.build_home)

            put_server_name = os.environ.get(
                "XNAT_PBS_JOBS_PUT_SERVER_LIST").split(" ")
            put_server = random.choice(put_server_name)

            # get information for the subject from the configuration
            clean_output_first = config.get_bool_value(subject.subject_id,
                                                       'CleanOutputFirst')
            processing_stage_str = config.get_value(subject.subject_id,
                                                    'ProcessingStage')
            processing_stage = submitter.processing_stage_from_string(
                processing_stage_str)
            walltime_limit_hrs = config.get_value(subject.subject_id,
                                                  'WalltimeLimitHours')
            vmem_limit_gbs = config.get_value(subject.subject_id,
                                              'VmemLimitGbs')
            output_resource_suffix = config.get_value(subject.subject_id,
                                                      'OutputResourceSuffix')
            brain_size = config.get_value(subject.subject_id, 'BrainSize')
            use_prescan_normalized = config.get_bool_value(
                subject.subject_id, 'UsePrescanNormalized')

            print("-----")
            print("\tSubmitting", submitter.PIPELINE_NAME, "jobs for:")
            print("\t			   project:", subject.project)
            print("\t			   subject:", subject.subject_id)
            print("\t	session classifier:", subject.classifier)
            print("\t			put_server:", put_server)
            print("\t	clean_output_first:", clean_output_first)
            print("\t	  processing_stage:", processing_stage)
            print("\t	walltime_limit_hrs:", walltime_limit_hrs)
            print("\t		vmem_limit_gbs:", vmem_limit_gbs)
            print("\toutput_resource_suffix:", output_resource_suffix)
            print("\t			brain_size:", brain_size)
            print("\tuse_prescan_normalized:", use_prescan_normalized)

            # configure one subject submitter

            # user and server information
            submitter.username = username
            submitter.password = password
            submitter.server = 'https://' + os_utils.getenv_required(
                'XNAT_PBS_JOBS_XNAT_SERVER')

            # subject and project information
            submitter.project = subject.project
            submitter.subject = subject.subject_id
            submitter.session = subject.subject_id + '_' + subject.classifier
            submitter.classifier = subject.classifier
            submitter.brain_size = brain_size
            submitter.use_prescan_normalized = use_prescan_normalized

            # job parameters
            submitter.clean_output_resource_first = clean_output_first
            submitter.put_server = put_server
            submitter.walltime_limit_hours = walltime_limit_hrs
            submitter.vmem_limit_gbs = vmem_limit_gbs
            submitter.output_resource_suffix = output_resource_suffix

            # submit jobs
            submitted_job_list = submitter.submit_jobs(processing_stage)

            for job in submitted_job_list:
                print("\tsubmitted jobs:", job)

            print("-----")
	def create_process_data_job_script(self):
		module_logger.debug(debug_utils.get_name())

		xnat_pbs_jobs_control_folder = os_utils.getenv_required('XNAT_PBS_JOBS_CONTROL')

		subject_info = ccf_subject.SubjectInfo(self.project, self.subject, self.classifier)


		script_name = self.process_data_job_script_name

		with contextlib.suppress(FileNotFoundError):
			os.remove(script_name)

		walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
		## Using mem option instead of vmem for IcaFix	
		#vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'
		mem_limit_str = str(self.mem_limit_gbs) + 'gb'
		resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
		resources_line += ':ppn=' + str(self.WORK_PPN)
		## FIX shouldn't be limited to haswell cores
		#resources_line += ':haswell'
		resources_line += ',walltime=' + walltime_limit_str
		#resources_line += ',vmem=' + vmem_limit_str
		resources_line += ',mem=' + mem_limit_str
		stdout_line = '#PBS -o ' + self.working_directory_name
		stderr_line = '#PBS -e ' + self.working_directory_name
		scratch_tmpdir = '/scratch/' + os.getenv('USER') + '/singularity/tmp/' + self.subject + "_" + self.classifier;
		xnat_pbs_setup_singularity_load = 'module load ' + self._get_xnat_pbs_setup_script_singularity_version()
		make_scratch_tmpdir = 'mkdir  -p ' + scratch_tmpdir
		xnat_pbs_setup_singularity_process = 'singularity exec -B ' + xnat_pbs_jobs_control_folder + ':/opt/xnat_pbs_jobs_control' \
										+ ',' + scratch_tmpdir + ':/tmp' \
											+ ',' + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() \
										+ ',' + self._get_xnat_pbs_setup_script_gradient_coefficient_path() + ':/export/HCP/gradient_coefficient_files' \
										+ ' ' + self._get_xnat_pbs_setup_script_singularity_container_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_qunexrun_path()
		#xnat_pbs_setup_singularity_process = '/opt/xnat_pbs_jobs_control/run_qunexContainer.sh' 
		#xnat_pbs_setup_singularity_process = self._get_xnat_pbs_setup_script_singularity_qunexrun_path()
		#container_line   = '  --containerpath=' + self._get_xnat_pbs_setup_script_singularity_container_path()
		## Per MH, parameterfolder is irrelevant to MR-FIX
		#parameter_line   = '  --parameterfolder=' + self._get_xnat_pbs_setup_script_singularity_qunexparameter_path()
		studyfolder_line   = '  --studyfolder=' + self.working_directory_name + '/' + self.subject + '_' + self.classifier
		subject_line   = '  --subjects=' + self.subject+ '_' + self.classifier
		overwrite_line = '  --overwrite=yes'
		hcppipelineprocess_line = '  --hcppipelineprocess=MultiRunIcaFixProcessing'

		with open(script_name, 'w') as script:
			script.write(resources_line + os.linesep)
			script.write(stdout_line + os.linesep)
			script.write(stderr_line + os.linesep)
			script.write(os.linesep)
			script.write(xnat_pbs_setup_singularity_load + os.linesep)
			script.write(make_scratch_tmpdir + os.linesep)
			script.write(os.linesep)
			script.write(xnat_pbs_setup_singularity_process+ ' \\' + os.linesep)
			## Per MH, parameterfolder is irrelevant to MR-FIX
			#script.write(parameter_line + ' \\' + os.linesep)
			script.write(studyfolder_line + ' \\' + os.linesep)
			script.write(subject_line + ' \\' + os.linesep)
			script.write(overwrite_line + ' \\' + os.linesep)
			#script.write(container_line + ' \\' + os.linesep)
			self._group_list = []
			script.write('  --boldlist="' + self._expand(self.groups) + '" \\' + os.linesep)
			script.write(hcppipelineprocess_line + os.linesep)
			script.close()
			os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
    def submit_jobs(self):
        _debug("submit_jobs")

        if self.validate_parameters():

            # make sure working directories don't have the same name based on the same
            # start time by sleeping a few seconds
            time.sleep(5)

            current_seconds_since_epoch = int(time.time())

            working_directory_name = self.build_home
            working_directory_name += os.sep + self.project
            working_directory_name += os.sep + self.PIPELINE_NAME
            working_directory_name += '.' + self.subject
            working_directory_name += '.' + str(current_seconds_since_epoch)

            # make the working directory
            _inform("Making working directory: " + working_directory_name)
            os.makedirs(name=working_directory_name)

            # get JSESSION ID
            jsession_id = xnat_access.get_jsession_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=self.username,
                password=self.password)
            _inform("jsession_id: " + jsession_id)

            # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
            xnat_session_id = xnat_access.get_session_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=self.username,
                password=self.password,
                project=self.project,
                subject=self.subject,
                session=self.session)
            _inform("xnat_session_id: " + xnat_session_id)

            # get XNAT Workflow ID
            workflow_obj = xnat_access.Workflow(
                self.username, self.password, self.server, jsession_id)
            workflow_id = workflow_obj.create_workflow(
                xnat_session_id, self.project, self.PIPELINE_NAME, 'Queued')
            _inform("workflow_id: " + workflow_id)

            # Determine the output resource name
            output_resource_name = self.archive.DEDRIFT_AND_RESAMPLE_RESOURCE_NAME
            _inform("output_resource_name: " + output_resource_name)

            # Clean the output resource if requested
            if self.clean_output_resource_first:
                _inform("Deleting resouce: " + output_resource_name + " for:")
                _inform("  project: " + self.project)
                _inform("  subject: " + self.subject)
                _inform("  session: " + self.session)

                delete_resource.delete_resource(
                    self.username, self.password, str_utils.get_server_name(self.server),
                    self.project, self.subject, self.session, output_resource_name, True)

            script_file_start_name = working_directory_name
            script_file_start_name += os.sep + self.subject
            script_file_start_name += '.' + self.PIPELINE_NAME
            script_file_start_name += '.' + self.project
            script_file_start_name += '.' + self.session

            # Create script to submit to do the actual work
            work_script_name = script_file_start_name + '.XNAT_PBS_job.sh'
            with contextlib.suppress(FileNotFoundError):
                os.remove(work_script_name)

            work_script = open(work_script_name, 'w')

            nodes_spec = 'nodes=1:ppn=1'
            walltime_spec = 'walltime=' + str(self.walltime_limit_hours) + ':00:00'
            vmem_spec = 'vmem=' + str(self.vmem_limit_gbs) + 'gb'
            mem_spec = 'mem=' + str(self.mem_limit_gbs) + 'gb'

            work_script.write('#PBS -l ' + nodes_spec + ',' + walltime_spec + ',' + vmem_spec + ',' + mem_spec + os.linesep)
            # work_script.write('#PBS -q HCPput' + os.linesep)
            work_script.write('#PBS -o ' + working_directory_name + os.linesep)
            work_script.write('#PBS -e ' + working_directory_name + os.linesep)
            work_script.write(os.linesep)
            work_script.write(self.xnat_pbs_jobs_home + os.sep + '7T' + os.sep + 'DeDriftAndResampleHCP7T' + os.sep +
                              'DeDriftAndResampleHCP7T.XNAT.sh \\' + os.linesep)
            work_script.write('  --user="******" \\' + os.linesep)
            work_script.write('  --password="******" \\' + os.linesep)
            work_script.write('  --server="' + str_utils.get_server_name(self.server) + '" \\' + os.linesep)
            work_script.write('  --project="' + self.project + '" \\' + os.linesep)
            work_script.write('  --subject="' + self.subject + '" \\' + os.linesep)
            work_script.write('  --session="' + self.session + '" \\' + os.linesep)
            work_script.write('  --structural-reference-project="' +
                              self.structural_reference_project + '" \\' + os.linesep)
            work_script.write('  --structural-reference-session="' +
                              self.structural_reference_session + '" \\' + os.linesep)
            work_script.write('  --working-dir="' + working_directory_name + '" \\' + os.linesep)
            work_script.write('  --workflow-id="' + workflow_id + '" \\' + os.linesep)

            # work_script.write('  --keep-all' + ' \\' + os.linesep)
            # work_script.write('  --prevent-push' + ' \\' + os.linesep)

            work_script.write('  --setup-script=' + self.setup_script + os.linesep)
            
            work_script.close()
            os.chmod(work_script_name, stat.S_IRWXU | stat.S_IRWXG)

            # Create script to put the results into the DB
            put_script_name = script_file_start_name + '.XNAT_PBS_PUT_job.sh'
            self.create_put_script(put_script_name,
                                   self.username, self.password, self.put_server,
                                   self.project, self.subject, self.session,
                                   working_directory_name, output_resource_name,
                                   self.PIPELINE_NAME)

            # Submit the job to do the work
            work_submit_cmd = 'qsub ' + work_script_name
            _inform("work_submit_cmd: " + work_submit_cmd)

            completed_work_submit_process = subprocess.run(
                work_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            work_job_no = str_utils.remove_ending_new_lines(completed_work_submit_process.stdout)
            _inform("work_job_no: " + work_job_no)

            # Submit the job to put the results in the DB
            put_submit_cmd = 'qsub -W depend=afterok:' + work_job_no + ' ' + put_script_name
            _inform("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(completed_put_submit_process.stdout)
            _inform("put_job_no: " + put_job_no)

        else:
            _inform("Unable to submit jobs")
    def xnat_pbs_jobs_home(self):
        if not self._xnat_pbs_jobs_home:
            self._xnat_pbs_jobs_home = os_utils.getenv_required(
                'XNAT_PBS_JOBS')

        return self._xnat_pbs_jobs_home
Beispiel #29
0
 def MIN_SHADOW_NUMBER(self):
     """Minimum shadow server number."""
     # return 1
     min_shadow_str=os_utils.getenv_required("XNAT_PBS_JOBS_MIN_SHADOW")
     return int(min_shadow_str)
    def submit_jobs(self, subject_list):
        """Submit a batch of jobs."""

        # read configuration file
        config_file_name = file_utils.get_config_file_name(__file__)

        _inform("")
        _inform("--------------------------------------------------------------------------------")
        _inform("Reading configuration from file: " + config_file_name)

        config = my_configparser.MyConfigParser()
        config.read(config_file_name)

        # submit jobs for listed subjects
        for subject in subject_list:

            put_server = 'http://db-shadow' + str(self.shadow_number) + '.nrg.mir:8080'

            # get information for subject from configuration file
            setup_file = scripts_home + os.sep + config.get_value(subject.subject_id, 'SetUpFile')
            clean_output_first = config.get_bool_value(subject.subject_id, 'CleanOutputFirst')
            wall_time_limit = config.get_int_value(subject.subject_id, 'WalltimeLimit')
            vmem_limit = config.get_int_value(subject.subject_id, 'VmemLimit')
            mem_limit = config.get_int_value(subject.subject_id, 'MemLimit')

            _inform("")
            _inform("--------------------------------------------------------------------------------")
            _inform(" Submitting DeDriftAndResampleHCP7T_HighRes jobs for: ")
            _inform("            project: " + subject.project)
            _inform("         refproject: " + subject.structural_reference_project)
            _inform("            subject: " + subject.subject_id)
            _inform("         put_server: " + put_server)
            _inform("         setup_file: " + setup_file)
            _inform(" clean_output_first: " + str(clean_output_first))
            _inform("    wall_time_limit: " + str(wall_time_limit))
            _inform("         vmem_limit: " + str(vmem_limit))
            _inform("          mem_limit: " + str(mem_limit))
            _inform("--------------------------------------------------------------------------------")

            _debug("Create and configure an appropriate 'one subject submitter'")
            one_subject_submitter = DeDriftAndResampleHCP7T_HighRes_OneSubjectJobSubmitter.DeDriftAndResampleHCP7T_HighRes_OneSubjectJobSubmitter(
                self._archive, self._archive.build_home)
            _debug("one_subject_submitter: " + str(one_subject_submitter))

            one_subject_submitter.username = userid
            one_subject_submitter.password = password
            one_subject_submitter.server = 'https://' + os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER')
            one_subject_submitter.project = subject.project
            one_subject_submitter.subject = subject.subject_id
            one_subject_submitter.session = subject.subject_id + '_7T'
            one_subject_submitter.structural_reference_project = subject.structural_reference_project
            one_subject_submitter.structural_reference_session = subject.subject_id + '_3T'
            one_subject_submitter.put_server = put_server
            one_subject_submitter.clean_output_resource_first = clean_output_first
            one_subject_submitter.setup_script = setup_file
            one_subject_submitter.walltime_limit_hours = wall_time_limit
            one_subject_submitter.vmem_limit_gbs = vmem_limit
            one_subject_submitter.mem_limit_gbs = mem_limit

            _debug("Use the 'one subject submitter' to submit the jobs for the current subject")
            one_subject_submitter.submit_jobs()

            self.increment_shadow_number()

            time.sleep(60)
Beispiel #31
0
 def MAX_SHADOW_NUMBER(self):
     """Maximum shadow server number."""
     # return 2
     max_shadow_str=os_utils.getenv_required("XNAT_PBS_JOBS_MAX_SHADOW")
     return int(max_shadow_str)