def delete_resource(user, password, server, project, subject, session, resource, perform_delete=True):
    # get XNAT session id
    xnat_session_id = xnat_access.get_session_id(server=str_utils.get_server_name(server), username=user, password=password,
                                                 project=project, subject=subject, session=session)

    resource_url = ''
    resource_url += 'https://' + str_utils.get_server_name(server)
    resource_url += '/REST/projects/' + project
    resource_url += '/subjects/' + subject
    resource_url += '/experiments/' + xnat_session_id
    resource_url += '/resources/' + resource

    variable_values = '?removeFiles=true'

    resource_uri = resource_url + variable_values

    delete_cmd = 'java -Xmx1024m -jar /home/HCPpipeline/pipeline/lib/xnat-data-client-1.6.4-SNAPSHOT-jar-with-dependencies.jar'
    delete_cmd += ' -u ' + user
    delete_cmd += ' -p ' + password
    delete_cmd += ' -m DELETE'
    delete_cmd += ' -r ' + resource_uri

    if perform_delete:
        _inform("Deleting")
        _inform("    Server: " + server)
        _inform("   Project: " + project)
        _inform("   Subject: " + subject)
        _inform("   Session: " + session)
        _inform("  Resource: " + resource)

        completed_delete_process = subprocess.run(delete_cmd, shell=True, check=True)

    else:
        _inform("delete_cmd: " + delete_cmd)
        _inform("Deletion not attempted")
Esempio n. 2
0
def delete_resource(user,
                    password,
                    server,
                    project,
                    subject,
                    session,
                    resource,
                    perform_delete=True):
    # get XNAT session id
    xnat_session_id = xnat_access.get_session_id(
        server=str_utils.get_server_name(server),
        username=user,
        password=password,
        project=project,
        subject=subject,
        session=session)

    resource_url = ''
    resource_url += 'https://' + str_utils.get_server_name(server)
    resource_url += '/REST/projects/' + project
    resource_url += '/subjects/' + subject
    resource_url += '/experiments/' + xnat_session_id
    resource_url += '/resources/' + resource

    variable_values = '?removeFiles=true'

    resource_uri = resource_url + variable_values

    pipeline_engine = os_utils.getenv_required('XNAT_PBS_JOBS_PIPELINE_ENGINE')

    delete_cmd = 'java -Xmx1024m -jar ' + pipeline_engine + os.sep + 'lib' + os.sep + 'xnat-data-client-1.6.4-SNAPSHOT-jar-with-dependencies.jar'
    delete_cmd += ' -u ' + user
    delete_cmd += ' -p ' + password
    delete_cmd += ' -m DELETE'
    delete_cmd += ' -r ' + resource_uri

    if perform_delete:
        _inform("Deleting")
        _inform("    Server: " + server)
        _inform("   Project: " + project)
        _inform("   Subject: " + subject)
        _inform("   Session: " + session)
        _inform("  Resource: " + resource)

        completed_delete_process = subprocess.run(delete_cmd,
                                                  shell=True,
                                                  check=True)

    else:
        _inform("delete_cmd: " + delete_cmd)
        _inform("Deletion not attempted")
    def mark_running_status(self, stage):
        module_logger.debug(debug_utils.get_name())

        if stage > ccf_processing_stage.ProcessingStage.PREPARE_SCRIPTS:
            mark_cmd = self._xnat_pbs_jobs_home
            mark_cmd += os.sep + self.PIPELINE_NAME
            mark_cmd += os.sep + self.PIPELINE_NAME
            mark_cmd += '.XNAT_MARK_RUNNING_STATUS'
            mark_cmd += ' --user='******' --password='******' --server=' + str_utils.get_server_name(
                self.put_server)
            mark_cmd += ' --project=' + self.project
            mark_cmd += ' --subject=' + self.subject
            mark_cmd += ' --classifier=' + self.classifier
            mark_cmd += ' --resource=RunningStatus'
            mark_cmd += ' --queued'

            completed_mark_cmd_process = subprocess.run(
                mark_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            print(completed_mark_cmd_process.stdout)

            return
	def create_mark_no_longer_running_script(self):
		module_logger.debug(debug_utils.get_name())

		script_name = self.mark_no_longer_running_script_name

		with contextlib.suppress(FileNotFoundError):
			os.remove(script_name)

		script = open(script_name, 'w')

		self._write_bash_header(script)
		script.write('#PBS -l nodes=1:ppn=1,walltime=4:00:00,mem=4gb' + os.linesep)
		script.write('#PBS -o ' + self.log_dir + os.linesep)
		script.write('#PBS -e ' + self.log_dir + os.linesep)
		script.write(os.linesep)
		script.write('source ' + self._get_xnat_pbs_setup_script_path() + ' ' + self._get_db_name() + os.linesep)
		script.write('module load ' + self._get_xnat_pbs_setup_script_singularity_version()  + os.linesep)
		script.write(os.linesep)
		script.write('singularity exec -B ' + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_container_xnat_path() + ' ' + self.mark_running_status_program_path   + ' \\' + os.linesep)
		script.write('  --user="******" \\' + os.linesep)
		script.write('  --password="******" \\' + os.linesep)
		script.write('  --server="' + str_utils.get_server_name(self.put_server) + '" \\' + os.linesep)
		script.write('  --project="' + self.project + '" \\' + os.linesep)
		script.write('  --subject="' + self.subject + '" \\' + os.linesep)
		script.write('  --classifier="' + self.classifier + '" \\' + os.linesep)
		if self.scan:
			script.write('  --scan="' + self.scan + '" \\' + os.linesep)
		script.write('  --resource="' + 'RunningStatus' + '" \\' + os.linesep)
		script.write('  --done' + os.linesep)
		script.write(os.linesep)
		script.write("rm -rf " + self.mark_completion_directory_name)
		
		script.close()
		os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
	def create_check_data_job_script(self):
		"""
		Create the script to be submitted as a job to perform the check data functionality.
		"""
		module_logger.debug(debug_utils.get_name())

		script_name = self.check_data_job_script_name

		with contextlib.suppress(FileNotFoundError):
			os.remove(script_name)

		script = open(script_name, 'w')

		self._write_bash_header(script)
		script.write('#PBS -l nodes=1:ppn=1,walltime=4:00:00,mem=4gb' + os.linesep)
		script.write('#PBS -o ' + self.log_dir + os.linesep)
		script.write('#PBS -e ' + self.log_dir + os.linesep)
		script.write(os.linesep)
		script.write('source ' + self._get_xnat_pbs_setup_script_path() + ' ' + self._get_db_name() + os.linesep)
		script.write('module load ' + self._get_xnat_pbs_setup_script_singularity_version() + os.linesep)
		script.write(os.linesep)
		script.write('singularity exec -B ' + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_container_xnat_path() + ' ' + self.check_data_program_path  + ' \\' + os.linesep)
		script.write('  --user="******" \\' + os.linesep)
		script.write('  --password="******" \\' + os.linesep)
		script.write('  --server="' + str_utils.get_server_name(self.put_server) + '" \\' + os.linesep)
		script.write('  --project=' + self.project + ' \\' + os.linesep)
		script.write('  --subject=' + self.subject + ' \\' + os.linesep)
		script.write('  --classifier=' + self.classifier + ' \\' + os.linesep)
		if self.scan:
			script.write('  --scan=' + self.scan + ' \\' + os.linesep)
		script.write('  --working-dir=' + self.check_data_directory_name + os.linesep)

		script.close()
		os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
    def create_put_script(self, put_script_name, username, password, put_server, project, subject, session,
                          working_directory_name, output_resource_name, reason):

        """Create a script to put the working directory in the DB"""
        with contextlib.suppress(FileNotFoundError):
            os.remove(put_script_name)

        put_script = open(put_script_name, 'w')

        put_script.write('#PBS -l nodes=1:ppn=1,walltime=4:00:00,vmem=12gb' + os.linesep)
        put_script.write('#PBS -q HCPput' + os.linesep)
        put_script.write('#PBS -o ' + self.log_dir + os.linesep)
        put_script.write('#PBS -e ' + self.log_dir + os.linesep)
        put_script.write(os.linesep)
        put_script.write(self.xnat_pbs_jobs_home + os.sep + 'WorkingDirPut' + os.sep + 'XNAT_working_dir_put.sh \\' + os.linesep)
        put_script.write('  --user="******" \\' + os.linesep)
        put_script.write('  --password="******" \\' + os.linesep)
        put_script.write('  --server="' + str_utils.get_server_name(put_server) + '" \\' + os.linesep)
        put_script.write('  --project="' + project + '" \\' + os.linesep)
        put_script.write('  --subject="' + subject + '" \\' + os.linesep)
        put_script.write('  --session="' + session + '" \\' + os.linesep)
        put_script.write('  --working-dir="' + working_directory_name + '" \\' + os.linesep)
        put_script.write('  --resource-suffix="' + output_resource_name + '" \\' + os.linesep)
        put_script.write('  --reason="' + reason + '"' + os.linesep)

        put_script.close()
        os.chmod(put_script_name, stat.S_IRWXU | stat.S_IRWXG)
    def create_freesurfer_assessor_script(self):
        module_logger.debug(debug_utils.get_name())

        # copy the .XNAT_CREATE_FREESURFER_ASSESSOR script to the working directory
        freesurfer_assessor_source_path = self.xnat_pbs_jobs_home
        freesurfer_assessor_source_path += os.sep + self.PIPELINE_NAME
        freesurfer_assessor_source_path += os.sep + self.PIPELINE_NAME
        freesurfer_assessor_source_path += '.XNAT_CREATE_FREESURFER_ASSESSOR'

        freesurfer_assessor_dest_path = self.working_directory_name
        freesurfer_assessor_dest_path += os.sep + self.PIPELINE_NAME
        freesurfer_assessor_dest_path += '.XNAT_CREATE_FREESURFER_ASSESSOR'

        shutil.copy(freesurfer_assessor_source_path,
                    freesurfer_assessor_dest_path)
        os.chmod(freesurfer_assessor_dest_path, stat.S_IRWXU | stat.S_IRWXG)

        # write the freesurfer assessor submission script (that calls the .XNAT_CREATE_FREESURFER_ASSESSOR script)

        script_name = self.freesurfer_assessor_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        script = open(script_name, 'w')

        self._write_bash_header(script)
        script.write('#PBS -l nodes=1:ppn=1,walltime=4:00:00,mem=4gb' +
                     os.linesep)
        script.write('#PBS -o ' + self.working_directory_name + os.linesep)
        script.write('#PBS -e ' + self.working_directory_name + os.linesep)
        script.write(os.linesep)
        script.write('source ' + self._get_xnat_pbs_setup_script_path() + ' ' +
                     self._get_db_name() + os.linesep)
        script.write(os.linesep)
        script_line = freesurfer_assessor_dest_path
        user_line = '  --user='******'  --password='******'  --server=' + str_utils.get_server_name(self.server)
        project_line = '  --project=' + self.project
        subject_line = '  --subject=' + self.subject
        session_line = '  --session=' + self.session
        session_classifier_line = '  --session-classifier=' + self.classifier
        wdir_line = '  --working-dir=' + self.working_directory_name

        script.write(script_line + ' \\' + os.linesep)
        script.write(user_line + ' \\' + os.linesep)
        script.write(password_line + ' \\' + os.linesep)
        script.write(server_line + ' \\' + os.linesep)
        script.write(project_line + ' \\' + os.linesep)
        script.write(subject_line + ' \\' + os.linesep)
        script.write(session_line + ' \\' + os.linesep)
        script.write(session_classifier_line + ' \\' + os.linesep)
        script.write(wdir_line + os.linesep)

        script.close()
        os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
    def _create_pre_eddy_script(self):
        _debug("_create_pre_eddy_script")

        with contextlib.suppress(FileNotFoundError):
            os.remove(self._pre_eddy_script_name)

        walltime_limit = str(self.pre_eddy_walltime_limit_hours) + ':00:00'
        vmem_limit = str(self.pre_eddy_vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=1:ppn=1,walltime=' + walltime_limit
        resources_line += ',vmem=' + vmem_limit

        stdout_line = '#PBS -o ' + self._working_directory_name
        stderr_line = '#PBS -e ' + self._working_directory_name

        script_line = self.xnat_pbs_jobs_home + os.sep + '7T' + os.sep
        script_line += 'DiffusionPreprocessingHCP7T' + os.sep
        script_line += 'DiffusionPreprocessingHCP7T_PreEddy.XNAT.sh'

        user_line = '  --user="******"'
        password_line = '  --password="******"'
        server_line = '  --server="' + str_utils.get_server_name(
            self.server) + '"'
        project_line = '  --project="' + self.project + '"'
        subject_line = '  --subject="' + self.subject + '"'
        session_line = '  --session="' + self.session + '"'
        ref_proj_line = '  --structural-reference-project="'
        ref_proj_line += self.structural_reference_project + '"'
        ref_sess_line = '  --structural-reference-session="'
        ref_sess_line += self.structural_reference_session + '"'
        wdir_line = '  --working-dir="' + self._working_directory_name + '"'
        workflow_line = '  --workflow-id="' + self._workflow_id + '"'
        setup_line = '  --setup-script=' + self.setup_script
        pe_dirs_line = '  --phase-encoding-dirs=' + self.pe_dirs_spec

        pre_eddy_script = open(self._pre_eddy_script_name, 'w')

        futils.wl(pre_eddy_script, resources_line)
        futils.wl(pre_eddy_script, stdout_line)
        futils.wl(pre_eddy_script, stderr_line)
        futils.wl(pre_eddy_script, '')
        futils.wl(pre_eddy_script, script_line + self._continue)
        futils.wl(pre_eddy_script, user_line + self._continue)
        futils.wl(pre_eddy_script, password_line + self._continue)
        futils.wl(pre_eddy_script, server_line + self._continue)
        futils.wl(pre_eddy_script, project_line + self._continue)
        futils.wl(pre_eddy_script, subject_line + self._continue)
        futils.wl(pre_eddy_script, session_line + self._continue)
        futils.wl(pre_eddy_script, ref_proj_line + self._continue)
        futils.wl(pre_eddy_script, ref_sess_line + self._continue)
        futils.wl(pre_eddy_script, wdir_line + self._continue)
        futils.wl(pre_eddy_script, workflow_line + self._continue)
        futils.wl(pre_eddy_script, setup_line + self._continue)
        futils.wl(pre_eddy_script, pe_dirs_line)

        pre_eddy_script.close()
        os.chmod(self._pre_eddy_script_name, stat.S_IRWXU | stat.S_IRWXG)
    def create_work_script(self):
        module_logger.debug(debug_utils.get_name())

        script_name = self.work_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN)
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',vmem=' + vmem_limit_str

        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name

        script_line = self.xnat_pbs_jobs_home + os.sep
        script_line += self.PIPELINE_NAME + os.sep + self.PIPELINE_NAME + '.XNAT.sh'

        user_line = '  --user='******'  --password='******'  --server=' + str_utils.get_server_name(self.server)
        project_line = '  --project=' + self.project
        subject_line = '  --subject=' + self.subject
        session_line = '  --session=' + self.session
        wdir_line = '  --working-dir=' + self.working_directory_name
        setup_line = '  --setup-script=' + self.xnat_pbs_jobs_home + os.sep + self.PIPELINE_NAME + os.sep + self.setup_script

        script = open(script_name, 'w')

        script.write(resources_line + os.linesep)
        script.write(stdout_line + os.linesep)
        script.write(stderr_line + os.linesep)
        script.write(os.linesep)
        script.write(script_line + ' \\' + os.linesep)
        script.write(user_line + ' \\' + os.linesep)
        script.write(password_line + ' \\' + os.linesep)
        script.write(server_line + ' \\' + os.linesep)
        script.write(project_line + ' \\' + os.linesep)
        script.write(subject_line + ' \\' + os.linesep)
        script.write(session_line + ' \\' + os.linesep)

        for group in self._group_list:
            script.write('  --group=' + group + ' \\' + os.linesep)

        for name in self._concat_name_list:
            script.write('  --concat-name=' + name + ' \\' + os.linesep)

        script.write(wdir_line + ' \\' + os.linesep)
        script.write(setup_line + os.linesep)

        script.close()
        os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
    def _create_pre_eddy_script(self):
        _debug("_create_pre_eddy_script")

        with contextlib.suppress(FileNotFoundError):
            os.remove(self._pre_eddy_script_name)

        walltime_limit = str(self.pre_eddy_walltime_limit_hours) + ':00:00'
        vmem_limit = str(self.pre_eddy_vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=1:ppn=1,walltime=' + walltime_limit
        resources_line += ',vmem=' + vmem_limit

        stdout_line = '#PBS -o ' + self._working_directory_name
        stderr_line = '#PBS -e ' + self._working_directory_name

        script_line = self.xnat_pbs_jobs_home + os.sep + '7T' + os.sep
        script_line += 'DiffusionPreprocessingHCP7T' + os.sep
        script_line += 'DiffusionPreprocessingHCP7T_PreEddy.XNAT.sh'

        user_line = '  --user="******"'
        password_line = '  --password="******"'
        server_line = '  --server="' + str_utils.get_server_name(self.server) + '"'
        project_line = '  --project="' + self.project + '"'
        subject_line = '  --subject="' + self.subject + '"'
        session_line = '  --session="' + self.session + '"'
        ref_proj_line = '  --structural-reference-project="'
        ref_proj_line += self.structural_reference_project + '"'
        ref_sess_line = '  --structural-reference-session="'
        ref_sess_line += self.structural_reference_session + '"'
        wdir_line = '  --working-dir="' + self._working_directory_name + '"'
        workflow_line = '  --workflow-id="' + self._workflow_id + '"'
        setup_line = '  --setup-script=' + self.setup_script
        pe_dirs_line = '  --phase-encoding-dirs=' + self.pe_dirs_spec

        pre_eddy_script = open(self._pre_eddy_script_name, 'w')

        futils.wl(pre_eddy_script, resources_line)
        futils.wl(pre_eddy_script, stdout_line)
        futils.wl(pre_eddy_script, stderr_line)
        futils.wl(pre_eddy_script, '')
        futils.wl(pre_eddy_script, script_line + self._continue)
        futils.wl(pre_eddy_script, user_line + self._continue)
        futils.wl(pre_eddy_script, password_line + self._continue)
        futils.wl(pre_eddy_script, server_line + self._continue)
        futils.wl(pre_eddy_script, project_line + self._continue)
        futils.wl(pre_eddy_script, subject_line + self._continue)
        futils.wl(pre_eddy_script, session_line + self._continue)
        futils.wl(pre_eddy_script, ref_proj_line + self._continue)
        futils.wl(pre_eddy_script, ref_sess_line + self._continue)
        futils.wl(pre_eddy_script, wdir_line + self._continue)
        futils.wl(pre_eddy_script, workflow_line + self._continue)
        futils.wl(pre_eddy_script, setup_line + self._continue)
        futils.wl(pre_eddy_script, pe_dirs_line)

        pre_eddy_script.close()
        os.chmod(self._pre_eddy_script_name, stat.S_IRWXU | stat.S_IRWXG)
Esempio n. 11
0
    def _create_work_script(self, scan):
        logger.debug("_create_work_script - scan: " + scan)

        with contextlib.suppress(FileNotFoundError):
            os.remove(self._work_script_name(scan))

        walltime_limit = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=1:ppn=1,walltime=' + walltime_limit
        resources_line += ',vmem=' + vmem_limit

        stdout_line = '#PBS -o ' + self._working_directory_name
        stderr_line = '#PBS -e ' + self._working_directory_name

        script_line = self.xnat_pbs_jobs_home + os.sep + '7T' + os.sep
        script_line += self.PIPELINE_NAME + os.sep + self.PIPELINE_NAME + '.XNAT.sh'

        user_line = '  --user='******'  --password='******'  --server=' + str_utils.get_server_name(self.server)
        project_line = '  --project=' + self.project
        subject_line = '  --subject=' + self.subject
        session_line = '  --session=' + self.session
        ref_proj_line = '  --structural-reference-project=' + self.structural_reference_project
        ref_sess_line = '  --structural-reference-session=' + self.structural_reference_session
        scan_line = '  --scan=' + scan
        wdir_line = '  --working-dir=' + self._working_directory_name
        workflow_line = '  --workflow-id=' + self._workflow_id
        setup_line = '  --setup-script=' + self.setup_script

        work_script = open(self._work_script_name(scan), 'w')

        futils.wl(work_script, resources_line)
        futils.wl(work_script, stdout_line)
        futils.wl(work_script, stderr_line)
        futils.wl(work_script, '')
        futils.wl(work_script, script_line + self._continue)
        futils.wl(work_script, user_line + self._continue)
        futils.wl(work_script, password_line + self._continue)
        futils.wl(work_script, server_line + self._continue)
        futils.wl(work_script, project_line + self._continue)
        futils.wl(work_script, subject_line + self._continue)
        futils.wl(work_script, session_line + self._continue)
        futils.wl(work_script, ref_proj_line + self._continue)
        futils.wl(work_script, ref_sess_line + self._continue)
        futils.wl(work_script, scan_line + self._continue)
        futils.wl(work_script, wdir_line + self._continue)
        futils.wl(work_script, workflow_line + self._continue)
        futils.wl(work_script, setup_line)

        work_script.close()
        os.chmod(self._work_script_name(scan), stat.S_IRWXU | stat.S_IRWXG)
	def create_put_data_script(self):
		module_logger.debug(debug_utils.get_name())

		script_name = self.put_data_script_name

		with contextlib.suppress(FileNotFoundError):
			os.remove(script_name)

		script = open(script_name, 'w')
		script.write('#PBS -l nodes=1:ppn=1,walltime=4:00:00,mem=12gb' + os.linesep)
		script.write('#PBS -o ' + self.log_dir + os.linesep)
		script.write('#PBS -e ' + self.log_dir + os.linesep)
		script.write(os.linesep)
		script.write('source ' + self._get_xnat_pbs_setup_script_path() + ' ' + self._get_db_name() + os.linesep)
		script.write('module load ' + self._get_xnat_pbs_setup_script_singularity_version() + os.linesep)
		script.write(os.linesep)
		script.write('mv ' + self.working_directory_name + os.path.sep + '*' + self.PIPELINE_NAME + '* ' + self.working_directory_name + os.path.sep + self.subject + '_' + self.classifier + os.path.sep + 'ProcessingInfo' + os.linesep)
		script.write(os.linesep)
		script.write('singularity exec -B ' + self._get_xnat_pbs_setup_script_archive_root() + ',' + self._get_xnat_pbs_setup_script_singularity_bind_path() + ' ' + self._get_xnat_pbs_setup_script_singularity_container_xnat_path() + ' ' + self.xnat_pbs_jobs_home + os.sep + 'WorkingDirPut' + os.sep + 'XNAT_working_dir_put.sh \\' + os.linesep)
		script.write('  --leave-subject-id-level \\' + os.linesep)
		script.write('  --user="******" \\' + os.linesep)
		script.write('  --password="******" \\' + os.linesep)
		script.write('  --server="' + str_utils.get_server_name(self.put_server) + '" \\' + os.linesep)
		script.write('  --project="' + self.project + '" \\' + os.linesep)
		script.write('  --subject="' + self.subject + '" \\' + os.linesep)
		script.write('  --session="' + self.session + '" \\' + os.linesep)
		script.write('  --working-dir="' + self.working_directory_name + '" \\' + os.linesep)
		if self.scan:
			script.write('  --scan="' + self.scan + '" \\' + os.linesep)
			script.write('  --resource-suffix="' + self.output_resource_suffix + '" \\' + os.linesep)
		else:
			script.write('  --resource-suffix="' + self.output_resource_name + '" \\' + os.linesep)	
		script.write('  --reason="' + self.PIPELINE_NAME + '"' + os.linesep)
		script.write(os.linesep)
		script.write('echo "Run structural QC on hand edited output"' + os.linesep)
		script.write('curl -n https://' + str_utils.get_server_name(self.put_server) + '/xapi/structuralQc/project/' + self.project + '/subject/' + self.subject + '/experiment/' + self.session + '/runStructuralQcHandEditingProcessing -X POST' + os.linesep)
		script.write('curl -n https://' + str_utils.get_server_name(self.put_server) + '/xapi/structuralQc/project/' + self.project + '/subject/' + self.subject + '/experiment/' + self.session + '/sendCompletionNotification -X POST' + os.linesep)
		script.write(os.linesep)
		script.close()
		os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
Esempio n. 13
0
    def create_put_script(self,
                          put_script_name,
                          username,
                          password,
                          put_server,
                          project,
                          subject,
                          session,
                          working_directory_name,
                          output_resource_name,
                          reason,
                          leave_subject_id_level=False):

        with contextlib.suppress(FileNotFoundError):
            os.remove(put_script_name)

        put_script = open(put_script_name, 'w')

        put_script.write('#PBS -l nodes=1:ppn=1,walltime=4:00:00,vmem=12gb' +
                         os.linesep)
        put_script.write('#PBS -q HCPput' + os.linesep)
        put_script.write('#PBS -o ' + self.log_dir + os.linesep)
        put_script.write('#PBS -e ' + self.log_dir + os.linesep)
        put_script.write(os.linesep)

        put_script.write(self.xnat_pbs_jobs_home + os.sep + 'WorkingDirPut' +
                         os.sep + 'XNAT_working_dir_files_put.sh \\' +
                         os.linesep)

        put_script.write('  --user="******" \\' + os.linesep)
        put_script.write('  --password="******" \\' + os.linesep)
        put_script.write('  --server="' +
                         str_utils.get_server_name(put_server) + '" \\' +
                         os.linesep)
        put_script.write('  --project="' + project + '" \\' + os.linesep)
        put_script.write('  --subject="' + subject + '" \\' + os.linesep)
        put_script.write('  --session="' + session + '" \\' + os.linesep)
        put_script.write('  --working-dir="' + working_directory_name +
                         '" \\' + os.linesep)
        put_script.write('  --resource-suffix="' + output_resource_name +
                         '" \\' + os.linesep)
        put_script.write('  --reason="' + self.PIPELINE_NAME + '" \\' +
                         os.linesep)

        if leave_subject_id_level:
            put_script.write('  --leave-subject-id-level' + os.linesep)
        else:
            put_script.write(os.linesep)

        put_script.close()
        os.chmod(put_script_name, stat.S_IRWXU | stat.S_IRWXG)
Esempio n. 14
0
    def _create_work_script(self):
        logger.debug(debug_utils.get_name())

        script_name = self._work_script_name()
        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=1:ppn=1,walltime=' + walltime_limit + ',vmem=' + vmem_limit
        stdout_line = '#PBS -o ' + self._working_directory_name
        stderr_line = '#PBS -e ' + self._working_directory_name

        script_line = self.xnat_pbs_jobs_home + os.sep + self.PIPELINE_NAME + os.sep + self.PIPELINE_NAME + '.XNAT.sh'

        user_line = '  --user='******'  --password='******'  --server=' + str_utils.get_server_name(self.server)
        project_line = '  --project=' + self.project
        subject_line = '  --subject=' + self.subject
        session_line = '  --session=' + self.session
        scan_line = '  --scan=' + self.scan
        wdir_line = '  --working-dir=' + self._working_directory_name
        setup_line = '  --setup-script=' + self.xnat_pbs_jobs_home + os.sep + self.PIPELINE_NAME + os.sep + self.setup_script
        reg_name_line = '  --reg-name=' + self.reg_name

        work_script = open(self._work_script_name(), 'w')

        futils.wl(work_script, resources_line)
        futils.wl(work_script, stdout_line)
        futils.wl(work_script, stderr_line)
        futils.wl(work_script, '')
        futils.wl(work_script, script_line + self._continue)
        futils.wl(work_script, user_line + self._continue)
        futils.wl(work_script, password_line + self._continue)
        futils.wl(work_script, server_line + self._continue)
        futils.wl(work_script, project_line + self._continue)
        futils.wl(work_script, subject_line + self._continue)
        futils.wl(work_script, session_line + self._continue)
        futils.wl(work_script, scan_line + self._continue)
        futils.wl(work_script, wdir_line + self._continue)
        if self.reg_name != 'MSMSulc':
            futils.wl(work_script, reg_name_line + self._continue)
        futils.wl(work_script, setup_line)

        work_script.close()
        os.chmod(self._work_script_name(), stat.S_IRWXU | stat.S_IRWXG)
    def create_put_data_script(self):
        module_logger.debug(debug_utils.get_name())

        script_name = self.put_data_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        script = open(script_name, 'w')

        script.write('#PBS -l nodes=1:ppn=1,walltime=4:00:00,vmem=12gb' +
                     os.linesep)
        script.write('#PBS -q HCPput' + os.linesep)
        script.write('#PBS -o ' + self.log_dir + os.linesep)
        script.write('#PBS -e ' + self.log_dir + os.linesep)
        script.write(os.linesep)
        script.write('source ' + self._get_xnat_pbs_setup_script_path() + ' ' +
                     self._get_db_name() + os.linesep)
        script.write(os.linesep)
        script.write(self.xnat_pbs_jobs_home + os.sep + 'WorkingDirPut' +
                     os.sep + 'XNAT_working_dir_put.sh \\' + os.linesep)
        script.write('  --leave-subject-id-level \\' + os.linesep)
        script.write('  --user="******" \\' + os.linesep)
        script.write('  --password="******" \\' + os.linesep)
        script.write('  --server="' +
                     str_utils.get_server_name(self.put_server) + '" \\' +
                     os.linesep)
        script.write('  --project="' + self.project + '" \\' + os.linesep)
        script.write('  --subject="' + self.subject + '" \\' + os.linesep)
        script.write('  --session="' + self.session + '" \\' + os.linesep)
        script.write('  --working-dir="' + self.working_directory_name +
                     '" \\' + os.linesep)
        script.write('  --use-http' + ' \\' + os.linesep)

        if self.scan:
            script.write('  --scan="' + self.scan + '" \\' + os.linesep)
            script.write('  --resource-suffix="' +
                         self.output_resource_suffix + '" \\' + os.linesep)
        else:
            script.write('  --resource-suffix="' + self.output_resource_name +
                         '" \\' + os.linesep)

        script.write('  --reason="' + self.PIPELINE_NAME + '"' + os.linesep)

        script.close()
        os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
    def _create_eddy_script(self):
        _debug("_create_eddy_script")

        with contextlib.suppress(FileNotFoundError):
            os.remove(self._eddy_script_name)

        walltime_limit = str(self.eddy_walltime_limit_hours) + ':00:00'

        resources_line = '#PBS -l nodes=1:ppn=3:gpus=1,walltime=' + walltime_limit

        stdout_line = '#PBS -o ' + self._working_directory_name
        stderr_line = '#PBS -e ' + self._working_directory_name

        script_line = self.xnat_pbs_jobs_home + os.sep + '7T' + os.sep
        script_line += 'DiffusionPreprocessingHCP7T' + os.sep
        script_line += 'DiffusionPreprocessingHCP7T_Eddy.XNAT.sh'

        user_line = '  --user="******"'
        password_line = '  --password="******"'
        server_line = '  --server="' + str_utils.get_server_name(
            self.server) + '"'
        subject_line = '  --subject="' + self.subject + '"'
        wdir_line = '  --working-dir="' + self._working_directory_name + '"'
        workflow_line = '  --workflow-id="' + self._workflow_id + '"'
        setup_line = '  --setup-script=' + self.setup_script

        eddy_script = open(self._eddy_script_name, 'w')

        futils.wl(eddy_script, resources_line)
        futils.wl(eddy_script, stdout_line)
        futils.wl(eddy_script, stderr_line)
        futils.wl(eddy_script, '')
        futils.wl(eddy_script, script_line + self._continue)
        futils.wl(eddy_script, user_line + self._continue)
        futils.wl(eddy_script, password_line + self._continue)
        futils.wl(eddy_script, server_line + self._continue)
        futils.wl(eddy_script, subject_line + self._continue)
        futils.wl(eddy_script, wdir_line + self._continue)
        futils.wl(eddy_script, workflow_line + self._continue)
        futils.wl(eddy_script, setup_line)

        eddy_script.close()
        os.chmod(self._eddy_script_name, stat.S_IRWXU | stat.S_IRWXG)
    def _create_eddy_script(self):
        _debug("_create_eddy_script")

        with contextlib.suppress(FileNotFoundError):
            os.remove(self._eddy_script_name)

        walltime_limit = str(self.eddy_walltime_limit_hours) + ':00:00'

        resources_line = '#PBS -l nodes=1:ppn=3:gpus=1,walltime=' + walltime_limit

        stdout_line = '#PBS -o ' + self._working_directory_name
        stderr_line = '#PBS -e ' + self._working_directory_name

        script_line = self.xnat_pbs_jobs_home + os.sep + '7T' + os.sep
        script_line += 'DiffusionPreprocessingHCP7T' + os.sep
        script_line += 'DiffusionPreprocessingHCP7T_Eddy.XNAT.sh'

        user_line = '  --user="******"'
        password_line = '  --password="******"'
        server_line = '  --server="' + str_utils.get_server_name(self.server) + '"'
        subject_line = '  --subject="' + self.subject + '"'
        wdir_line = '  --working-dir="' + self._working_directory_name + '"'
        workflow_line = '  --workflow-id="' + self._workflow_id + '"'
        setup_line = '  --setup-script=' + self.setup_script

        eddy_script = open(self._eddy_script_name, 'w')

        futils.wl(eddy_script, resources_line)
        futils.wl(eddy_script, stdout_line)
        futils.wl(eddy_script, stderr_line)
        futils.wl(eddy_script, '')
        futils.wl(eddy_script, script_line + self._continue)
        futils.wl(eddy_script, user_line + self._continue)
        futils.wl(eddy_script, password_line + self._continue)
        futils.wl(eddy_script, server_line + self._continue)
        futils.wl(eddy_script, subject_line + self._continue)
        futils.wl(eddy_script, wdir_line + self._continue)
        futils.wl(eddy_script, workflow_line + self._continue)
        futils.wl(eddy_script, setup_line)

        eddy_script.close()
        os.chmod(self._eddy_script_name, stat.S_IRWXU | stat.S_IRWXG)
    def submit_jobs(
            self,
            processing_stage=ccf_processing_stage.ProcessingStage.CHECK_DATA):
        module_logger.debug(debug_utils.get_name() + ": processing_stage: " +
                            str(processing_stage))

        module_logger.info("-----")

        module_logger.info("Submitting " + self.PIPELINE_NAME + " jobs for")
        module_logger.info("  Project: " + self.project)
        module_logger.info("  Subject: " + self.subject)
        module_logger.info("  Session: " + self.session)
        module_logger.info("	Stage: " + str(processing_stage))

        # make sure working directories do not have the same name based on
        # the same start time by sleeping a few seconds
        time.sleep(5)

        # build the working directory name
        os.makedirs(name=self.working_directory_name)
        os.makedirs(name=self.check_data_directory_name)
        os.makedirs(name=self.mark_completion_directory_name)

        module_logger.info("Output Resource Name: " +
                           self.output_resource_name)

        # clean output resource if requested
        if self.clean_output_resource_first:
            module_logger.info("Deleting resource: " +
                               self.output_resource_name + " for:")
            module_logger.info("  project: " + self.project)
            module_logger.info("  subject: " + self.subject)
            module_logger.info("  session: " + self.session)

            delete_resource.delete_resource(
                self.username, self.password,
                str_utils.get_server_name(self.server), self.project,
                self.subject, self.session, self.output_resource_name)

        return self.do_job_submissions(processing_stage)
    def submit_jobs(self):
        _debug("submit_jobs")

        if self.validate_parameters():

            # make sure working directories don't have the same name based on the same
            # start time by sleeping a few seconds
            time.sleep(5)

            current_seconds_since_epoch = int(time.time())

            working_directory_name = self.build_home
            working_directory_name += os.sep + self.project
            working_directory_name += os.sep + self.PIPELINE_NAME
            working_directory_name += '.' + self.subject
            working_directory_name += '.' + str(current_seconds_since_epoch)

            # make the working directory
            _inform("Making working directory: " + working_directory_name)
            os.makedirs(name=working_directory_name)

            # get JSESSION ID
            jsession_id = xnat_access.get_jsession_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=self.username,
                password=self.password)
            _inform("jsession_id: " + jsession_id)

            # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
            xnat_session_id = xnat_access.get_session_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=self.username,
                password=self.password,
                project=self.project,
                subject=self.subject,
                session=self.session)
            _inform("xnat_session_id: " + xnat_session_id)

            # get XNAT Workflow ID
            workflow_obj = xnat_access.Workflow(
                self.username, self.password, self.server, jsession_id)
            workflow_id = workflow_obj.create_workflow(
                xnat_session_id, self.project, self.PIPELINE_NAME, 'Queued')
            _inform("workflow_id: " + workflow_id)

            # Determine the output resource name
            output_resource_name = self.archive.DEDRIFT_AND_RESAMPLE_RESOURCE_NAME
            _inform("output_resource_name: " + output_resource_name)

            # Clean the output resource if requested
            if self.clean_output_resource_first:
                _inform("Deleting resouce: " + output_resource_name + " for:")
                _inform("  project: " + self.project)
                _inform("  subject: " + self.subject)
                _inform("  session: " + self.session)

                delete_resource.delete_resource(
                    self.username, self.password, str_utils.get_server_name(self.server),
                    self.project, self.subject, self.session, output_resource_name, True)

            script_file_start_name = working_directory_name
            script_file_start_name += os.sep + self.subject
            script_file_start_name += '.' + self.PIPELINE_NAME
            script_file_start_name += '.' + self.project
            script_file_start_name += '.' + self.session

            # Create script to submit to do the actual work
            work_script_name = script_file_start_name + '.XNAT_PBS_job.sh'
            with contextlib.suppress(FileNotFoundError):
                os.remove(work_script_name)

            work_script = open(work_script_name, 'w')

            nodes_spec = 'nodes=1:ppn=1'
            walltime_spec = 'walltime=' + str(self.walltime_limit_hours) + ':00:00'
            vmem_spec = 'vmem=' + str(self.vmem_limit_gbs) + 'gb'
            mem_spec = 'mem=' + str(self.mem_limit_gbs) + 'gb'

            work_script.write('#PBS -l ' + nodes_spec + ',' + walltime_spec + ',' + vmem_spec + ',' + mem_spec + os.linesep)
            # work_script.write('#PBS -q HCPput' + os.linesep)
            work_script.write('#PBS -o ' + working_directory_name + os.linesep)
            work_script.write('#PBS -e ' + working_directory_name + os.linesep)
            work_script.write(os.linesep)
            work_script.write(self.xnat_pbs_jobs_home + os.sep + '7T' + os.sep + 'DeDriftAndResampleHCP7T' + os.sep +
                              'DeDriftAndResampleHCP7T.XNAT.sh \\' + os.linesep)
            work_script.write('  --user="******" \\' + os.linesep)
            work_script.write('  --password="******" \\' + os.linesep)
            work_script.write('  --server="' + str_utils.get_server_name(self.server) + '" \\' + os.linesep)
            work_script.write('  --project="' + self.project + '" \\' + os.linesep)
            work_script.write('  --subject="' + self.subject + '" \\' + os.linesep)
            work_script.write('  --session="' + self.session + '" \\' + os.linesep)
            work_script.write('  --structural-reference-project="' +
                              self.structural_reference_project + '" \\' + os.linesep)
            work_script.write('  --structural-reference-session="' +
                              self.structural_reference_session + '" \\' + os.linesep)
            work_script.write('  --working-dir="' + working_directory_name + '" \\' + os.linesep)
            work_script.write('  --workflow-id="' + workflow_id + '" \\' + os.linesep)

            # work_script.write('  --keep-all' + ' \\' + os.linesep)
            # work_script.write('  --prevent-push' + ' \\' + os.linesep)

            work_script.write('  --setup-script=' + self.setup_script + os.linesep)
            
            work_script.close()
            os.chmod(work_script_name, stat.S_IRWXU | stat.S_IRWXG)

            # Create script to put the results into the DB
            put_script_name = script_file_start_name + '.XNAT_PBS_PUT_job.sh'
            self.create_put_script(put_script_name,
                                   self.username, self.password, self.put_server,
                                   self.project, self.subject, self.session,
                                   working_directory_name, output_resource_name,
                                   self.PIPELINE_NAME)

            # Submit the job to do the work
            work_submit_cmd = 'qsub ' + work_script_name
            _inform("work_submit_cmd: " + work_submit_cmd)

            completed_work_submit_process = subprocess.run(
                work_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            work_job_no = str_utils.remove_ending_new_lines(completed_work_submit_process.stdout)
            _inform("work_job_no: " + work_job_no)

            # Submit the job to put the results in the DB
            put_submit_cmd = 'qsub -W depend=afterok:' + work_job_no + ' ' + put_script_name
            _inform("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(completed_put_submit_process.stdout)
            _inform("put_job_no: " + put_job_no)

        else:
            _inform("Unable to submit jobs")
    def submit_jobs(self):
        _debug("submit_jobs")

        if self.validate_parameters():

            _inform("")
            _inform("--------------------------------------------------")
            _inform("Submitting " + self.PIPELINE_NAME + " jobs for")
            _inform("  Project: " + self.project)
            _inform("  Subject: " + self.subject)
            _inform("  Session: " + self.session)
            _inform("--------------------------------------------------")

            # make sure working directories don't have the same name based on
            # the same start time by sleeping a few seconds
            time.sleep(5)
            current_seconds_since_epoch = int(time.time())

            # build the working directory name
            self._working_directory_name = self.build_home
            self._working_directory_name += os.sep + self.project
            self._working_directory_name += os.sep + self.PIPELINE_NAME
            self._working_directory_name += '.' + self.subject
            self._working_directory_name += '.' + str(
                current_seconds_since_epoch)

            # make the working directory
            _inform("making working directory: " +
                    self._working_directory_name)
            os.makedirs(name=self._working_directory_name)

            # get JSESSION ID
            jsession_id = xnat_access.get_jsession_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=self.username,
                password=self.password)
            _inform("jsession_id: " + jsession_id)

            # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
            xnat_session_id = xnat_access.get_session_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=self.username,
                password=self.password,
                project=kself.project,
                subject=self.subject,
                session=self.session)
            _inform("xnat_session_id: " + xnat_session_id)

            # get XNAT Workflow ID
            workflow_obj = xnat_access.Workflow(
                self.username, self.password,
                os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                jsession_id)
            self._workflow_id = workflow_obj.create_workflow(
                xnat_session_id, self.project, self.PIPELINE_NAME, 'Queued')
            _inform("workflow_id: " + self._workflow_id)

            # determine output resource name
            self._output_resource_name = 'Diffusion_preproc'

            # clean the output resource if requested
            if self.clean_output_resource_first:
                _inform("Deleting resource: " + self._output_resource_name +
                        " for:")
                _inform("  project: " + self.project)
                _inform("  subject: " + self.subject)
                _inform("  session: " + self.session)

                delete_resource.delete_resource(
                    self.username, self.password,
                    str_utils.get_server_name(self.server), self.project,
                    self.subject, self.session, self._output_resource_name)

            # create script to do the PreEddy work
            self._create_pre_eddy_script()

            # create script to do the Eddy work
            self._create_eddy_script()

            # create script to do the PostEddy work
            self._create_post_eddy_script()

            # create script to put the results into the DB
            put_script_name = self._get_scripts_start_name(
            ) + '.XNAT_PBS_PUT_job.sh'
            self.create_put_script(put_script_name, self.username,
                                   self.password, self.put_server,
                                   self.project, self.subject, self.session,
                                   self._working_directory_name,
                                   self._output_resource_name,
                                   self.PIPELINE_NAME)

            # Submit the job to do the Pre-Eddy work
            pre_eddy_submit_cmd = 'qsub ' + self._pre_eddy_script_name
            _inform("pre_eddy_submit_cmd: " + pre_eddy_submit_cmd)

            completed_pre_eddy_submit_process = subprocess.run(
                pre_eddy_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            pre_eddy_job_no = str_utils.remove_ending_new_lines(
                completed_pre_eddy_submit_process.stdout)
            _inform("pre_eddy_job_no: " + pre_eddy_job_no)

            # Submit the job to do the Eddy work
            eddy_submit_cmd = 'qsub -W depend=afterok:' + pre_eddy_job_no + ' ' + self._eddy_script_name
            _inform("eddy_submit_cmd: " + eddy_submit_cmd)

            completed_eddy_submit_process = subprocess.run(
                eddy_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            eddy_job_no = str_utils.remove_ending_new_lines(
                completed_eddy_submit_process.stdout)
            _inform("eddy_job_no: " + eddy_job_no)

            # Submit the job to do the Post-Eddy work
            post_eddy_submit_cmd = 'qsub -W depend=afterok:' + eddy_job_no + ' ' + self._post_eddy_script_name
            _inform("post_eddy_submit_cmd: " + post_eddy_submit_cmd)

            completed_post_eddy_submit_process = subprocess.run(
                post_eddy_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            post_eddy_job_no = str_utils.remove_ending_new_lines(
                completed_post_eddy_submit_process.stdout)
            _inform("post_eddy_job_no: " + post_eddy_job_no)

            # Submit the job to put the results in the DB
            put_submit_cmd = 'qsub -W depend=afterok:' + post_eddy_job_no + ' ' + put_script_name
            _inform("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(
                completed_put_submit_process.stdout)
            _inform("put_job_no: " + put_job_no)

        else:
            _inform("Unable to submit jobs")
    def create_process_data_job_script(self):
        module_logger.debug(debug_utils.get_name())

        # copy the .XNAT_PROCESS script to the working directory
        processing_script_source_path = self.xnat_pbs_jobs_home
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += '.XNAT_PROCESS'

        processing_script_dest_path = self.working_directory_name
        processing_script_dest_path += os.sep + self.PIPELINE_NAME
        processing_script_dest_path += '.XNAT_PROCESS'

        shutil.copy(processing_script_source_path, processing_script_dest_path)
        os.chmod(processing_script_dest_path, stat.S_IRWXU | stat.S_IRWXG)

        # write the process data job script (that calls the .XNAT_PROCESS script)

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN)
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + vmem_limit_str

        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name

        xnat_pbs_setup_line = 'source ' + self._get_xnat_pbs_setup_script_path(
        ) + ' ' + self._get_db_name()

        script_line = processing_script_dest_path
        user_line = '  --user='******'  --password='******'  --server=' + str_utils.get_server_name(self.server)
        project_line = '  --project=' + self.project
        subject_line = '  --subject=' + self.subject
        session_line = '  --session=' + self.session
        session_classifier_line = '  --session-classifier=' + self.classifier

        if self._has_spin_echo_field_maps(subject_info):
            fieldmap_type_line = '  --fieldmap-type=' + 'SpinEcho'
        else:
            fieldmap_type_line = '  --fieldmap-type=' + 'SiemensGradientEcho'

        first_t1w_directory_name_line = '  --first-t1w-directory-name=' + self._get_first_t1w_directory_name(
            subject_info)
        first_t1w_resource_name_line = '  --first-t1w-resource-name=' + self._get_first_t1w_resource_name(
            subject_info)
        first_t1w_file_name_line = '  --first-t1w-file-name=' + self._get_first_t1w_file_name(
            subject_info)

        first_t2w_directory_name_line = '  --first-t2w-directory-name=' + self._get_first_t2w_directory_name(
            subject_info)
        first_t2w_resource_name_line = '  --first-t2w-resource-name=' + self._get_first_t2w_resource_name(
            subject_info)
        first_t2w_file_name_line = '  --first-t2w-file-name=' + self._get_first_t2w_file_name(
            subject_info)

        brain_size_line = '  --brainsize=' + str(self.brain_size)

        t1template_line = '  --t1template=' + self.T1W_TEMPLATE_NAME
        t1templatebrain_line = '  --t1templatebrain=' + self.T1W_TEMPLATE_BRAIN_NAME
        t1template2mm_line = '  --t1template2mm=' + self.T1W_TEMPLATE_2MM_NAME
        t2template_line = '  --t2template=' + self.T2W_TEMPLATE_NAME
        t2templatebrain_line = '  --t2templatebrain=' + self.T2W_TEMPLATE_BRAIN_NAME
        t2template2mm_line = '  --t2template2mm=' + self.T2W_TEMPLATE_2MM_NAME
        templatemask_line = '  --templatemask=' + self.TEMPLATE_MASK_NAME
        template2mmmask_line = '  --template2mmmask=' + self.TEMPLATE_2MM_MASK_NAME

        fnirtconfig_line = '  --fnirtconfig=' + self.FNIRT_CONFIG_FILE_NAME

        if subject_info.project in OneSubjectJobSubmitter._CONNECTOME_SKYRA_SCANNER_PROJECTS:
            gdcoeffs_line = '  --gdcoeffs=' + self.CONNECTOME_GDCOEFFS_FILE_NAME
        elif subject_info.project in OneSubjectJobSubmitter._PRISMA_3T_PROJECTS:
            gdcoeffs_line = '  --gdcoeffs=' + self.PRISMA_3T_GDCOEFFS_FILE_NAME
        else:
            raise ValueError(
                "Unrecognized project for setting gradient distortion coefficients file: "
                + subject_info.project)

        topupconfig_line = '  --topupconfig=' + self.TOPUP_CONFIG_FILE_NAME

        if self._has_spin_echo_field_maps(subject_info):
            se_phase_pos_line = '  --se-phase-pos=' + self._get_positive_spin_echo_file_name(
                subject_info)
            se_phase_neg_line = '  --se-phase-neg=' + self._get_negative_spin_echo_file_name(
                subject_info)
            # mag_line = None
            # phase_line = None
        else:
            se_phase_pos_line = None
            se_phase_neg_line = None
            # mag_line   = '  --fmapmag=' + self._get_fmap_mag_file_name(subject_info)
            # phase_line = '  --fmapphase=' + self._get_fmap_phase_file_name(subject_info)

        wdir_line = '  --working-dir=' + self.working_directory_name
        setup_line = '  --setup-script=' + self.setup_file_name

        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_line + os.linesep)
            script.write(os.linesep)
            script.write(script_line + ' \\' + os.linesep)
            script.write(user_line + ' \\' + os.linesep)
            script.write(password_line + ' \\' + os.linesep)
            script.write(server_line + ' \\' + os.linesep)
            script.write(project_line + ' \\' + os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(session_line + ' \\' + os.linesep)
            script.write(session_classifier_line + ' \\' + os.linesep)
            script.write(fieldmap_type_line + ' \\' + os.linesep)
            script.write(first_t1w_directory_name_line + ' \\' + os.linesep)
            script.write(first_t1w_resource_name_line + ' \\' + os.linesep)
            script.write(first_t1w_file_name_line + ' \\' + os.linesep)
            script.write(first_t2w_directory_name_line + ' \\' + os.linesep)
            script.write(first_t2w_resource_name_line + ' \\' + os.linesep)
            script.write(first_t2w_file_name_line + ' \\' + os.linesep)
            script.write(brain_size_line + ' \\' + os.linesep)
            script.write(t1template_line + ' \\' + os.linesep)
            script.write(t1templatebrain_line + ' \\' + os.linesep)
            script.write(t1template2mm_line + ' \\' + os.linesep)
            script.write(t2template_line + ' \\' + os.linesep)
            script.write(t2templatebrain_line + ' \\' + os.linesep)
            script.write(t2template2mm_line + ' \\' + os.linesep)
            script.write(templatemask_line + ' \\' + os.linesep)
            script.write(template2mmmask_line + ' \\' + os.linesep)
            script.write(fnirtconfig_line + ' \\' + os.linesep)
            script.write(gdcoeffs_line + ' \\' + os.linesep)
            script.write(topupconfig_line + ' \\' + os.linesep)

            if (se_phase_pos_line):
                script.write(se_phase_pos_line + ' \\' + os.linesep)
            if (se_phase_neg_line):
                script.write(se_phase_neg_line + ' \\' + os.linesep)
            # if (mag_line): script.write(mag_line + ' \\' + os.linesep)
            # if (phase_line): script.write(phase_line + ' \\' + os.linesep)

            script.write(wdir_line + ' \\' + os.linesep)
            script.write(setup_line + os.linesep)

            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
    def create_process_data_job_script(self):
        module_logger.debug(debug_utils.get_name())

        # copy the .XNAT script to the working directory
        processing_script_source_name = self.xnat_pbs_jobs_home
        processing_script_source_name += os.sep + '7T'
        processing_script_source_name += os.sep + self.PIPELINE_NAME
        processing_script_source_name += os.sep + self.PIPELINE_NAME
        processing_script_source_name += '.XNAT_PROCESS'

        processing_script_dest_name = self.working_directory_name
        processing_script_dest_name += os.sep + self.PIPELINE_NAME
        processing_script_dest_name += '.XNAT_PROCESS'

        shutil.copy(processing_script_source_name, processing_script_dest_name)
        os.chmod(processing_script_dest_name, stat.S_IRWXU | stat.S_IRWXG)

        # write the process data job script (that calls the .XNAT script)

        subject_info = hcp7t_subject.Hcp7TSubjectInfo(project=self.project,
                                                      subject_id=self.subject)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        mem_limit_str = str(self.mem_limit_gbs) + 'gb'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN)
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + mem_limit_str
        resources_line += ',vmem=' + vmem_limit_str

        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name

        script_line = processing_script_dest_name
        user_line = '  --user='******'  --password='******'  --server=' + str_utils.get_server_name(self.server)
        project_line = '  --project=' + self.project
        subject_line = '  --subject=' + self.subject
        session_line = '  --session=' + self.subject + '_7T'

        avail_retinotopy_task_names = self.archive.available_retinotopy_preproc_names(
            subject_info)

        # sort available retinotopy task names into the order the
        # tasks were presented to the subject
        avail_retinotopy_task_names = sorted(
            avail_retinotopy_task_names, key=retinotopy_presentation_order_key)

        # add the tesla spec to each element of the group
        group_names = list(map(add_tesla_spec, avail_retinotopy_task_names))

        group_spec = '@'.join(group_names)
        group_line = '  --group=' + group_spec

        concat_spec = '_'.join(
            list(map(remove_scan_type, avail_retinotopy_task_names)))
        concat_line = '  --concat-name=tfMRI_7T_' + concat_spec

        wdir_line = '  --working-dir=' + self.working_directory_name
        setup_line = '  --setup-script=' + self.setup_file_name

        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)

            script.write(script_line + ' \\' + os.linesep)
            script.write(user_line + ' \\' + os.linesep)
            script.write(password_line + ' \\' + os.linesep)
            script.write(server_line + ' \\' + os.linesep)
            script.write(project_line + ' \\' + os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(session_line + ' \\' + os.linesep)
            script.write(group_line + ' \\' + os.linesep)
            script.write(concat_line + ' \\' + os.linesep)
            script.write(wdir_line + ' \\' + os.linesep)
            script.write(setup_line + os.linesep)
            script.write(os.linesep)

            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)
    def submit_jobs(
            self,
            processing_stage=one_subject_job_submitter.ProcessingStage.PUT_DATA
    ):
        module_logger.debug(debug_utils.get_name() + ": processing_stage: " +
                            str(processing_stage))

        module_logger.info("-----")
        module_logger.info("Submitting " + self.PIPELINE_NAME + " jobs for")
        module_logger.info("  Project: " + self.project)
        module_logger.info("  Subject: " + self.subject)
        module_logger.info("  Session: " + self.session)
        module_logger.info("    Stage: " + str(processing_stage))

        # make sure working directories do not have the same name based on
        # the same start time by sleeping a few seconds
        time.sleep(5)

        # build the working directory name
        os.makedirs(name=self.working_directory_name)

        # determine output resource name
        self._output_resource_name = self.output_resource_suffix
        module_logger.info("Output Resource Name: " +
                           self._output_resource_name)

        # clean output resource if requested
        if self.clean_output_resource_first:
            module_logger.info("Deleting resource: " +
                               self._output_resource_name + " for:")
            module_logger.info("  project: " + self.project)
            module_logger.info("  subject: " + self.subject)
            module_logger.info("  session: " + self.session)

            delete_resource.delete_resource(
                self.username, self.password,
                str_utils.get_server_name(self.server), self.project,
                self.subject, self.session, self._output_resource_name)

        # build list of groups
        self._group_list = []
        for group in self.groups:
            self._group_list.append(self._expand(group))

        module_logger.debug("self._group_list: " + str(self._group_list))

        # build list of concat names
        self._concat_name_list = []
        for group in self._group_list:
            self._concat_name_list.append(self._concat(group))

        # create scripts for various stages of processing
        if processing_stage >= one_subject_job_submitter.ProcessingStage.PREPARE_SCRIPTS:
            self.create_get_data_job_script()
            self.create_work_script()
            self.create_clean_data_script()
            self.create_put_data_script()

        # Submit the job to get the data
        if processing_stage >= one_subject_job_submitter.ProcessingStage.GET_DATA:
            get_data_job_no = self.submit_get_data_job()
            module_logger.info("get_data_job_no: " + str(get_data_job_no))
        else:
            module_logger.info("Get data job not submitted")

        # Submit the job to process the data (do the work)
        if processing_stage >= one_subject_job_submitter.ProcessingStage.PROCESS_DATA:
            work_job_no = self.submit_process_data_job(get_data_job_no)
            module_logger.info("work_job_no: " + str(work_job_no))
        else:
            module_logger.info("Process data job not submitted")

        # Submit job to clean the data
        if processing_stage >= one_subject_job_submitter.ProcessingStage.CLEAN_DATA:
            clean_job_no = self.submit_clean_data_job(work_job_no)
            module_logger.info("clean_job_no: " + str(clean_job_no))
        else:
            module_logger.info("Clean data job not submitted")

        # Submit job to put the resulting data in the DB
        if processing_stage >= one_subject_job_submitter.ProcessingStage.PUT_DATA:
            put_job_no = self.submit_put_data_job(clean_job_no)
            module_logger.info("put_job_no: " + str(put_job_no))
        else:
            module_logger.info("Put data job not submitted")
Esempio n. 24
0
    def submit_jobs(self, processing_stage=ProcessingStage.PUT_DATA):
        """
        processing_stage is the last processing stage for which to submit
        the corresponding job.
        GET_DATA means just get the data.
        PROCESS_DATA means get the data and do the processing.
        PUT_DATA means get the data, processing it, and put the results
         back in the DB
        """
        logger.debug("submit_jobs processing_stage: " + str(processing_stage))

        if self.validate_parameters():

            # determine what scans to run the RestingStateStats pipeline on for this subject
            # TBD: Does this get run on every scan for which the ICAFIX pipeline has been run,
            #      or does it only get run on every resting state scan that has been fix processed.

            subject_info = hcp7t_subject.Hcp7TSubjectInfo(
                self.project, self.structural_reference_project, self.subject)

            fix_processed_scans = self.archive.available_FIX_processed_names(
                subject_info)
            fix_processed_scans_set = set(fix_processed_scans)
            logger.debug("fix_processed_scans_set = " +
                         str(fix_processed_scans_set))

            # resting_state_scans = self.archive.available_resting_state_preproc_names(subject_info)
            # resting_state_scans_set = set(resting_state_scans)
            # logger.debug("resting_state_scans_set = " + str(resting_state_scans_set))

            # scans_to_process_set = resting_state_scans_set & fix_processed_scans_set
            scans_to_process_set = fix_processed_scans_set
            scans_to_process = list(scans_to_process_set)
            scans_to_process.sort()
            logger.debug("scans_to_process: " + str(scans_to_process))

            incomplete_scans_to_process = list()
            for scan in scans_to_process:
                if (not is_complete(self.archive, subject_info, scan)):
                    incomplete_scans_to_process.append(scan)

            logger.debug("incomplete_scans_to_process: " +
                         str(incomplete_scans_to_process))
            print("incomplete_scans_to_process:", incomplete_scans_to_process)

            # for scan in scans_to_process:
            for scan in incomplete_scans_to_process:

                logger.info("")
                logger.info(
                    "--------------------------------------------------")
                logger.info("Submitting " + self.PIPELINE_NAME + " jobs for")
                logger.info("  Project: " + self.project)
                logger.info("  Subject: " + self.subject)
                logger.info("  Session: " + self.session)
                logger.info("  Structural Reference Project: " +
                            self.structural_reference_project)
                logger.info("  Structural Reference Session: " +
                            self.structural_reference_session)
                logger.info("     Scan: " + scan)
                logger.info("    Stage: " + str(processing_stage))
                logger.info(
                    "--------------------------------------------------")

                # make sure working directories do not have the same name based on
                # the same start time by sleeping a few seconds
                time.sleep(5)

                # build the working directory name
                self._working_directory_name = \
                    self.build_working_directory_name(self.project, self.PIPELINE_NAME, self.subject, scan)
                logger.info("Making working directory: " +
                            self._working_directory_name)
                os.makedirs(name=self._working_directory_name)

                # get JSESSION ID
                jsession_id = xnat_access.get_jsession_id(
                    server=os_utils.getenv_required(
                        'XNAT_PBS_JOBS_XNAT_SERVER'),
                    username=self.username,
                    password=self.password)
                logger.info("jsession_id: " + jsession_id)

                # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
                xnat_session_id = xnat_access.get_session_id(
                    server=os_utils.getenv_required(
                        'XNAT_PBS_JOBS_XNAT_SERVER'),
                    username=self.username,
                    password=self.password,
                    project=self.project,
                    subject=self.subject,
                    session=self.session)
                logger.info("xnat_session_id: " + xnat_session_id)

                # get XNAT Workflow ID
                workflow_obj = xnat_access.Workflow(
                    self.username, self.password, 'https://' +
                    os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                    jsession_id)
                self._workflow_id = workflow_obj.create_workflow(
                    xnat_session_id, self.project,
                    self.PIPELINE_NAME + '_' + scan, 'Queued')
                logger.info("workflow_id: " + self._workflow_id)

                # determine output resource name
                self._output_resource_name = scan + "_RSS"

                # clean output resource if requested
                if self.clean_output_resource_first:
                    logger.info("Deleting resource: " +
                                self._output_resource_name + " for:")
                    logger.info("  project: " + self.project)
                    logger.info("  subject: " + self.subject)
                    logger.info("  session: " + self.session)

                    delete_resource.delete_resource(
                        self.username, self.password,
                        str_utils.get_server_name(self.server), self.project,
                        self.subject, self.session, self._output_resource_name)

                # create scripts for various stages of processing
                if processing_stage >= ProcessingStage.PREPARE_SCRIPTS:
                    # create script to get data
                    self._create_get_data_script(scan)

                    # create script to do work
                    self._create_work_script(scan)

                    # create script to clean data
                    self._create_clean_data_script(scan)

                    # create script to put the results into the DB
                    put_script_name = self._put_data_script_name(scan)
                    self.create_put_script(put_script_name, self.username,
                                           self.password, self.put_server,
                                           self.project, self.subject,
                                           self.session,
                                           self._working_directory_name,
                                           self._output_resource_name,
                                           self.PIPELINE_NAME + '_' + scan)

                # submit job to get the data
                if processing_stage >= ProcessingStage.GET_DATA:

                    get_data_submit_cmd = 'qsub ' + self._get_data_script_name(
                        scan)
                    logger.info("get_data_submit_cmd: " + get_data_submit_cmd)

                    completed_get_data_submit_process = subprocess.run(
                        get_data_submit_cmd,
                        shell=True,
                        check=True,
                        stdout=subprocess.PIPE,
                        universal_newlines=True)
                    get_data_job_no = str_utils.remove_ending_new_lines(
                        completed_get_data_submit_process.stdout)
                    logger.info("get_data_job_no: " + get_data_job_no)

                else:
                    logger.info("Get data job not submitted")

                # submit job to process the data
                if processing_stage >= ProcessingStage.PROCESS_DATA:

                    work_submit_cmd = 'qsub -W depend=afterok:' + get_data_job_no + ' ' + self._work_script_name(
                        scan)
                    logger.info("work_submit_cmd: " + work_submit_cmd)

                    completed_work_submit_process = subprocess.run(
                        work_submit_cmd,
                        shell=True,
                        check=True,
                        stdout=subprocess.PIPE,
                        universal_newlines=True)
                    work_job_no = str_utils.remove_ending_new_lines(
                        completed_work_submit_process.stdout)
                    logger.info("work_job_no: " + work_job_no)

                else:
                    logger.info("Process data job not submitted")

                # submit job to clean the data
                if processing_stage >= ProcessingStage.CLEAN_DATA:

                    clean_submit_cmd = 'qsub -W depend=afterok:' + work_job_no + ' ' + self._clean_data_script_name(
                        scan)
                    logger.info("clean_submit_cmd: " + clean_submit_cmd)

                    completed_clean_submit_process = subprocess.run(
                        clean_submit_cmd,
                        shell=True,
                        check=True,
                        stdout=subprocess.PIPE,
                        universal_newlines=True)
                    clean_job_no = str_utils.remove_ending_new_lines(
                        completed_clean_submit_process.stdout)
                    logger.info("clean_job_no: " + clean_job_no)

                else:
                    logger.info("Clean data job not submitted")

                # submit job to put the resulting data in the DB
                if processing_stage >= ProcessingStage.PUT_DATA:

                    put_submit_cmd = 'qsub -W depend=afterok:' + clean_job_no + ' ' + put_script_name
                    logger.info("put_submit_cmd: " + put_submit_cmd)

                    completed_put_submit_process = subprocess.run(
                        put_submit_cmd,
                        shell=True,
                        check=True,
                        stdout=subprocess.PIPE,
                        universal_newlines=True)
                    put_job_no = str_utils.remove_ending_new_lines(
                        completed_put_submit_process.stdout)
                    logger.info("put_job_no: " + put_job_no)

                else:
                    logger.info("Put data job not submitted")

        else:
            logger.info("Unable to submit jobs")
Esempio n. 25
0
    def submit_jobs(
            self,
            username,
            password,
            server,
            project,
            subject,
            session,
            structural_reference_project,
            structural_reference_session,
            put_server,
            clean_output_resource_first,
            setup_script,
            incomplete_only,
            scan,
            walltime_limit_hours,
            mem_limit_gbs,  # UNUSED
            vmem_limit_gbs):

        subject_info = hcp7t_subject.Hcp7TSubjectInfo(
            project, structural_reference_project, subject)

        # determine names of preprocessed resting state scans that are
        # available for the subject
        preproc_resting_state_scan_names = self.archive.available_resting_state_preproc_names(
            subject_info)
        inform("Preprocessed resting state scans available for subject: " +
               str(preproc_resting_state_scan_names))

        # determine names of the preprocessed MOVIE task scans that are available for the subject
        preproc_movie_scan_names = self.archive.available_movie_preproc_names(
            subject_info)
        inform("Preprocessed movie scans available for subject " +
               str(preproc_movie_scan_names))

        # determine names of the FIX processed scans that are available for the subject
        fix_processed_scan_names = self.archive.available_FIX_processed_names(
            subject_info)
        inform("FIX processed scans available for subject " +
               str(fix_processed_scan_names))

        # build list of scans to process
        scan_list = []
        if scan is None:
            scan_list = fix_processed_scan_names
        else:
            scan_list.append(scan)

        # process specified scans
        for scan_name in scan_list:
            if incomplete_only:
                completion_checker = PostFixHCP7T_OneSubjectCompletionChecker.PostFixHCP7T_OneSubjectCompletionChecker(
                )
                if completion_checker.is_processing_complete(
                        self.archive, subject_info, scan_name):
                    inform("scan: " + scan_name +
                           " has already completed PostFixHCP7T processing")
                    inform(
                        "Only submitting jobs for incomplete scans - skipping "
                        + scan_name)
                    continue

            inform("scan_name: " + scan_name)
            long_scan_name = self.archive.functional_scan_long_name(scan_name)
            output_resource_name = self.archive.PostFix_processed_resource_name(
                scan_name)

            inform("")
            inform("-------------------------------------------------")
            inform("Submitting jobs for scan: " + long_scan_name)
            inform("Output resource name: " + output_resource_name)
            inform("-------------------------------------------------")
            inform("")

            # make sure working directories don't have the same name based on the
            # same start time by sleeping a few seconds
            time.sleep(5)

            current_seconds_since_epoch = int(time.time())

            working_directory_name = self.build_home
            working_directory_name += os.sep + project
            working_directory_name += os.sep + self.PIPELINE_NAME
            working_directory_name += '.' + subject
            working_directory_name += '.' + long_scan_name
            working_directory_name += '.' + str(current_seconds_since_epoch)

            # make the working directory
            inform("Making working directory: " + working_directory_name)
            os.makedirs(name=working_directory_name)

            # get JSESSION ID
            jsession_id = xnat_access.get_jsession_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=username,
                password=password)
            inform("jsession_id: " + jsession_id)

            # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
            xnat_session_id = xnat_access.get_session_id(
                server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
                username=username,
                password=password,
                project=project,
                subject=subject,
                session=session)

            inform("xnat_session_id: " + xnat_session_id)

            # get XNAT Workflow ID
            workflow_obj = xnat_access.Workflow(username, password, server,
                                                jsession_id)
            workflow_id = workflow_obj.create_workflow(xnat_session_id,
                                                       project,
                                                       self.PIPELINE_NAME,
                                                       'Queued')
            inform("workflow_id: " + workflow_id)

            # Clean the output resource if requested
            if clean_output_resource_first:
                inform("Deleting resource: " + output_resource_name + " for:")
                inform("  project: " + project)
                inform("  subject: " + subject)
                inform("  session: " + session)

                delete_resource.delete_resource(
                    username, password, str_utils.get_server_name(server),
                    project, subject, session, output_resource_name)

            script_file_start_name = working_directory_name
            script_file_start_name += os.sep + subject
            script_file_start_name += '.' + long_scan_name
            script_file_start_name += '.' + self.PIPELINE_NAME
            script_file_start_name += '.' + project
            script_file_start_name += '.' + session

            # Create script to submit to do the actual work
            work_script_name = script_file_start_name + '.XNAT_PBS_job.sh'
            with contextlib.suppress(FileNotFoundError):
                os.remove(work_script_name)

            work_script = open(work_script_name, 'w')

            nodes_spec = 'nodes=1:ppn=1'
            walltime_spec = 'walltime=' + str(walltime_limit_hours) + ':00:00'
            vmem_spec = 'vmem=' + str(vmem_limit_gbs) + 'gb'

            work_script.write('#PBS -l ' + nodes_spec + ',' + walltime_spec +
                              ',' + vmem_spec + os.linesep)
            work_script.write('#PBS -o ' + working_directory_name + os.linesep)
            work_script.write('#PBS -e ' + working_directory_name + os.linesep)
            work_script.write(os.linesep)
            work_script.write(self.xnat_pbs_jobs_home + os.sep + '7T' +
                              os.sep + 'PostFixHCP7T' + os.sep +
                              'PostFixHCP7T.XNAT.sh \\' + os.linesep)
            work_script.write('  --user="******" \\' + os.linesep)
            work_script.write('  --password="******" \\' +
                              os.linesep)
            work_script.write('  --server="' +
                              str_utils.get_server_name(server) + '" \\' +
                              os.linesep)
            work_script.write('  --project="' + project + '" \\' + os.linesep)
            work_script.write('  --subject="' + subject + '" \\' + os.linesep)
            work_script.write('  --session="' + session + '" \\' + os.linesep)
            work_script.write('  --scan="' + long_scan_name + '" \\' +
                              os.linesep)
            work_script.write('  --working-dir="' + working_directory_name +
                              '" \\' + os.linesep)
            work_script.write('  --workflow-id="' + workflow_id + '" \\' +
                              os.linesep)
            work_script.write('  --setup-script=' + setup_script + os.linesep)

            work_script.close()
            os.chmod(work_script_name, stat.S_IRWXU | stat.S_IRWXG)

            # Create script to put the results into the DB
            put_script_name = script_file_start_name + '.XNAT_PBS_PUT_job.sh'
            self.create_put_script(put_script_name, username, password,
                                   put_server, project, subject, session,
                                   working_directory_name,
                                   output_resource_name,
                                   scan_name + '_' + self.PIPELINE_NAME)

            # Submit the job to do the work
            work_submit_cmd = 'qsub ' + work_script_name
            inform("work_submit_cmd: " + work_submit_cmd)

            completed_work_submit_process = subprocess.run(
                work_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            work_job_no = str_utils.remove_ending_new_lines(
                completed_work_submit_process.stdout)
            inform("work_job_no: " + work_job_no)

            # Submit the job to put the results in the DB
            put_submit_cmd = 'qsub -W depend=afterok:' + work_job_no + ' ' + put_script_name
            inform("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(
                completed_put_submit_process.stdout)
            inform("put_job_no: " + put_job_no)
    def submit_jobs(self):
        _debug("submit_jobs")

        if self.validate_parameters():
            # subject_info = hcp7t_subject.Hcp7TSubjectInfo(self.project,
            #                                               self.structural_reference_project,
            #                                               self.subject)

            # make sure working directories don't have the same name based on the same
            # start time by sleeping a few seconds
            time.sleep(5)

            current_seconds_since_epoch = int(time.time())

            working_directory_name = self.build_home
            working_directory_name += os.sep + self.project
            working_directory_name += os.sep + self.PIPELINE_NAME
            working_directory_name += '.' + self.subject
            working_directory_name += '.' + str(current_seconds_since_epoch)

            # make the working directory
            _inform("Making working directory: " + working_directory_name)
            os.makedirs(name=working_directory_name)

            # get JSESSION ID
            jsession_id = xnat_access.get_jsession_id(
                server='db.humanconnectome.org',
                username=self.username,
                password=self.password)
            _inform("jsession_id: " + jsession_id)

            # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
            xnat_session_id = xnat_access.get_session_id(
                server='db.humanconnectome.org',
                username=self.username,
                password=self.password,
                project=self.project,
                subject=self.subject,
                session=self.session)
            _inform("xnat_session_id: " + xnat_session_id)

            # get XNAT Workflow ID
            workflow_obj = xnat_access.Workflow(
                self.username, self.password, self.server, jsession_id)
            workflow_id = workflow_obj.create_workflow(
                xnat_session_id, self.project, self.PIPELINE_NAME, 'Queued')
            _inform("workflow_id: " + workflow_id)

            # Determine the output resource name
            output_resource_name = self.archive.DEDRIFT_AND_RESAMPLE_RESOURCE_NAME
            _inform("output_resource_name: " + output_resource_name)

            # Clean the output resource if requested
            if self.clean_output_resource_first:
                _inform("Deleting resouce: " + output_resource_name + " for:")
                _inform("  project: " + self.project)
                _inform("  subject: " + self.subject)
                _inform("  session: " + self.session)

                delete_resource.delete_resource(
                    self.username, self.password, str_utils.get_server_name(self.server),
                    self.project, self.subject, self.session, output_resource_name, True)

            script_file_start_name = working_directory_name
            script_file_start_name += os.sep + self.subject
            script_file_start_name += '.' + self.PIPELINE_NAME
            script_file_start_name += '.' + self.project
            script_file_start_name += '.' + self.session

            # Create script to submit to do the actual work
            work_script_name = script_file_start_name + '.XNAT_PBS_job.sh'
            with contextlib.suppress(FileNotFoundError):
                os.remove(work_script_name)

            work_script = open(work_script_name, 'w')

            nodes_spec = 'nodes=1:ppn=1'
            walltime_spec = 'walltime=' + str(self.walltime_limit_hours) + ':00:00'
            vmem_spec = 'vmem=' + str(self.vmem_limit_gbs) + 'gb'

            work_script.write('#PBS -l ' + nodes_spec + ',' + walltime_spec + ',' + vmem_spec + os.linesep)
            # work_script.write('#PBS -q HCPput' + os.linesep)
            work_script.write('#PBS -o ' + working_directory_name + os.linesep)
            work_script.write('#PBS -e ' + working_directory_name + os.linesep)
            work_script.write(os.linesep)
            work_script.write(self.xnat_pbs_jobs_home + os.sep + '7T' + os.sep + 'DeDriftAndResampleHCP7T' + os.sep +
                              'DeDriftAndResampleHCP7T.XNAT.sh \\' + os.linesep)
            work_script.write('  --user="******" \\' + os.linesep)
            work_script.write('  --password="******" \\' + os.linesep)
            work_script.write('  --server="' + str_utils.get_server_name(self.server) + '" \\' + os.linesep)
            work_script.write('  --project="' + self.project + '" \\' + os.linesep)
            work_script.write('  --subject="' + self.subject + '" \\' + os.linesep)
            work_script.write('  --session="' + self.session + '" \\' + os.linesep)
            work_script.write('  --structural-reference-project="' +
                              self.structural_reference_project + '" \\' + os.linesep)
            work_script.write('  --structural-reference-session="' +
                              self.structural_reference_session + '" \\' + os.linesep)
            work_script.write('  --working-dir="' + working_directory_name + '" \\' + os.linesep)
            work_script.write('  --workflow-id="' + workflow_id + '" \\' + os.linesep)
            work_script.write('  --setup-script=' + self.setup_script + os.linesep)

            work_script.close()
            os.chmod(work_script_name, stat.S_IRWXU | stat.S_IRWXG)

            # Create script to put the results into the DB
            put_script_name = script_file_start_name + '.XNAT_PBS_PUT_job.sh'
            self.create_put_script(put_script_name,
                                   self.username, self.password, self.put_server,
                                   self.project, self.subject, self.session,
                                   working_directory_name, output_resource_name,
                                   self.PIPELINE_NAME)

            # Submit the job to do the work
            work_submit_cmd = 'qsub ' + work_script_name
            _inform("work_submit_cmd: " + work_submit_cmd)

            completed_work_submit_process = subprocess.run(
                work_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            work_job_no = str_utils.remove_ending_new_lines(completed_work_submit_process.stdout)
            _inform("work_job_no: " + work_job_no)

            # Submit the job to put the results in the DB
            put_submit_cmd = 'qsub -W depend=afterok:' + work_job_no + ' ' + put_script_name
            _inform("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(completed_put_submit_process.stdout)
            _inform("put_job_no: " + put_job_no)

        else:
            _inform("Unable to submit jobs")
Esempio n. 27
0
    def submit_jobs(self, processing_stage=ProcessingStage.PUT_DATA):
        logger.debug(debug_utils.get_name() + ": processing_stage: " +
                     str(processing_stage))

        logger.info("-----")
        logger.info("Submitting " + self.PIPELINE_NAME + " jobs for")
        logger.info("  Project: " + self.project)
        logger.info("  Subject: " + self.subject)
        logger.info("  Session: " + self.session)
        logger.info("     Scan: " + self.scan)
        logger.info("    Stage: " + str(processing_stage))

        # make sure working directories do not have the same name based on
        # the same start time by sleeping a few seconds
        time.sleep(5)

        # build the working directory name
        self._working_directory_name = \
            self.build_working_directory_name(self.project, self.PIPELINE_NAME, self.subject, self.scan)
        logger.info("Making working directory: " +
                    self._working_directory_name)
        os.makedirs(name=self._working_directory_name)

        # determine output resource name
        self._output_resource_name = self.scan + "_" + self.output_resource_suffix

        # clean output resource if requested
        if self.clean_output_resource_first:
            logger.info("Deleting resource: " + self._output_resource_name +
                        " for:")
            logger.info("  project: " + self.project)
            logger.info("  subject: " + self.subject)
            logger.info("  session: " + self.session)

            delete_resource.delete_resource(
                self.username, self.password,
                str_utils.get_server_name(self.server), self.project,
                self.subject, self.session, self._output_resource_name)

        # create scripts for various stages of processing
        if processing_stage >= ProcessingStage.PREPARE_SCRIPTS:
            self._create_get_data_script()
            self._create_work_script()
            self._create_clean_data_script()

            put_script_name = self._put_data_script_name()
            self.create_put_script(put_script_name,
                                   self.username,
                                   self.password,
                                   self.put_server,
                                   self.project,
                                   self.subject,
                                   self.session,
                                   self._working_directory_name,
                                   self._output_resource_name,
                                   self.PIPELINE_NAME,
                                   leave_subject_id_level=True)

        # Submit the job to get the data
        if processing_stage >= ProcessingStage.GET_DATA:

            get_data_submit_cmd = 'qsub ' + self._get_data_script_name()
            logger.info("get_data_submit_cmd: " + get_data_submit_cmd)

            completed_get_data_submit_process = subprocess.run(
                get_data_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            get_data_job_no = str_utils.remove_ending_new_lines(
                completed_get_data_submit_process.stdout)
            logger.info("get_data_job_no: " + get_data_job_no)

        else:
            logger.info("Get data job not submitted")

        # Submit the job to process the data (do the work)
        if processing_stage >= ProcessingStage.PROCESS_DATA:

            work_submit_cmd = 'qsub -W depend=afterok:' + get_data_job_no + ' ' + self._work_script_name(
            )
            logger.info("work_submit_cmd: " + work_submit_cmd)

            completed_work_submit_process = subprocess.run(
                work_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            work_job_no = str_utils.remove_ending_new_lines(
                completed_work_submit_process.stdout)
            logger.info("work_job_no: " + work_job_no)

        else:
            logger.info("Process data job not submitted")

        # Submit job to clean the data
        if processing_stage >= ProcessingStage.CLEAN_DATA:

            clean_submit_cmd = 'qsub -W depend=afterok:' + work_job_no + ' ' + self._clean_data_script_name(
            )
            logger.info("clean_submit_cmd: " + clean_submit_cmd)

            completed_clean_submit_process = subprocess.run(
                clean_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            clean_job_no = str_utils.remove_ending_new_lines(
                completed_clean_submit_process.stdout)
            logger.info("clean_job_no: " + clean_job_no)

        else:
            logger.info("Clean data job not submitted")

        # Submit job to put the resulting data in the DB
        if processing_stage >= ProcessingStage.PUT_DATA:

            put_submit_cmd = 'qsub -W depend=afterok:' + clean_job_no + ' ' + put_script_name
            logger.info("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(
                completed_put_submit_process.stdout)
            logger.info("put_job_no: " + put_job_no)

        else:
            logger.info("Put data job not submitted")
    def submit_jobs(self):
        _debug("submit_jobs")

        if self.validate_parameters():

            _inform("")
            _inform("--------------------------------------------------")
            _inform("Submitting " + self.PIPELINE_NAME + " jobs for")
            _inform("  Project: " + self.project)
            _inform("  Subject: " + self.subject)
            _inform("  Session: " + self.session)
            _inform("--------------------------------------------------")

            # make sure working directories don't have the same name based on
            # the same start time by sleeping a few seconds
            time.sleep(5)
            current_seconds_since_epoch = int(time.time())

            # build the working directory name
            self._working_directory_name = self.build_home
            self._working_directory_name += os.sep + self.project
            self._working_directory_name += os.sep + self.PIPELINE_NAME
            self._working_directory_name += '.' + self.subject
            self._working_directory_name += '.' + str(current_seconds_since_epoch)

            # make the working directory
            _inform("making working directory: " + self._working_directory_name)
            os.makedirs(name=self._working_directory_name)

            # get JSESSION ID
            jsession_id = xnat_access.get_jsession_id(
                server='db.humanconnectome.org',
                username=self.username,
                password=self.password)
            _inform("jsession_id: " + jsession_id)

            # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
            xnat_session_id = xnat_access.get_session_id(
                server='db.humanconnectome.org',
                username=self.username,
                password=self.password,
                project=kself.project,
                subject=self.subject,
                session=self.session)
            _inform("xnat_session_id: " + xnat_session_id)

            # get XNAT Workflow ID
            workflow_obj = xnat_access.Workflow(self.username, self.password,
                                                'https://db.humanconnectome.org', jsession_id)
            self._workflow_id = workflow_obj.create_workflow(xnat_session_id,
                                                             self.project,
                                                             self.PIPELINE_NAME,
                                                             'Queued')
            _inform("workflow_id: " + self._workflow_id)

            # determine output resource name
            self._output_resource_name = 'Diffusion_preproc'

            # clean the output resource if requested
            if self.clean_output_resource_first:
                _inform("Deleting resource: " + self._output_resource_name + " for:")
                _inform("  project: " + self.project)
                _inform("  subject: " + self.subject)
                _inform("  session: " + self.session)

                delete_resource.delete_resource(
                    self.username, self.password,
                    str_utils.get_server_name(self.server),
                    self.project, self.subject, self.session,
                    self._output_resource_name)

            # create script to do the PreEddy work
            self._create_pre_eddy_script()

            # create script to do the Eddy work
            self._create_eddy_script()

            # create script to do the PostEddy work
            self._create_post_eddy_script()

            # create script to put the results into the DB
            put_script_name = self._get_scripts_start_name() + '.XNAT_PBS_PUT_job.sh'
            self.create_put_script(put_script_name,
                                   self.username, self.password, self.put_server,
                                   self.project, self.subject, self.session,
                                   self._working_directory_name, self._output_resource_name,
                                   self.PIPELINE_NAME)

            # Submit the job to do the Pre-Eddy work
            pre_eddy_submit_cmd = 'qsub ' + self._pre_eddy_script_name
            _inform("pre_eddy_submit_cmd: " + pre_eddy_submit_cmd)

            completed_pre_eddy_submit_process = subprocess.run(
                pre_eddy_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            pre_eddy_job_no = str_utils.remove_ending_new_lines(completed_pre_eddy_submit_process.stdout)
            _inform("pre_eddy_job_no: " + pre_eddy_job_no)

            # Submit the job to do the Eddy work
            eddy_submit_cmd = 'qsub -W depend=afterok:' + pre_eddy_job_no + ' ' + self._eddy_script_name
            _inform("eddy_submit_cmd: " + eddy_submit_cmd)

            completed_eddy_submit_process = subprocess.run(
                eddy_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            eddy_job_no = str_utils.remove_ending_new_lines(completed_eddy_submit_process.stdout)
            _inform("eddy_job_no: " + eddy_job_no)

            # Submit the job to do the Post-Eddy work
            post_eddy_submit_cmd = 'qsub -W depend=afterok:' + eddy_job_no + ' ' + self._post_eddy_script_name
            _inform("post_eddy_submit_cmd: " + post_eddy_submit_cmd)

            completed_post_eddy_submit_process = subprocess.run(
                post_eddy_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            post_eddy_job_no = str_utils.remove_ending_new_lines(completed_post_eddy_submit_process.stdout)
            _inform("post_eddy_job_no: " + post_eddy_job_no)

            # Submit the job to put the results in the DB
            put_submit_cmd = 'qsub -W depend=afterok:' + post_eddy_job_no + ' ' + put_script_name
            _inform("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(completed_put_submit_process.stdout)
            _inform("put_job_no: " + put_job_no)

        else:
            _inform("Unable to submit jobs")
Esempio n. 29
0
    def submit_jobs(self, username, password, server, project, subject,
                    session, structural_reference_project,
                    structural_reference_session, put_server, setup_script,
                    incomplete_only, scan, walltime_limit_hours, mem_limit_gbs,
                    vmem_limit_gbs):

        subject_info = hcp7t_subject.Hcp7TSubjectInfo(
            project, structural_reference_project, subject)

        # determine names of the preprocessed resting state scans that are
        # available for the subject
        resting_state_scan_names = self.archive.available_resting_state_preproc_names(
            subject_info)
        inform("Preprocessed resting state scans available for subject: " +
               str(resting_state_scan_names))

        # determine names of the preprocessed MOVIE task scans that are available for the subject
        movie_scan_names = self.archive.available_movie_preproc_names(
            subject_info)
        inform("Preprocessed movie scans available for subject " +
               str(movie_scan_names))

        # build list of scans to process
        scan_list = []
        if scan is None:
            scan_list = resting_state_scan_names + movie_scan_names
        else:
            scan_list.append(scan)

        # process the specified scans
        for scan_name in scan_list:
            if incomplete_only and self.archive.FIX_processing_repaired(
                    subject_info, scan_name):
                inform("scan: " + scan_name +
                       " FIX processing is already repaired")
                inform(
                    "Only submitting jobs for incomplete scans - skipping " +
                    scan_name)
                continue

            long_scan_name = self.archive.functional_scan_long_name(scan_name)
            output_resource_name = self.archive.FIX_processed_resource_name(
                scan_name)

            inform("")
            inform("-------------------------------------------------")
            inform("Submitting jobs for scan: " + long_scan_name)
            inform("Output resource name: " + output_resource_name)
            inform("")

            # make sure working directories don't have the same name based on the
            # same start time by sleeping a few seconds
            time.sleep(5)

            current_seconds_since_epoch = int(time.time())

            working_directory_name = self.build_home
            working_directory_name += os.sep + project
            working_directory_name += os.sep + self.PIPELINE_NAME
            working_directory_name += '.' + subject
            working_directory_name += '.' + long_scan_name
            working_directory_name += '.' + str(current_seconds_since_epoch)

            inform("Making working directory: " + working_directory_name)
            os.makedirs(name=working_directory_name)

            script_file_start_name = working_directory_name
            script_file_start_name += os.sep + subject
            script_file_start_name += '.' + long_scan_name
            script_file_start_name += '.' + self.PIPELINE_NAME
            script_file_start_name += '.' + project
            script_file_start_name += '.' + session

            # Create script to submit to do the actual work
            work_script_name = script_file_start_name + '.XNAT_PBS_job.sh'
            with contextlib.suppress(FileNotFoundError):
                os.remove(work_script_name)

            work_script = open(work_script_name, 'w')

            work_script.write('#PBS -l nodes=1:ppn=1,walltime=' +
                              str(walltime_limit_hours) + ':00:00,mem=' +
                              str(mem_limit_gbs) + 'gb,vmem=' +
                              str(vmem_limit_gbs) + 'gb' + os.linesep)
            work_script.write('#PBS -o ' + working_directory_name + os.linesep)
            work_script.write('#PBS -e ' + working_directory_name + os.linesep)
            work_script.write(os.linesep)
            work_script.write(self.xnat_pbs_jobs_home + os.sep + '7T' +
                              os.sep + self.PIPELINE_NAME + os.sep +
                              self.PIPELINE_NAME + '.XNAT.sh \\' + os.linesep)
            work_script.write('  --user="******" \\' + os.linesep)
            work_script.write('  --password="******" \\' +
                              os.linesep)
            work_script.write('  --server="' +
                              str_utils.get_server_name(server) + '" \\' +
                              os.linesep)
            work_script.write('  --project="' + project + '" \\' + os.linesep)
            work_script.write('  --subject="' + subject + '" \\' + os.linesep)
            work_script.write('  --session="' + session + '" \\' + os.linesep)
            work_script.write('  --structural-reference-project="' +
                              structural_reference_project + '" \\' +
                              os.linesep)
            work_script.write('  --structural-reference-session="' +
                              structural_reference_session + '" \\' +
                              os.linesep)
            work_script.write('  --scan="' + long_scan_name + '" \\' +
                              os.linesep)
            work_script.write('  --working-dir="' + working_directory_name +
                              '" \\' + os.linesep)
            work_script.write('  --setup-script=' + setup_script + os.linesep)

            work_script.close()
            os.chmod(work_script_name, stat.S_IRWXU | stat.S_IRWXG)

            # Create script to put the results into the DB
            put_script_name = script_file_start_name + '.XNAT_PBS_PUT_job.sh'
            self.create_put_script(put_script_name,
                                   username,
                                   password,
                                   put_server,
                                   project,
                                   subject,
                                   session,
                                   working_directory_name,
                                   output_resource_name,
                                   scan_name + '_' + self.PIPELINE_NAME,
                                   leave_subject_id_level=True)

            # Submit the job to do the work
            work_submit_cmd = 'qsub ' + work_script_name
            inform("work_submit_cmd: " + work_submit_cmd)

            completed_work_submit_process = subprocess.run(
                work_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            work_job_no = str_utils.remove_ending_new_lines(
                completed_work_submit_process.stdout)
            inform("work_job_no: " + work_job_no)

            # Submit the job put the results in the DB
            put_submit_cmd = 'qsub -W depend=afterok:' + work_job_no + ' ' + put_script_name
            inform("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(
                completed_put_submit_process.stdout)
            inform("put_job_no: " + put_job_no)
    def submit_jobs(self, processing_stage=ProcessingStage.PUT_DATA):
        logger.debug("submit_jobs: processing_stage: " + str(processing_stage))

        logger.info("----------")
        logger.info("Submitting " + self.PIPELINE_NAME + " jobs for")
        logger.info(" Project: " + self.project)
        logger.info(" Subject: " + self.subject)
        logger.info(" Session: " + self.session)
        logger.info("   Stage: " + str(processing_stage))
        logger.info("----------")

        # make sure working directories do not have the same name based on
        # the same start time by sleeping a few seconds
        time.sleep(5)

        # build the working directory name
        self._working_directory_name = \
            self.build_working_directory_name(self.project, self.PIPELINE_NAME, self.subject)
        logger.info("Making working directory: " + self._working_directory_name)
        os.makedirs(name=self._working_directory_name)

        # # get JSESSION ID
        # jsession_id = xnat_access.get_jsession_id(
        #     server='db.humanconnectome.org',
        #     username=self.username,
        #     password=self.password)
        # logger.info("jsession_id: " + jsession_id)

        # # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
        # xnat_session_id = xnat_access.get_session_id(
        #     server='db.humanconnectome.org',
        #     username=self.username,
        #     password=self.password,
        #     project=self.project,
        #     subject=self.subject,
        #     session=self.session)
        # logger.info("xnat_session_id: " + xnat_session_id)

        # # get XNAT Workflow ID
        # workflow_obj = xnat_access.Workflow(self.username, self.password,
        #                                     'https://db.humanconnectome.org', jsession_id)
        # self._workflow_id = workflow_obj.create_workflow(xnat_session_id,
        #                                                  self.project,
        #                                                  self.PIPELINE_NAME,
        #                                                  'Queued')
        # logger.info("workflow_id: " + self._workflow_id)

        # determine output resource name
        self._output_resource_name = "Diffusion_bedpostx"

        # clean output resource if requested
        if self.clean_output_resource_first:
            logger.info("Deleting resource: " + self._output_resource_name + " for:")
            logger.info("  project: " + self.project)
            logger.info("  subject: " + self.subject)
            logger.info("  session: " + self.session)

            delete_resource.delete_resource(
                self.username, self.password,
                str_utils.get_server_name(self.server),
                self.project, self.subject, self.session,
                self._output_resource_name)

        # create scripts for various stages of processing
        if processing_stage >= ProcessingStage.PREPARE_SCRIPTS:
            # create script to get data
            self._create_get_data_script()
            self._create_process_script()
            self._create_clean_data_script()

            put_script_name = self._put_data_script_name()
            self.create_put_script(put_script_name,
                                   self.username, self.password, self.put_server,
                                   self.project, self.subject, self.session,
                                   self._working_directory_name,
                                   self._output_resource_name,
                                   self.PIPELINE_NAME)

        # run the script to get the data
        if processing_stage >= ProcessingStage.GET_DATA:

            stdout_file = open(self._get_data_script_name() + '.stdout', 'w')
            stderr_file = open(self._get_data_script_name() + '.stderr', 'w')

            logger.info("Running get data script")
            logger.info("  stdout: " + stdout_file.name)
            logger.info("  stderr: " + stderr_file.name)

            proc = subprocess.Popen(['bash', self._get_data_script_name()], 
                                    stdout=stdout_file, stderr=stderr_file)
            proc.communicate()

            logger.info("  return code: " + str(proc.returncode))

            stdout_file.close()
            stderr_file.close()

            if proc.returncode != 0:
                raise RuntimeError("get data script ended with non-zero return code")

        else:
            logger.info("Get data script not run")

        # run the script to submit processing jobs 
        if processing_stage >= ProcessingStage.PROCESS_DATA:

            stdout_file = open(self._process_script_name() + '.stdout', 'w')
            stderr_file = open(self._process_script_name() + '.stderr', 'w')

            logger.info("Running script to submit processing jobs")
            logger.info("  stdout: " + stdout_file.name)
            logger.info("  stderr: " + stderr_file.name)

            proc = subprocess.Popen(['bash', self._process_script_name()],
                                    stdout=stdout_file, stderr=stderr_file)
            proc.communicate()
            
            stdout_file.close()
            stderr_file.close()

            logger.info("  return code: " + str(proc.returncode))

            if proc.returncode != 0:
                raise RuntimeError("script to submit processing jobs ended with non-zero return code")

        else:
            logger.info("process data job not submitted")

        # submit the job to clean the data
        if processing_stage >= ProcessingStage.CLEAN_DATA:

            # figure out what the job id number is for the bedpostx postprocessing job 
            postproc_file_name = self._working_directory_name + os.sep 
            postproc_file_name += self.subject + os.sep
            postproc_file_name += 'T1w' + os.sep
            postproc_file_name += 'Diffusion.bedpostX' + os.sep
            postproc_file_name += 'logs' + os.sep
            postproc_file_name += 'postproc_ID'
            logger.info("Post-processing job ID file name: " + postproc_file_name)

            f = open(postproc_file_name, 'r')
            id_str = f.readline().rstrip()
            logger.info("Post-processing job ID: " + id_str)
            f.close()

            clean_submit_cmd = 'qsub -W depend=afterok:' + id_str + ' ' + self._clean_data_script_name()
            logger.info("clean_submit_cmd: " + clean_submit_cmd)

            completed_clean_submit_process = subprocess.run(
                clean_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            clean_job_no = str_utils.remove_ending_new_lines(completed_clean_submit_process.stdout)
            logger.info("clean_job_no: " + clean_job_no)

        else:
            logger.info("Clean data job not submitted")

        # submit the job to put the resulting data in the DB
        if processing_stage >= ProcessingStage.PUT_DATA:

            put_submit_cmd = 'qsub -W depend=afterok:' + clean_job_no + ' ' + put_script_name
            logger.info("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(completed_put_submit_process.stdout)
            logger.info("put_job_no: " + put_job_no)

        else:
            logger.info("Put data job not submitted")
    def submit_jobs(
        self,
        username,
        password,
        server,
        project,
        subject,
        session,
        structural_reference_project,
        structural_reference_session,
        put_server,
        clean_output_resource_first,
        setup_script,
        incomplete_only,
        scan,
        walltime_limit_hours,
        mem_limit_gbs,  # UNUSED
        vmem_limit_gbs,
    ):

        subject_info = hcp7t_subject.Hcp7TSubjectInfo(project, structural_reference_project, subject)

        # determine names of preprocessed resting state scans that are
        # available for the subject
        preproc_resting_state_scan_names = self.archive.available_resting_state_preproc_names(subject_info)
        inform("Preprocessed resting state scans available for subject: " + str(preproc_resting_state_scan_names))

        # determine names of the preprocessed MOVIE task scans that are available for the subject
        preproc_movie_scan_names = self.archive.available_movie_preproc_names(subject_info)
        inform("Preprocessed movie scans available for subject " + str(preproc_movie_scan_names))

        # determine names of the FIX processed scans that are available for the subject
        fix_processed_scan_names = self.archive.available_FIX_processed_names(subject_info)
        inform("FIX processed scans available for subject " + str(fix_processed_scan_names))

        # build list of scans to process
        scan_list = []
        if scan is None:
            scan_list = fix_processed_scan_names
        else:
            scan_list.append(scan)

        # process specified scans
        for scan_name in scan_list:
            if incomplete_only:
                completion_checker = PostFixHCP7T_OneSubjectCompletionChecker.PostFixHCP7T_OneSubjectCompletionChecker()
                if completion_checker.is_processing_complete(self.archive, subject_info, scan_name):
                    inform("scan: " + scan_name + " has already completed PostFixHCP7T processing")
                    inform("Only submitting jobs for incomplete scans - skipping " + scan_name)
                    continue

            long_scan_name = self.archive.functional_scan_long_name(scan_name)
            output_resource_name = self.archive.PostFix_processed_resource_name(scan_name)

            inform("")
            inform("-------------------------------------------------")
            inform("Submitting jobs for scan: " + long_scan_name)
            inform("Output resource name: " + output_resource_name)
            inform("-------------------------------------------------")
            inform("")

            # make sure working directories don't have the same name based on the
            # same start time by sleeping a few seconds
            time.sleep(5)

            current_seconds_since_epoch = int(time.time())

            working_directory_name = self.build_home
            working_directory_name += os.sep + project
            working_directory_name += os.sep + self.PIPELINE_NAME
            working_directory_name += "." + subject
            working_directory_name += "." + long_scan_name
            working_directory_name += "." + str(current_seconds_since_epoch)

            # make the working directory
            inform("Making working directory: " + working_directory_name)
            os.makedirs(name=working_directory_name)

            # get JSESSION ID
            jsession_id = xnat_access.get_jsession_id(
                server="db.humanconnectome.org", username=username, password=password
            )
            inform("jsession_id: " + jsession_id)

            # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
            xnat_session_id = xnat_access.get_session_id(
                server="db.humanconnectome.org",
                username=username,
                password=password,
                project=project,
                subject=subject,
                session=session,
            )

            inform("xnat_session_id: " + xnat_session_id)

            # get XNAT Workflow ID
            workflow_obj = xnat_access.Workflow(username, password, server, jsession_id)
            workflow_id = workflow_obj.create_workflow(xnat_session_id, project, self.PIPELINE_NAME, "Queued")
            inform("workflow_id: " + workflow_id)

            # Clean the output resource if requested
            if clean_output_resource_first:
                inform("Deleting resource: " + output_resource_name + " for:")
                inform("  project: " + project)
                inform("  subject: " + subject)
                inform("  session: " + session)

                delete_resource.delete_resource(
                    username,
                    password,
                    str_utils.get_server_name(server),
                    project,
                    subject,
                    session,
                    output_resource_name,
                )

            script_file_start_name = working_directory_name
            script_file_start_name += os.sep + subject
            script_file_start_name += "." + long_scan_name
            script_file_start_name += "." + self.PIPELINE_NAME
            script_file_start_name += "." + project
            script_file_start_name += "." + session

            # Create script to submit to do the actual work
            work_script_name = script_file_start_name + ".XNAT_PBS_job.sh"
            with contextlib.suppress(FileNotFoundError):
                os.remove(work_script_name)

            work_script = open(work_script_name, "w")

            nodes_spec = "nodes=1:ppn=1"
            walltime_spec = "walltime=" + str(walltime_limit_hours) + ":00:00"
            vmem_spec = "vmem=" + str(vmem_limit_gbs) + "gb"

            work_script.write("#PBS -l " + nodes_spec + "," + walltime_spec + "," + vmem_spec + os.linesep)
            work_script.write("#PBS -o " + working_directory_name + os.linesep)
            work_script.write("#PBS -e " + working_directory_name + os.linesep)
            work_script.write(os.linesep)
            work_script.write(
                self.xnat_pbs_jobs_home
                + os.sep
                + "7T"
                + os.sep
                + "PostFixHCP7T"
                + os.sep
                + "PostFixHCP7T.XNAT.sh \\"
                + os.linesep
            )
            work_script.write('  --user="******" \\' + os.linesep)
            work_script.write('  --password="******" \\' + os.linesep)
            work_script.write('  --server="' + str_utils.get_server_name(server) + '" \\' + os.linesep)
            work_script.write('  --project="' + project + '" \\' + os.linesep)
            work_script.write('  --subject="' + subject + '" \\' + os.linesep)
            work_script.write('  --session="' + session + '" \\' + os.linesep)
            work_script.write('  --scan="' + long_scan_name + '" \\' + os.linesep)
            work_script.write('  --working-dir="' + working_directory_name + '" \\' + os.linesep)
            work_script.write('  --workflow-id="' + workflow_id + '" \\' + os.linesep)
            work_script.write("  --setup-script=" + setup_script + os.linesep)

            work_script.close()
            os.chmod(work_script_name, stat.S_IRWXU | stat.S_IRWXG)

            # Create script to put the results into the DB
            put_script_name = script_file_start_name + ".XNAT_PBS_PUT_job.sh"
            self.create_put_script(
                put_script_name,
                username,
                password,
                put_server,
                project,
                subject,
                session,
                working_directory_name,
                output_resource_name,
                scan_name + "_" + self.PIPELINE_NAME,
            )

            # Submit the job to do the work
            work_submit_cmd = "qsub " + work_script_name
            inform("work_submit_cmd: " + work_submit_cmd)

            completed_work_submit_process = subprocess.run(
                work_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE, universal_newlines=True
            )
            work_job_no = str_utils.remove_ending_new_lines(completed_work_submit_process.stdout)
            inform("work_job_no: " + work_job_no)

            # Submit the job to put the results in the DB
            put_submit_cmd = "qsub -W depend=afterok:" + work_job_no + " " + put_script_name
            inform("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd, shell=True, check=True, stdout=subprocess.PIPE, universal_newlines=True
            )
            put_job_no = str_utils.remove_ending_new_lines(completed_put_submit_process.stdout)
            inform("put_job_no: " + put_job_no)
    def submit_jobs(self, processing_stage=ProcessingStage.PUT_DATA):
        logger.debug("submit_jobs: processing_stage: " + str(processing_stage))

        logger.info("----------")
        logger.info("Submitting " + self.PIPELINE_NAME + " jobs for")
        logger.info(" Project: " + self.project)
        logger.info(" Subject: " + self.subject)
        logger.info(" Session: " + self.session)
        logger.info("   Stage: " + str(processing_stage))
        logger.info("----------")

        # make sure working directories do not have the same name based on
        # the same start time by sleeping a few seconds
        time.sleep(5)

        # build the working directory name
        self._working_directory_name = \
            self.build_working_directory_name(self.project, self.PIPELINE_NAME, self.subject)
        logger.info("Making working directory: " +
                    self._working_directory_name)
        os.makedirs(name=self._working_directory_name)

        # # get JSESSION ID
        # jsession_id = xnat_access.get_jsession_id(
        #     server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
        #     username=self.username,
        #     password=self.password)
        # logger.info("jsession_id: " + jsession_id)

        # # get XNAT Session ID (a.k.a. the experiment ID, e.g. ConnectomeDB_E1234)
        # xnat_session_id = xnat_access.get_session_id(
        #     server=os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
        #     username=self.username,
        #     password=self.password,
        #     project=self.project,
        #     subject=self.subject,
        #     session=self.session)
        # logger.info("xnat_session_id: " + xnat_session_id)

        # # get XNAT Workflow ID
        # workflow_obj = xnat_access.Workflow(self.username, self.password,
        #                                     os_utils.getenv_required('XNAT_PBS_JOBS_XNAT_SERVER'),
        #                                     jsession_id)
        # self._workflow_id = workflow_obj.create_workflow(xnat_session_id,
        #                                                  self.project,
        #                                                  self.PIPELINE_NAME,
        #                                                  'Queued')
        # logger.info("workflow_id: " + self._workflow_id)

        # determine output resource name
        self._output_resource_name = "Diffusion_bedpostx"

        # clean output resource if requested
        if self.clean_output_resource_first:
            logger.info("Deleting resource: " + self._output_resource_name +
                        " for:")
            logger.info("  project: " + self.project)
            logger.info("  subject: " + self.subject)
            logger.info("  session: " + self.session)

            delete_resource.delete_resource(
                self.username, self.password,
                str_utils.get_server_name(self.server), self.project,
                self.subject, self.session, self._output_resource_name)

        # create scripts for various stages of processing
        if processing_stage >= ProcessingStage.PREPARE_SCRIPTS:
            # create script to get data
            self._create_get_data_script()
            self._create_process_script()
            self._create_clean_data_script()

            put_script_name = self._put_data_script_name()
            self.create_put_script(put_script_name, self.username,
                                   self.password, self.put_server,
                                   self.project, self.subject, self.session,
                                   self._working_directory_name,
                                   self._output_resource_name,
                                   self.PIPELINE_NAME)

        # run the script to get the data
        if processing_stage >= ProcessingStage.GET_DATA:

            stdout_file = open(self._get_data_script_name() + '.stdout', 'w')
            stderr_file = open(self._get_data_script_name() + '.stderr', 'w')

            logger.info("Running get data script")
            logger.info("  stdout: " + stdout_file.name)
            logger.info("  stderr: " + stderr_file.name)

            proc = subprocess.Popen(
                ['bash', self._get_data_script_name()],
                stdout=stdout_file,
                stderr=stderr_file)
            proc.communicate()

            logger.info("  return code: " + str(proc.returncode))

            stdout_file.close()
            stderr_file.close()

            if proc.returncode != 0:
                raise RuntimeError(
                    "get data script ended with non-zero return code")

        else:
            logger.info("Get data script not run")

        # run the script to submit processing jobs
        if processing_stage >= ProcessingStage.PROCESS_DATA:

            stdout_file = open(self._process_script_name() + '.stdout', 'w')
            stderr_file = open(self._process_script_name() + '.stderr', 'w')

            logger.info("Running script to submit processing jobs")
            logger.info("  stdout: " + stdout_file.name)
            logger.info("  stderr: " + stderr_file.name)

            proc = subprocess.Popen(
                ['bash', self._process_script_name()],
                stdout=stdout_file,
                stderr=stderr_file)
            proc.communicate()

            stdout_file.close()
            stderr_file.close()

            logger.info("  return code: " + str(proc.returncode))

            if proc.returncode != 0:
                raise RuntimeError(
                    "script to submit processing jobs ended with non-zero return code"
                )

        else:
            logger.info("process data job not submitted")

        # submit the job to clean the data
        if processing_stage >= ProcessingStage.CLEAN_DATA:

            # figure out what the job id number is for the bedpostx postprocessing job
            postproc_file_name = self._working_directory_name + os.sep
            postproc_file_name += self.subject + os.sep
            postproc_file_name += 'T1w' + os.sep
            postproc_file_name += 'Diffusion.bedpostX' + os.sep
            postproc_file_name += 'logs' + os.sep
            postproc_file_name += 'postproc_ID'
            logger.info("Post-processing job ID file name: " +
                        postproc_file_name)

            f = open(postproc_file_name, 'r')
            id_str = f.readline().rstrip()
            logger.info("Post-processing job ID: " + id_str)
            f.close()

            clean_submit_cmd = 'qsub -W depend=afterok:' + id_str + ' ' + self._clean_data_script_name(
            )
            logger.info("clean_submit_cmd: " + clean_submit_cmd)

            completed_clean_submit_process = subprocess.run(
                clean_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            clean_job_no = str_utils.remove_ending_new_lines(
                completed_clean_submit_process.stdout)
            logger.info("clean_job_no: " + clean_job_no)

        else:
            logger.info("Clean data job not submitted")

        # submit the job to put the resulting data in the DB
        if processing_stage >= ProcessingStage.PUT_DATA:

            put_submit_cmd = 'qsub -W depend=afterok:' + clean_job_no + ' ' + put_script_name
            logger.info("put_submit_cmd: " + put_submit_cmd)

            completed_put_submit_process = subprocess.run(
                put_submit_cmd,
                shell=True,
                check=True,
                stdout=subprocess.PIPE,
                universal_newlines=True)
            put_job_no = str_utils.remove_ending_new_lines(
                completed_put_submit_process.stdout)
            logger.info("put_job_no: " + put_job_no)

        else:
            logger.info("Put data job not submitted")
Esempio n. 33
0
    def create_process_data_job_script(self):
        module_logger.debug(debug_utils.get_name())

        # copy the .XNAT_PROCESS script to the working directory
        processing_script_source_path = self.xnat_pbs_jobs_home
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += os.sep + self.PIPELINE_NAME
        processing_script_source_path += '.XNAT_PROCESS'

        processing_script_dest_path = self.working_directory_name
        processing_script_dest_path += os.sep + self.PIPELINE_NAME
        processing_script_dest_path += '.XNAT_PROCESS'

        shutil.copy(processing_script_source_path, processing_script_dest_path)
        os.chmod(processing_script_dest_path, stat.S_IRWXU | stat.S_IRWXG)

        # write the process data job script (that calls the .XNAT_PROCESS script)

        subject_info = ccf_subject.SubjectInfo(self.project, self.subject,
                                               self.classifier, self.scan)

        script_name = self.process_data_job_script_name

        with contextlib.suppress(FileNotFoundError):
            os.remove(script_name)

        walltime_limit_str = str(self.walltime_limit_hours) + ':00:00'
        vmem_limit_str = str(self.vmem_limit_gbs) + 'gb'

        resources_line = '#PBS -l nodes=' + str(self.WORK_NODE_COUNT)
        resources_line += ':ppn=' + str(self.WORK_PPN)
        resources_line += ',walltime=' + walltime_limit_str
        resources_line += ',mem=' + vmem_limit_str

        stdout_line = '#PBS -o ' + self.working_directory_name
        stderr_line = '#PBS -e ' + self.working_directory_name

        xnat_pbs_setup_line = 'source ' + self._get_xnat_pbs_setup_script_path(
        ) + ' ' + self._get_db_name()

        script_line = processing_script_dest_path
        user_line = '  --user='******'  --password='******'  --server=' + str_utils.get_server_name(self.server)
        project_line = '  --project=' + self.project
        subject_line = '  --subject=' + self.subject
        session_line = '  --session=' + self.session
        scan_line = '  --scan=' + self.scan
        session_classifier_line = '  --session-classifier=' + self.classifier
        dcmethod_line = '  --dcmethod=TOPUP'
        topupconfig_line = '  --topupconfig=b02b0.cnf'
        gdcoeffs_line = '  --gdcoeffs=Prisma_3T_coeff_AS82.grad'

        wdir_line = '  --working-dir=' + self.working_directory_name
        setup_line = '  --setup-script=' + self.setup_file_name

        with open(script_name, 'w') as script:
            script.write(resources_line + os.linesep)
            script.write(stdout_line + os.linesep)
            script.write(stderr_line + os.linesep)
            script.write(os.linesep)
            script.write(xnat_pbs_setup_line + os.linesep)
            script.write(os.linesep)
            script.write(script_line + ' \\' + os.linesep)
            script.write(user_line + ' \\' + os.linesep)
            script.write(password_line + ' \\' + os.linesep)
            script.write(server_line + ' \\' + os.linesep)
            script.write(project_line + ' \\' + os.linesep)
            script.write(subject_line + ' \\' + os.linesep)
            script.write(session_line + ' \\' + os.linesep)
            script.write(scan_line + ' \\' + os.linesep)
            script.write(session_classifier_line + ' \\' + os.linesep)
            script.write(dcmethod_line + ' \\' + os.linesep)
            script.write(topupconfig_line + ' \\' + os.linesep)
            script.write(gdcoeffs_line + ' \\' + os.linesep)
            script.write(wdir_line + ' \\' + os.linesep)
            script.write(setup_line + os.linesep)

            os.chmod(script_name, stat.S_IRWXU | stat.S_IRWXG)