def get_project_open_tasks(self, xnat, project_id): task_list = [] # Get lists of processors for this project sess_proc_list, scan_proc_list = processors.processors_by_type(self.project_process_dict[project_id]) # Get lists of assessors for this project assr_list = XnatUtils.list_project_assessors(xnat, project_id) # Match each assessor to a processor, get a task, and add to list for assr_info in assr_list: if assr_info['procstatus'] not in task.OPEN_STATUS_LIST and assr_info['qcstatus'] not in task.OPEN_QC_LIST: continue task_proc = self.match_proc(xnat, assr_info, sess_proc_list, scan_proc_list) if task_proc == None: print('WARN:no matching processor found:'+assr_info['assessor_label']) continue # Get a new task with the matched processor assr = XnatUtils.get_full_object(xnat, assr_info) cur_task = Task(task_proc,assr,self.upload_dir) task_list.append(cur_task) return task_list
def launch(self, jobdir, job_email=None, job_email_options='bae'): cmds = self.commands(jobdir) pbsfile = self.pbs_path() outlog = self.outlog_path() pbs = PBS(pbsfile, outlog, cmds, self.processor.walltime_str, self.processor.memreq_mb, self.processor.ppn, job_email, job_email_options) pbs.write() jobid = pbs.submit() if jobid == '' or jobid == '0': # TODO: raise exception print('ERROR:failed to launch job on cluster') return False else: self.set_status(JOB_RUNNING) self.set_jobid(jobid) self.set_jobstartdate_today() #save record on redcap for the job that has been launch project = self.assessor_label.split('-x-')[0] SM_name = self.get_processor_name() data, record_id = XnatUtils.create_record_redcap(project, SM_name) run = XnatUtils.save_job_redcap(data, record_id) if not run: print(' ->ERROR: did not send the job to redcap for jobID <' + str(jobid) + '>: ' + record_id) return True
def update_session(self, xnat, sess_info, sess_proc_list, scan_proc_list, sess_mod_list, scan_mod_list): # Scans if scan_proc_list or scan_mod_list: scan_list = XnatUtils.list_scans(xnat, sess_info['project'], sess_info['subject_ID'], sess_info['ID']) for scan_info in scan_list: print' +SCAN: '+scan_info['scan_id'] self.update_scan(xnat, scan_info, scan_proc_list, scan_mod_list) # Modules for sess_mod in sess_mod_list: print' * Module: '+sess_mod.getname() sess_obj = None if (sess_mod.needs_run(sess_info, xnat)): if sess_obj == None: sess_obj = XnatUtils.get_full_object(xnat, sess_info) sess_mod.run(sess_info, sess_obj) # Processors for sess_proc in sess_proc_list: if sess_proc.should_run(sess_info, xnat): sess_task = sess_proc.get_task(xnat, sess_info, self.upload_dir) print(' *Processor:'+sess_proc.name+':updating status:'+sess_task.assessor_label) sess_task.update_status()
def upload_update_date_redcap(project_list, type_update, start_end): """ Upload the timestamp of when bin ran on a project (start and finish). :param project_list: List of projects that were updated :param type_update: What type of process ran: dax_build (1), dax_update_tasks (2), dax_launch (3) :param start_end: starting timestamp (1) and ending timestamp (2) :return: None """ logger = logging.getLogger('dax') if API_URL and API_KEY_DAX and REDCAP_VAR: redcap_project = None try: redcap_project = redcap.Project(API_URL, API_KEY_DAX) except: logger.warn('Could not access redcap. Either wrong API_URL/API_KEY or redcap down.') if redcap_project: data = list() for project in project_list: to_upload = dict() to_upload[REDCAP_VAR['project']] = project if type_update == 1: to_upload = set_variables_dax_manager(to_upload, 'dax_build', start_end) elif type_update == 2: to_upload = set_variables_dax_manager(to_upload, 'dax_update_tasks', start_end) elif type_update == 3: to_upload = set_variables_dax_manager(to_upload, 'dax_launch', start_end) data.append(to_upload) XnatUtils.upload_list_records_redcap(redcap_project, data)
def lastupdated_subj2sess(self): print('DEBUG:check for up to date subjects, apply timestamp to session last_updated') try: print('Connecting to XNAT at '+self.xnat_host) xnat = Interface(self.xnat_host, self.xnat_user, self.xnat_pass) project_list = sorted(set(self.project_process_dict.keys() + self.project_modules_dict.keys())) # Update projects for project_id in project_list: print('===== PROJECT:'+project_id+' =====') for subj_info in XnatUtils.list_subjects(xnat, project_id): last_mod = datetime.strptime(subj_info['last_modified'][0:19], '%Y-%m-%d %H:%M:%S') last_up = self.get_lastupdated(subj_info) if (last_up != None and last_mod < last_up): print(' +Subject:'+subj_info['label']+', subject up to date:last_mod='+str(last_mod)+',last_up='+str(last_up)) for sess_info in XnatUtils.list_sessions(xnat, subj_info['project'], subj_info['ID']): if sess_info['last_updated'] == '': print(' +Session:'+sess_info['label']+': subject up to date, setting update time to now') self.set_session_lastupdated(xnat, sess_info) else: print(' +Subject:'+subj_info['label']+', skipping:last_mod='+str(last_mod)+',last_up='+str(last_up)) finally: xnat.disconnect() print('Connection to XNAT closed')
def launch(self,jobdir,job_email=None,job_email_options='bae'): cmds = self.commands(jobdir) pbsfile = self.pbs_path() outlog = self.outlog_path() pbs = PBS(pbsfile,outlog,cmds,self.processor.walltime_str,self.processor.memreq_mb,self.processor.ppn,job_email,job_email_options) pbs.write() jobid = pbs.submit() if jobid == '' or jobid == '0': # TODO: raise exception print('ERROR:failed to launch job on cluster') return False else: self.set_status(JOB_RUNNING) self.set_jobid(jobid) self.set_jobstartdate_today() #save record on redcap for the job that has been launch project=self.assessor_label.split('-x-')[0] SM_name=self.get_processor_name() data,record_id=XnatUtils.create_record_redcap(project, SM_name) run=XnatUtils.save_job_redcap(data,record_id) if not run: print(' ->ERROR: did not send the job to redcap for jobID <'+str(jobid)+'>: '+record_id) return True
def build(self, lockfile_prefix, project_local, sessions_local): """ Main method to build the tasks and the sessions :param lockfile_prefix: prefix for flag file to lock the launcher :param project_local: project to run locally :param sessions_local: list of sessions to launch tasks associated to the project locally :return: None """ LOGGER.info('-------------- Build --------------\n') flagfile = os.path.join(RESULTS_DIR, 'FlagFiles', lockfile_prefix+'_'+BUILD_SUFFIX) project_list = self.init_script(flagfile, project_local, type_update=1, start_end=1) try: LOGGER.info('Connecting to XNAT at '+self.xnat_host) xnat = XnatUtils.get_interface(self.xnat_host, self.xnat_user, self.xnat_pass) if not XnatUtils.has_dax_datatypes(xnat): raise Exception('error: dax datatypes are not installed on your xnat <%s>' % (self.xnat_host)) #Priority if set: if self.priority_project and not project_local: unique_list = set(self.project_process_dict.keys()+self.project_modules_dict.keys()) project_list = self.get_project_list(list(unique_list)) # Build projects for project_id in project_list: LOGGER.info('===== PROJECT:'+project_id+' =====') self.build_project(xnat, project_id, lockfile_prefix, sessions_local) finally: self.finish_script(xnat, flagfile, project_list, 1, 2, project_local)
def module_prerun(self,projectID,settings_filename=''): for mod in self.project_modules_dict[projectID]: #save the modules to redcap project vuiis xnat job before the prerun: data,record_id=XnatUtils.create_record_redcap(projectID, mod.getname()) run=XnatUtils.save_job_redcap(data,record_id) if not run: print(' ->ERROR: did not send the job to redcap for <'+mod.getname()+'> : '+record_id) mod.prerun(settings_filename)
def update_project(self, xnat, project_id, lockfile_prefix): exp_mod_list, scan_mod_list = [],[] exp_proc_list, scan_proc_list = [],[] #Modules prerun print(' *Modules Prerun') self.module_prerun(project_id, lockfile_prefix) # Get lists of modules/processors per scan/exp for this project exp_mod_list, scan_mod_list = modules.modules_by_type(self.project_modules_dict[project_id]) exp_proc_list, scan_proc_list = processors.processors_by_type(self.project_process_dict[project_id]) # Update each session for sess_info in XnatUtils.list_sessions(xnat, project_id): last_mod = datetime.strptime(sess_info['last_modified'][0:19], '%Y-%m-%d %H:%M:%S') last_up = self.get_lastupdated(sess_info) if (last_up != None and last_mod < last_up): print(' +Session:'+sess_info['label']+': skipping, last_mod='+str(last_mod)+',last_up='+str(last_up)) else: print(' +Session:'+sess_info['label']+': updating...') # NOTE: we set update time here, so if the sess is changed below it will be checked again self.set_session_lastupdated(xnat, sess_info) self.update_session(xnat, sess_info, exp_proc_list, scan_proc_list, exp_mod_list, scan_mod_list) # Modules after run print(' *Modules Afterrun') self.module_afterrun(xnat,project_id)
def get_sessions_list(xnat, project_id, slocal): """ Get the sessions list from XNAT and sort it. Move the new sessions to the front. :param xnat: pyxnat.Interface object :param project_id: project ID on XNAT :param slocal: session selected by user :return: list of sessions sorted for a project """ list_sessions = XnatUtils.list_sessions(xnat, project_id) if slocal and slocal.lower() != 'all': #filter the list and keep the match between both list: list_sessions = filter(lambda x: x['label'] in slocal.split(','), list_sessions) if not list_sessions: LOGGER.warn( 'No session from XNAT matched the sessions given: ' + slocal + ' .') #Sort sessions: first the new sessions that have never been updated sorted_list = [ sess for sess in list_sessions if not sess['last_updated'] ] new_sessions_label = [sess['label'] for sess in sorted_list] for session in list_sessions: if not session['label'] in new_sessions_label: sorted_list.append(session) return sorted_list
def set_session_lastupdated(self, xnat, sess_info): # We set update to one minute into the future since setting update field will change last modified time now = (datetime.now() + timedelta(minutes=1)).strftime(UPDATE_FORMAT) print('DEBUG:setting last_updated for:'+sess_info['label']+' to '+now) sess_obj = XnatUtils.get_full_object(xnat, sess_info) xsi_type = sess_info['xsiType'] sess_obj.attrs.set(xsi_type+'/original', UPDATE_PREFIX+now)
def upload_update_date_redcap(project_list, type_update, start_end): """ Upload the timestamp of when bin ran on a project (start and finish). :param project_list: List of projects that were updated :param type_update: What type of process ran: dax_build (1), dax_update_tasks (2), dax_launch (3) :param start_end: starting timestamp (1) and ending timestamp (2) :return: None """ logger = logging.getLogger('dax') if DAX_SETTINGS.get_api_url() and DAX_SETTINGS.get_api_key_dax( ) and DAX_SETTINGS.get_dax_manager_config(): redcap_project = None try: redcap_project = redcap.Project(DAX_SETTINGS.get_api_url(), DAX_SETTINGS.get_api_key_dax()) except: logger.warn( 'Could not access redcap. Either wrong DAX_SETTINGS. API_URL/API_KEY or redcap down.' ) if redcap_project: data = list() for project in project_list: to_upload = dict() to_upload[DAX_SETTINGS.get_dax_manager_config() ['project']] = project if type_update == 1: to_upload = set_variables_dax_manager( to_upload, 'dax_build', start_end) elif type_update == 2: to_upload = set_variables_dax_manager( to_upload, 'dax_update_tasks', start_end) elif type_update == 3: to_upload = set_variables_dax_manager( to_upload, 'dax_launch', start_end) data.append(to_upload) XnatUtils.upload_list_records_redcap(redcap_project, data)
def update_tasks(self, lockfile_prefix, project_local, sessions_local): """ Main method to Update the tasks :param lockfile_prefix: prefix for flag file to lock the launcher :param project_local: project to run locally :param sessions_local: list of sessions to update tasks associated to the project locally :return: None """ LOGGER.info('-------------- Update Tasks --------------\n') flagfile = os.path.join(RESULTS_DIR, 'FlagFiles', lockfile_prefix+'_'+UPDATE_SUFFIX) project_list = self.init_script(flagfile, project_local, type_update=2, start_end=1) try: LOGGER.info('Connecting to XNAT at '+self.xnat_host) xnat = XnatUtils.get_interface(self.xnat_host, self.xnat_user, self.xnat_pass) if not XnatUtils.has_dax_datatypes(xnat): raise Exception('error: dax datatypes are not installed on your xnat <%s>' % (self.xnat_host)) LOGGER.info('Getting task list...') task_list = self.get_tasks(xnat, self.is_updatable_tasks, project_list, sessions_local) LOGGER.info(str(len(task_list))+' open tasks found') LOGGER.info('Updating tasks...') for cur_task in task_list: LOGGER.info(' Updating task:'+cur_task.assessor_label) cur_task.update_status() finally: self.finish_script(xnat, flagfile, project_list, 2, 2, project_local)
def launch_jobs(self, lockfile_prefix, project_local, sessions_local, writeonly=False, pbsdir=None): """ Main Method to launch the tasks :param lockfile_prefix: prefix for flag file to lock the launcher :param project_local: project to run locally :param sessions_local: list of sessions to launch tasks associated to the project locally :param writeonly: write the job files without submitting them :param pbsdir: folder to store the pbs file :return: None """ LOGGER.info('-------------- Launch Tasks --------------\n') flagfile = os.path.join(RESULTS_DIR, 'FlagFiles', lockfile_prefix+'_'+LAUNCH_SUFFIX) project_list = self.init_script(flagfile, project_local, type_update=3, start_end=1) try: LOGGER.info('Connecting to XNAT at '+self.xnat_host) xnat = XnatUtils.get_interface(self.xnat_host, self.xnat_user, self.xnat_pass) if not XnatUtils.has_dax_datatypes(xnat): raise Exception('error: dax datatypes are not installed on your xnat <%s>' % (self.xnat_host)) LOGGER.info('Getting launchable tasks list...') task_list = self.get_tasks(xnat, self.is_launchable_tasks, project_list, sessions_local) LOGGER.info(str(len(task_list))+' tasks that need to be launched found') #Launch the task that need to be launch self.launch_tasks(task_list, writeonly, pbsdir) finally: self.finish_script(xnat, flagfile, project_list, 3, 2, project_local)
def build_session(self, xnat, sess_info, sess_proc_list, scan_proc_list, sess_mod_list, scan_mod_list): """ Build a session :param xnat: pyxnat.Interface object :param sess_info: python ditionary from XnatUtils.list_sessions method :param sess_proc_list: list of processors running on a session :param scan_proc_list: list of processors running on a scan :param sess_mod_list: list of modules running on a session :param scan_mod_list: list of modules running on a scan :return: None """ csess = XnatUtils.CachedImageSession(xnat, sess_info['project_label'], sess_info['subject_label'], sess_info['session_label']) session_info = csess.info() sess_obj = None # Modules mod_count = 0 while mod_count < 3: mess = """== Build modules (count:{count}) ==""" LOGGER.debug(mess.format(count=mod_count)) # NOTE: we keep starting time to check if something changes below start_time = datetime.now() if sess_mod_list: self.build_session_modules(xnat, csess, sess_mod_list) if scan_mod_list: for cscan in csess.scans(): LOGGER.debug('+SCAN: ' + cscan.info()['scan_id']) self.build_scan_modules(xnat, cscan, scan_mod_list) if not sess_was_modified(xnat, sess_info, start_time): break csess.reload() mod_count += 1 # Scan Processors LOGGER.debug('== Build scan processors ==') if scan_proc_list: for cscan in csess.scans(): LOGGER.debug('+SCAN: ' + cscan.info()['scan_id']) self.build_scan_processors(xnat, cscan, scan_proc_list) # Session Processors LOGGER.debug('== Build session processors ==') if sess_proc_list: self.build_session_processors(xnat, csess, sess_proc_list)
def build_scan(self, xnat, cscan, scan_proc_list, scan_mod_list): """ Build the scan :param xnat: pyxnat.Interface object :param cscan: CachedImageScan from XnatUtils :param scan_proc_list: list of processors running on a scan :param scan_mod_list: list of modules running on a scan :return: None """ scan_info = cscan.info() scan_obj = None # Modules for scan_mod in scan_mod_list: LOGGER.debug('* Module: '+scan_mod.getname()) if scan_mod.needs_run(cscan, xnat): if scan_obj == None: scan_obj = XnatUtils.get_full_object(xnat, scan_info) scan_mod.run(scan_info, scan_obj) # Processors for scan_proc in scan_proc_list: if scan_proc.should_run(scan_info): assr_name = scan_proc.get_assessor_name(cscan) # Look for existing assessor proc_assr = None for assr in cscan.parent().assessors(): if assr.info()['label'] == assr_name: proc_assr = assr # Create it if it doesn't exist if proc_assr == None or proc_assr.info()['procstatus'] == task.NEED_INPUTS: scan_task = scan_proc.get_task(xnat, cscan, RESULTS_DIR) self.log_updating_status(scan_proc.name, scan_task.assessor_label) has_inputs, qcstatus = scan_proc.has_inputs(cscan) if has_inputs == 1: scan_task.set_status(task.NEED_TO_RUN) scan_task.set_qcstatus(task.JOB_PENDING) elif has_inputs == -1: scan_task.set_status(task.NO_DATA) scan_task.set_qcstatus(qcstatus) else: scan_task.set_qcstatus(qcstatus) else: # Other statuses handled by dax_update_open_tasks pass
def get_task(self, intf, csess, upload_dir): """ Return the Task object :param intf: XNAT interface see pyxnat.Interface :param csess: CachedImageSession from XnatUtils :param upload_dir: directory to put the data after run on the node :return: Task object of the assessor """ sess_info = csess.info() assessor_name = self.get_assessor_name(csess) session = XnatUtils.get_full_object(intf, sess_info) assessor = session.assessor(assessor_name) return task.Task(self, assessor, upload_dir)
def __init__(self, mod_name, directory, email, text_report): """ Entry point of the Base Module Class. :param mod_name: Name of the module :param directory: Temp directory to store data :param email: email address to send report :param text_report: string to write at the beggining of the report email :return: None """ self.mod_name = mod_name self.directory = directory self.email = XnatUtils.get_input_list(input_val=email, default_val=None) self.text_report = text_report self.send_an_email = 0
def get_task(self, intf, cscan, upload_dir): """ Get the Task object :param intf: XNAT interface (pyxnat.Interface class) :param cscan: CachedImageScan object from XnatUtils :param upload_dir: the directory to put the processed data when the process is done :return: Task object """ scan_dict = cscan.info() assessor_name = self.get_assessor_name(cscan) scan = XnatUtils.get_full_object(intf, scan_dict) assessor = scan.parent().assessor(assessor_name) return task.Task(self, assessor, upload_dir)
def pi_from_project(project): """ Get the last name of PI who owns the project on XNAT :param project: String of the ID of project on XNAT. :return: String of the PIs last name """ pi_name = '' try: xnat = XnatUtils.get_interface() proj = xnat.select.project(project) pi_name = proj.attrs.get('xnat:projectdata/pi/lastname') except: pass finally: xnat.disconnect() return pi_name
def update_scan(self, xnat, scan_info, scan_proc_list, scan_mod_list): # Modules scan_obj = None for scan_mod in scan_mod_list: print' * Module: '+scan_mod.getname() if (scan_mod.needs_run(scan_info, xnat)): if scan_obj == None: scan_obj = XnatUtils.get_full_object(xnat, scan_info) scan_mod.run(scan_info, scan_obj) # Processors for scan_proc in scan_proc_list: if scan_proc.should_run(scan_info): scan_task = scan_proc.get_task(xnat, scan_info, self.upload_dir) print(' *Processor: '+scan_proc.name+':updating status:'+scan_task.assessor_label) scan_task.update_status()
def generate_task(self, xnat, assr_info, sess_proc_list, scan_proc_list): """ Generate a task for the assessor in the info :param xnat: pyxnat.Interface object :param assr_info: dictionary containing the assessor info (See XnatUtils.list_assessors) :param sess_proc_list: list of processors running on a session :param scan_proc_list: list of processors running on a scan :return: task if processor and assessor match, None otherwise """ task_proc = self.match_proc(assr_info, sess_proc_list, scan_proc_list) if task_proc == None: LOGGER.warn('no matching processor found:'+assr_info['assessor_label']) return None else: # Get a new task with the matched processor assr = XnatUtils.get_full_object(xnat, assr_info) cur_task = Task(task_proc, assr, RESULTS_DIR) return cur_task
def get_assessors_list(xnat, project_id, slocal): """ Get the assessor list from XNAT and filter it if necessary :param xnat: pyxnat.Interface object :param project_id: project ID on XNAT :param slocal: session selected by user :return: list of assessors for a project """ # Get lists of assessors for this project assr_list = XnatUtils.list_project_assessors(xnat, project_id) #filter the assessors to the sessions given as parameters if given if slocal and slocal.lower() != 'all': #filter the list and keep the match between both list: assr_list = filter(lambda x: x['session_label'] in slocal.split(','), assr_list) if not assr_list: LOGGER.warn('No processes from XNAT matched the sessions given: '+slocal+' .') sys.exit(1) return assr_list
def build_scan_modules(self, xnat, cscan, scan_mod_list): scan_info = cscan.info() scan_obj = None # Modules for scan_mod in scan_mod_list: LOGGER.debug('* Module: ' + scan_mod.getname()) if scan_mod.needs_run(cscan, xnat): if scan_obj == None: scan_obj = XnatUtils.get_full_object(xnat, scan_info) try: scan_mod.run(scan_info, scan_obj) except Exception as E: LOGGER.critical( 'Caught exception building session scan module in session %s' % scan_info['session_label']) LOGGER.critical( 'Exception class %s caught with message %s' % (E.__class__, E.message))
def generate_task(self, xnat, assr_info, sess_proc_list, scan_proc_list): """ Generate a task for the assessor in the info :param xnat: pyxnat.Interface object :param assr_info: dictionary containing the assessor info (See XnatUtils.list_assessors) :param sess_proc_list: list of processors running on a session :param scan_proc_list: list of processors running on a scan :return: task if processor and assessor match, None otherwise """ task_proc = self.match_proc(assr_info, sess_proc_list, scan_proc_list) if task_proc == None: LOGGER.warn('no matching processor found:' + assr_info['assessor_label']) return None else: # Get a new task with the matched processor assr = XnatUtils.get_full_object(xnat, assr_info) cur_task = Task(task_proc, assr, DAX_SETTINGS.get_results_dir()) return cur_task
def has_new_processors(xnat, project_id, sess_proc_list, scan_proc_list): """ Check if has new processors :param xnat: pyxnat.Interface object :param project_id: project ID on XNAT :param sess_proc_list: list of processors running on a session :param scan_proc_list: list of processors running on a scan :return: True if has new processors, False otherwise """ # Get unique list of assessors already in XNAT assr_list = XnatUtils.list_project_assessors(xnat, project_id) assr_type_set = set([x['proctype'] for x in assr_list]) # Get unique list of processors prescribed for project proc_name_set = set([x.name for x in sess_proc_list + scan_proc_list]) # Get list of processors that don't have assessors in XNAT yet diff_list = list(proc_name_set.difference(assr_type_set)) # Are there any? return len(diff_list) > 0
def has_new_processors(xnat, project_id, sess_proc_list, scan_proc_list): """ Check if has new processors :param xnat: pyxnat.Interface object :param project_id: project ID on XNAT :param sess_proc_list: list of processors running on a session :param scan_proc_list: list of processors running on a scan :return: True if has new processors, False otherwise """ # Get unique list of assessors already in XNAT assr_list = XnatUtils.list_project_assessors(xnat, project_id) assr_type_set = set([x['proctype'] for x in assr_list]) # Get unique list of processors prescribed for project proc_name_set = set([x.name for x in sess_proc_list+scan_proc_list]) # Get list of processors that don't have assessors in XNAT yet diff_list = list(proc_name_set.difference(assr_type_set)) # Are there any? return len(diff_list) > 0
def set_session_lastupdated(xnat, sess_info, update_start_time): """ Set the last session update on XNAT :param xnat: pyxnat.Interface object :param sess_info: dictionary of session information :param update_start_time: date when the update started :return: False if the session change and don't set the last update date, True otherwise """ xsi_type = sess_info['xsiType'] sess_obj = XnatUtils.get_full_object(xnat, sess_info) last_modified_xnat = sess_obj.attrs.get(xsi_type+'/meta/last_modified') last_mod = datetime.strptime(last_modified_xnat[0:19], '%Y-%m-%d %H:%M:%S') if last_mod > update_start_time: return False else: #format: update_str = (datetime.now()+timedelta(minutes=1)).strftime(UPDATE_FORMAT) # We set update to one minute into the future # since setting update field will change last modified time LOGGER.debug('setting last_updated for:'+sess_info['label']+' to '+update_str) sess_obj.attrs.set(xsi_type+'/original', UPDATE_PREFIX+update_str) return True
def set_session_lastupdated(xnat, sess_info, update_start_time): """ Set the last session update on XNAT :param xnat: pyxnat.Interface object :param sess_info: dictionary of session information :param update_start_time: date when the update started :return: False if the session change and don't set the last update date, True otherwise """ xsi_type = sess_info['xsiType'] sess_obj = XnatUtils.get_full_object(xnat, sess_info) last_modified_xnat = sess_obj.attrs.get(xsi_type + '/meta/last_modified') last_mod = datetime.strptime(last_modified_xnat[0:19], '%Y-%m-%d %H:%M:%S') if last_mod > update_start_time: return False # format: update_str = (datetime.now() + timedelta(minutes=1)).strftime(UPDATE_FORMAT) # We set update to one minute into the future # since setting update field will change last modified time LOGGER.debug('setting last_updated for:' + sess_info['label'] + ' to ' + update_str) try: sess_obj.attrs.set(xsi_type + '/original', UPDATE_PREFIX + update_str) except Exception as E: LOGGER.critical( 'Caught exception setting update timestamp for session %s' % sess_info['session_label']) LOGGER.critical('Exception class %s caught with message %s' % (E.__class__, E.message)) return True
def get_assessors_list(xnat, project_id, slocal): """ Get the assessor list from XNAT and filter it if necessary :param xnat: pyxnat.Interface object :param project_id: project ID on XNAT :param slocal: session selected by user :return: list of assessors for a project """ # Get lists of assessors for this project assr_list = XnatUtils.list_project_assessors(xnat, project_id) #filter the assessors to the sessions given as parameters if given if slocal and slocal.lower() != 'all': #filter the list and keep the match between both list: assr_list = filter( lambda x: x['session_label'] in slocal.split(','), assr_list) if not assr_list: LOGGER.warn( 'No processes from XNAT matched the sessions given: ' + slocal + ' .') sys.exit(1) return assr_list
def get_sessions_list(xnat, project_id, slocal): """ Get the sessions list from XNAT and sort it. Move the new sessions to the front. :param xnat: pyxnat.Interface object :param project_id: project ID on XNAT :param slocal: session selected by user :return: list of sessions sorted for a project """ list_sessions = XnatUtils.list_sessions(xnat, project_id) if slocal and slocal.lower() != 'all': #filter the list and keep the match between both list: list_sessions = filter(lambda x: x['label'] in slocal.split(','), list_sessions) if not list_sessions: LOGGER.warn('No session from XNAT matched the sessions given: '+slocal+' .') #Sort sessions: first the new sessions that have never been updated sorted_list = [sess for sess in list_sessions if not sess['last_updated']] new_sessions_label = [sess['label'] for sess in sorted_list] for session in list_sessions: if not session['label'] in new_sessions_label: sorted_list.append(session) return sorted_list
def get_task(self, intf, scan_dict, upload_dir): scan = XnatUtils.get_full_object(intf,scan_dict) assessor_name = self.get_assessor_name(scan_dict) assessor = scan.parent().assessor(assessor_name) return task.Task(self,assessor,upload_dir)
def build_scan_processors(self, xnat, cscan, scan_proc_list): """ Build the scan :param xnat: pyxnat.Interface object :param cscan: CachedImageScan from XnatUtils :param scan_proc_list: list of processors running on a scan :param scan_mod_list: list of modules running on a scan :return: None """ scan_info = cscan.info() # Processors for scan_proc in scan_proc_list: if not scan_proc.should_run(scan_info): continue assr_name = scan_proc.get_assessor_name(cscan) # Look for existing assessor proc_assr = None for assr in cscan.parent().assessors(): if assr.info()['label'] == assr_name: proc_assr = assr if self.launcher_type in ['diskq-xnat', 'diskq-combined']: if proc_assr == None or proc_assr.info()['procstatus'] in [ task.NEED_INPUTS, task.NEED_TO_RUN ] or proc_assr.info()['qcstatus'] in [task.RERUN, task.REPROC]: # TODO: get session object directly scan = XnatUtils.get_full_object(xnat, scan_info) assessor = scan.parent().assessor(assr_name) xtask = XnatTask( scan_proc, assessor, DAX_SETTINGS.get_results_dir(), os.path.join(DAX_SETTINGS.get_results_dir(), 'DISKQ')) if proc_assr != None and proc_assr.info()['qcstatus'] in [ task.RERUN, task.REPROC ]: xtask.update_status() LOGGER.debug('building task:' + assr_name) (proc_status, qc_status) = xtask.build_task(cscan, self.root_job_dir, self.job_email, self.job_email_options) LOGGER.debug('proc_status=' + proc_status + ', qc_status=' + qc_status) else: # TODO: check that it actually exists in QUEUE LOGGER.debug('skipping, already built:' + assr_name) else: if proc_assr == None or proc_assr.info( )['procstatus'] == task.NEED_INPUTS: scan_task = scan_proc.get_task( xnat, cscan, DAX_SETTINGS.get_results_dir()) log_updating_status(scan_proc.name, scan_task.assessor_label) has_inputs, qcstatus = scan_proc.has_inputs(cscan) try: if has_inputs == 1: scan_task.set_status(task.NEED_TO_RUN) scan_task.set_qcstatus(task.JOB_PENDING) elif has_inputs == -1: scan_task.set_status(task.NO_DATA) scan_task.set_qcstatus(qcstatus) else: scan_task.set_qcstatus(qcstatus) except Exception as E: LOGGER.critical( 'Caught exception building sessions %s' % scan_info['session_label']) LOGGER.critical( 'Exception class %s caught with message %s' % (E.__class__, E.message)) else: # Other statuses handled by dax_update_open_tasks pass
def from_file(self, source): contents = XnatUtils.read_yaml(source) self.source_type = "file" self.source_id = source self.contents = contents return self
def build(self, lockfile_prefix, project_local, sessions_local, mod_delta=None): """ Main method to build the tasks and the sessions :param lockfile_prefix: prefix for flag file to lock the launcher :param project_local: project to run locally :param sessions_local: list of sessions to launch tasks associated to the project locally :return: None """ if self.launcher_type == 'diskq-cluster': LOGGER.error('cannot build jobs with this launcher type:' + self.launcher_type) return LOGGER.info('-------------- Build --------------\n') LOGGER.info('launcher_type = ' + self.launcher_type) LOGGER.info('mod delta=' + str(mod_delta)) flagfile = os.path.join( os.path.join(DAX_SETTINGS.get_results_dir(), 'FlagFiles'), lockfile_prefix + '_' + BUILD_SUFFIX) project_list = self.init_script(flagfile, project_local, type_update=1, start_end=1) try: LOGGER.info('Connecting to XNAT at ' + self.xnat_host) xnat = XnatUtils.get_interface(self.xnat_host, self.xnat_user, self.xnat_pass) if not XnatUtils.has_dax_datatypes(xnat): raise Exception( 'error: dax datatypes are not installed on your xnat <%s>' % (self.xnat_host)) #Priority if set: if self.priority_project and not project_local: unique_list = set(self.project_process_dict.keys() + self.project_modules_dict.keys()) project_list = self.get_project_list(list(unique_list)) # Build projects for project_id in project_list: LOGGER.info('===== PROJECT:' + project_id + ' =====') try: self.build_project(xnat, project_id, lockfile_prefix, sessions_local, mod_delta=mod_delta) except Exception as E: LOGGER.critical('Caught exception building project %s' % project_id) LOGGER.critical( 'Exception class %s caught with message %s' % (E.__class__, E.message)) finally: self.finish_script(xnat, flagfile, project_list, 1, 2, project_local)
def update_tasks(self, lockfile_prefix, project_local, sessions_local): """ Main method to Update the tasks :param lockfile_prefix: prefix for flag file to lock the launcher :param project_local: project to run locally :param sessions_local: list of sessions to update tasks associated to the project locally :return: None """ if self.launcher_type == 'diskq-xnat': LOGGER.error('cannot update jobs with this launcher type:' + self.launcher_type) return LOGGER.info('-------------- Update Tasks --------------\n') LOGGER.info('launcher_type = ' + self.launcher_type) xnat = None flagfile = os.path.join( os.path.join(DAX_SETTINGS.get_results_dir(), 'FlagFiles'), lockfile_prefix + '_' + UPDATE_SUFFIX) project_list = self.init_script(flagfile, project_local, type_update=2, start_end=1) try: if self.launcher_type in ['diskq-cluster', 'diskq-combined']: LOGGER.info( 'Loading task queue from:' + os.path.join(DAX_SETTINGS.get_results_dir(), 'DISKQ')) task_list = load_task_queue() LOGGER.info(str(len(task_list)) + ' tasks found.') LOGGER.info('Updating tasks...') for cur_task in task_list: LOGGER.info('Updating task:' + cur_task.assessor_label) cur_task.update_status() else: LOGGER.info('Connecting to XNAT at ' + self.xnat_host) xnat = XnatUtils.get_interface(self.xnat_host, self.xnat_user, self.xnat_pass) if not XnatUtils.has_dax_datatypes(xnat): raise Exception( 'error: dax datatypes are not installed on your xnat <%s>' % (self.xnat_host)) LOGGER.info('Getting task list...') task_list = self.get_tasks(xnat, self.is_updatable_tasks, project_list, sessions_local) LOGGER.info(str(len(task_list)) + ' open tasks found') LOGGER.info('Updating tasks...') for cur_task in task_list: LOGGER.info(' Updating task:' + cur_task.assessor_label) cur_task.update_status() finally: self.finish_script(xnat, flagfile, project_list, 2, 2, project_local)
def launch_jobs(self, lockfile_prefix, project_local, sessions_local, writeonly=False, pbsdir=None): """ Main Method to launch the tasks :param lockfile_prefix: prefix for flag file to lock the launcher :param project_local: project to run locally :param sessions_local: list of sessions to launch tasks associated to the project locally :param writeonly: write the job files without submitting them :param pbsdir: folder to store the pbs file :return: None """ if self.launcher_type == 'diskq-xnat': LOGGER.error('cannot launch jobs with this launcher type:' + self.launcher_type) return LOGGER.info('-------------- Launch Tasks --------------\n') LOGGER.info('launcher_type = ' + self.launcher_type) xnat = None flagfile = os.path.join( os.path.join(DAX_SETTINGS.get_results_dir(), 'FlagFiles'), lockfile_prefix + '_' + LAUNCH_SUFFIX) project_list = self.init_script(flagfile, project_local, type_update=3, start_end=1) try: if self.launcher_type in ['diskq-cluster', 'diskq-combined']: LOGGER.info( 'Loading task queue from:' + os.path.join(DAX_SETTINGS.get_results_dir(), 'DISKQ')) task_list = load_task_queue(status=task.NEED_TO_RUN) LOGGER.info( str(len(task_list)) + ' tasks that need to be launched found') self.launch_tasks(task_list) else: LOGGER.info('Connecting to XNAT at ' + self.xnat_host) xnat = XnatUtils.get_interface(self.xnat_host, self.xnat_user, self.xnat_pass) if not XnatUtils.has_dax_datatypes(xnat): raise Exception( 'error: dax datatypes are not installed on your xnat <%s>' % (self.xnat_host)) LOGGER.info('Getting launchable tasks list...') task_list = self.get_tasks(xnat, self.is_launchable_tasks, project_list, sessions_local) LOGGER.info( str(len(task_list)) + ' tasks that need to be launched found') # Launch the task that need to be launch self.launch_tasks(task_list, writeonly, pbsdir) finally: self.finish_script(xnat, flagfile, project_list, 3, 2, project_local)
def get_sess_lastmod(xnat, sess_info): xsi_type = sess_info['xsiType'] sess_obj = XnatUtils.get_full_object(xnat, sess_info) last_modified_xnat = sess_obj.attrs.get(xsi_type + '/meta/last_modified') last_mod = datetime.strptime(last_modified_xnat[0:19], '%Y-%m-%d %H:%M:%S') return last_mod
def get_task(self, intf, session_dict, upload_dir): session = XnatUtils.get_full_object(intf,session_dict) assessor_name = self.get_assessor_name(session_dict) assessor = session.assessor(assessor_name) return task.Task(self,assessor,upload_dir)
def build_session(self, xnat, sess_info, sess_proc_list, scan_proc_list, sess_mod_list, scan_mod_list): """ Build a session :param xnat: pyxnat.Interface object :param sess_info: python ditionary from XnatUtils.list_sessions method :param sess_proc_list: list of processors running on a session :param scan_proc_list: list of processors running on a scan :param sess_mod_list: list of modules running on a session :param scan_mod_list: list of modules running on a scan :return: None """ csess = XnatUtils.CachedImageSession(xnat, sess_info['project_label'], sess_info['subject_label'], sess_info['session_label']) session_info = csess.info() sess_obj = None # Modules on session LOGGER.debug('== Build modules for session ==') for sess_mod in sess_mod_list: LOGGER.debug('* Module: '+sess_mod.getname()) if sess_mod.needs_run(csess, xnat): if sess_obj == None: sess_obj = XnatUtils.get_full_object(xnat, session_info) sess_mod.run(session_info, sess_obj) # Scans LOGGER.debug('== Build modules/processors for scans in session ==') if scan_proc_list or scan_mod_list: for cscan in csess.scans(): LOGGER.debug('+SCAN: '+cscan.info()['scan_id']) self.build_scan(xnat, cscan, scan_proc_list, scan_mod_list) # Processors LOGGER.debug('== Build processors for session ==') for sess_proc in sess_proc_list: if sess_proc.should_run(session_info): assr_name = sess_proc.get_assessor_name(csess) # Look for existing assessor proc_assr = None for assr in csess.assessors(): if assr.info()['label'] == assr_name: proc_assr = assr if proc_assr == None: # Create it if it doesn't exist sess_task = sess_proc.get_task(xnat, csess, RESULTS_DIR) self.log_updating_status(sess_proc.name, sess_task.assessor_label) has_inputs, qcstatus = sess_proc.has_inputs(csess) if has_inputs == 1: sess_task.set_status(task.NEED_TO_RUN) sess_task.set_qcstatus(task.JOB_PENDING) elif has_inputs == -1: sess_task.set_status(task.NO_DATA) sess_task.set_qcstatus(qcstatus) elif proc_assr.info()['procstatus'] == task.NEED_INPUTS: has_inputs, qcstatus = sess_proc.has_inputs(csess) if has_inputs == 1: sess_task = sess_proc.get_task(xnat, csess, RESULTS_DIR) self.log_updating_status(sess_proc.name, sess_task.assessor_label) sess_task.set_status(task.NEED_TO_RUN) sess_task.set_qcstatus(task.JOB_PENDING) elif has_inputs == -1: sess_task = sess_proc.get_task(xnat, csess, RESULTS_DIR) self.log_updating_status(sess_proc.name, sess_task.assessor_label) sess_task.set_status(task.NO_DATA) sess_task.set_qcstatus(qcstatus) else: # Leave as NEED_INPUTS pass else: # Other statuses handled by dax_update_tasks pass