def main(): global DRYRUN, PARALLEL, LOG_DIR arguments = docopt(__doc__) study = arguments['<study>'] use_server = arguments['--log-to-server'] scanid = arguments['--subject'] debug = arguments['--debug'] resubmit = arguments['--resubmit'] PARALLEL = arguments['--parallel'] DRYRUN = arguments['--dry-run'] # If you add an option/argument that needs to be propagated to subject jobs # after a batch submit make sure to add it to create_command() # If it needs to be propagated to recon-all add it to # get_freesurfer_arguments() config = load_config(study) if use_server: add_server_handler(config) if debug: logger.setLevel(logging.DEBUG) logger.info('Starting') check_input_paths(config) #Get subject QC status, whether they've been inducted into blacklist or not if empty value to subject key qc_subjects = utils.get_subject_metadata(config) fs_path = config.get_path('freesurfer') LOG_DIR = make_error_log_dir(fs_path) if scanid: # single subject mode blacklisted_series = get_blacklist(qc_subjects, scanid) subject = dm_scan.Scan(scanid, config) if subject.is_phantom: sys.exit( 'Subject {} is a phantom, cannot be analyzed'.format(scanid)) run_freesurfer(subject, blacklisted_series, config, resubmit) return # batch mode update_aggregate_stats(config) destination = os.path.join(fs_path, 'freesurfer_aggregate_log.csv') update_aggregate_log(config, qc_subjects, destination) fs_subjects = get_new_subjects(config, qc_subjects) logger.info("Submitting {} new subjects".format(len(fs_subjects))) if resubmit: # Filter out subjects that were just submitted to reduce search space remaining_subjects = filter(lambda x: x not in fs_subjects, qc_subjects) halted_subjects = get_halted_subjects(fs_path, remaining_subjects) logger.info("Resubmitting {} subjects".format(len(halted_subjects))) fs_subjects.extend(halted_subjects) submit_proc_freesurfer(fs_path, fs_subjects, arguments, config)
def process_subject(subject, cfg, be, bids_dir, rewrite): """ Convert subject in DATMAN folder to BIDS-style """ ident = scanid.parse(subject) subscan = scan.Scan(subject, cfg) bids_sub = ident.get_bids_name() bids_ses = ident.timepoint exp_path = make_bids_template(bids_dir, "sub-" + bids_sub, "ses-" + bids_ses) dm_to_bids = [] if dashboard.dash_found: db_subject = dashboard.get_subject(subject) db_subject.add_bids(bids_sub, bids_ses) # Construct initial BIDS transformation info scan_list = list(sort_by_series(subscan.niftis)) for i, series in enumerate(scan_list): # Construct bids name logger.info("Processing {}".format(series)) bids_dict = get_tag_bids_spec(cfg, series.tag, series.site) if not bids_dict: continue bids_dict.update({"sub": bids_sub, "ses": bids_ses}) # Deal with reference scans if bids_dict.get('is_ref', False): target_dict = get_tag_bids_spec(cfg, scan_list[i + 1].tag, series.site) bids_dict.update({'task': target_dict['task']}) bids_prefix = be.construct_bids_name(bids_dict) class_path = os.path.join(exp_path, bids_dict["class"]) # Make dm2bids transformation file, update source if applicable bidsfiles = BIDSFile(bids_sub, bids_ses, series, class_path, bids_prefix, bids_dict).update_source(cfg, be) if bidsfiles is None: logger.error("Cannot find derivative of {}".format(series)) logger.warning("Skipping!") continue if isinstance(bidsfiles, list): dm_to_bids.extend(bidsfiles) else: dm_to_bids.append(bidsfiles) # Apply prioritization calls dm_to_bids = prioritize_scans(dm_to_bids) # Prepare fieldmap information (requires knowledge about all scans) dm_to_bids = prepare_fieldmaps(dm_to_bids) # Transfer files over for k in dm_to_bids: if os.path.exists(k.dest_nii) and not rewrite: logger.info("Output file {} already exists!".format(k.dest_nii)) continue k.transfer_files() if dashboard.dash_found: db_series = dashboard.get_scan(k.series.path) db_series.add_bids(str(k)) return