def main(): """Initializes main script from command-line call to generate single-subject or multi-subject workflow(s)""" import os import gc import sys import json import ast import yaml import itertools from types import SimpleNamespace from pathlib import Path import pkg_resources from pynets.core.utils import flatten from pynets.cli.pynets_run import build_workflow from multiprocessing import set_start_method, Process, Manager try: import pynets except ImportError: print('PyNets not installed! Ensure that you are referencing the correct site-packages and using Python3.6+') if len(sys.argv) < 1: print("\nMissing command-line inputs! See help options with the -h flag.\n") sys.exit() print('Obtaining Derivatives Layout...') modalities = ['func', 'dwi'] bids_args = get_bids_parser().parse_args() participant_label = bids_args.participant_label session_label = bids_args.session_label modality = bids_args.modality bids_config = bids_args.config analysis_level = bids_args.analysis_level clean = bids_args.clean if analysis_level == 'group' and participant_label is not None: raise ValueError('Error: You have indicated a group analysis level run, but specified a participant label!') if analysis_level == 'participant' and participant_label is None: raise ValueError('Error: You have indicated a participant analysis level run, but not specified a participant ' 'label!') if bids_config: with open(bids_config, 'r') as stream: arg_dict = json.load(stream) else: with open(pkg_resources.resource_filename("pynets", "config/bids_config_test.json"), 'r') as stream: arg_dict = json.load(stream) stream.close() # Available functional and structural connectivity models with open(pkg_resources.resource_filename("pynets", "runconfig.yaml"), 'r') as stream: hardcoded_params = yaml.load(stream) try: func_models = hardcoded_params['available_models']['func_models'] except KeyError: print('ERROR: available functional models not successfully extracted from runconfig.yaml') sys.exit() try: struct_models = hardcoded_params['available_models']['struct_models'] except KeyError: print('ERROR: available structural models not successfully extracted from runconfig.yaml') sys.exit() space = hardcoded_params['bids_defaults']['space'][0] func_desc = hardcoded_params['bids_defaults']['desc'][0] stream.close() # S3 # Primary inputs s3 = bids_args.bids_dir.startswith("s3://") if not s3: bids_dir = bids_args.bids_dir # secondary inputs sec_s3_objs = [] if isinstance(bids_args.ua, list): for i in bids_args.ua: if i.startswith("s3://"): print('Downloading user atlas: ', i, ' from S3...') sec_s3_objs.append(i) if isinstance(bids_args.cm, list): for i in bids_args.cm: if i.startswith("s3://"): print('Downloading clustering mask: ', i, ' from S3...') sec_s3_objs.append(i) if isinstance(bids_args.roi, list): for i in bids_args.roi: if i.startswith("s3://"): print('Downloading ROI mask: ', i, ' from S3...') sec_s3_objs.append(i) if isinstance(bids_args.way, list): for i in bids_args.way: if i.startswith("s3://"): print('Downloading tractography waymask: ', i, ' from S3...') sec_s3_objs.append(i) if bids_args.ref: if bids_args.ref.startswith("s3://"): print('Downloading atlas labeling reference file: ', bids_args.ref, ' from S3...') sec_s3_objs.append(bids_args.ref) if s3 or len(sec_s3_objs) > 0: from boto3.session import Session from pynets.core import cloud_utils from pynets.core.utils import as_directory home = os.path.expanduser("~") creds = bool(cloud_utils.get_credentials()) if s3: buck, remo = cloud_utils.parse_path(bids_args.bids_dir) os.makedirs(f"{home}/.pynets", exist_ok=True) os.makedirs(f"{home}/.pynets/input", exist_ok=True) os.makedirs(f"{home}/.pynets/output", exist_ok=True) bids_dir = as_directory(f"{home}/.pynets/input", remove=False) if (not creds) and bids_args.push_location: raise AttributeError("""No AWS credentials found, but `--push_location` flag called. Pushing will most likely fail.""") else: output_dir = as_directory(f"{home}/.pynets/output", remove=False) # Get S3 input data if needed if analysis_level == 'participant': for partic, ses in list(itertools.product(participant_label, session_label)): if ses is not None: info = "sub-" + partic + '/ses-' + ses elif ses is None: info = "sub-" + partic cloud_utils.s3_get_data(buck, remo, bids_dir, modality, info=info) elif analysis_level == 'group': if len(session_label) > 1 and session_label[0] != 'None': for ses in session_label: info = 'ses-' + ses cloud_utils.s3_get_data(buck, remo, bids_dir, modality, info=info) else: cloud_utils.s3_get_data(buck, remo, bids_dir, modality) if len(sec_s3_objs) > 0: [access_key, secret_key] = cloud_utils.get_credentials() session = Session( aws_access_key_id=access_key, aws_secret_access_key=secret_key ) s3_r = session.resource('s3') s3_c = cloud_utils.s3_client(service="s3") sec_dir = as_directory(home + "/.pynets/secondary_files", remove=False) for s3_obj in [i for i in sec_s3_objs if i is not None]: buck, remo = cloud_utils.parse_path(s3_obj) s3_c.download_file(buck, remo, f"{sec_dir}/{os.path.basename(s3_obj)}") if isinstance(bids_args.ua, list): local_ua = bids_args.ua.copy() for i in local_ua: if i.startswith("s3://"): local_ua[local_ua.index(i)] = f"{sec_dir}/{os.path.basename(i)}" bids_args.ua = local_ua if isinstance(bids_args.cm, list): local_cm = bids_args.cm.copy() for i in bids_args.cm: if i.startswith("s3://"): local_cm[local_cm.index(i)] = f"{sec_dir}/{os.path.basename(i)}" bids_args.cm = local_cm if isinstance(bids_args.roi, list): local_roi = bids_args.roi.copy() for i in bids_args.roi: if i.startswith("s3://"): local_roi[local_roi.index(i)] = f"{sec_dir}/{os.path.basename(i)}" bids_args.roi = local_roi if isinstance(bids_args.way, list): local_way = bids_args.way.copy() for i in bids_args.way: if i.startswith("s3://"): local_way[local_way.index(i)] = f"{sec_dir}/{os.path.basename(i)}" bids_args.way = local_way if bids_args.ref: if bids_args.ref.startswith("s3://"): bids_args.ref = f"{sec_dir}/{os.path.basename(bids_args.ref)}" else: output_dir = bids_args.output_dir if output_dir is None: raise ValueError('Must specify an output directory') intermodal_dict = {k: [] for k in ['funcs', 'confs', 'dwis', 'bvals', 'bvecs', 'anats', 'masks', 'subjs', 'seshs']} if analysis_level == 'group': if len(modality) > 1: i = 0 for mod in modality: outs = sweep_directory(bids_dir, modality=mod, space=space, func_desc=func_desc, sesh=session_label) if mod == 'func': if i == 0: funcs, confs, _, _, _, anats, masks, subjs, seshs = outs else: funcs, confs, _, _, _, _, _, _, _ = outs intermodal_dict['funcs'].append(funcs) intermodal_dict['confs'].append(confs) elif mod == 'dwi': if i == 0: _, _, dwis, bvals, bvecs, anats, masks, subjs, seshs = outs else: _, _, dwis, bvals, bvecs, _, _, _, _ = outs intermodal_dict['dwis'].append(dwis) intermodal_dict['bvals'].append(bvals) intermodal_dict['bvecs'].append(bvecs) intermodal_dict['anats'].append(anats) intermodal_dict['masks'].append(masks) intermodal_dict['subjs'].append(subjs) intermodal_dict['seshs'].append(seshs) i += 1 else: intermodal_dict = None outs = sweep_directory(bids_dir, modality=modality[0], space=space, func_desc=func_desc, sesh=session_label) funcs, confs, dwis, bvals, bvecs, anats, masks, subjs, seshs = outs elif analysis_level == 'participant': if len(modality) > 1: i = 0 for mod in modality: outs = sweep_directory(bids_dir, modality=mod, space=space, func_desc=func_desc, subj=participant_label, sesh=session_label) if mod == 'func': if i == 0: funcs, confs, _, _, _, anats, masks, subjs, seshs = outs else: funcs, confs, _, _, _, _, _, _, _ = outs intermodal_dict['funcs'].append(funcs) intermodal_dict['confs'].append(confs) elif mod == 'dwi': if i == 0: _, _, dwis, bvals, bvecs, anats, masks, subjs, seshs = outs else: _, _, dwis, bvals, bvecs, _, _, _, _ = outs intermodal_dict['dwis'].append(dwis) intermodal_dict['bvals'].append(bvals) intermodal_dict['bvecs'].append(bvecs) intermodal_dict['anats'].append(anats) intermodal_dict['masks'].append(masks) intermodal_dict['subjs'].append(subjs) intermodal_dict['seshs'].append(seshs) i += 1 else: intermodal_dict = None outs = sweep_directory(bids_dir, modality=modality[0], space=space, func_desc=func_desc, subj=participant_label, sesh=session_label) funcs, confs, dwis, bvals, bvecs, anats, masks, subjs, seshs = outs else: raise ValueError('Analysis level invalid. Must be `participant` or `group`. See --help.') if intermodal_dict: funcs, confs, dwis, bvals, bvecs, anats, masks, subjs, seshs = [list(set(list(flatten(i)))) for i in intermodal_dict.values()] arg_list = [] for mod in modalities: arg_list.append(arg_dict[mod]) arg_list.append(arg_dict['gen']) args_dict_all = {} models = [] for d in arg_list: if 'mod' in d.keys(): if len(modality) == 1: if any(x in d['mod'] for x in func_models): if 'dwi' in modality: del d['mod'] elif any(x in d['mod'] for x in struct_models): if 'func' in modality: del d['mod'] else: if any(x in d['mod'] for x in func_models) or any(x in d['mod'] for x in struct_models): models.append(ast.literal_eval(d['mod'])) args_dict_all.update(d) if len(modality) > 1: args_dict_all['mod'] = str(list(set(flatten(models)))) print('Arguments parsed from bids_config.json:\n') print(args_dict_all) for key, val in args_dict_all.items(): if isinstance(val, str): args_dict_all[key] = ast.literal_eval(val) id_list = [] for i in sorted(list(set(subjs))): for ses in sorted(list(set(seshs))): id_list.append(i + '_' + ses) args_dict_all['work'] = bids_args.work args_dict_all['output_dir'] = output_dir args_dict_all['plug'] = bids_args.plug args_dict_all['pm'] = bids_args.pm args_dict_all['v'] = bids_args.v args_dict_all['clean'] = bids_args.clean if funcs is not None: args_dict_all['func'] = sorted(funcs) else: args_dict_all['func'] = None if confs is not None: args_dict_all['conf'] = sorted(confs) else: args_dict_all['conf'] = None if dwis is not None: args_dict_all['dwi'] = sorted(dwis) args_dict_all['bval'] = sorted(bvals) args_dict_all['bvec'] = sorted(bvecs) else: args_dict_all['dwi'] = None args_dict_all['bval'] = None args_dict_all['bvec'] = None if anats is not None: args_dict_all['anat'] = sorted(anats) else: args_dict_all['anat'] = None if masks is not None: args_dict_all['m'] = sorted(masks) else: args_dict_all['m'] = None args_dict_all['g'] = None if ('dwi' in modality) and (bids_args.way is not None): args_dict_all['way'] = bids_args.way else: args_dict_all['way'] = None args_dict_all['id'] = id_list args_dict_all['ua'] = bids_args.ua args_dict_all['ref'] = bids_args.ref args_dict_all['roi'] = bids_args.roi if ('func' in modality) and (bids_args.cm is not None): args_dict_all['cm'] = bids_args.cm else: args_dict_all['cm'] = None # Mimic argparse with SimpleNamespace object args = SimpleNamespace(**args_dict_all) print(args) set_start_method('forkserver') with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(args, retval)) p.start() p.join() if p.is_alive(): p.terminate() retcode = p.exitcode or retval.get('return_code', 0) pynets_wf = retval.get('workflow', None) work_dir = retval.get('work_dir') plugin_settings = retval.get('plugin_settings', None) plugin_settings = retval.get('plugin_settings', None) execution_dict = retval.get('execution_dict', None) run_uuid = retval.get('run_uuid', None) retcode = retcode or int(pynets_wf is None) if retcode != 0: sys.exit(retcode) # Clean up master process before running workflow, which may create forks gc.collect() mgr.shutdown() if bids_args.push_location: print(f"Pushing to s3 at {bids_args.push_location}.") push_buck, push_remo = cloud_utils.parse_path(bids_args.push_location) for id in id_list: cloud_utils.s3_push_data( push_buck, push_remo, output_dir, modality, subject=id.split('_')[0], session=id.split('_')[1], creds=creds, ) sys.exit(0) return
def main(): """Initializes main script from command-line call to generate single-subject or multi-subject workflow(s)""" import os import gc import sys import json from pynets.core.utils import build_args_from_config import itertools from types import SimpleNamespace import pkg_resources from pynets.core.utils import flatten from pynets.cli.pynets_run import build_workflow from multiprocessing import set_start_method, Process, Manager from colorama import Fore, Style try: import pynets except ImportError: print( "PyNets not installed! Ensure that you are referencing the correct" " site-packages and using Python3.6+" ) if len(sys.argv) < 1: print("\nMissing command-line inputs! See help options with the -h" " flag.\n") sys.exit() print(f"{Fore.LIGHTBLUE_EX}\nBIDS API\n") print(Style.RESET_ALL) print(f"{Fore.LIGHTGREEN_EX}Obtaining Derivatives Layout...") print(Style.RESET_ALL) modalities = ["func", "dwi"] space = 'T1w' bids_args = get_bids_parser().parse_args() participant_label = bids_args.participant_label session_label = bids_args.session_label run = bids_args.run_label if isinstance(run, list): run = str(run[0]).zfill(2) modality = bids_args.modality bids_config = bids_args.config analysis_level = bids_args.analysis_level clean = bids_args.clean if analysis_level == "group" and participant_label is not None: raise ValueError( "Error: You have indicated a group analysis level run, but" " specified a participant label!" ) if analysis_level == "participant" and participant_label is None: raise ValueError( "Error: You have indicated a participant analysis level run, but" " not specified a participant " "label!") if bids_config: with open(bids_config, "r") as stream: arg_dict = json.load(stream) else: with open( pkg_resources.resource_filename("pynets", "config/bids_config.json"), "r", ) as stream: arg_dict = json.load(stream) stream.close() # S3 # Primary inputs s3 = bids_args.bids_dir.startswith("s3://") if not s3: bids_dir = bids_args.bids_dir # secondary inputs sec_s3_objs = [] if isinstance(bids_args.ua, list): for i in bids_args.ua: if i.startswith("s3://"): print("Downloading user atlas: ", i, " from S3...") sec_s3_objs.append(i) if isinstance(bids_args.cm, list): for i in bids_args.cm: if i.startswith("s3://"): print("Downloading clustering mask: ", i, " from S3...") sec_s3_objs.append(i) if isinstance(bids_args.roi, list): for i in bids_args.roi: if i.startswith("s3://"): print("Downloading ROI mask: ", i, " from S3...") sec_s3_objs.append(i) if isinstance(bids_args.way, list): for i in bids_args.way: if i.startswith("s3://"): print("Downloading tractography waymask: ", i, " from S3...") sec_s3_objs.append(i) if bids_args.ref: if bids_args.ref.startswith("s3://"): print( "Downloading atlas labeling reference file: ", bids_args.ref, " from S3...", ) sec_s3_objs.append(bids_args.ref) if s3 or len(sec_s3_objs) > 0: from boto3.session import Session from pynets.core import cloud_utils from pynets.core.utils import as_directory home = os.path.expanduser("~") creds = bool(cloud_utils.get_credentials()) if s3: buck, remo = cloud_utils.parse_path(bids_args.bids_dir) os.makedirs(f"{home}/.pynets", exist_ok=True) os.makedirs(f"{home}/.pynets/input", exist_ok=True) os.makedirs(f"{home}/.pynets/output", exist_ok=True) bids_dir = as_directory(f"{home}/.pynets/input", remove=False) if (not creds) and bids_args.push_location: raise AttributeError( """No AWS credentials found, but `--push_location` flag called. Pushing will most likely fail.""") else: output_dir = as_directory( f"{home}/.pynets/output", remove=False) # Get S3 input data if needed if analysis_level == "participant": for partic, ses in list( itertools.product(participant_label, session_label) ): if ses is not None: info = "sub-" + partic + "/ses-" + ses elif ses is None: info = "sub-" + partic cloud_utils.s3_get_data( buck, remo, bids_dir, modality, info=info) elif analysis_level == "group": if len(session_label) > 1 and session_label[0] != "None": for ses in session_label: info = "ses-" + ses cloud_utils.s3_get_data( buck, remo, bids_dir, modality, info=info ) else: cloud_utils.s3_get_data(buck, remo, bids_dir, modality) if len(sec_s3_objs) > 0: [access_key, secret_key] = cloud_utils.get_credentials() session = Session( aws_access_key_id=access_key, aws_secret_access_key=secret_key ) s3_r = session.resource("s3") s3_c = cloud_utils.s3_client(service="s3") sec_dir = as_directory( home + "/.pynets/secondary_files", remove=False) for s3_obj in [i for i in sec_s3_objs if i is not None]: buck, remo = cloud_utils.parse_path(s3_obj) s3_c.download_file( buck, remo, f"{sec_dir}/{os.path.basename(s3_obj)}") if isinstance(bids_args.ua, list): local_ua = bids_args.ua.copy() for i in local_ua: if i.startswith("s3://"): local_ua[local_ua.index( i)] = f"{sec_dir}/{os.path.basename(i)}" bids_args.ua = local_ua if isinstance(bids_args.cm, list): local_cm = bids_args.cm.copy() for i in bids_args.cm: if i.startswith("s3://"): local_cm[local_cm.index( i)] = f"{sec_dir}/{os.path.basename(i)}" bids_args.cm = local_cm if isinstance(bids_args.roi, list): local_roi = bids_args.roi.copy() for i in bids_args.roi: if i.startswith("s3://"): local_roi[ local_roi.index(i) ] = f"{sec_dir}/{os.path.basename(i)}" bids_args.roi = local_roi if isinstance(bids_args.way, list): local_way = bids_args.way.copy() for i in bids_args.way: if i.startswith("s3://"): local_way[ local_way.index(i) ] = f"{sec_dir}/{os.path.basename(i)}" bids_args.way = local_way if bids_args.ref: if bids_args.ref.startswith("s3://"): bids_args.ref = f"{sec_dir}/" \ f"{os.path.basename(bids_args.ref)}" else: output_dir = bids_args.output_dir if output_dir is None: raise ValueError("Must specify an output directory") intermodal_dict = { k: [] for k in [ "funcs", "confs", "dwis", "bvals", "bvecs", "anats", "masks", "subjs", "seshs", ] } if analysis_level == "group": if len(modality) > 1: i = 0 for mod_ in modality: outs = sweep_directory( bids_dir, modality=mod_, space=space, sesh=session_label, run=run ) if mod_ == "func": if i == 0: funcs, confs, _, _, _, anats, masks, subjs, seshs =\ outs else: funcs, confs, _, _, _, _, _, _, _ = outs intermodal_dict["funcs"].append(funcs) intermodal_dict["confs"].append(confs) elif mod_ == "dwi": if i == 0: _, _, dwis, bvals, bvecs, anats, masks, subjs, seshs =\ outs else: _, _, dwis, bvals, bvecs, _, _, _, _ = outs intermodal_dict["dwis"].append(dwis) intermodal_dict["bvals"].append(bvals) intermodal_dict["bvecs"].append(bvecs) intermodal_dict["anats"].append(anats) intermodal_dict["masks"].append(masks) intermodal_dict["subjs"].append(subjs) intermodal_dict["seshs"].append(seshs) i += 1 else: intermodal_dict = None outs = sweep_directory( bids_dir, modality=modality[0], space=space, sesh=session_label, run=run ) funcs, confs, dwis, bvals, bvecs, anats, masks, subjs, seshs = outs elif analysis_level == "participant": if len(modality) > 1: i = 0 for mod_ in modality: outs = sweep_directory( bids_dir, modality=mod_, space=space, subj=participant_label, sesh=session_label, run=run ) if mod_ == "func": if i == 0: funcs, confs, _, _, _, anats, masks, subjs, seshs =\ outs else: funcs, confs, _, _, _, _, _, _, _ = outs intermodal_dict["funcs"].append(funcs) intermodal_dict["confs"].append(confs) elif mod_ == "dwi": if i == 0: _, _, dwis, bvals, bvecs, anats, masks, subjs, seshs =\ outs else: _, _, dwis, bvals, bvecs, _, _, _, _ = outs intermodal_dict["dwis"].append(dwis) intermodal_dict["bvals"].append(bvals) intermodal_dict["bvecs"].append(bvecs) intermodal_dict["anats"].append(anats) intermodal_dict["masks"].append(masks) intermodal_dict["subjs"].append(subjs) intermodal_dict["seshs"].append(seshs) i += 1 else: intermodal_dict = None outs = sweep_directory( bids_dir, modality=modality[0], space=space, subj=participant_label, sesh=session_label, run=run ) funcs, confs, dwis, bvals, bvecs, anats, masks, subjs, seshs = outs else: raise ValueError( "Analysis level invalid. Must be `participant` or `group`. See" " --help." ) if intermodal_dict: funcs, confs, dwis, bvals, bvecs, anats, masks, subjs, seshs = [ list(set(list(flatten(i)))) for i in intermodal_dict.values() ] args_dict_all = build_args_from_config(modality, arg_dict) id_list = [] for i in sorted(list(set(subjs))): for ses in sorted(list(set(seshs))): id_list.append(i + "_" + ses) args_dict_all["work"] = bids_args.work args_dict_all["output_dir"] = output_dir args_dict_all["plug"] = bids_args.plug args_dict_all["pm"] = bids_args.pm args_dict_all["v"] = bids_args.v args_dict_all["clean"] = bids_args.clean if funcs is not None: args_dict_all["func"] = sorted(funcs) else: args_dict_all["func"] = None if confs is not None: args_dict_all["conf"] = sorted(confs) else: args_dict_all["conf"] = None if dwis is not None: args_dict_all["dwi"] = sorted(dwis) args_dict_all["bval"] = sorted(bvals) args_dict_all["bvec"] = sorted(bvecs) else: args_dict_all["dwi"] = None args_dict_all["bval"] = None args_dict_all["bvec"] = None if anats is not None: args_dict_all["anat"] = sorted(anats) else: args_dict_all["anat"] = None if masks is not None: args_dict_all["m"] = sorted(masks) else: args_dict_all["m"] = None args_dict_all["g"] = None if ("dwi" in modality) and (bids_args.way is not None): args_dict_all["way"] = bids_args.way else: args_dict_all["way"] = None args_dict_all["id"] = id_list args_dict_all["ua"] = bids_args.ua args_dict_all["ref"] = bids_args.ref args_dict_all["roi"] = bids_args.roi if ("func" in modality) and (bids_args.cm is not None): args_dict_all["cm"] = bids_args.cm else: args_dict_all["cm"] = None # Mimic argparse with SimpleNamespace object args = SimpleNamespace(**args_dict_all) print(args) set_start_method("forkserver") with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(args, retval)) p.start() p.join() if p.is_alive(): p.terminate() retcode = p.exitcode or retval.get("return_code", 0) pynets_wf = retval.get("workflow", None) work_dir = retval.get("work_dir") plugin_settings = retval.get("plugin_settings", None) plugin_settings = retval.get("plugin_settings", None) execution_dict = retval.get("execution_dict", None) run_uuid = retval.get("run_uuid", None) retcode = retcode or int(pynets_wf is None) if retcode != 0: sys.exit(retcode) # Clean up master process before running workflow, which may create # forks gc.collect() mgr.shutdown() if bids_args.push_location: print(f"Pushing to s3 at {bids_args.push_location}.") push_buck, push_remo = cloud_utils.parse_path(bids_args.push_location) for id in id_list: cloud_utils.s3_push_data( push_buck, push_remo, output_dir, modality, subject=id.split("_")[0], session=id.split("_")[1], creds=creds, ) sys.exit(0) return