Exemplo n.º 1
0
def upload_tabular_file_wrapper(filename, project_label, group_id, api_key=None, create=False, raise_on=None,
                                update_values=False, create_emtpy_entry=False, subject_col="subject_id",
                                session_col="session_id"):
    api_key = get_fw_api(api_key)
    fw = flywheel.Client(api_key)

    project = handle_project(fw, project_label, group_id, create, raise_on)
    project_id = project["id"]

    upload_tabular_file(fw, filename, project_id, update_values, create_emtpy_entry, subject_col, session_col)
Exemplo n.º 2
0
def main():

    logger.info("{:=^70}\n".format(": fw-heudiconv exporter starting up :"))
    parser = get_parser()
    args = parser.parse_args()

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        if args.api_key:
            fw = flywheel.Client(args.api_key)
        else:
            fw = flywheel.Client()
    assert fw, "Your Flywheel CLI credentials aren't set!"

    if args.path:
        destination = args.path
    else:
        destination = args.destination

    if not os.path.exists(destination):
        logger.info("Creating destination directory...")
        os.makedirs(args.destination)

    downloads = gather_bids(
        client=fw, project_label=args.project, session_labels=args.session,
        subject_labels=args.subject
        )

    if args.attachments is not None and args.verbose:
        logger.info("Filtering attachments...")
        logger.info(args.attachments)

    download_bids(
        client=fw, to_download=downloads, root_path=destination,
        folders_to_download=args.folders, dry_run=args.dry_run,
        attachments=args.attachments, name=args.directory_name
        )

    if args.dry_run:
        shutil.rmtree(Path(args.destination, args.directory_name))

    logger.info("Done!")
    logger.info("{:=^70}".format(": Exiting fw-heudiconv exporter :"))
def upload_subject_dir(path,
                       project_name,
                       permissions=None,
                       fw=None,
                       projectId=None):
    if permissions is None: permissions = []
    if fw is None: fw = flywheel.Client()
    #if fw is None: fw = Client(environ['FLYWHEEL_TOKEN'])
    if projectId is None:
        projectId = find_project_id(project_name, permissions, fw=fw)
    path = pathlib.Path(path)
    subjectId = {
        'code': path.name[4:],
        '_id': None,
    }
    for session in path.iterdir():
        if session.is_file():
            lg.warning(
                f'It is probably better to only upload inside session, so skip {session}'
            )
            continue
        # a weird way because first you generate the session, which gives you the subject_id.
        # then you reuse the same subject_id across session
        info = {
            'label': session.name,
            'project': projectId,
        }
        if subjectId['_id'] is not None:
            info['subject'] = subjectId

        sessionId = fw.add_session(info)
        if subjectId['_id'] is None:
            subjectId['_id'] = fw.get_session(sessionId)['subject']['_id']
            fw.modify_session(sessionId, {'subject': subjectId})
        for acquisition in session.iterdir():
            if acquisition.is_file():
                lg.warning(
                    f'Uploading file {acquisition.name} from {session} as attachment'
                )
                fw.upload_file_to_project(projectId, str(acquisition))
                continue
            acquisitionId = fw.add_acquisition({
                'label': acquisition.name,
                'session': sessionId,
            })
            for filepath in acquisition.iterdir():
                lg.debug(
                    f'Uploading {filepath.name} to {path.name}/{session.name}/{acquisition.name}'
                )
                if filepath.suffix in ('.vhdr', '.vmrk'):
                    continue
                #if filepath.suffix == '.eeg':
                #    _zip_and_upload(filepath, fw, acquisitionId)
                #    continue
                fw.upload_file_to_acquisition(acquisitionId, str(filepath))
def test_handle_sessions_no_changes(group='scien',
                                    project='Nate-BIDS-pre-curate'):
    fw = flywheel.Client()
    proj = fw.lookup(f'{group}/{project}')
    path = Path(__file__).parents[
        1] / 'assets/project/session_labels_Nate-BIDS-pre-curate.csv'
    ses_df = pd.read_csv(path.resolve())
    handle_sessions(ses_df, fw, proj, dry_run=True)
    # This test only checks for errors, so if we get here, we can assert True
    # Integration testing checks for functionality
    assert True
Exemplo n.º 5
0
def delete_analyses(group_id, project_label, analysis_label, api_key=None):
    api_key = get_fw_api(api_key)
    fw = flywheel.Client(api_key)
    project = fw.lookup(f"{group_id}/{project_label}")

    for subject in project.subjects():
        print(subject.label)
        ana_list = fw.get_container_analyses(
            subject.id, filter=f'label="{analysis_label}"')
        for analysis in ana_list:
            print(analysis.id)
            fw.delete_container_analysis(subject.id, analysis.id)
Exemplo n.º 6
0
def delete_canceled_analysis(group_id, project_label, api_key=None):
    api_key = get_fw_api(api_key)
    fw = flywheel.Client(api_key)
    project = fw.lookup(f"{group_id}/{project_label}")

    for subject in project.subjects():
        print(subject.label)
        ana_list = fw.get_container_analyses(subject.id)
        for analysis in ana_list:
            job = fw.get_job(analysis.job)
            if (job.state == "cancelled") or (job.state == "failed"):
                print(analysis.id)
                fw.delete_container_analysis(subject.id, analysis.id)
def test_handle_subjects_no_changes(group='scien',
                                    project='Nate-BIDS-pre-curate'):
    fw = flywheel.Client()
    proj = fw.lookup(f'{group}/{project}')
    path = Path(__file__).parents[
        1] / 'assets/project/subject_codes_Nate-BIDS-pre-curate.csv'
    sub_df = pd.read_csv(path.resolve())
    with pytest.raises(flywheel.rest.ApiException) as api_info:
        handle_subjects(sub_df, fw, proj, dry_run=True)
        # This test only checks for errors, so if we get here, we can assert True
        # Integration testing checks for functionality
        # Subject id is stale so it won't find
        assert "not found" in api_info
Exemplo n.º 8
0
def upload_analysis(group_id, project_label, root_dir, api_key=None, level="subject", subjects=[],
                    search_strings_template=["{subject}*"], note="", check_ignored_files=True):
    """
    :param group_id:
    :param project_label:
    :param root_dir:
    :param api_key:
    :param level:
    :param subjects:
    :return:
    """
    assert level in ["subject", "project"], f'level needs to be "subject" or "project", not {level}'

    api_key = get_fw_api(api_key)
    fw = flywheel.Client(api_key)
    project = fw.lookup(f"{group_id}/{project_label}")

    root_dir = Path(root_dir)
    os.chdir(root_dir)
    analysis_label = root_dir.name

    files_uploaded = []
    if level == "subject":
        containers = get_subject_containers(project, subjects)
        print(f"Uploading {root_dir} into {project_label} for {len(containers)} subject.")
        cont()

        for container in containers:
            subject = container.label
            print(subject)
            search_strings = [s.format(subject=subject) for s in search_strings_template]
            files_uploaded += zip_and_upload_data(fw, container, root_dir, analysis_label,
                                                  search_strings=search_strings, note=note)
    elif level == "project":
        container = project
        search_strings = search_strings_template
        print(f"Uploading {root_dir} into {project_label} for group data.")
        cont()

        files_uploaded += zip_and_upload_data(fw, container, root_dir, analysis_label,
                                              search_strings=search_strings, note=note)

    print("Upload done")

    # check for files that have not been uploaded
    if check_ignored_files:
        all_files = set(list_files(root_dir, ["*"]))
        files_not_uploaded = all_files - set(files_uploaded)
        if files_not_uploaded:
            files_not_uploaded_ = [str(f) for f in files_not_uploaded]
            warn(f"\n\n\n{len(files_not_uploaded)} files not uploaded {files_not_uploaded_}")
Exemplo n.º 9
0
def init_gear(gear_name):
    """
    Initializes a gear from name. Returns the flywheel client and gear object.

    Args:
        gear_name (str): Name of gear in instance reference by API-Key.

    Returns:
        tuple: fw_client (flywheel.Client), gear (flywheel.GearDoc)
    """
    fw_client = flywheel.Client()
    gear = fw_client.gears.find_one(f'gear.name="{gear_name}"')

    return fw_client, gear
Exemplo n.º 10
0
def get_meica_data(context, output_directory="/flywheel/v0/output"):
    """
    For a given input dicom file, grab all of the nifti files from that acquisition.

    Return MEICA data which is a sorted list of file objects.
    """

    # Flywheel Object
    fw = flywheel.Client(context.get_input("api_key")["key"])

    # For this acquisition find each nifti file, download it and note its echo time
    acquisition = fw.get_acquisition(
        context.get_input("functional")["hierarchy"]["id"])
    nifti_files = [
        x for x in acquisition.files
        if x.type == "nifti" and "Functional" in x.classification["Intent"]
    ]
    log.info("Found %d Functional NIfTI files in %s" %
             (len(nifti_files), acquisition.label))

    # Compile meica_data structure
    meica_data = []
    repetition_time = ""
    for n in nifti_files:
        file_path = os.path.join(output_directory, n.name)
        log.info("Downloading %s" % (n.name))
        fw.download_file_from_acquisition(acquisition.id, n.name, file_path)
        echo_time = n.info.get("EchoTime")

        # TODO: Handle case where EchoTime is not here
        # or classification is not correct
        # Or not multi echo data
        # Or if coronavirus attacks

        meica_data.append({
            "path": n.name,
            "te": echo_time * 1000
        })  # Convert to ms

    # Generate prefix
    sub_code = (fw.get_session(
        acquisition.parents.session).subject.code.strip().replace(" ", ""))
    label = acquisition.label.strip().replace(" ", "")
    prefix = "%s_%s" % (sub_code, label)

    meica_data = sorted(meica_data, key=lambda k: k["te"])
    datasets = [Path(meica["path"]) for meica in meica_data]
    tes = [meica["te"] for meica in meica_data]

    return (datasets, tes)
def test_dcm2niix_nifti():

    sd.clean_working_dir()

    input_file = sd.get_zipped_t1()
    work_dir = Path(sd.WORKING_DIR) / "work"
    work_dir.mkdir(exist_ok=True)
    output_dir = Path(sd.WORKING_DIR) / "output"
    output_dir.mkdir(exist_ok=True)

    fw = flywheel.Client()
    # Get configuration, acquisition, and file info
    file_id = "626037fe7cc96261fa295c75"
    file_obj = fw.get_file(file_id)

    # destination_type = "nrrd" if config.get("save_NRRD") else "nifti"
    input_file = Path(input_file)
    destination_type = "nifti"
    combine = False
    bitmask = False
    method = "dcm2niix"

    exporter = MeasurementExport(
        fw_client=fw,
        fw_file=file_obj,
        work_dir=work_dir,
        output_dir=output_dir,
        input_file_path=input_file,
        dest_file_type=destination_type,
        combine=combine,
        bitmask=bitmask,
        method=method,
    )

    ohifviewer_info, labels, affine = exporter.process_file()
    globpath = output_dir / "*"
    output_files = glob.glob(globpath.as_posix())
    assert len(output_files) == 3

    output_files = [os.path.basename(a) for a in output_files]
    doggy = [d for d in output_files if d.startswith("ROI_Doggy")]
    potato = [d for d in output_files if d.startswith("ROI_Potato")]
    lesion = [d for d in output_files if d.startswith("ROI_Lesion")]

    assert len(doggy) == 1
    assert len(potato) == 1
    assert len(lesion) == 1

    sd.clean_working_dir()
Exemplo n.º 12
0
def main():

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        fw = flywheel.Client()
        assert fw, "Your Flywheel CLI credentials aren't set!"

    parser = argparse.ArgumentParser(description=(
        "Use this to query Flywheel for the gears available to you, or get the config file for a gear."
    ))

    parser.add_argument("-name",
                        "--gear-name",
                        dest='name',
                        help="Shorthand name of the gear on Flywheel",
                        required=True,
                        default='all')
    parser.add_argument(
        "-config",
        "--output-config",
        dest='config',
        help="True/False; Whether to output configuration file for running",
        required=False,
        default='False')

    args = parser.parse_args()

    config = str2bool(args.config)

    if args.name == 'all':
        gears = fw.gears()
        gears_table = [nested_to_record(g.to_dict(), sep='_') for g in gears]
        df = pd.DataFrame(gears_table)
        df = df.filter(regex=r'gear_label$|gear_name$|^category$', axis=1)
        print(tabulate(
            df,
            headers='keys',
            tablefmt='psql',
        ))

    else:
        gear = find_gear(args.name, fw)
        config_file = collect_gear_config(gear['_id'], fw)
        if config:
            with open('gear_config.json', 'w') as outfile:
                json.dump(config_file, outfile)
            print("Config file written.")
        else:
            print(json.dumps(config_file, indent=4))
Exemplo n.º 13
0
def login(args):
    fw = flywheel.Client(args.api_key)

    try:
        # Get current user
        login_id = sdk_impl.get_login_id(fw)

        # Save credentials
        sdk_impl.save_api_key(args.api_key, root=user['root'])

        print('You are now logged in as: {}!'.format(login_id))
    except Exception as e:
        log.debug('Login error', exc_info=True)
        perror('Error logging in: {}'.format(str(e)))
        sys.exit(1)
Exemplo n.º 14
0
def find_project_id(project_name, permissions, fw=None):
    if fw is None: fw = flywheel.Client()
    #fw = Flywheel(environ['FLYWHEEL_TOKEN'])
    projects = [
        x['_id'] for x in fw.get_all_projects() if x['label'] == project_name
    ]
    if len(projects) == 1:
        projectId = projects[0]
        lg.debug(f'Adding to data to project "{project_name}"')
    else:
        projectId = fw.add_project({'label': project_name, 'group': GROUPID})
        lg.debug(f'Creating project "{project_name}" to group "{GROUPID}"')
        for perm in permissions[1:]:
            fw.add_project_permission(projectId, perm)
    return projectId
Exemplo n.º 15
0
def fix_session_labels(group="pennftdcenter",
                       projectLabel="HUP6",
                       matchString="BRAIN RESEARCH^GROSSMAN"):
    fw = flywheel.Client()
    proj = fw.lookup("{}/{}".format(group, projectLabel))
    must_fix = [s for s in proj.sessions() if matchString in s.label]
    for s in must_fix:
        # Create a new session label by piecing together year/month/day/hour/minute info
        # from the session timestamp.
        tstamp = s.timestamp
        lab = '{}{}{}x{}{}'.format(tstamp.year, f'{tstamp.month:02}',
                                   f'{tstamp.day:02}', f'{tstamp.hour:02}',
                                   f'{tstamp.minute:02}')
        # Update the session label using the update() method, whose input is a dictionary
        # of the fields to be changed and their new values.
        s.update({'label': lab})
Exemplo n.º 16
0
def generate_gear_args(gear_context):
    """Generate gear arguments."""
    log.info("Preparing arguments for dicom-send gear.")
    gear_kwargs = {
        "work_dir": gear_context.work_dir,
        "destination": gear_context.config["destination"],
        "called_ae": gear_context.config["called_ae"],
        "port": gear_context.config["port"],
        "calling_ae": gear_context.config["calling_ae"],
        "group": "0x0021",
        "identifier": "Flywheel",
        "tag_value": "DICOM Send",
        'api_key': gear_context.get_input("api_key")["key"]
    }

    fw = flywheel.Client(gear_kwargs['api_key'])

    # Input is a tgz or zip DICOM archive, or a single DICOM file
    try:
        infile = Path(gear_context.get_input_path("file"))
        download = not infile.is_file()
    except TypeError:
        download = True
        log.info(
            "No input provided. Will use files of type DICOM from session.")

    if download is False:
        gear_kwargs["infile"] = infile
        gear_kwargs["parent_acq"] = gear_context.get_input(
            "file")["hierarchy"].get("id")
        # When a file is provided as input, destination ID is the acquisition ID
        gear_kwargs['session_id'] = fw.get_acquisition(
            gear_kwargs["parent_acq"]).parents.session

    else:
        # Alternatively, if no input is provided, all DICOM files in the session are
        # downloaded and used as input
        # In this case the destination ID is the session ID.
        gear_kwargs['session_id'] = gear_context.destination["id"]
        gear_kwargs['input_dir'] = "/flywheel/v0/input"

    print_kwargs = dict(gear_kwargs)
    print_kwargs.pop('api_key')
    gear_args_formatted = pprint.pformat(print_kwargs)
    log.info(f"Prepared gear stage arguments: \n\n{gear_args_formatted}\n")

    return gear_kwargs, download
Exemplo n.º 17
0
def fix_timestamps(project_label, group_id, api_key=None):
    from datetime import datetime, timezone

    api_key = get_fw_api(api_key)
    fw = flywheel.Client(api_key)
    project = fw.lookup(f"{group_id}/{project_label}")

    print(f"Fixing timestamps for {group_id} {project_label}.")

    for subject in project.subjects():
        for session in subject.sessions():
            print(f"{subject.label} {session.label}")
            session_num = int(session.label.replace("ses-tp", ""))
            if not session_num:
                raise RuntimeError(f"Session cannot be determined: {session.label}")
            session.update({"timestamp": datetime(1900, 1, session_num, 0, 0, tzinfo=timezone.utc)})
    print("Done")
Exemplo n.º 18
0
def cancle_jobs(pending=True, running=True, api_key=None):
    api_key = get_fw_api(api_key)
    fw = flywheel.Client(api_key)

    searches = []
    if pending:
        searches.append('state=pending')
    if running:
        searches.append('state=running')

    for search_str in searches:
        jobs = fw.jobs.find(search_str)
        print(f"Cancelling {len(jobs)} jobs")

        for job in jobs:
            print(job.id)
            job.change_state('cancelled')
Exemplo n.º 19
0
def main():
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        fw = flywheel.Client()
    assert fw, "Your Flywheel CLI credentials aren't set!"
    parser = get_parser()
    args = parser.parse_args()

    # Print a lot if requested
    if args.verbose:
        logger.setLevel(logging.DEBUG)

    project_label = ' '.join(args.project)
    convert_to_bids(client=fw,
                    project_label=project_label,
                    heuristic_path=args.heuristic,
                    session_labels=args.session,
                    subject_labels=args.subject,
                    dry_run=args.dry_run)
Exemplo n.º 20
0
def gather_session_indeces():

    # use flywheel to gather a dict of all session session_labels
    # with their corresponding index by time, within the subject

    # query subjects
    import flywheel

    fw = flywheel.Client()
    '''
    lg = fw.projects.find_first('label="{}"'.format("PNC_LG_810336"))
    lg_subjects = lg.subjects()
    lg_subject_labs = [int(x.label) for x in lg_subjects]
    '''
    cs = fw.projects.find_first('label="{}"'.format("PNC_CS_810336"))
    cs_subjects = cs.subjects()
    cs_subject_labs = [int(x.label) for x in cs_subjects]

    # initialise dict
    sess_dict = {}

    for x in range(len(cs_subjects)):
        '''
        # if they already have a session in the CS project
        if lg_subject_labs[x] in cs_subject_labs:
            n = 1
        else:
            n = 0
        '''
        # get a list of their sessions
        sess_list = cs_subjects[x].sessions()

        if sess_list:

            # sort that list by timestamp
            sess_list = sorted(sess_list, key=lambda x: x.timestamp)

            # loop through the sessions and assign the session label an index
            for i, y in enumerate(sess_list):
                sess_dict[y.label] = "PNC" + str(i + 1)

    return sess_dict
def test_data2csv_ses_duplicate(group='scien', project='Nate-BIDS-pre-curate'):
    fw = flywheel.Client()
    proj = fw.lookup(f'{group}/{project}')
    sess = [ses.to_dict() for ses in fw.get_project_sessions(proj.id)]
    path, df = data2csv(sess,
                        project,
                        keep_keys=['label'],
                        prefix='session_labels',
                        regex='[^A-Za-z0-9]',
                        column_rename=['existing_session_label'],
                        user_columns=['new_session_label'],
                        unique=['label'],
                        no_print=True)
    print(df.values)
    supposedly_unique = df.index.values
    unique = np.unique(
        pd.DataFrame.from_records(session_object)['label'].values)
    #assert set(unique) == set(supposedly_unique)
    assert (set(df.columns) == set(
        ['existing_session_label', 'new_session_label']))
Exemplo n.º 22
0
def make_clients():
    api_key = None
    if init_db.SCITRAN_PERSISTENT_DB_URI:
        # Initialize database first
        init_db.init_db()

        site_url = urlparse(os.environ['SCITRAN_SITE_API_URL'])
        api_key = '{}:__force_insecure:{}'.format(
            site_url.netloc, init_db.SCITRAN_ADMIN_API_KEY)
    else:
        api_key = os.environ.get('SdkTestKey')

    if not api_key:
        print(
            'Could not initialize test case, no api_key. Try setting the SdkTestKey environment variable!'
        )
        exit(1)

    fw = flywheel.Flywheel(api_key)
    fw.enable_feature('beta')

    fw_root = flywheel.Flywheel(api_key, root=True)
    fw_root.enable_feature('beta')

    # Mock cli login
    home = os.environ['HOME']
    os.environ['HOME'] = tmp_path = tempfile.mkdtemp()
    cli_config_path = os.path.expanduser('~/.config/flywheel/')
    if not os.path.exists(cli_config_path):
        os.makedirs(cli_config_path)
    with open(os.path.join(cli_config_path, 'user.json'), 'w') as cli_config:
        json.dump({'key': api_key}, cli_config)

    client = flywheel.Client()
    client.enable_feature('beta')

    # Don't need the login anymore
    shutil.rmtree(tmp_path)
    os.environ['HOME'] = home

    return fw, fw_root, client
Exemplo n.º 23
0
def main():

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        fw = flywheel.Client()
    assert fw, "Your Flywheel CLI credentials aren't set!"
    parser = get_parser()

    args = parser.parse_args()
    project_label = ' '.join(args.project)
    assert os.path.exists(args.path), "Path does not exist!"
    downloads = gather_bids(client=fw,
                            project_label=project_label,
                            session_labels=args.session,
                            subject_labels=args.subject)

    download_bids(client=fw,
                  to_download=downloads,
                  root_path=args.path,
                  folders_to_download=args.folders,
                  dry_run=args.dry_run)
Exemplo n.º 24
0
def upload_project_dir(bids_root, project_name, permissions=None, fw=None):
    if permissions is None: permissions = []
    #if fw is None: fw = Flywheel(environ['FLYWHEEL_TOKEN'])
    if fw is None: fw = flywheel.Client()
    projectId = find_project_id(project_name, permissions, fw=fw)
    bids_root = pathlib.Path(bids_root)
    for extra in bids_root.iterdir():
        if extra.is_file():
            fw.upload_file_to_project(projectId, str(extra))
            continue

        if not extra.name.startswith('sub-'):
            lg.debug(f'Uploading directory {extra.name} as zipped file')
            _zip_and_upload(extra, fw, projectId)
            continue
        else:
            upload_subject_dir(extra,
                               project_name,
                               projectId=projectId,
                               fw=fw,
                               permissions=permissions)
Exemplo n.º 25
0
def main(context):

    #fw = flywheel.Client()
    config = context.config
    for inp in context.config_json["inputs"].values():
        if inp["base"] == "api-key" and inp["key"]:
            api_key = inp["key"]

    fw = flywheel.Client(api_key)

    # Setup basic logging and log the configuration for this job
    if config["gear_log_level"] == "INFO":
        context.init_logging("info")
    else:
        context.init_logging("debug")
    context.log_config()

    dry_run = config.get('dry-run', False)
    log.debug(f"dry_run is {dry_run}")

    try:

        destination_id = context.destination.get('id')
        dest_container = fw.get(destination_id)
        project_id = dest_container.parents.project
        project = fw.get_project(project_id)

        output_dict = ar.acquire_rois(fw, project)

        output_path = Path(
            context.output_dir
        ) / f"{project.label}_ROI-Export_{datetime.now().strftime('%d-%m-%Y_%H-%M-%S')}.csv"
        ar.save_csv(output_dict, output_path)

    except Exception as e:
        log.exception(e)
        return 1

    return 0
Exemplo n.º 26
0
def test_main():

    import os
    import flywheel

    fw = flywheel.Client(os.environ["FWGA_API"])
    # Get configuration, acquisition, and file info
    from pathlib import Path

    parent_acq = "621d449c559d4f2a0d1468e0"
    file_id = "621d449f332f06de9645fd7a"
    file_obj = fw.get_file(file_id)
    input_file = "/Users/davidparker/Documents/Flywheel/SSE/MyWork/Gears/roi2nix/tests/test_ax_cor_sag/Scans/T1_SE_AX.zip"
    # destination_type = "nrrd" if config.get("save_NRRD") else "nifti"
    work_dir = "/Users/davidparker/Documents/Flywheel/SSE/MyWork/Gears/roi2nix/tests/test_ax_cor_sag/work"
    output_dir = "/Users/davidparker/Documents/Flywheel/SSE/MyWork/Gears/roi2nix/tests/test_ax_cor_sag/output"
    input_file = Path(input_file)
    work_dir = Path(work_dir)
    output_dir = Path(output_dir)
    destination_type = "nifti"

    combine = False
    bitmask = False
    method = "dcm2niix"

    exporter = MeasurementExport(
        fw_client=fw,
        fw_file=file_obj,
        work_dir=work_dir,
        output_dir=output_dir,
        input_file_path=input_file,
        dest_file_type=destination_type,
        combine=combine,
        bitmask=bitmask,
        method=method,
    )

    ohifviewer_info, labels, affine = exporter.process_file()
Exemplo n.º 27
0
def make_clients():
    api_key = get_api_key()

    fw = flywheel.Flywheel(api_key)
    fw.enable_feature('beta')

    # Mock cli login
    home = os.environ['HOME']
    os.environ['HOME'] = tmp_path = tempfile.mkdtemp()
    cli_config_path = os.path.expanduser('~/.config/flywheel/')
    if not os.path.exists(cli_config_path):
        os.makedirs(cli_config_path)
    with open(os.path.join(cli_config_path, 'user.json'), 'w') as cli_config:
        json.dump({'key': api_key}, cli_config)

    client = flywheel.Client()
    client.enable_feature('beta')

    # Don't need the login anymore
    shutil.rmtree(tmp_path)
    os.environ['HOME'] = home

    return fw, client
Exemplo n.º 28
0
def main():

    fw = flywheel.Client()

    parser = argparse.ArgumentParser()
    parser.add_argument("-orig",
                        help="Path to the original flywheel query CSV",
                        dest="original",
                        required=True)
    parser.add_argument("-mod",
                        help="Path to the modified flywheel query CSV",
                        dest="modified",
                        required=True)

    args = parser.parse_args()

    # original df
    df_original = read_flywheel_csv(args.original)
    # edited df
    df_modified = read_flywheel_csv(args.modified)

    # check for equality of each cell between the original and modified
    unequal = get_unequal_cells(df_original, df_modified)
    # if any unequal, assess the validity of the modification
    res = validate_on_unequal_cells(unequal, df_modified)

    if len(ERROR_MESSAGES) is 0 and res is True:
        print("Changes appear to be valid! Uploading...")
        diff = df_modified.fillna(9999) != df_original.fillna(9999)
        #drop_downs = ['classification_Measurement', 'classification_Intent', 'classification_Features']
        #df_modified.loc[:, drop_downs] = df_modified.loc[:, drop_downs].applymap(relist_item)
        upload_to_flywheel(df_modified.loc[diff.any(axis=1), ], unequal, fw)
        print("Done!")
        sys.exit(0)
    else:
        print("Exiting...")
        sys.exit(0)
def test_data2csv_acq_duplicate(group='scien', project='Nate-BIDS-pre-curate'):
    fw = flywheel.Client()
    proj = fw.lookup(f'{group}/{project}')
    acqs = [acq.to_dict() for acq in fw.get_project_acquisitions(proj.id)]
    path, df = data2csv(acqs,
                        project,
                        keep_keys=['label'],
                        prefix='acquisition_labels',
                        regex='[^A-Za-z0-9]',
                        column_rename=['existing_acquisition_label'],
                        user_columns=[
                            'new_acquisition_label', 'modality', 'task', 'run',
                            'ignore'
                        ],
                        unique=['label'],
                        no_print=True)
    supposedly_unique = np.sort(df['existing_acquisition_label'].values)
    unique = np.unique(
        pd.DataFrame.from_records(acquistions_object)['label'].values)
    #  assert unique.shape == supposedly_unique.shape
    assert (df.columns == [
        'existing_acquisition_label', 'new_acquisition_label', 'modality',
        'task', 'run', 'ignore'
    ]).all()
Exemplo n.º 30
0
def delete_lhab_info(group_id, project_label, api_key=None, delete_subject_info_keys=["missing_info"],
                     delete_session_info_keys=['cognition', 'health', 'demographics', 'motorskills',
                                               'questionnaires']):
    """
    Removes keys from the info dict on subject and session levels
    Needs to be run in case variables are discontinued
    """
    api_key = get_fw_api(api_key)
    fw = flywheel.Client(api_key)
    project = fw.lookup(f"{group_id}/{project_label}")

    print(f"Deleting LHAB-related values (phenotype) in info dict for {group_id} {project_label}.")

    for subject in project.subjects():

        for k in delete_subject_info_keys:
            print(f"{subject.label} {k}")
            subject.delete_info(k)

        for session in subject.sessions():
            for k in delete_session_info_keys:
                print(f"{subject.label} {session.label} {k}")
                session.delete_info(k)
    print("DONE")