Ejemplo n.º 1
0
def bidsmri2project(directory, args):
    #Parse dataset_description.json file in BIDS directory
    if (os.path.isdir(os.path.join(directory))):
        try:
            with open(os.path.join(directory,
                                   'dataset_description.json')) as data_file:
                dataset = json.load(data_file)
        except OSError:
            logging.critical(
                "Cannot find dataset_description.json file which is required in the BIDS spec"
            )
            exit("-1")
    else:
        logging.critical("Error: BIDS directory %s does not exist!" %
                         os.path.join(directory))
        exit("-1")

    #create project / nidm-exp doc
    project = Project()

    #add various attributes if they exist in BIDS dataset
    for key in dataset:
        #if key from dataset_description file is mapped to term in BIDS_Constants.py then add to NIDM object
        if key in BIDS_Constants.dataset_description:
            if type(dataset[key]) is list:
                project.add_attributes({
                    BIDS_Constants.dataset_description[key]:
                    "".join(dataset[key])
                })
            else:
                project.add_attributes(
                    {BIDS_Constants.dataset_description[key]: dataset[key]})
        #add absolute location of BIDS directory on disk for later finding of files which are stored relatively in NIDM document
        project.add_attributes({Constants.PROV['Location']: directory})

    #get BIDS layout
    bids_layout = BIDSLayout(directory)

    #create empty dictinary for sessions where key is subject id and used later to link scans to same session as demographics
    session = {}
    participant = {}
    #Parse participants.tsv file in BIDS directory and create study and acquisition objects
    if os.path.isfile(os.path.join(directory, 'participants.tsv')):
        with open(os.path.join(directory, 'participants.tsv')) as csvfile:
            participants_data = csv.DictReader(csvfile, delimiter='\t')

            #logic to map variables to terms.#########################################################################################################

            #first iterate over variables in dataframe and check which ones are already mapped as BIDS constants and which are not.  For those that are not
            #we want to use the variable-term mapping functions to help the user do the mapping
            #iterate over columns
            mapping_list = []
            column_to_terms = {}
            for field in participants_data.fieldnames:

                #column is not in BIDS_Constants
                if not (field in BIDS_Constants.participants):
                    #add column to list for column_to_terms mapping
                    mapping_list.append(field)

            #do variable-term mappings
            if ((args.json_map != False) or (args.key != None)):

                #if user didn't supply a json mapping file but we're doing some variable-term mapping create an empty one for column_to_terms to use
                if args.json_map == False:
                    #defaults to participants.json because here we're mapping the participants.tsv file variables to terms
                    # if participants.json file doesn't exist then run without json mapping file
                    if not os.path.isfile(
                            os.path.join(directory, 'participants.json')):
                        #maps variables in CSV file to terms
                        temp = DataFrame(columns=mapping_list)

                        column_to_terms, cde = map_variables_to_terms(
                            directory=directory,
                            assessment_name='participants.tsv',
                            df=temp,
                            apikey=args.key,
                            output_file=os.path.join(directory,
                                                     'participants.json'))
                    else:
                        #maps variables in CSV file to terms
                        temp = DataFrame(columns=mapping_list)
                        column_to_terms, cde = map_variables_to_terms(
                            directory=directory,
                            assessment_name='participants.tsv',
                            df=temp,
                            apikey=args.key,
                            output_file=os.path.join(directory,
                                                     'participants.json'),
                            json_file=os.path.join(directory,
                                                   'participants.json'))

                else:
                    #maps variables in CSV file to terms
                    temp = DataFrame(columns=mapping_list)
                    column_to_terms, cde = map_variables_to_terms(
                        directory=directory,
                        assessment_name='participants.tsv',
                        df=temp,
                        apikey=args.key,
                        output_file=os.path.join(directory,
                                                 'participants.json'),
                        json_file=args.json_map)

            for row in participants_data:
                #create session object for subject to be used for participant metadata and image data
                #parse subject id from "sub-XXXX" string
                temp = row['participant_id'].split("-")
                #for ambiguity in BIDS datasets.  Sometimes participant_id is sub-XXXX and othertimes it's just XXXX
                if len(temp) > 1:
                    subjid = temp[1]
                else:
                    subjid = temp[0]
                logging.info(subjid)
                session[subjid] = Session(project)

                #add acquisition object
                acq = AssessmentAcquisition(session=session[subjid])

                acq_entity = AssessmentObject(acquisition=acq)
                participant[subjid] = {}
                participant[subjid]['person'] = acq.add_person(
                    attributes=({
                        Constants.NIDM_SUBJECTID: row['participant_id']
                    }))

                #add qualified association of participant with acquisition activity
                acq.add_qualified_association(
                    person=participant[subjid]['person'],
                    role=Constants.NIDM_PARTICIPANT)
                print(acq)

                for key, value in row.items():
                    if not value:
                        continue
                    #for variables in participants.tsv file who have term mappings in BIDS_Constants.py use those, add to json_map so we don't have to map these if user
                    #supplied arguments to map variables
                    if key in BIDS_Constants.participants:

                        #if this was the participant_id, we already handled it above creating agent / qualified association
                        if not (BIDS_Constants.participants[key]
                                == Constants.NIDM_SUBJECTID):
                            acq_entity.add_attributes(
                                {BIDS_Constants.participants[key]: value})

                    #else if user added -mapvars flag to command line then we'll use the variable-> term mapping procedures to help user map variables to terms (also used
                    # in CSV2NIDM.py)
                    else:

                        # WIP: trying to add new support for CDEs...
                        add_attributes_with_cde(prov_object=acq_entity,
                                                cde=cde,
                                                row_variable=key,
                                                value=value)
                        # if key in column_to_terms:
                        #    acq_entity.add_attributes({QualifiedName(provNamespace(Core.safe_string(None,string=str(key)), column_to_terms[key]["url"]), ""):value})
                        #else:

                        #    acq_entity.add_attributes({Constants.BIDS[key.replace(" ", "_")]:value})

    #create acquisition objects for each scan for each subject

    #loop through all subjects in dataset
    for subject_id in bids_layout.get_subjects():
        logging.info("Converting subject: %s" % subject_id)
        #skip .git directories...added to support datalad datasets
        if subject_id.startswith("."):
            continue

        #check if there's a session number.  If so, store it in the session activity
        session_dirs = bids_layout.get(target='session',
                                       subject=subject_id,
                                       return_type='dir')
        #if session_dirs has entries then get any metadata about session and store in session activity

        #bids_layout.get(subject=subject_id,type='session',extensions='.tsv')
        #bids_layout.get(subject=subject_id,type='scans',extensions='.tsv')
        #bids_layout.get(extensions='.tsv',return_type='obj')

        #check whether sessions have been created (i.e. was there a participants.tsv file?  If not, create here
        if not (subject_id in session):
            session[subject_id] = Session(project)

        for file_tpl in bids_layout.get(subject=subject_id,
                                        extensions=['.nii', '.nii.gz']):
            #create an acquisition activity
            acq = MRAcquisition(session[subject_id])

            #check whether participant (i.e. agent) for this subject already exists (i.e. if participants.tsv file exists) else create one
            if not (subject_id in participant):
                participant[subject_id] = {}
                participant[subject_id]['person'] = acq.add_person(
                    attributes=({
                        Constants.NIDM_SUBJECTID: subject_id
                    }))

            #add qualified association with person
            acq.add_qualified_association(
                person=participant[subject_id]['person'],
                role=Constants.NIDM_PARTICIPANT)

            if file_tpl.entities['datatype'] == 'anat':
                #do something with anatomicals
                acq_obj = MRObject(acq)
                #add image contrast type
                if file_tpl.entities['suffix'] in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_CONTRAST_TYPE:
                        BIDS_Constants.scans[file_tpl.entities['suffix']]
                    })
                else:
                    logging.info(
                        "WARNING: No matching image contrast type found in BIDS_Constants.py for %s"
                        % file_tpl.entities['suffix'])

                #add image usage type
                if file_tpl.entities['datatype'] in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_USAGE_TYPE:
                        BIDS_Constants.scans[file_tpl.entities['datatype']]
                    })
                else:
                    logging.info(
                        "WARNING: No matching image usage type found in BIDS_Constants.py for %s"
                        % file_tpl.entities['datatype'])
                #add file link
                #make relative link to
                acq_obj.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(join(file_tpl.dirname, file_tpl.filename),
                                     directory)
                })
                #WIP: add absolute location of BIDS directory on disk for later finding of files
                acq_obj.add_attributes({Constants.PROV['Location']: directory})

                #add sha512 sum
                if isfile(join(directory, file_tpl.dirname,
                               file_tpl.filename)):
                    acq_obj.add_attributes({
                        Constants.CRYPTO_SHA512:
                        getsha512(
                            join(directory, file_tpl.dirname,
                                 file_tpl.filename))
                    })
                else:
                    logging.info(
                        "WARNINGL file %s doesn't exist! No SHA512 sum stored in NIDM files..."
                        % join(directory, file_tpl.dirname, file_tpl.filename))
                #get associated JSON file if exists
                #There is T1w.json file with information
                json_data = (bids_layout.get(
                    suffix=file_tpl.entities['suffix'],
                    subject=subject_id))[0].metadata
                if len(json_data.info) > 0:
                    for key in json_data.info.items():
                        if key in BIDS_Constants.json_keys:
                            if type(json_data.info[key]) is list:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    ''.join(
                                        str(e) for e in json_data.info[key])
                                })
                            else:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    json_data.info[key]
                                })

                #Parse T1w.json file in BIDS directory to add the attributes contained inside
                if (os.path.isdir(os.path.join(directory))):
                    try:
                        with open(os.path.join(directory,
                                               'T1w.json')) as data_file:
                            dataset = json.load(data_file)
                    except OSError:
                        logging.critical(
                            "Cannot find T1w.json file which is required in the BIDS spec"
                        )
                        exit("-1")
                else:
                    logging.critical(
                        "Error: BIDS directory %s does not exist!" %
                        os.path.join(directory))
                    exit("-1")

                #add various attributes if they exist in BIDS dataset
                for key in dataset:
                    #if key from T1w.json file is mapped to term in BIDS_Constants.py then add to NIDM object
                    if key in BIDS_Constants.json_keys:
                        if type(dataset[key]) is list:
                            acq_obj.add_attributes({
                                BIDS_Constants.json_keys[key]:
                                "".join(dataset[key])
                            })
                        else:
                            acq_obj.add_attributes(
                                {BIDS_Constants.json_keys[key]: dataset[key]})

            elif file_tpl.entities['datatype'] == 'func':
                #do something with functionals
                acq_obj = MRObject(acq)
                #add image contrast type
                if file_tpl.entities['suffix'] in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_CONTRAST_TYPE:
                        BIDS_Constants.scans[file_tpl.entities['suffix']]
                    })
                else:
                    logging.info(
                        "WARNING: No matching image contrast type found in BIDS_Constants.py for %s"
                        % file_tpl.entities['suffix'])

                #add image usage type
                if file_tpl.entities['datatype'] in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_USAGE_TYPE:
                        BIDS_Constants.scans[file_tpl.entities['datatype']]
                    })
                else:
                    logging.info(
                        "WARNING: No matching image usage type found in BIDS_Constants.py for %s"
                        % file_tpl.entities['datatype'])
                #make relative link to
                acq_obj.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(join(file_tpl.dirname, file_tpl.filename),
                                     directory)
                })
                #WIP: add absolute location of BIDS directory on disk for later finding of files
                acq_obj.add_attributes({Constants.PROV['Location']: directory})

                #add sha512 sum
                if isfile(join(directory, file_tpl.dirname,
                               file_tpl.filename)):
                    acq_obj.add_attributes({
                        Constants.CRYPTO_SHA512:
                        getsha512(
                            join(directory, file_tpl.dirname,
                                 file_tpl.filename))
                    })
                else:
                    logging.info(
                        "WARNINGL file %s doesn't exist! No SHA512 sum stored in NIDM files..."
                        % join(directory, file_tpl.dirname, file_tpl.filename))

                if 'run' in file_tpl.entities:
                    acq_obj.add_attributes({
                        BIDS_Constants.json_keys["run"]:
                        file_tpl.entities['run']
                    })

                #get associated JSON file if exists
                json_data = (bids_layout.get(
                    suffix=file_tpl.entities['suffix'],
                    subject=subject_id))[0].metadata

                if len(json_data.info) > 0:
                    for key in json_data.info.items():
                        if key in BIDS_Constants.json_keys:
                            if type(json_data.info[key]) is list:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    ''.join(
                                        str(e) for e in json_data.info[key])
                                })
                            else:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    json_data.info[key]
                                })
                #get associated events TSV file
                if 'run' in file_tpl.entities:
                    events_file = bids_layout.get(
                        subject=subject_id,
                        extensions=['.tsv'],
                        modality=file_tpl.entities['datatype'],
                        task=file_tpl.entities['task'],
                        run=file_tpl.entities['run'])
                else:
                    events_file = bids_layout.get(
                        subject=subject_id,
                        extensions=['.tsv'],
                        modality=file_tpl.entities['datatype'],
                        task=file_tpl.entities['task'])
                #if there is an events file then this is task-based so create an acquisition object for the task file and link
                if events_file:
                    #for now create acquisition object and link it to the associated scan
                    events_obj = AcquisitionObject(acq)
                    #add prov type, task name as prov:label, and link to filename of events file

                    events_obj.add_attributes({
                        PROV_TYPE:
                        Constants.NIDM_MRI_BOLD_EVENTS,
                        BIDS_Constants.json_keys["TaskName"]:
                        json_data["TaskName"],
                        Constants.NIDM_FILENAME:
                        getRelPathToBIDS(events_file[0].filename, directory)
                    })
                    #link it to appropriate MR acquisition entity
                    events_obj.wasAttributedTo(acq_obj)

                #Parse task-rest_bold.json file in BIDS directory to add the attributes contained inside
                if (os.path.isdir(os.path.join(directory))):
                    try:
                        with open(
                                os.path.join(
                                    directory,
                                    'task-rest_bold.json')) as data_file:
                            dataset = json.load(data_file)
                    except OSError:
                        logging.critical(
                            "Cannot find task-rest_bold.json file which is required in the BIDS spec"
                        )
                        exit("-1")
                else:
                    logging.critical(
                        "Error: BIDS directory %s does not exist!" %
                        os.path.join(directory))
                    exit("-1")

                #add various attributes if they exist in BIDS dataset
                for key in dataset:
                    #if key from task-rest_bold.json file is mapped to term in BIDS_Constants.py then add to NIDM object
                    if key in BIDS_Constants.json_keys:
                        if type(dataset[key]) is list:
                            acq_obj.add_attributes({
                                BIDS_Constants.json_keys[key]:
                                ",".join(map(str, dataset[key]))
                            })
                        else:
                            acq_obj.add_attributes(
                                {BIDS_Constants.json_keys[key]: dataset[key]})

            elif file_tpl.entities['datatype'] == 'dwi':
                #do stuff with with dwi scans...
                acq_obj = MRObject(acq)
                #add image contrast type
                if file_tpl.entities['suffix'] in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_CONTRAST_TYPE:
                        BIDS_Constants.scans[file_tpl.entities['suffix']]
                    })
                else:
                    logging.info(
                        "WARNING: No matching image contrast type found in BIDS_Constants.py for %s"
                        % file_tpl.entities['suffix'])

                #add image usage type
                if file_tpl.entities['datatype'] in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_USAGE_TYPE:
                        BIDS_Constants.scans["dti"]
                    })
                else:
                    logging.info(
                        "WARNING: No matching image usage type found in BIDS_Constants.py for %s"
                        % file_tpl.entities['datatype'])
                #make relative link to
                acq_obj.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(join(file_tpl.dirname, file_tpl.filename),
                                     directory)
                })
                #add sha512 sum
                if isfile(join(directory, file_tpl.dirname,
                               file_tpl.filename)):
                    acq_obj.add_attributes({
                        Constants.CRYPTO_SHA512:
                        getsha512(
                            join(directory, file_tpl.dirname,
                                 file_tpl.filename))
                    })
                else:
                    logging.info(
                        "WARNINGL file %s doesn't exist! No SHA512 sum stored in NIDM files..."
                        % join(directory, file_tpl.dirname, file_tpl.filename))

                if 'run' in file_tpl._fields:
                    acq_obj.add_attributes(
                        {BIDS_Constants.json_keys["run"]: file_tpl.run})

                #get associated JSON file if exists
                json_data = (bids_layout.get(
                    suffix=file_tpl.entities['suffix'],
                    subject=subject_id))[0].metadata

                if len(json_data.info) > 0:
                    for key in json_data.info.items():
                        if key in BIDS_Constants.json_keys:
                            if type(json_data.info[key]) is list:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    ''.join(
                                        str(e) for e in json_data.info[key])
                                })
                            else:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    json_data.info[key]
                                })
                #for bval and bvec files, what to do with those?

                #for now, create new generic acquisition objects, link the files, and associate with the one for the DWI scan?
                acq_obj_bval = AcquisitionObject(acq)
                acq_obj_bval.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans["bval"]})
                #add file link to bval files
                acq_obj_bval.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(
                        join(file_tpl.dirname,
                             bids_layout.get_bval(file_tpl.filename)),
                        directory)
                })
                #WIP: add absolute location of BIDS directory on disk for later finding of files
                acq_obj_bval.add_attributes(
                    {Constants.PROV['Location']: directory})

                #add sha512 sum
                if isfile(join(directory, file_tpl.dirname,
                               file_tpl.filename)):
                    acq_obj_bval.add_attributes({
                        Constants.CRYPTO_SHA512:
                        getsha512(
                            join(directory, file_tpl.dirname,
                                 file_tpl.filename))
                    })
                else:
                    logging.info(
                        "WARNINGL file %s doesn't exist! No SHA512 sum stored in NIDM files..."
                        % join(directory, file_tpl.dirname, file_tpl.filename))
                acq_obj_bvec = AcquisitionObject(acq)
                acq_obj_bvec.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans["bvec"]})
                #add file link to bvec files
                acq_obj_bvec.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(
                        join(file_tpl.dirname,
                             bids_layout.get_bvec(file_tpl.filename)),
                        directory)
                })
                #WIP: add absolute location of BIDS directory on disk for later finding of files
                acq_obj_bvec.add_attributes(
                    {Constants.PROV['Location']: directory})

                if isfile(join(directory, file_tpl.dirname,
                               file_tpl.filename)):
                    #add sha512 sum
                    acq_obj_bvec.add_attributes({
                        Constants.CRYPTO_SHA512:
                        getsha512(
                            join(directory, file_tpl.dirname,
                                 file_tpl.filename))
                    })
                else:
                    logging.info(
                        "WARNINGL file %s doesn't exist! No SHA512 sum stored in NIDM files..."
                        % join(directory, file_tpl.dirname, file_tpl.filename))

                #link bval and bvec acquisition object entities together or is their association with DWI scan...

        #Added temporarily to support phenotype files
        #for each *.tsv / *.json file pair in the phenotypes directory
        #WIP: ADD VARIABLE -> TERM MAPPING HERE
        for tsv_file in glob.glob(os.path.join(directory, "phenotype",
                                               "*.tsv")):
            #for now, open the TSV file, extract the row for this subject, store it in an acquisition object and link to
            #the associated JSON data dictionary file
            with open(tsv_file) as phenofile:
                pheno_data = csv.DictReader(phenofile, delimiter='\t')
                for row in pheno_data:
                    subjid = row['participant_id'].split("-")
                    if not subjid[1] == subject_id:
                        continue
                    else:
                        #add acquisition object
                        acq = AssessmentAcquisition(session=session[subjid[1]])
                        #add qualified association with person
                        acq.add_qualified_association(
                            person=participant[subject_id]['person'],
                            role=Constants.NIDM_PARTICIPANT)

                        acq_entity = AssessmentObject(acquisition=acq)

                        for key, value in row.items():
                            if not value:
                                continue
                            #we're using participant_id in NIDM in agent so don't add to assessment as a triple.
                            #BIDS phenotype files seem to have an index column with no column header variable name so skip those
                            if ((not key == "participant_id") and (key != "")):
                                #for now we're using a placeholder namespace for BIDS and simply the variable names as the concept IDs..
                                acq_entity.add_attributes(
                                    {Constants.BIDS[key]: value})

                        #link TSV file
                        acq_entity.add_attributes({
                            Constants.NIDM_FILENAME:
                            getRelPathToBIDS(tsv_file, directory)
                        })
                        #WIP: add absolute location of BIDS directory on disk for later finding of files
                        acq_entity.add_attributes(
                            {Constants.PROV['Location']: directory})

                        #link associated JSON file if it exists
                        data_dict = os.path.join(
                            directory, "phenotype",
                            os.path.splitext(os.path.basename(tsv_file))[0] +
                            ".json")
                        if os.path.isfile(data_dict):
                            acq_entity.add_attributes({
                                Constants.BIDS["data_dictionary"]:
                                getRelPathToBIDS(data_dict, directory)
                            })

    return project, cde
Ejemplo n.º 2
0
def addimagingsessions(bids_layout,subject_id,session,participant, directory,img_session=None):
    '''
    This function adds imaging acquistions to the NIDM file and deals with BIDS structures potentially having
    separate ses-* directories or not
    :param bids_layout:
    :param subject_id:
    :param session:
    :param participant:
    :param directory:
    :param img_session:
    :return:
    '''
    for file_tpl in bids_layout.get(subject=subject_id, session=img_session, extensions=['.nii', '.nii.gz']):
        # create an acquisition activity
        acq=MRAcquisition(session)

        # check whether participant (i.e. agent) for this subject already exists (i.e. if participants.tsv file exists) else create one
        if (not subject_id in participant) and (not subject_id.lstrip("0") in participant):
            participant[subject_id] = {}
            participant[subject_id]['person'] = acq.add_person(attributes=({Constants.NIDM_SUBJECTID:subject_id}))
            acq.add_qualified_association(person=participant[subject_id]['person'],role=Constants.NIDM_PARTICIPANT)

        # added to account for errors in BIDS datasets where participants.tsv may have no leading 0's but
        # subject directories do.  Since bidsmri2nidm starts with the participants.tsv file those are the IDs unless
        # there's a subject directory and no entry in participants.tsv...
        elif subject_id.lstrip("0") in participant:
            # then link acquisition to the agent with participant ID without leading 00's
            acq.add_qualified_association(person=participant[subject_id.lstrip("0")]['person'],role=Constants.NIDM_PARTICIPANT)
        else:
            # add qualified association with person
            acq.add_qualified_association(person=participant[subject_id]['person'],role=Constants.NIDM_PARTICIPANT)



        if file_tpl.entities['datatype']=='anat':
            # do something with anatomicals
            acq_obj = MRObject(acq)
            # add image contrast type
            if file_tpl.entities['suffix'] in BIDS_Constants.scans:
                acq_obj.add_attributes({Constants.NIDM_IMAGE_CONTRAST_TYPE:BIDS_Constants.scans[file_tpl.entities['suffix']]})
            else:
                logging.info("WARNING: No matching image contrast type found in BIDS_Constants.py for %s" % file_tpl.entities['suffix'])

            # add image usage type
            if file_tpl.entities['datatype'] in BIDS_Constants.scans:
                acq_obj.add_attributes({Constants.NIDM_IMAGE_USAGE_TYPE:BIDS_Constants.scans[file_tpl.entities['datatype']]})
            else:
                logging.info("WARNING: No matching image usage type found in BIDS_Constants.py for %s" % file_tpl.entities['datatype'])
            # add file link
            # make relative link to
            acq_obj.add_attributes({Constants.NIDM_FILENAME:getRelPathToBIDS(join(file_tpl.dirname,file_tpl.filename), directory)})

            # add git-annex info if exists
            num_sources = addGitAnnexSources(obj=acq_obj,filepath=join(file_tpl.dirname,file_tpl.filename),bids_root=directory)
            # if there aren't any git annex sources then just store the local directory information
            if num_sources == 0:
                # WIP: add absolute location of BIDS directory on disk for later finding of files
                acq_obj.add_attributes({Constants.PROV['Location']:"file:/" + join(file_tpl.dirname,file_tpl.filename)})



            # add sha512 sum
            if isfile(join(directory,file_tpl.dirname,file_tpl.filename)):
                acq_obj.add_attributes({Constants.CRYPTO_SHA512:getsha512(join(directory,file_tpl.dirname,file_tpl.filename))})
            else:
                logging.info("WARNING file %s doesn't exist! No SHA512 sum stored in NIDM files..." %join(directory,file_tpl.dirname,file_tpl.filename))
            # get associated JSON file if exists
            # There is T1w.json file with information
            json_data = (bids_layout.get(suffix=file_tpl.entities['suffix'],subject=subject_id))[0].metadata
            if len(json_data.info)>0:
                for key in json_data.info.items():
                    if key in BIDS_Constants.json_keys:
                        if type(json_data.info[key]) is list:
                            acq_obj.add_attributes({BIDS_Constants.json_keys[key.replace(" ", "_")]:''.join(str(e) for e in json_data.info[key])})
                        else:
                            acq_obj.add_attributes({BIDS_Constants.json_keys[key.replace(" ", "_")]:json_data.info[key]})

            # Parse T1w.json file in BIDS directory to add the attributes contained inside
            if (os.path.isdir(os.path.join(directory))):
                try:
                    with open(os.path.join(directory,'T1w.json')) as data_file:
                        dataset = json.load(data_file)
                except OSError:
                    logging.warning("Cannot find T1w.json file...looking for session-specific one")
                    try:
                        with open(os.path.join(directory,'ses-' + img_session + '_T1w.json')) as data_file:
                            dataset = json.load(data_file)
                    except OSError:
                        logging.warning("Cannot find session-specific T1w.json file which is required in the BIDS spec..continuing anyway")
                        dataset={}

            else:
                logging.critical("Error: BIDS directory %s does not exist!" %os.path.join(directory))
                exit(-1)

            # add various attributes if they exist in BIDS dataset
            for key in dataset:
                # if key from T1w.json file is mapped to term in BIDS_Constants.py then add to NIDM object
                if key in BIDS_Constants.json_keys:
                    if type(dataset[key]) is list:
                        acq_obj.add_attributes({BIDS_Constants.json_keys[key]:"".join(dataset[key])})
                    else:
                        acq_obj.add_attributes({BIDS_Constants.json_keys[key]:dataset[key]})

        elif file_tpl.entities['datatype'] == 'func':
            # do something with functionals
            acq_obj = MRObject(acq)
            # add image contrast type
            if file_tpl.entities['suffix'] in BIDS_Constants.scans:
                acq_obj.add_attributes({Constants.NIDM_IMAGE_CONTRAST_TYPE:BIDS_Constants.scans[file_tpl.entities['suffix']]})
            else:
                logging.info("WARNING: No matching image contrast type found in BIDS_Constants.py for %s" % file_tpl.entities['suffix'])

            # add image usage type
            if file_tpl.entities['datatype'] in BIDS_Constants.scans:
                acq_obj.add_attributes({Constants.NIDM_IMAGE_USAGE_TYPE:BIDS_Constants.scans[file_tpl.entities['datatype']]})
            else:
                logging.info("WARNING: No matching image usage type found in BIDS_Constants.py for %s" % file_tpl.entities['datatype'])
            # make relative link to
            acq_obj.add_attributes({Constants.NIDM_FILENAME:getRelPathToBIDS(join(file_tpl.dirname,file_tpl.filename), directory)})

            # add git-annex/datalad info if exists
            num_sources=addGitAnnexSources(obj=acq_obj,filepath=join(file_tpl.dirname,file_tpl.filename),bids_root=directory)

            # if there aren't any git annex sources then just store the local directory information
            if num_sources == 0:
                # WIP: add absolute location of BIDS directory on disk for later finding of files
                acq_obj.add_attributes({Constants.PROV['Location']:"file:/" + join(file_tpl.dirname,file_tpl.filename)})



            # add sha512 sum
            if isfile(join(directory,file_tpl.dirname,file_tpl.filename)):
                acq_obj.add_attributes({Constants.CRYPTO_SHA512:getsha512(join(directory,file_tpl.dirname,file_tpl.filename))})
            else:
                logging.info("WARNINGL file %s doesn't exist! No SHA512 sum stored in NIDM files..." %join(directory,file_tpl.dirname,file_tpl.filename))

            if 'run' in file_tpl.entities:
                acq_obj.add_attributes({BIDS_Constants.json_keys["run"]:file_tpl.entities['run']})

            # get associated JSON file if exists
            json_data = (bids_layout.get(suffix=file_tpl.entities['suffix'],subject=subject_id))[0].metadata

            if len(json_data.info)>0:
                for key in json_data.info.items():
                    if key in BIDS_Constants.json_keys:
                        if type(json_data.info[key]) is list:
                            acq_obj.add_attributes({BIDS_Constants.json_keys[key.replace(" ", "_")]:''.join(str(e) for e in json_data.info[key])})
                        else:
                            acq_obj.add_attributes({BIDS_Constants.json_keys[key.replace(" ", "_")]:json_data.info[key]})
            # get associated events TSV file
            if 'run' in file_tpl.entities:
                events_file = bids_layout.get(subject=subject_id, extensions=['.tsv'],modality=file_tpl.entities['datatype'],task=file_tpl.entities['task'],run=file_tpl.entities['run'])
            else:
                events_file = bids_layout.get(subject=subject_id, extensions=['.tsv'],modality=file_tpl.entities['datatype'],task=file_tpl.entities['task'])
            # if there is an events file then this is task-based so create an acquisition object for the task file and link
            if events_file:
                #for now create acquisition object and link it to the associated scan
                events_obj = AcquisitionObject(acq)
                #add prov type, task name as prov:label, and link to filename of events file

                events_obj.add_attributes({PROV_TYPE:Constants.NIDM_MRI_BOLD_EVENTS,BIDS_Constants.json_keys["TaskName"]: json_data["TaskName"], Constants.NIDM_FILENAME:getRelPathToBIDS(events_file[0].filename, directory)})
                #link it to appropriate MR acquisition entity
                events_obj.wasAttributedTo(acq_obj)

                # add source links for this file
                # add git-annex/datalad info if exists
                num_sources=addGitAnnexSources(obj=events_obj,filepath=events_file,bids_root=directory)

                # if there aren't any git annex sources then just store the local directory information
                if num_sources == 0:
                    # WIP: add absolute location of BIDS directory on disk for later finding of files
                    events_obj.add_attributes({Constants.PROV['Location']:"file:/" + events_file})


            #Parse task-rest_bold.json file in BIDS directory to add the attributes contained inside
            if (os.path.isdir(os.path.join(directory))):
                try:
                    with open(os.path.join(directory,'task-rest_bold.json')) as data_file:
                        dataset = json.load(data_file)
                except OSError:
                    logging.warning("Cannot find task-rest_bold.json file looking for session-specific one")
                    try:
                        with open(os.path.join(directory,'ses-' + img_session +'_task-rest_bold.json')) as data_file:
                            dataset = json.load(data_file)
                    except OSError:
                        logging.warning("Cannot find session-specific task-rest_bold.json file which is required in the BIDS spec..continuing anyway")
                        dataset={}
            else:
                logging.critical("Error: BIDS directory %s does not exist!" %os.path.join(directory))
                exit(-1)

            #add various attributes if they exist in BIDS dataset
            for key in dataset:
                #if key from task-rest_bold.json file is mapped to term in BIDS_Constants.py then add to NIDM object
                if key in BIDS_Constants.json_keys:
                    if type(dataset[key]) is list:
                        acq_obj.add_attributes({BIDS_Constants.json_keys[key]:",".join(map(str,dataset[key]))})
                    else:
                        acq_obj.add_attributes({BIDS_Constants.json_keys[key]:dataset[key]})

        elif file_tpl.entities['datatype'] == 'dwi':
            #do stuff with with dwi scans...
            acq_obj = MRObject(acq)
            #add image contrast type
            if file_tpl.entities['suffix'] in BIDS_Constants.scans:
                acq_obj.add_attributes({Constants.NIDM_IMAGE_CONTRAST_TYPE:BIDS_Constants.scans[file_tpl.entities['suffix']]})
            else:
                logging.info("WARNING: No matching image contrast type found in BIDS_Constants.py for %s" % file_tpl.entities['suffix'])

            #add image usage type
            if file_tpl.entities['datatype'] in BIDS_Constants.scans:
                acq_obj.add_attributes({Constants.NIDM_IMAGE_USAGE_TYPE:BIDS_Constants.scans["dti"]})
            else:
                logging.info("WARNING: No matching image usage type found in BIDS_Constants.py for %s" % file_tpl.entities['datatype'])
            #make relative link to
            acq_obj.add_attributes({Constants.NIDM_FILENAME:getRelPathToBIDS(join(file_tpl.dirname,file_tpl.filename), directory)})
            #add sha512 sum
            if isfile(join(directory,file_tpl.dirname,file_tpl.filename)):
                    acq_obj.add_attributes({Constants.CRYPTO_SHA512:getsha512(join(directory,file_tpl.dirname,file_tpl.filename))})
            else:
                logging.info("WARNING file %s doesn't exist! No SHA512 sum stored in NIDM files..." %join(directory,file_tpl.dirname,file_tpl.filename))

            # add git-annex/datalad info if exists
            num_sources = addGitAnnexSources(obj=acq_obj,filepath=join(file_tpl.dirname,file_tpl.filename),bids_root=directory)

            if num_sources == 0:
                acq_obj.add_attributes({Constants.PROV['Location']: "file:/" + join(file_tpl.dirname,file_tpl.filename)})

            if 'run' in file_tpl.entities:
                acq_obj.add_attributes({BIDS_Constants.json_keys["run"]:file_tpl.run})

            #get associated JSON file if exists
            json_data = (bids_layout.get(suffix=file_tpl.entities['suffix'],subject=subject_id))[0].metadata

            if len(json_data.info)>0:
                for key in json_data.info.items():
                    if key in BIDS_Constants.json_keys:
                        if type(json_data.info[key]) is list:
                            acq_obj.add_attributes({BIDS_Constants.json_keys[key.replace(" ", "_")]:''.join(str(e) for e in json_data.info[key])})
                        else:
                            acq_obj.add_attributes({BIDS_Constants.json_keys[key.replace(" ", "_")]:json_data.info[key]})
            #for bval and bvec files, what to do with those?

            # for now, create new generic acquisition objects, link the files, and associate with the one for the DWI scan?
            acq_obj_bval = AcquisitionObject(acq)
            acq_obj_bval.add_attributes({PROV_TYPE:BIDS_Constants.scans["bval"]})
            # add file link to bval files
            acq_obj_bval.add_attributes({Constants.NIDM_FILENAME:getRelPathToBIDS(join(file_tpl.dirname,bids_layout.get_bval(join(file_tpl.dirname,file_tpl.filename))),directory)})

            # add git-annex/datalad info if exists
            num_sources = addGitAnnexSources(obj=acq_obj_bval,filepath=join(file_tpl.dirname,bids_layout.get_bval(join(file_tpl.dirname,file_tpl.filename))),bids_root=directory)

            if num_sources == 0:
                # WIP: add absolute location of BIDS directory on disk for later finding of files
                acq_obj_bval.add_attributes({Constants.PROV['Location']:"file:/" + join(file_tpl.dirname,bids_layout.get_bval(join(file_tpl.dirname,file_tpl.filename)))})

            # add sha512 sum
            if isfile(join(directory,file_tpl.dirname,file_tpl.filename)):
                acq_obj_bval.add_attributes({Constants.CRYPTO_SHA512:getsha512(join(directory,file_tpl.dirname,file_tpl.filename))})
            else:
                logging.info("WARNING file %s doesn't exist! No SHA512 sum stored in NIDM files..." %join(directory,file_tpl.dirname,file_tpl.filename))
            acq_obj_bvec = AcquisitionObject(acq)
            acq_obj_bvec.add_attributes({PROV_TYPE:BIDS_Constants.scans["bvec"]})
            #add file link to bvec files
            acq_obj_bvec.add_attributes({Constants.NIDM_FILENAME:getRelPathToBIDS(join(file_tpl.dirname,bids_layout.get_bvec(join(file_tpl.dirname,file_tpl.filename))),directory)})

            # add git-annex/datalad info if exists
            num_sources = addGitAnnexSources(obj=acq_obj_bvec,filepath=join(file_tpl.dirname,bids_layout.get_bvec(join(file_tpl.dirname,file_tpl.filename))),bids_root=directory)

            if num_sources == 0:
               #WIP: add absolute location of BIDS directory on disk for later finding of files
                acq_obj_bvec.add_attributes({Constants.PROV['Location']:"file:/" + join(file_tpl.dirname,bids_layout.get_bvec(join(file_tpl.dirname,file_tpl.filename)))})

            if isfile(join(directory,file_tpl.dirname,file_tpl.filename)):
                #add sha512 sum
                acq_obj_bvec.add_attributes({Constants.CRYPTO_SHA512:getsha512(join(directory,file_tpl.dirname,file_tpl.filename))})
            else:
                logging.info("WARNING file %s doesn't exist! No SHA512 sum stored in NIDM files..." %join(directory,file_tpl.dirname,file_tpl.filename))
Ejemplo n.º 3
0
def main(argv):
    #create new nidm-experiment document with project
    kwargs={Constants.NIDM_PROJECT_NAME:"FBIRN_PhaseII",Constants.NIDM_PROJECT_IDENTIFIER:9610,Constants.NIDM_PROJECT_DESCRIPTION:"Test investigation"}
    project = Project(attributes=kwargs)
    
    #test add string attribute with existing namespace
    #nidm_doc.addLiteralAttribute("nidm","isFun","ForMe")
    project.add_attributes({Constants.NIDM["isFun"]:"ForMe"})

    #test adding string attribute with new namespace/term
    project.addLiteralAttribute("fred","notFound","in namespaces","www.fred.org/")

    #test add float attribute
    project.addLiteralAttribute("nidm", "float", float(2.34))

    #test adding attributes in bulk with mix of existing and new namespaces
    #nidm_doc.addAttributesWithNamespaces(nidm_doc.getProject(),[{"prefix":"nidm", "uri":nidm_doc.namespaces["nidm"], "term":"score", "value":int(15)}, \
        #                                              {"prefix":"dave", "uri":"http://www.davidkeator.com/", "term":"isAwesome", "value":"15"}, \
        #                                              {"prefix":"nidm", "uri":nidm_doc.namespaces["nidm"], "term":"value", "value":float(2.34)}])
    
    #nidm_doc.addAttributes(nidm_doc.getProject(),{"nidm:test":int(15), "ncit:isTerminology":"15","ncit:joker":float(1)})


    #test add PI to investigation
    project_PI = project.add_person(attributes={Constants.NIDM_FAMILY_NAME:"Keator", Constants.NIDM_GIVEN_NAME:"David"})

    #add qualified association of project PI to project activity
    project.add_qualified_association(person=project_PI,role=Constants.NIDM_PI)

    #test add session to graph and associate with project
    session = Session(project)
    session.add_attributes({Constants.NIDM:"test"})
    #project.add_sessions(session)

    #test add MR acquisition activity / entity to graph and associate with session
    acq_act = MRAcquisition(session=session)
    #test add acquisition object entity to graph associated with participant role NIDM_PARTICIPANT
    acq_entity = MRObject(acquisition=acq_act)

    #add person to graph
    person = acq_act.add_person(attributes={Constants.NIDM_GIVEN_NAME:"George"})
    #add qualified association of person with role NIDM_PARTICIPANT, and associated with acquistion activity
    acq_act.add_qualified_association(person=person, role=Constants.NIDM_PARTICIPANT)


    #test add Assessment acquisition activity / entity to graph and associate with session
    acq_act = AssessmentAcquisition(session=session)
    #test add acquisition object entity to graph associated with participant role NIDM_PARTICIPANT
    acq_entity = AssessmentObject(acquisition=acq_act)
    acq_entity.add_attributes({Constants.NIDM["Q1"]:"Q1 Answer",Constants.NIDM["Q2"]:"Q2 Answer" })
    #associate person as participant
    acq_act.add_qualified_association(person=person, role=Constants.NIDM_PARTICIPANT)


    #test add DemographicsAssessment acquisition activity / entity to graph and associate with session
    acq_act = AssessmentAcquisition(session=session)
    #test add acquisition object entity to graph associated with participant role NIDM_PARTICIPANT
    acq_entity = DemographicsObject(acquisition=acq_act)
    #add new person to graph
    person2 = acq_act.add_person(attributes={Constants.NIDM_FAMILY_NAME:"Doe", \
            Constants.NIDM_GIVEN_NAME:"John"})
    #associate person2 with assessment acquisition
    acq_act.add_qualified_association(person=person2, role=Constants.NIDM_PARTICIPANT)

    acq_entity.add_attributes({Constants.NIDM_AGE:60,Constants.NIDM_GENDER:"Male" })


    #save a turtle file
    with open("test.ttl",'w') as f:
        f.write (project.serializeTurtle())

    #save a DOT graph as PDF
    project.save_DotGraph("test.png",format="png")
Ejemplo n.º 4
0
def bidsmri2project(directory):
    #Parse dataset_description.json file in BIDS directory
    if (os.path.isdir(os.path.join(directory))):
        with open(os.path.join(directory,
                               'dataset_description.json')) as data_file:
            dataset = json.load(data_file)
    else:
        print("Error: BIDS directory %s does not exist!" %
              os.path.join(directory))
        exit("-1")
    #print(dataset_data)

    #create project / nidm-exp doc
    project = Project()

    #add various attributes if they exist in BIDS dataset
    for key in dataset:
        #print(key)
        #if key from dataset_description file is mapped to term in BIDS_Constants.py then add to NIDM object
        if key in BIDS_Constants.dataset_description:
            if type(dataset[key]) is list:
                project.add_attributes({
                    BIDS_Constants.dataset_description[key]:
                    "".join(dataset[key])
                })
            else:
                project.add_attributes(
                    {BIDS_Constants.dataset_description[key]: dataset[key]})
        #add absolute location of BIDS directory on disk for later finding of files which are stored relatively in NIDM document
        project.add_attributes({Constants.PROV['Location']: directory})
    #create empty dictinary for sessions where key is subject id and used later to link scans to same session as demographics
    session = {}
    participant = {}
    #Parse participants.tsv file in BIDS directory and create study and acquisition objects
    with open(os.path.join(directory, 'participants.tsv')) as csvfile:
        participants_data = csv.DictReader(csvfile, delimiter='\t')
        #print(participants_data.fieldnames)
        for row in participants_data:
            #create session object for subject to be used for participant metadata and image data
            #parse subject id from "sub-XXXX" string
            subjid = row['participant_id'].split("-")
            session[subjid[1]] = Session(project)

            #add acquisition object
            acq = AssessmentAcquisition(session=session[subjid[1]])

            acq_entity = AssessmentObject(acquisition=acq)
            participant[subjid[1]] = {}
            participant[subjid[1]]['person'] = acq.add_person(
                attributes=({
                    Constants.NIDM_SUBJECTID: row['participant_id']
                }))

            #add qualified association of participant with acquisition activity
            acq.add_qualified_association(
                person=participant[subjid[1]]['person'],
                role=Constants.NIDM_PARTICIPANT)

            for key, value in row.items():
                #for variables in participants.tsv file who have term mappings in BIDS_Constants.py use those
                if key in BIDS_Constants.participants:
                    #if this was the participant_id, we already handled it above creating agent / qualified association
                    if not (BIDS_Constants.participants[key]
                            == Constants.NIDM_SUBJECTID):
                        acq_entity.add_attributes(
                            {BIDS_Constants.participants[key]: value})
                #else just put variables in bids namespace since we don't know what they mean
                else:
                    #acq_entity.add_attributes({Constants.BIDS[quote(key)]:value})
                    acq_entity.add_attributes(
                        {Constants.BIDS[key.replace(" ", "_")]: value})

    #get BIDS layout
    bids_layout = BIDSLayout(directory)

    #create acquisition objects for each scan for each subject

    #loop through all subjects in dataset
    for subject_id in bids_layout.get_subjects():
        print("Converting subject: %s" % subject_id)
        #skip .git directories...added to support datalad datasets
        if subject_id.startswith("."):
            continue

        #check if there's a session number.  If so, store it in the session activity
        session_dirs = bids_layout.get(target='session',
                                       subject=subject_id,
                                       return_type='dir')
        #if session_dirs has entries then get any metadata about session and store in session activity

        #bids_layout.get(subject=subject_id,type='session',extensions='.tsv')
        #bids_layout.get(subject=subject_id,type='scans',extensions='.tsv')
        #bids_layout.get(extensions='.tsv',return_type='obj')

        for file_tpl in bids_layout.get(subject=subject_id,
                                        extensions=['.nii', '.nii.gz']):
            #create an acquisition activity
            acq = MRAcquisition(session[subject_id])

            #add qualified association with person
            acq.add_qualified_association(
                person=participant[subject_id]['person'],
                role=Constants.NIDM_PARTICIPANT)

            #print(file_tpl.type)
            if file_tpl.modality == 'anat':
                #do something with anatomicals
                acq_obj = MRObject(acq)
                #add image contrast type
                if file_tpl.type in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_CONTRAST_TYPE:
                        BIDS_Constants.scans[file_tpl.type]
                    })
                else:
                    print(
                        "WARNING: No matching image contrast type found in BIDS_Constants.py for %s"
                        % file_tpl.type)

                #add image usage type
                if file_tpl.modality in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_USAGE_TYPE:
                        BIDS_Constants.scans[file_tpl.modality]
                    })
                else:
                    print(
                        "WARNING: No matching image usage type found in BIDS_Constants.py for %s"
                        % file_tpl.modality)
                #add file link
                #make relative link to
                acq_obj.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(file_tpl.filename, directory)
                })
                #get associated JSON file if exists
                json_data = bids_layout.get_metadata(file_tpl.filename)
                if json_data:
                    for key in json_data:
                        if key in BIDS_Constants.json_keys:
                            if type(json_data[key]) is list:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    ''.join(str(e) for e in json_data[key])
                                })
                            else:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    json_data[key]
                                })
            elif file_tpl.modality == 'func':
                #do something with functionals
                acq_obj = MRObject(acq)
                #add image contrast type
                if file_tpl.type in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_CONTRAST_TYPE:
                        BIDS_Constants.scans[file_tpl.type]
                    })
                else:
                    print(
                        "WARNING: No matching image contrast type found in BIDS_Constants.py for %s"
                        % file_tpl.type)

                #add image usage type
                if file_tpl.modality in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_USAGE_TYPE:
                        BIDS_Constants.scans[file_tpl.modality]
                    })
                else:
                    print(
                        "WARNING: No matching image usage type found in BIDS_Constants.py for %s"
                        % file_tpl.modality)
                #add file link
                acq_obj.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(file_tpl.filename, directory)
                })
                if 'run' in file_tpl._fields:
                    acq_obj.add_attributes(
                        {BIDS_Constants.json_keys["run"]: file_tpl.run})

                #get associated JSON file if exists
                json_data = bids_layout.get_metadata(file_tpl.filename)

                if json_data:
                    for key in json_data:
                        if key in BIDS_Constants.json_keys:
                            if type(json_data[key]) is list:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    ''.join(str(e) for e in json_data[key])
                                })
                            else:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    json_data[key]
                                })

                #get associated events TSV file
                if 'run' in file_tpl._fields:
                    events_file = bids_layout.get(subject=subject_id,
                                                  extensions=['.tsv'],
                                                  modality=file_tpl.modality,
                                                  task=file_tpl.task,
                                                  run=file_tpl.run)
                else:
                    events_file = bids_layout.get(subject=subject_id,
                                                  extensions=['.tsv'],
                                                  modality=file_tpl.modality,
                                                  task=file_tpl.task)
                #if there is an events file then this is task-based so create an acquisition object for the task file and link
                if events_file:
                    #for now create acquisition object and link it to the associated scan
                    events_obj = AcquisitionObject(acq)
                    #add prov type, task name as prov:label, and link to filename of events file

                    events_obj.add_attributes({
                        PROV_TYPE:
                        Constants.NIDM_MRI_BOLD_EVENTS,
                        BIDS_Constants.json_keys["TaskName"]:
                        json_data["TaskName"],
                        Constants.NIDM_FILENAME:
                        getRelPathToBIDS(events_file[0].filename, directory)
                    })
                    #link it to appropriate MR acquisition entity
                    events_obj.wasAttributedTo(acq_obj)

            elif file_tpl.modality == 'dwi':
                #do stuff with with dwi scans...
                acq_obj = MRObject(acq)
                #add image contrast type
                if file_tpl.type in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_CONTRAST_TYPE:
                        BIDS_Constants.scans[file_tpl.type]
                    })
                else:
                    print(
                        "WARNING: No matching image contrast type found in BIDS_Constants.py for %s"
                        % file_tpl.type)

                #add image usage type
                if file_tpl.modality in BIDS_Constants.scans:
                    acq_obj.add_attributes({
                        Constants.NIDM_IMAGE_USAGE_TYPE:
                        BIDS_Constants.scans["dti"]
                    })
                else:
                    print(
                        "WARNING: No matching image usage type found in BIDS_Constants.py for %s"
                        % file_tpl.modality)
                #add file link
                acq_obj.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(file_tpl.filename, directory)
                })
                if 'run' in file_tpl._fields:
                    acq_obj.add_attributes(
                        {BIDS_Constants.json_keys["run"]: file_tpl.run})

                #get associated JSON file if exists
                json_data = bids_layout.get_metadata(file_tpl.filename)

                if json_data:
                    for key in json_data:
                        if key in BIDS_Constants.json_keys:
                            if type(json_data[key]) is list:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    ''.join(str(e) for e in json_data[key])
                                })
                            else:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key.replace(
                                        " ", "_")]:
                                    json_data[key]
                                })

                #for bval and bvec files, what to do with those?

                #for now, create new generic acquisition objects, link the files, and associate with the one for the DWI scan?
                acq_obj_bval = AcquisitionObject(acq)
                acq_obj_bval.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans["bval"]})
                #add file link to bval files
                acq_obj_bval.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(bids_layout.get_bval(file_tpl.filename),
                                     directory)
                })
                acq_obj_bvec = AcquisitionObject(acq)
                acq_obj_bvec.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans["bvec"]})
                #add file link to bvec files
                acq_obj_bvec.add_attributes({
                    Constants.NIDM_FILENAME:
                    getRelPathToBIDS(bids_layout.get_bvec(file_tpl.filename),
                                     directory)
                })

                #link bval and bvec acquisition object entities together or is their association with DWI scan...

        #Added temporarily to support phenotype files
        #for each *.tsv / *.json file pair in the phenotypes directory
        for tsv_file in glob.glob(os.path.join(directory, "phenotype",
                                               "*.tsv")):
            #for now, open the TSV file, extract the row for this subject, store it in an acquisition object and link to
            #the associated JSON data dictionary file
            with open(tsv_file) as phenofile:
                pheno_data = csv.DictReader(phenofile, delimiter='\t')
                for row in pheno_data:
                    subjid = row['participant_id'].split("-")
                    if not subjid[1] == subject_id:
                        continue
                    else:
                        #add acquisition object
                        acq = AssessmentAcquisition(session=session[subjid[1]])
                        #add qualified association with person
                        acq.add_qualified_association(
                            person=participant[subject_id]['person'],
                            role=Constants.NIDM_PARTICIPANT)

                        acq_entity = AssessmentObject(acquisition=acq)

                        for key, value in row.items():
                            #we're using participant_id in NIDM in agent so don't add to assessment as a triple.
                            #BIDS phenotype files seem to have an index column with no column header variable name so skip those
                            if ((not key == "participant_id") and (key != "")):
                                #for now we're using a placeholder namespace for BIDS and simply the variable names as the concept IDs..
                                acq_entity.add_attributes(
                                    {Constants.BIDS[key]: value})

                        #link TSV file
                        acq_entity.add_attributes({
                            Constants.NIDM_FILENAME:
                            getRelPathToBIDS(tsv_file, directory)
                        })
                        #link associated JSON file if it exists
                        data_dict = os.path.join(
                            directory, "phenotype",
                            os.path.splitext(os.path.basename(tsv_file))[0] +
                            ".json")
                        if os.path.isfile(data_dict):
                            acq_entity.add_attributes({
                                Constants.BIDS["data_dictionary"]:
                                getRelPathToBIDS(data_dict, directory)
                            })

    return project