Пример #1
0
def test_GetParticipantIDs():

    kwargs = {
        Constants.NIDM_PROJECT_NAME: "FBIRN_PhaseII",
        Constants.NIDM_PROJECT_IDENTIFIER: 9610,
        Constants.NIDM_PROJECT_DESCRIPTION: "Test investigation"
    }
    project = Project(uuid="_123456", attributes=kwargs)
    session = Session(uuid="_13579", project=project)
    acq = Acquisition(uuid="_15793", session=session)
    acq2 = Acquisition(uuid="_15795", session=session)

    person = acq.add_person(attributes=({Constants.NIDM_SUBJECTID: "9999"}))
    acq.add_qualified_association(person=person,
                                  role=Constants.NIDM_PARTICIPANT)

    person2 = acq2.add_person(attributes=({Constants.NIDM_SUBJECTID: "8888"}))
    acq2.add_qualified_association(person=person2,
                                   role=Constants.NIDM_PARTICIPANT)

    #save a turtle file
    with open("test.ttl", 'w') as f:
        f.write(project.serializeTurtle())

    participant_list = Query.GetParticipantIDs(["test.ttl"])

    remove("test.ttl")
    assert (participant_list['ID'].str.contains('9999').any())
    assert (participant_list['ID'].str.contains('8888').any())
Пример #2
0
def main(argv):
    #create new nidm-experiment document with project
    kwargs={Constants.NIDM_PROJECT_NAME:"FBIRN_PhaseII",Constants.NIDM_PROJECT_IDENTIFIER:9610,Constants.NIDM_PROJECT_DESCRIPTION:"Test investigation"}
    project = Project(attributes=kwargs)
    
    #test add string attribute with existing namespace
    #nidm_doc.addLiteralAttribute("nidm","isFun","ForMe")
    project.add_attributes({Constants.NIDM["isFun"]:"ForMe"})

    #test adding string attribute with new namespace/term
    project.addLiteralAttribute("fred","notFound","in namespaces","www.fred.org/")

    #test add float attribute
    project.addLiteralAttribute("nidm", "float", float(2.34))

    #test adding attributes in bulk with mix of existing and new namespaces
    #nidm_doc.addAttributesWithNamespaces(nidm_doc.getProject(),[{"prefix":"nidm", "uri":nidm_doc.namespaces["nidm"], "term":"score", "value":int(15)}, \
        #                                              {"prefix":"dave", "uri":"http://www.davidkeator.com/", "term":"isAwesome", "value":"15"}, \
        #                                              {"prefix":"nidm", "uri":nidm_doc.namespaces["nidm"], "term":"value", "value":float(2.34)}])
    
    #nidm_doc.addAttributes(nidm_doc.getProject(),{"nidm:test":int(15), "ncit:isTerminology":"15","ncit:joker":float(1)})


    #test add PI to investigation
    project_PI = project.add_person(role=Constants.NIDM_PI,  attributes={Constants.NIDM_FAMILY_NAME:"Keator", Constants.NIDM_GIVEN_NAME:"David"})
    
    #test add session to graph and associate with project
    session = Session(project)
    project.add_sessions(session)

    #test add acquisition activity to graph and associate with session
    acq_act = Acquisition(session=session)
    #test add acquisition object entity to graph associated with participant role NIDM_PARTICIPANT
    acq_entity = MRAcquisitionObject(acquisition=acq_act)
    acq_entity.add_person(role=Constants.NIDM_PARTICIPANT,attributes={Constants.NIDM_GIVEN_NAME:"George"})

    #save a turtle file
    with open("test.ttl",'w') as f:
        f.write (project.serializeTurtle())

    #save a DOT graph as PDF
    project.save_DotGraph("test.png",format="png")
Пример #3
0
def makeTestFile(filename, params):
    global test_person_uuid, test_p2_subject_uuids

    nidm_project_name = params.get('NIDM_PROJECT_NAME',
                                   False) or "Project_name_sample"
    nidm_project_identifier = params.get('NIDM_PROJECT_IDENTIFIER',
                                         False) or 9610
    nidm_project2_identifier = params.get('NIDM_PROJECT_IDENTIFIER',
                                          False) or 550
    nidm_project_description = params.get(
        'NIDM_PROJECT_DESCRIPTION', False) or "1234356 Test investigation"
    project_uuid = params.get('PROJECT_UUID', False) or "_proj1"
    project_uuid2 = params.get('PROJECT2_UUID', False) or "_proj2"
    session_uuid = params.get('SESSION_UUID', False) or "_ses1"
    session_uuid2 = params.get('SESSION2_UUID', False) or "_ses2"
    p1kwargs = {
        Constants.NIDM_PROJECT_NAME: nidm_project_name,
        Constants.NIDM_PROJECT_IDENTIFIER: nidm_project_identifier,
        Constants.NIDM_PROJECT_DESCRIPTION: nidm_project_description
    }
    p2kwargs = {
        Constants.NIDM_PROJECT_NAME: nidm_project_name,
        Constants.NIDM_PROJECT_IDENTIFIER: nidm_project2_identifier,
        Constants.NIDM_PROJECT_DESCRIPTION: nidm_project_description
    }

    project = Project(uuid=project_uuid, attributes=p1kwargs)
    session = Session(uuid=session_uuid, project=project)
    acq = Acquisition(uuid="_acq1", session=session)
    acq2 = Acquisition(uuid="_acq2", session=session)
    acq3 = Acquisition(uuid="_acq2", session=session)

    person = acq.add_person(attributes=({Constants.NIDM_SUBJECTID: "a1_9999"}))
    test_person_uuid = (str(person.identifier)).replace("niiri:", "")

    acq.add_qualified_association(person=person,
                                  role=Constants.NIDM_PARTICIPANT)

    person2 = acq2.add_person(attributes=({
        Constants.NIDM_SUBJECTID: "a1_8888"
    }))
    acq2.add_qualified_association(person=person2,
                                   role=Constants.NIDM_PARTICIPANT)
    person3 = acq3.add_person(attributes=({
        Constants.NIDM_SUBJECTID: "a2_7777"
    }))
    acq2.add_qualified_association(person=person3,
                                   role=Constants.NIDM_PARTICIPANT)

    project2 = Project(uuid=project_uuid2, attributes=p2kwargs)
    session2 = Session(uuid=session_uuid2, project=project2)
    acq4 = Acquisition(uuid="_acq3", session=session2)
    acq5 = Acquisition(uuid="_acq4", session=session2)

    person4 = acq4.add_person(attributes=({
        Constants.NIDM_SUBJECTID: "a3_6666"
    }))
    acq4.add_qualified_association(person=person4,
                                   role=Constants.NIDM_PARTICIPANT)
    person5 = acq5.add_person(attributes=({
        Constants.NIDM_SUBJECTID: "a4_5555"
    }))
    acq5.add_qualified_association(person=person5,
                                   role=Constants.NIDM_PARTICIPANT)

    # now add some assessment instrument data
    addData(
        acq, {
            Constants.NIDM_AGE: 9,
            Constants.NIDM_HANDEDNESS: "R",
            Constants.NIDM_DIAGNOSIS: "Anxiety"
        })
    addData(
        acq2, {
            Constants.NIDM_AGE: 8,
            Constants.NIDM_HANDEDNESS: "L",
            Constants.NIDM_DIAGNOSIS: "ADHD"
        })
    addData(
        acq4, {
            Constants.NIDM_AGE: 7,
            Constants.NIDM_HANDEDNESS: "A",
            Constants.NIDM_DIAGNOSIS: "Depression"
        })
    addData(
        acq5, {
            Constants.NIDM_AGE: 6,
            Constants.NIDM_HANDEDNESS: "R",
            Constants.NIDM_DIAGNOSIS: "Depression"
        })

    test_p2_subject_uuids.append(
        (str(person4.identifier)).replace("niiri:", ""))
    test_p2_subject_uuids.append(
        (str(person5.identifier)).replace("niiri:", ""))

    with open("a.ttl", 'w') as f:
        f.write(project.graph.serialize(None, format='rdf', rdf_format='ttl'))
    with open("b.ttl", 'w') as f:
        f.write(project2.graph.serialize(None, format='rdf', rdf_format='ttl'))

    #create empty graph
    graph = Graph()
    for nidm_file in ("a.ttl", "b.ttl"):
        tmp = Graph()
        graph = graph + tmp.parse(nidm_file,
                                  format=util.guess_format(nidm_file))

    graph.serialize(filename, format='turtle')

    os.unlink("a.ttl")
    os.unlink("b.ttl")

    with open(filename, "r") as f:
        x = f.read()

    with open("./agent.ttl", "w") as f:
        f.write(x)
Пример #4
0
def main(argv):
    parser = ArgumentParser(
        description=
        'This program will convert a BIDS MRI dataset to a NIDM-Experiment \
        RDF document.  It will parse phenotype information and simply store variables/values \
        and link to the associated json data dictionary file.')

    parser.add_argument('-d',
                        dest='directory',
                        required=True,
                        help="Path to BIDS dataset directory")
    parser.add_argument('-o',
                        dest='outputfile',
                        default="nidm.ttl",
                        help="NIDM output turtle file")
    args = parser.parse_args()

    directory = args.directory
    outputfile = args.outputfile

    #importlib.reload(sys)
    #sys.setdefaultencoding('utf8')

    #Parse dataset_description.json file in BIDS directory
    with open(os.path.join(directory,
                           'dataset_description.json')) as data_file:
        dataset = json.load(data_file)
    #print(dataset_data)

    #create project / nidm-exp doc
    project = Project()

    #add various attributes if they exist in BIDS dataset
    for key in dataset:
        #print(key)
        #if key from dataset_description file is mapped to term in BIDS_Constants.py then add to NIDM object
        if key in BIDS_Constants.dataset_description:
            if type(dataset[key]) is list:
                project.add_attributes({
                    BIDS_Constants.dataset_description[key]:
                    "".join(dataset[key])
                })
            else:
                project.add_attributes(
                    {BIDS_Constants.dataset_description[key]: dataset[key]})

    #create empty dictinary for sessions where key is subject id and used later to link scans to same session as demographics
    session = {}
    #Parse participants.tsv file in BIDS directory and create study and acquisition objects
    with open(os.path.join(directory, 'participants.tsv')) as csvfile:
        participants_data = csv.DictReader(csvfile, delimiter='\t')
        #print(participants_data.fieldnames)
        for row in participants_data:
            #create session object for subject to be used for participant metadata and image data
            #parse subject id from "sub-XXXX" string
            subjid = row['participant_id'].split("-")
            session[subjid[1]] = Session(project)

            #add acquisition object
            acq = Acquisition(session=session[subjid[1]])
            acq_entity = DemographicsAcquisitionObject(acquisition=acq)
            participant = acq.add_person(role=Constants.NIDM_PARTICIPANT,
                                         attributes=({
                                             Constants.NIDM_SUBJECTID:
                                             row['participant_id']
                                         }))

            for key, value in row.items():
                #for now only convert variables in participants.tsv file who have term mappings in BIDS_Constants.py
                if key in BIDS_Constants.participants:
                    acq_entity.add_attributes(
                        {BIDS_Constants.participants[key]: value})

    #get BIDS layout
    bids_layout = BIDSLayout(directory)

    #create acquisition objects for each scan for each subject

    #loop through all subjects in dataset
    for subject_id in bids_layout.get_subjects():
        #skip .git directories...added to support datalad datasets
        if subject_id.startswith("."):
            continue
        for file_tpl in bids_layout.get(subject=subject_id,
                                        extensions=['.nii', '.nii.gz']):
            #create an acquisition activity
            acq = Acquisition(session[subject_id])

            #print(file_tpl.type)
            if file_tpl.modality == 'anat':
                #do something with anatomicals
                acq_obj = MRAcquisitionObject(acq)
                acq_obj.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans[file_tpl.modality]})
                #add file link
                #make relative link to
                acq_obj.add_attributes(
                    {Constants.NIDM_FILENAME: file_tpl.filename})
                #get associated JSON file if exists
                json_data = bids_layout.get_metadata(file_tpl.filename)
                if json_data:
                    for key in json_data:
                        if key in BIDS_Constants.json_keys:
                            if type(json_data[key]) is list:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key]:
                                    ''.join(str(e) for e in json_data[key])
                                })
                            else:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key]:
                                    json_data[key]
                                })
            elif file_tpl.modality == 'func':
                #do something with functionals
                acq_obj = MRAcquisitionObject(acq)
                acq_obj.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans[file_tpl.modality]})
                #add file link
                acq_obj.add_attributes(
                    {Constants.NIDM_FILENAME: file_tpl.filename})
                if 'run' in file_tpl._fields:
                    acq_obj.add_attributes(
                        {BIDS_Constants.json_keys["run"]: file_tpl.run})

                #get associated JSON file if exists
                json_data = bids_layout.get_metadata(file_tpl.filename)

                if json_data:
                    for key in json_data:
                        if key in BIDS_Constants.json_keys:
                            if type(json_data[key]) is list:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key]:
                                    ''.join(str(e) for e in json_data[key])
                                })
                            else:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key]:
                                    json_data[key]
                                })

                #get associated events TSV file
                if 'run' in file_tpl._fields:
                    events_file = bids_layout.get(subject=subject_id,
                                                  extensions=['.tsv'],
                                                  modality=file_tpl.modality,
                                                  task=file_tpl.task,
                                                  run=file_tpl.run)
                else:
                    events_file = bids_layout.get(subject=subject_id,
                                                  extensions=['.tsv'],
                                                  modality=file_tpl.modality,
                                                  task=file_tpl.task)
                #if there is an events file then this is task-based so create an acquisition object for the task file and link
                if events_file:
                    #for now create acquisition object and link it to the associated scan
                    events_obj = AcquisitionObject(acq)
                    #add prov type, task name as prov:label, and link to filename of events file
                    events_obj.add_attributes({
                        PROV_TYPE:
                        Constants.NIDM_MRI_BOLD_EVENTS,
                        BIDS_Constants.json_keys["TaskName"]:
                        json_data["TaskName"],
                        Constants.NFO["filename"]:
                        events_file[0].filename
                    })
                    #link it to appropriate MR acquisition entity
                    events_obj.wasAttributedTo(acq_obj)

            elif file_tpl.modality == 'dwi':
                #do stuff with with dwi scans...
                acq_obj = MRAcquisitionObject(acq)
                acq_obj.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans[file_tpl.modality]})
                #add file link
                acq_obj.add_attributes(
                    {Constants.NIDM_FILENAME: file_tpl.filename})
                if 'run' in file_tpl._fields:
                    acq_obj.add_attributes(
                        {BIDS_Constants.json_keys["run"]: file_tpl.run})
                    #get associated JSON file if exists
                json_data = bids_layout.get_metadata(file_tpl.filename)

                if json_data:
                    for key in json_data:
                        if key in BIDS_Constants.json_keys:
                            if type(json_data[key]) is list:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key]:
                                    ''.join(str(e) for e in json_data[key])
                                })
                            else:
                                acq_obj.add_attributes({
                                    BIDS_Constants.json_keys[key]:
                                    json_data[key]
                                })

                #for bval and bvec files, what to do with those?
                #for now, create new generic acquisition objects, link the files, and associate with the one for the DWI scan?
                acq_obj_bval = AcquisitionObject(acq)
                acq_obj_bval.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans["bval"]})
                #add file link to bval files
                acq_obj_bval.add_attributes({
                    Constants.NIDM_FILENAME:
                    bids_layout.get_bval(file_tpl.filename)
                })
                acq_obj_bvec = AcquisitionObject(acq)
                acq_obj_bvec.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans["bvec"]})
                #add file link to bvec files
                acq_obj_bvec.add_attributes({
                    Constants.NIDM_FILENAME:
                    bids_layout.get_bvec(file_tpl.filename)
                })
                #link bval and bvec acquisition object entities together or is their association with enclosing activity enough?

        #Added temporarily to support phenotype files
        #for each *.tsv / *.json file pair in the phenotypes directory
        for tsv_file in glob.glob(os.path.join(directory, "phenotype",
                                               "*.tsv")):
            #for now, open the TSV file, extract the row for this subject, store it in an acquisition object and link to
            #the associated JSON data dictionary file
            with open(tsv_file) as phenofile:
                pheno_data = csv.DictReader(phenofile, delimiter='\t')
                for row in pheno_data:
                    subjid = row['participant_id'].split("-")
                    if not subjid[1] == subject_id:
                        continue
                    else:
                        #add acquisition object
                        acq = Acquisition(session=session[subjid[1]])
                        acq_entity = AssessmentAcquisitionObject(
                            acquisition=acq)
                        participant = acq.add_person(
                            role=Constants.NIDM_PARTICIPANT,
                            attributes=({
                                Constants.NIDM_SUBJECTID:
                                row['participant_id']
                            }))

                        for key, value in row.items():
                            if not key == "participant_id":
                                #for now we're using a placeholder namespace for BIDS and simply the variable names as the concept IDs..
                                acq_entity.add_attributes(
                                    {Constants.BIDS[key]: value})

                        #link TSV file
                        acq_entity.add_attributes(
                            {Constants.NIDM_FILENAME: tsv_file})
                        #link associated JSON file if it exists
                        data_dict = os.path.join(
                            directory, "phenotype",
                            os.path.splitext(os.path.basename(tsv_file))[0] +
                            ".json")
                        if os.path.isfile(data_dict):
                            acq_entity.add_attributes(
                                {Constants.BIDS["data_dictionary"]: data_dict})

    #serialize graph
    #print(project.graph.get_provn())
    with open(outputfile, 'w') as f:
        f.write(project.serializeTurtle())
        #f.write(project.graph.get_provn())
    #save a DOT graph as PNG
    project.save_DotGraph(str(outputfile + ".png"), format="png")
Пример #5
0
def main(argv):
    parser = ArgumentParser()

    parser.add_argument('-d',
                        dest='directory',
                        required=True,
                        help="Path to BIDS dataset directory")
    parser.add_argument('-o',
                        dest='outputfile',
                        default="nidm.ttl",
                        help="NIDM output turtle file")
    args = parser.parse_args()

    directory = args.directory
    outputfile = args.outputfile

    #Parse dataset_description.json file in BIDS directory
    with open(directory + '/' + 'dataset_description.json') as data_file:
        dataset = json.load(data_file)
    #print(dataset_data)

    #create project / nidm-exp doc
    project = Project()

    #add various attributes if they exist in BIDS dataset
    for key in dataset:
        #print(key)
        #if key from dataset_description file is mapped to term in BIDS_Constants.py then add to NIDM object
        if key in BIDS_Constants.dataset_description:
            if type(dataset[key]) is list:
                project.add_attributes({
                    BIDS_Constants.dataset_description[key]:
                    "".join(dataset[key])
                })
            else:
                project.add_attributes(
                    {BIDS_Constants.dataset_description[key]: dataset[key]})

    #create empty dictinary for sessions where key is subject id and used later to link scans to same session as demographics
    session = {}
    #Parse participants.tsv file in BIDS directory and create study and acquisition objects
    with open(directory + '/' + 'participants.tsv') as csvfile:
        participants_data = csv.DictReader(csvfile, delimiter='\t')
        #print(participants_data.fieldnames)
        for row in participants_data:
            #create session object for subject to be used for participant metadata and image data
            #parse subject id from "sub-XXXX" string
            subjid = row['participant_id'].split("-")
            session[subjid[1]] = Session(project)

            #add acquisition object
            acq = Acquisition(session=session[subjid[1]])
            acq_entity = DemographicsAcquisitionObject(acquisition=acq)
            participant = acq.add_person(role=Constants.NIDM_PARTICIPANT,
                                         attributes=({
                                             Constants.NIDM_SUBJECTID:
                                             row['participant_id']
                                         }))

            for key, value in row.items():
                #for now only convert variables in participants.tsv file who have term mappings in BIDS_Constants.py
                if key in BIDS_Constants.participants:
                    acq_entity.add_attributes(
                        {BIDS_Constants.participants[key]: value})

    #get BIDS layout
    bids_layout = BIDSLayout(directory)

    #create acquisition objects for each scan for each subject

    #loop through all subjects in dataset
    for subject_id in bids_layout.get_subjects():
        for file_tpl in bids_layout.get(subject=subject_id,
                                        extensions=['.nii', '.nii.gz']):
            #create an acquisition activity
            acq = Acquisition(session[subject_id])

            #print(file_tpl.type)
            if file_tpl.modality == 'anat':
                #do something with anatomicals
                acq_obj = MRAcquisitionObject(acq)
                acq_obj.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans[file_tpl.modality]})
                #add file link
                acq_obj.add_attributes(
                    {Constants.NFO["filename"]: file_tpl.filename})
                #get associated JSON file if exists
                for json_file in bids_layout.get(subject=subject_id,
                                                 extensions=['.json'],
                                                 modality=file_tpl.modality):
                    #open json file, grab key-value pairs, map them to terms and add to acquisition object
                    with open(json_file[0]) as data_file:
                        json_data = json.load(data_file)
                    for key in json_data:
                        if key in BIDS_Constants.json_keys:
                            if type(json_data[key]) is list:
                                project.add_attributes({
                                    BIDS_Constants.json_keys[key]:
                                    "".join(json_data[key])
                                })
                            else:
                                project.add_attributes({
                                    BIDS_Constants.json_keys[key]:
                                    json_data[key]
                                })
                #if we want to do something further if T1w or t2, etc
                #if file_tpl.type == 'T1w':
                #elif file_tpl.type == 'inplaneT2':
            elif file_tpl.modality == 'func':
                #do something with functionals
                acq_obj = MRAcquisitionObject(acq)
                acq_obj.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans[file_tpl.modality]})
                #add file link
                acq_obj.add_attributes({
                    Constants.NFO["filename"]:
                    file_tpl.filename,
                    BIDS_Constants.json_keys["run"]:
                    file_tpl.run
                })
                #add attributes for task description keys from task JSON file
                for task_desc in bids_layout.get(extensions=['.json'],
                                                 task=file_tpl.task):
                    with open(task_desc[0]) as data_file:
                        json_data = json.load(data_file)
                    for key in json_data:
                        if key in BIDS_Constants.json_keys:
                            acq_obj.add_attributes({
                                BIDS_Constants.json_keys[key]:
                                json_data[key]
                            })
                    #get associated events TSV file
                    events_file = bids_layout.get(subject=subject_id,
                                                  extensions=['.tsv'],
                                                  modality=file_tpl.modality,
                                                  task=file_tpl.task,
                                                  run=file_tpl.run)
                    #for now create acquisition object and link it to the associated scan
                    events_obj = AcquisitionObject(acq)
                    #add prov type, task name as prov:label, and link to filename of events file
                    events_obj.add_attributes({
                        PROV_TYPE:
                        Constants.NIDM_MRI_BOLD_EVENTS,
                        BIDS_Constants.json_keys["TaskName"]:
                        json_data["TaskName"],
                        Constants.NFO["filename"]:
                        events_file[0].filename
                    })
                    #link it to appropriate MR acquisition entity
                    events_obj.wasAttributedTo(acq_obj)

            elif file_tpl.modality == 'dwi':
                #do stuff with with dwi scans...
                acq_obj = MRAcquisitionObject(acq)
                acq_obj.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans[file_tpl.modality]})
                #add file link
                acq_obj.add_attributes({
                    Constants.NFO["filename"]:
                    file_tpl.filename,
                    BIDS_Constants.json_keys["run"]:
                    file_tpl.run
                })
                #add attributes for task description keys from task JSON file
                for task_desc in bids_layout.get(extensions=['.json'],
                                                 task=file_tpl.task):
                    with open(task_desc[0]) as data_file:
                        json_data = json.load(data_file)
                    for key in json_data:
                        if key in BIDS_Constants.json_keys:
                            acq_obj.add_attributes({
                                BIDS_Constants.json_keys[key]:
                                json_data[key]
                            })
                #for bval and bvec files, what to do with those?
                #for now, create new generic acquisition objects, link the files, and associate with the one for the DWI scan?
                acq_obj = AcquisitionObject(acq)
                acq_obj.add_attributes(
                    {PROV_TYPE: BIDS_Constants.scans["bval"]})
                for bval in bids_layout.get(extensions=['.bval'],
                                            task=file_tpl.task):
                    #add file link
                    acq_obj.add_attributes(
                        {Constants.NFO["filename"]: bval.filename})
                for bvec in bids_layout.get(extensions=['.bvec'],
                                            task=file_tpl.task):
                    #add file link
                    acq_obj.add_attributes(
                        {Constants.NFO["filename"]: bvec.filename})

    #serialize graph
    #print(project.graph.get_provn())
    with open(outputfile, 'w') as f:
        f.write(project.serializeTurtle())
        #f.write(project.graph.get_provn())
    #save a DOT graph as PNG
    project.save_DotGraph(str(outputfile + ".png"), format="png")