Ejemplo n.º 1
0
def uploaddicom(server, token, dicom_directory):
    """
Create zip archive of dicom files, upload to server. This will check if subject is in server, upload it if not.
Returns parsed xml for dicom object on server. If called on dicom already in server, will return same object as checkdicomexistence.
"""
    common.lout('Asked to upload dicom with path '+dicom_directory+' \n')
    dicom_image=getfirstdicomfile(dicom_directory)
    subject = DicomSubject(dicom_image)
    dicom_parsed_xml = checkdicomexistence(server, token, dicom_image.SeriesInstanceUID)
    subject_parsed_xml =uploadsubject(server, token, subject)
    subject_id = common.get_id_from_parsed_xml(subject_parsed_xml)
    
    if dicom_parsed_xml == None:
        common.lout('DICOM with path '+dicom_directory+' not in database, uploading\n')
        try :
            tmpdname=tempfile.mkdtemp()
            dicom_directory_base=os.path.basename(os.path.normpath(dicom_directory))
            tmpfname = os.sep.join([tmpdname,dicom_directory_base+'.zip'])
            zipfile, dicom_image = zipdicom(dicom_directory,tmpfname)
            # so, now, actually do checkin      
            dicom_doc=dicomdocument(dicom_directory,subject_id,zipfile.filename)
            client=poauth.OAuthClient(token)
            response=client.post(server+'/object.xml',
                                data={'object':dicom_doc.toxml()},
                                files={'file': open(zipfile.filename,'rb')} )
            dicom_parsed_xml=common.parse_xml_from_server(response.text)
        except Exception as e :
            print(e.__repr__())
        finally :
            shutil.rmtree(tmpdname,ignore_errors=True)
    else:
        common.lout('DICOM with path '+dicom_directory+' in database, not uploading\n')
    return dicom_parsed_xml
Ejemplo n.º 2
0
def find_parent_braink(server,token,lfm_hdr_path) :
    parent_bk_files=['dipolesFileName1','dipolesFileName2','geometryFileName','normalsFileName','sensorsFileName']

    parsed_lfmhdr=parselfmheader(lfm_hdr_path)
    idsets=[]
    for p in parent_bk_files :
        bkhash=common.rawfilehash(parsed_lfmhdr[p])
        # now search on these tags ... how to do it ?
        # print bkhash
        querystring = ''.join(
            [server, 
             "/object.xml?key=artifacthash&value=",
             bkhash])
        idlist_curr=[common.get_id_from_parsed_xml(xml) for xml in
                     common.checkexistence(server, token, querystring) ]
        idsets.append( frozenset(idlist_curr) )
    # If I understand correctly, calling reduce with intersection
    # function on list of sets computes intersection of all the sets
    intersect_ids=reduce(frozenset.intersection,idsets)
    if len(intersect_ids)==0 :
        return None
    if len(intersect_ids)>1 :
        print 'Warning : multiple matching BrainK objects found : '
        for id in intersect_ids :
            print id
    return list(intersect_ids)[0]
Ejemplo n.º 3
0
def upload_nii_with_dicom_reference(server,token,nii_path,reference_dicom_path) :
    """ Extract subject from dicom, upload nii file using this
    subject. Use with care! This routine cannot verify that the nii
    and dicom actually correspond"""   
    if not dicomobject.isvaliddicom(reference_dicom_path) :
        raise Exception('Invalid dicom path ' +reference_dicom_path+' given')

    subject=dicomobject.DicomSubject(dicomobject.getfirstdicomfile(reference_dicom_path))
    # this will add subject to database, if it isn't there already
    subject_parsed_xml=dicomobject.uploadsubject(server,token,subject)
    subject_id=common.get_id_from_parsed_xml(subject_parsed_xml)
    nii_parsed_xml=uploadnii(server,token,nii_path,subject_id)
    return nii_parsed_xml
Ejemplo n.º 4
0
def uploadbraink(server,token,bkstub,recursive_checkin=False):
    """ compute hash, check against server, if not found, create zipfile and upload"""
    if not isvalidbkstub(bkstub) :
        raise Exception('invalid brainkstub')

    bkhash=brainkhash(bkstub)

    common.lout('Asked to upload braink with stub '+bkstub+' bkhash '+bkhash+'\n')

    # how to handle recursive_checkin if MRI,CT,GPS objects do not define subject?
    # Grab subject id from MRI, if possible
    #
    # As of now, code will give not succesfully infer subject if MRI
    # is not in the database and if it is not a DICOM file.
    #
    # It is possible that CT could be dicom, and subject could be
    # inferred from CT, but I won't bother to handle this case. To be
    # able to deal with this case, the user would need to check in the
    # non-DICOM MRI with some tool that associates a subject id.
    
    braink_server_xml=checkbrainkexistence(server,token,bkhash)
    if not braink_server_xml is None :
        common.lout('braink with hash '+bkhash+' found on server, not uploading\n')
        return braink_server_xml
    else :
        # need to upload 
        common.lout('braink with hash '+bkhash+' not found on server, uploading\n')
        bkparam=parsebkstub(bkstub)
        parentdataids={}
        subject_id = None
        
        if 'MRI_file' in bkparam :
            mri_path = bkparam['MRI_file'][1]
            mri_parsed_xml = common.get_database_xml_from_path(server,token,mri_path)

            if mri_parsed_xml is None :
                if recursive_checkin :
                    # todo : this should be upload_path, not uploaddicom
                    # mri image may not be a dicom directory
                    mri_parsed_xml = dicomobject.upload_image_from_path(server,token,mri_path)
                    if mri_parsed_xml is None :
                        raise Exception('Failed to check in image '+mri_path)                    
                else :
                    raise Exception('MRI at path '+mri_path+' not in database : should check it in')

            mri_id = common.get_id_from_parsed_xml(mri_parsed_xml)
            subject_id = common.get_parents_from_parsed_xml(mri_parsed_xml)[0]
            parentdataids['MRI']=mri_id
                
        if 'CT_file' in bkparam :
            ct_path = bkparam['CT_file'][1]
            ct_parsed_xml= common.get_database_xml_from_path(server,token,ct_path)
            if ct_id is None :
                if recursive_checkin :
                    ct_parsed_xml=dicomobject.upload_image_from_path(server,token,ct_path,parent_id=subject_id)
                else:
                    raise Exception('CT at path '+ct_path+' not in database : should check it in')
            ct_id=common.get_id_from_parsed_xml(ct_parsed_xml)
            parentdataids['CT']=ct_id
            
        # I have seen parameters of form GPS_sensor_file_(CT_2_MRI),
        # I will assume I can find a single key that starts with GPS_sensor_file
        GPS_key = [x for x in bkparam.keys() if x.startswith('GPS_sensor_file') and len(bkparam[x])==2]
        if len(GPS_key)>1 :
            raise Exception('Unexpected number of GPS_sensor_file parameters found')
        if len(GPS_key)>0 :
            gps_path = bkparam[GPS_key[0]][1]
            gps_parsed_xml = common.get_database_xml_from_path(server,token,gps_path)
            if gps_parsed_xml is None :
                if recursive_checkin :
                    gps_parsed_xml=gpsobject.uploadgps(server,token,gps_path,subject_id=subject_id)
                else :
                    raise Exception('GPS at path '+gps_path+' not in database : should check it in')
            gps_id=common.get_id_from_parsed_xml(gps_parsed_xml)
            parentdataids['GPS']=gps_id
        
        # todo :   
        # similar code for CT_file, GPS_file (or whatever they get called by BrainK)
        tmpdname=tempfile.mkdtemp()
        bkhead,bktail=os.path.split(bkstub)
        zipfilename = os.sep.join([tmpdname,bktail+'-bkout.zip'])
        try :
            print 'creating braink zipfile',
            zfile=zipbkstub(bkstub,zipfilename)
            print
            brainkdoc=brainkdocument(bkstub,parentdataids,zipfilename)
            client=poauth.OAuthClient(token)
            response=client.post(server+'/object.xml',
                             data={'object':brainkdoc.toxml()},
                             files={'file': open(zfile.filename,'rb')} )
            #return etree.fromstring(response.text)
            return common.parse_xml_from_server(response.text)
        finally :
            shutil.rmtree(tmpdname,ignore_errors=True)