def upload_results(cursor, url_path): ''' Method to insert Freesurfer pipeline data from files into the abide_img_results table in miNDAR Parameters ---------- cursor : OracleCursor a cx_Oracle cursor object which is used to query and modify an Oracle database url_path : string (url) URL address of the .dat file to parse and insert into miNDAR Returns ------- None The function doesn't return any value, it uploads data to the datasase using the input cursor and url_path ''' # Import packages import insert_utils import time import urllib # If it's a dat file, get the specific file's info if url_path.endswith('.stats'): extract_upload_stats(cursor, url_path) # Otherwise, it's a surface file else: # Init variables # Known field values for CIVET pipeline results deriv_id = insert_utils.return_next_pk(cursor) # S3 path s3_path = url_path # Get datasetid and guid fname = url_path.split('/')[-1] sub_id = insert_utils.find_subid(fname) datasetid, guid = insert_utils.return_datasetid_guid(cursor, sub_id) # Get the specific file's info print 'Analyzing %s...' % url_path beg, end, name, measure, template, units, split_str = fetch_freesurfer_info(url_path) # Timestamp timestamp = str(time.ctime(time.time())) # ROI description value = '' roi = '' roidesc = '' atlas = '' # Insert entry cursor.execute(cmd, col_1=deriv_id, col_2=roi, col_3=pname, col_4=ptype, col_5=ptools, col_6=pver, col_7=pdesc, col_8=name, col_9=measure, col_10=timestamp, col_11=s3_path, col_12=template, col_13=guid, col_14=datasetid, col_15=roidesc, col_16=strategy, col_17=atlas, col_18=value, col_19=units) # Commit changes cursor.execute('commit') print deriv_id # Print done with that file and return print 'Done!' return
def extract_upload_stats(cursor, url_path, subs_dir): ''' Function which uses the recon-stats python package to extract stats info from the Freesurfer-generated. Note, for recon-stats to work properly, it needs the SUBJECTS_DIR variable to be set in the OS environment, as well as having all of the stats data organized as: $SUBJECTS_DIR/{sub_id}/stats/*.stats Parameters ---------- cursor : OracleCursor a cx_Oracle cursor object which is used to query and modify an Oracle database sub_id : string a subject ID that is located in the $ subs_dir : string filepath to the subjects directory (Freesurfer directory structure) Returns ------- None The function doesn't return any value, it uploads data to the datasase using the input cursor and url_path ''' # Import packages import insert_utils import os import recon_stats import time # Init variables os.environ['SUBJECTS_DIR'] = subs_dir # Get subject id from url_path fname = url_path.split('/')[-1] subid = insert_utils.find_subid(fname) subkey = fname.split(subid)[0] + subid # Get filename fkey = fname.split('_stats_')[-1] # Use the subid to create a recon_stats.Subject object s = recon_stats.Subject(subkey) # Parse through the stats files and get a list of measures s.get_measures(fkey) mlist = s.measures # Get next derivative id to insert deriv_id = insert_utils.return_next_pk(cursor) if 'aparc.stats' in fname: # Set atlas to parcellation atlas atlas = 'Desikan-Killiany Atlas' elif 'aparc.a2009s.stats' in fname: # Set atlas to parcellation atla atlas = 'Destrieux Atlas' elif 'BA.stats' in fname: atlas = 'Brodmann Atlas' else: atlas = '' # Now iterate through the measures list and insert into db for m in mlist: # Timestamp timestamp = str(time.ctime(time.time())) # Get measure info roi = '' roidesc = m.structure name = m.name() measure = m.measure s3_path = url_path datasetid, guid = insert_utils.return_datasetid_guid(cursor, subid) value = m.value units = m.units # Insert entry cursor.execute(cmd, col_1=deriv_id, col_2=roi, col_3=pname, col_4=ptype, col_5=ptools, col_6=pver, col_7=pdesc, col_8=name, col_9=measure, col_10=timestamp, col_11=s3_path, col_12=template, col_13=guid, col_14=datasetid, col_15=roidesc, col_16=strategy, col_17=atlas, col_18=value, col_19=units) # Commit changes cursor.execute('commit') deriv_id += 1 print deriv_id
def transfer_table_entries(creds_path): ''' Function to transfer all of the ABIDE subjects results in the DERIVATIVES_UNORMD and IMG_DERIVATIVES_UNORMD tables to the ABIDE_IMG_RESULTS table Parameters ---------- creds_path : string (filepath) path to the csv file with 'Access Key Id' as the header and the corresponding ASCII text for the key underneath; same with the 'Secret Access Key' string and ASCII text Returns ------- None This function does not return any value. It transfers table entries in an Oracle database. ''' # Import packages import insert_utils import fetch_creds # Init variables deriv_id = insert_utils.return_next_pk(cursor, 'ABIDE_IMG_RESULTS') template = 'OASIS-30 Atropos Template' cursor = fetch_creds.return_cursor(creds_path) # Get ACT img derivatives from img_derivatives_unormd imgs_get = ''' select pipelinename, pipelinetype, pipelinetools, pipelineversion, pipelinedescription, name, measurename, guid, datasetid, roidescription, roi, template, s3_path, cfgfilelocation from img_derivatives_unormd where instr(datasetid, :arg_1) > 0 ''' # Get ROI derivatives from DERIVATIVES_UNORMD rois_get = ''' select pipelinename, pipelinetype, pipelinetools, pipelineversion, pipelinedescription, derivativename, measurename, guid, datasetid, roidescription, roi, template, value, units, cfgfilelocation from derivatives_unormd where instr(datasetid, :arg_1) > 0 ''' # Insert entries into ABIDE_IMG_RESULTS air_put = ''' insert into abide_img_results (id, pipelinename, pipelinetype, pipelinetools, pipelineversion, pipelinedescription, name, measurename, timestamp, guid, datasetid, roidescription, roi, atlas, value, units, s3_path, template, cfgfilelocation) values (:col_1, :col_2, :col_3, :col_4, :col_5, :col_6, :col_7, :col_8, :col_9, :col_10, :col_11, :col_12, :col_13, :col_14, :col_15, :col_16, :col_17, :col_18, :col_19) ''' # Get abide results from derivatives_unormd (ABIDE id's have an 'a' in them) cursor.execute(rois_get, arg_1='a') roi_entries = cursor.fetchall() print 'Found %d roi results, inserting into ABIDE table' % len(roi_entries) # For each ROI entry, copy its fields over to ABIDE_IMG_RESULTS for entry in roi_entries: # Extract field values from entry result pname = entry[0] ptype = entry[1] ptools = entry[2] pver = entry[3] pdesc = entry[4] dname = entry[5] mname = entry[6] guid = entry[7] datasetid = entry[8] roidesc = entry[9] roi = entry[10] # template --> atlas atlas = entry[11] value = entry[12] units = entry[13] cfgfile = entry[14] # Timestamp timestamp = str(time.ctime(time.time())) # Find/make s3 path s3_path = make_roi_s3(cursor, datasetid) # And insert all of this into ABIDE_IMG_RESULTS cursor.execute(air_put, col_1=deriv_id, col_2=pname, col_3=ptype, col_4=ptools, col_5=pver, col_6=pdesc, col_7=dname, col_8=mname, col_9=timestamp, col_10=guid, col_11=datasetid, col_12=roidescription, col_13=roi, col_14=atlas, col_15=value, col_16=units, col_17=s3_path, col_18=template, col_19=cfgfile) # Commit changes cursor.execute('commit') # Increment to next unique pk id deriv_id += 1 print deriv_id # Get abide results from derivatives_unormd (ABIDE id's have an 'a' in them) cursor.execute(imgs_get, arg_1='a') img_entries = cursor.fetchall() print 'Found %d image results, inserting into ABIDE table' % len(img_entries) # For each IMG entry, copy its fields over to ABIDE_IMG_RESULTS for entry in img_entries: # Extract field values from entry result pname = entry[0] ptype = entry[1] ptools = entry[2] pver = entry[3] pdesc = entry[4] dname = entry[5] mname = entry[6] guid = entry[7] datasetid = entry[8] roidesc = entry[9] roi = entry[10] # template --> atlas template = entry[11] s3_path = entry[12] cfgfile = entry[13] # Timestamp timestamp = str(time.ctime(time.time())) # Find/make s3 path s3_path = make_roi_s3(cursor, datasetid) # And insert all of this into ABIDE_IMG_RESULTS cursor.execute(air_put, col_1=deriv_id, col_2=pname, col_3=ptype, col_4=ptools, col_5=pver, col_6=pdesc, col_7=dname, col_8=mname, col_9=timestamp, col_10=guid, col_11=datasetid, col_12=roidescription, col_13=roi, col_14='', col_15='', col_16='', col_17=s3_path, col_18=template, col_19=cfgfile) # Commit changes cursor.execute('commit') # Increment to next unique pk id deriv_id += 1 print deriv_id