def extract_upload_stats(cursor, url_path, subs_dir):
    '''
    Function which uses the recon-stats python package to extract stats
    info from the Freesurfer-generated. Note, for recon-stats to work
    properly, it needs the SUBJECTS_DIR variable to be set in the OS
    environment, as well as having all of the stats data organized as:

        $SUBJECTS_DIR/{sub_id}/stats/*.stats

    Parameters
    ----------
    cursor : OracleCursor
        a cx_Oracle cursor object which is used to query and modify an
        Oracle database
    sub_id : string
        a subject ID that is located in the $
    subs_dir : string
        filepath to the subjects directory (Freesurfer directory structure)

    Returns
    -------
    None
        The function doesn't return any value, it uploads data to the
        datasase using the input cursor and url_path
    '''

    # Import packages
    import insert_utils
    import os
    import recon_stats
    import time

    # Init variables
    os.environ['SUBJECTS_DIR'] = subs_dir
    # Get subject id from url_path
    fname = url_path.split('/')[-1]
    subid = insert_utils.find_subid(fname)
    subkey = fname.split(subid)[0] + subid
    # Get filename
    fkey = fname.split('_stats_')[-1]

    # Use the subid to create a recon_stats.Subject object
    s = recon_stats.Subject(subkey)
    # Parse through the stats files and get a list of measures
    s.get_measures(fkey)
    mlist = s.measures

    # Get next derivative id to insert
    deriv_id = insert_utils.return_next_pk(cursor)
    if 'aparc.stats' in fname:
        # Set atlas to parcellation atlas
        atlas = 'Desikan-Killiany Atlas'
    elif 'aparc.a2009s.stats' in fname:
        # Set atlas to parcellation atla
        atlas = 'Destrieux Atlas'
    elif 'BA.stats' in fname:
        atlas = 'Brodmann Atlas'
    else:
        atlas = ''

    # Now iterate through the measures list and insert into db
    for m in mlist:
        # Timestamp
        timestamp = str(time.ctime(time.time()))
        # Get measure info
        roi = ''
        roidesc = m.structure
        name = m.name()
        measure = m.measure
        s3_path = url_path
        datasetid, guid = insert_utils.return_datasetid_guid(cursor, subid)
        value = m.value
        units = m.units

        # Insert entry
        cursor.execute(cmd,
                       col_1=deriv_id,
                       col_2=roi,
                       col_3=pname,
                       col_4=ptype,
                       col_5=ptools,
                       col_6=pver,
                       col_7=pdesc,
                       col_8=name,
                       col_9=measure,
                       col_10=timestamp,
                       col_11=s3_path,
                       col_12=template,
                       col_13=guid,
                       col_14=datasetid,
                       col_15=roidesc,
                       col_16=strategy,
                       col_17=atlas,
                       col_18=value,
                       col_19=units)
        # Commit changes
        cursor.execute('commit')
        deriv_id += 1
        print deriv_id
def upload_results(cursor, url_path):
    '''
    Method to insert Freesurfer pipeline data from files
    into the abide_img_results table in miNDAR

    Parameters
    ----------
    cursor : OracleCursor
        a cx_Oracle cursor object which is used to query and modify an
        Oracle database
    url_path : string (url)
        URL address of the .dat file to parse and insert into miNDAR

    Returns
    -------
    None
        The function doesn't return any value, it uploads data to the
        datasase using the input cursor and url_path
    '''

    # Import packages
    import insert_utils
    import time
    import urllib

    # If it's a dat file, get the specific file's info
    if url_path.endswith('.stats'):
        extract_upload_stats(cursor, url_path)
    # Otherwise, it's a surface file
    else:
        # Init variables
        # Known field values for CIVET pipeline results
        deriv_id = insert_utils.return_next_pk(cursor)
        # S3 path
        s3_path = url_path
        # Get datasetid and guid
        fname = url_path.split('/')[-1]
        sub_id = insert_utils.find_subid(fname)
        datasetid, guid = insert_utils.return_datasetid_guid(cursor, sub_id)

        # Get the specific file's info
        print 'Analyzing %s...' % url_path
        beg, end, name, measure, template, units, split_str = fetch_freesurfer_info(url_path)
        # Timestamp
        timestamp = str(time.ctime(time.time()))
        # ROI description
        value = ''
        roi = ''
        roidesc = ''
        atlas = ''
        # Insert entry
        cursor.execute(cmd,
                       col_1=deriv_id,
                       col_2=roi,
                       col_3=pname,
                       col_4=ptype,
                       col_5=ptools,
                       col_6=pver,
                       col_7=pdesc,
                       col_8=name,
                       col_9=measure,
                       col_10=timestamp,
                       col_11=s3_path,
                       col_12=template,
                       col_13=guid,
                       col_14=datasetid,
                       col_15=roidesc,
                       col_16=strategy,
                       col_17=atlas,
                       col_18=value,
                       col_19=units)
        # Commit changes
        cursor.execute('commit')
        print deriv_id


    # Print done with that file and return
    print 'Done!'
    return