Example #1
0
def compute_func_values(cell, theta, model, fobj, f, options, istrain):
    '''
    Compute the objective function value, correlation, explained variance

    Outputs
    -------
    fun (double):
        objective value

    cc (double):
        correlation coefficient

    ev (double):
        explained variance
    '''
    if istrain:
        data = ccls.get_data(cell, model, options)
    else:
        temp_options = {}
        for key in options.keys():
            if key == 'crossval':
                temp_options[key] = False
            else:
                temp_options[key] = options[key]
        data = ccls.get_data(cell, model, temp_options)

    options['basis'] = lnks.LinearFilterBasis_8param()
    options['stim'] = data[0] - np.mean(data[0])
    if options['is_grad']:
        fun, grad = fobj(theta, data[0], data[1], options)
    else:
        fun = fobj(theta, data[0], data[1], options)

    if model.lower() == 'lnks_mp':
        y = data[1][1]
        v, y_est = f(theta, data[0], options)
    else:
        y = data[1]
        y_est = f(theta, data[0], options)

    if istrain:
        cc = stats.corrcoef(y, y_est)
        ev = stats.variance_explained(y, y_est)
    else:
        cc = stats.corrcoef(y[-20000:], y_est[-20000:])
        ev = stats.variance_explained(y[-20000:], y_est[-20000:])

    return fun, cc, ev
Example #2
0
def fit(cell_num, model, objective, init_num, num_optims, options):
    '''
    Fit one cell

    Inputs
    ------
    cell_num (string) : cell number
    model (string) : type of model optimized
    objective (string): type of objective function optimized
    init_num (string) : initial parameter of model
    num_optims (int):
        Number of optimization repeated.
        One optimization is MAX_ITER iteration(step gradient).
        Total iteration would be Tot_Iter = MAX_ITER * num_optims
        This way, the optimization process can keep track of intermediate
        cost values, cross-validation(test values) values, and other intermediate
        statistics.
        For each optimization, results are saved
    options (dictionary)
        pathway (int) : number of pathways for LNK or LNKS model (1 or 2)
        crossval (bool): cross-validation 'True' or 'False'
        is_grad (bool): gradient On(True) or Off(False)

    Optimization
    ------------
        Using the objective function fobj, model function f, and initial parameter theta_init,
        optimize the model and get results, using the method of the cell class.
        cellclass.py module is assumed to be available and in the PYTHONPATH
        optimizationtools.py is assumed to be available and in the PYTHONPATH
        cell.fit: (function, method)
    '''

    # load cell data
    cell = ldt.loadcell(cell_num)

    # select type of model, objective, boundary function
    f = models[model]
    fobj = objectives[objective]
    bnds = bounds[model]
    bound = bnds(pathway=options['pathway'])

    # get initials
    theta_init = get_initial(model, init_num)

    # For the case of Spiking_est model(using LNK output as an input to Spiking model)
    # need to compute v_est in advance and put it in to the cell class.
    if model.lower() == 'spiking_est':
        theta_init_LNK = theta_init[0]

        crossval = options['crossval']
        options['crossval'] = False
        data = ccls.get_data(cell, 'LNK', options)
        cell.v_est = lnks.LNK_f(theta_init_LNK, data[0], options['pathway'])

        options['crossval'] = crossval
        theta_init = theta_init[1]

    thetas = np.array(np.zeros([num_optims+1, theta_init.size]))
    thetas[0,:] = theta_init

    # compute initial objective value and correlation coefficient
    fun_train, cc_train, evar_train = compute_func_values(cell,theta_init,model,fobj, f,options, True)
    fun_test, cc_test, evar_test = compute_func_values(cell,theta_init,model, fobj,f, options, False)

    # Run Optimization
    # print("\nFit cell %s. Optimize %s model using %s objective function\n" %(cell_num, model, objective))
    print("%15s %17s %17s %17s %17s %17s %17s" %("Process(%)","Update-Time(sec)","funs",
                                            "corrcoef(train)","var-expl(train)",
                                            "corrcoef(test)", "var-expl(test)"))
    print("%15.2f %17.2f %17.5f %17.5f %17.5f %17.5f %17.5f" %(0, 0, fun_train,cc_train,evar_train,cc_test,evar_test))

    # save results to Data Frame
    # idxs = np.array(range(num_optims+1))
    # cols = ["Optimization Process(%)","Update Time(sec)","funs","corrcoef(train)","var-expl(train)","corrcoef(test)","var-expl(test)"]
    # df = pd.DataFrame(index=idxs, columns=cols)
    # df.loc[0] = (0, 0, fun_train,cc_train,evar_train,cc_test,evar_test)

    # df_thetas = pd.DataFrame(index=idxs, columns=list(range(len(theta_init))))
    # df_thetas.loc[0] = theta_init

    for i in range(1, num_optims+1):
        t0 = time.time()
        cell.fit(fobj, f, theta_init, model, bound, options)
        t1 = time.time()

        # train result
        theta, fun_train, cc_train, evar_train = get_results(cell)
        thetas[i,:] = theta
        # test result
        fun_test, cc_test, evar_test = compute_func_values(cell,theta,model,fobj,f,options,False)
        theta_init = theta

        print("%15.2f %17.2f %17.5f %17.5f %17.5f %17.5f %17.5f" %( (i/num_optims * 100),(t1-t0),fun_train,cc_train,evar_train,cc_test,evar_test))

        # output = [(i/num_optims * 100),(t1-t0),fun_train,cc_train,evar_train,cc_test,evar_test]
        # df.loc[i] = output
        # df_thetas.loc[i] = theta

    print("\n")
    save_results(cell, cell_num, model, init_num, theta, fun_train, cc_train, evar_train,fun_test, cc_test, evar_test)
    if model.lower in ['Spiking','Spiking_est']:
        np.savetxt(cell_num+'_'+init_num[1]+'_theta.csv', thetas, delimiter=",")
    else:
        np.savetxt(cell_num+'_'+init_num[0]+'_theta.csv', thetas, delimiter=",")
    # df.to_csv(cell_num+'.csv', sep='\t')
    # df_thetas.to_csv(cell_num+'_thetas.csv', sep='\t')

    return cell