def optimize_GLM( gtol=1e-6 , maxiter=500):
   objective = objV1_data( R , obj_V1, v2=np.zeros(Ncones) , u=np.eye(Ncones) )
   optimizer = optimize.optimizer( objective )
   init_params = {'V1': np.vstack( R['statistics']['stimulus']['STA'] ) }
   params = optimizer(init_params=init_params,maxiter=maxiter,gtol=gtol)
   opt_V1 = objective.unflat(params)
   return opt_V1['V1']
Exemple #2
0
def MaxLike(init_params,data):
    '''Maximum Likelihood optimization of parameters theta and M'''
    print
    print
    print 'Starting Max-Likelihood optimization in theta and M, compiling...'
    # Compile symbolic objectives into numerical objective.
    objective = kolia_theano.Objective(init_params=init_params[0],differentiate=['f'],
                                       callback=callback,
                                       f=quadratic_Poisson, barrier=eig_barrier)
    # Set fixed arguments STA and STC in optimizer object.
    optimizers = [ optimize.optimizer( objective.where(**dat) ) for dat in data ]
    
    # Maximize likelihood separately for each RGC; run the optimization twice just to be sure.
    params = init_params
    for i in range(2):
        params = [opt(init_params=par) for opt,par in zip(optimizers,params)]
        
    # Unpack flat parameters into meaningful dictionary.
    params = [objective.unflat(par) for par in params]
    
    # print norm of inferred and true theta
    optU = [param['theta'] for param in params]
    print
    print 'true    ||theta||^2  : ', np.sum(U*U,axis=1)
    print 'optimal ||theta||^2  : ', [np.sum(optu*optu) for optu in optU]
    print
    print
    return params
Exemple #3
0
def optimize_u( v1, init_u, v2 , T, gtol=1e-7 , maxiter=500):
    objective = kb.Sum_objective( [objU_data( ustats[rgc_type] , v1 , v2 , T , i )
                                   for i in range(NRGC)] )
    optimizer = optimize.optimizer( objective )
    # debug_here()
    params = optimizer(init_params={'u': init_u },maxiter=maxiter,gtol=gtol)
    opt_u = single_objective.unflat(params)
    return opt_u['u']
Exemple #4
0
def optimize_UV1( objective, v1,u,v2 ):
    data = {'STAs':np.vstack(R['statistics']['stimulus']['STA']) ,
            'STCs':np.vstack([stc[np.newaxis,:] for stc in R['statistics']['stimulus']['STC']]), 
            'V2':v2 , 'N':NRGC , 'N_spikes':R['N_spikes'] , 
            'Ncones':Ncones , 'centers':centers , 'indices':indices , 'lam':lam }
    optimizer = optimize.optimizer( objective.where(**data).with_callback(callback) )
    params = optimizer(init_params={'V1': v1 , 'U': u },maxiter=10000,gtol=1.1e-7)
    opt_U = objective.unflat(params)
    return opt_U
Exemple #5
0
def optimize_U( objective, v1,init_U, v2  ):
    data = {'STAs':np.vstack(R['statistics']['stimulus']['STA']) ,
            'STCs':np.vstack([stc[np.newaxis,:] for stc in R['statistics']['stimulus']['STC']]), 
            'V2':v2 , 'V1': v1 , 'N':NRGC , 'N_spikes':R['N_spikes'] }
     
    optimizer = optimize.optimizer( objective.where(**data).with_callback(callback) )
    params = optimizer(init_params={'U': init_U },maxiter=2000,gtol=1.1e-6)
    opt_U = objective.unflat(params)
    return opt_U['U']
def optimize_V1( u,v2, T, init=V1 , gtol=1e-4 , maxiter=100):
   data = {'STAs':np.vstack(R['statistics']['stimulus']['STA']) , 
           'STCs':np.vstack([stc[np.newaxis,:] for stc in R['statistics']['stimulus']['STC']]), 
           'V2':v2 , 'u': u , 'T':T, 'N':NRGC , 'N_spikes':R['N_spikes'] }
   objective = obj_V1u.where(**data).with_callback(callback)
   optimizer = optimize.optimizer( objective )
   init_params = {'V1': init }
   params = optimizer(init_params=init_params,maxiter=maxiter,gtol=gtol)
   opt_V1 = objective.unflat(params)
   return opt_V1['V1']
Exemple #7
0
    def optimize_L2(X, Y, mu, **optimize_params):
        def L2_f(x, *_):
            return getattr(objective, f)(x) + 0.5 * sum((x - Y) * (x - Y)) / mu

        def L2_df(x, *_):
            return getattr(objective, df)(x) + (x - Y) / mu

        objective.L2_f = L2_f
        objective.L2_df = L2_df
        opter = optimizer(
            objective, init_params=X, f="L2_f", df="L2_df", maxiter=100, gtol=gtol, disp=0, **optimize_params
        )
        return opter()
Exemple #8
0
def main():
    """
    Main function
    """
    # Download data for NLTK if not already done
    #nltk.download('all')

    # Read 
    imdb = Indexer()
    imdb_file = 'data/data.json'
    logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
    logging.info('Reading file %s' % imdb_file)
    imdb.read_file(imdb_file)
    logging.info('File %s read' % imdb_file)
    (vocab_size, user_list, movie_list, \
    rating_matrix, review_matrix, review_map) = imdb.get_mappings()

    # Get number of users and movies
    Users = len(user_list)
    Movies = len(movie_list)
    logging.info('No. of users U = %d' % Users)
    logging.info('No. of movies M = %d' % Movies)

    # Run Gibbs EM
    for it in xrange(1,MAX_ITER+1):
        logging.info('Running iteration %d of Gibbs EM' % it)
        logging.info('Running E-Step - Gibbs Sampling')
        gibbs_sampler = GibbsSampler(5,A,2)
        gibbs_sampler.run(rating_matrix)
        logging.info('Running M-Step - Gradient Descent')
        for i in xrange(1,MAX_OPT_ITER+1):
            optimizer()

    # Output Predicted Ratings
    for u in range(U):
        for m in range(M):
            pred_rate = predicted_rating(u, m)
            print "Predicted Rating of user " + str(u) + " and movie " + str(m) + ": " + str(pred_rate)
Exemple #9
0
def optimize_V1( objective, u,v2, lambdas=None , init=None , gtol=1e-4 , maxiter=100):
    if lambdas is None:
        lambdas = np.zeros(v2.size)
    data = {'STAs':np.vstack(R['statistics']['stimulus']['STA']) , 
            'STCs':np.vstack([stc[np.newaxis,:] for stc in R['statistics']['stimulus']['STC']]), 
            'V2':v2 , 'U': u , 'N':NRGC , 'N_spikes':R['N_spikes'] ,
            'Ncones':Ncones , 'centers':centers , 'indices':indices , 'lam':lam , 'lambdas':lambdas }
    optimizer = optimize.optimizer( objective.where(**data).with_callback(callback) )
    if init is None:
        init_params = {'V1': 0.001 + 0.01 * Rand.rand(NRGC,u.shape[0]) }
    else:
        init_params = {'V1': init }
    params = optimizer(init_params=init_params,maxiter=maxiter,gtol=gtol)
    opt_V1 = objective.unflat(params)
    return opt_V1['V1']
Exemple #10
0
    def __init__(self,data,Nsub,prior=lambda U:0):
        self.DATA  = data
        self.N     = len(data[-2][0])
        self.Nsub  = Nsub
        self.NRGC  = len(data[-2])
        self.prior = prior
        self.mindet= -2.
        U    = Th.dmatrix()                   # SYMBOLIC variables     #
        V1   = Th.dvector()                                            #
        V2   = Th.dvector()                                            #
        STA  = Th.dvector()                                            #
        STC  = Th.dmatrix()                                            #
        theta= Th.dot( U.T , V1 )                                      #
        M    = Th.dot( V1 * U.T , (V2 * U.T).T )                       #
        detM = Th.dscalar()
        invM = Th.dmatrix()
        invMtheta = Th.as_tensor_variable(Th.dot(invM,theta),ndim=2)
        prior     = self.prior(U)                                      #

        post = (  Th.log(detM) \
#                - 0.01 / (Th.log(detM)-self.mindet) \
                - 1./(Th.log(detM)+6)**2 \
                - Th.sum(invMtheta*theta) \
                + 2. * Th.sum( theta * STA ) \
                + Th.sum( M * (STC + Th.outer(STA,STA)) )) / 2. \
                #+ prior \
                #+ Th.sum(0.001*Th.log(V1)) #+ Th.sum(0.001*Th.log(U))
        dpost_dM  = ( invM + invMtheta * invMtheta.T \
#                    + 0.01 * invM / ((Th.log(detM)-self.mindet)**2) \
                    + 2. * invM / (Th.log(detM)+6)**3 \
                    ) / 2.

        def dpost(dX):
            return Th.grad( cost = post                   , wrt = dX ,
                            consider_constant=[invM,detM,STC,STA] ) \
                 - Th.grad( cost = Th.sum( dpost_dM * M ) , wrt = dX , 
                            consider_constant=[dpost_dM,STA,STC,invM,invMtheta])

        self.M          = function( [    U,V2,V1]                  ,  M       ) #
        self.posterior  = function( [    U,V2,V1,invM,detM,STA,STC],  post    ) #
        self.dpost_dU   = function( [    U,V2,V1,invM,detM,STA,STC], dpost(U) ) #
        self.dpost_dV1  = function( [    U,V2,V1,invM,detM,STA,STC], dpost(V1)) #
#        self.dpost_dV2  = function( [    U,V2,V1,invM,detM,STA,STC], dpost(V2)) #

        self.dpost_dM   = function( [    U,V2,V1,invM,detM,STA,STC], dpost_dM) #

        self.optimize   = optimizer( self )
Exemple #11
0
def _test_LNP( rgc_type='off parasol' ):
    vardict   = LNP( **thetaM( **linear_reparameterization()))
    init_LNP  = LNP_model( init_sv1(rgc_type) )
    indices = extract( linear_stats( rgc_type, (5,0) ), ['sparse_index', 'subunit_index'] )
    indices['N_subunits'] = len(cones)
    unknown = extract(init_LNP,['sv1'])
    train_LNP = global_objective( unknown, extract(init_LNP,['u','V2']), 
                                  vardict, run=linear_stats( rgc_type, (5,0) ),
                                  indices=indices)
    train_LNP.with_callback(callback)
    train_LNP.description = 'LNP'
    sv1 = optimize.optimizer( train_LNP )( init_params=unknown, maxiter=5000, gtol=1e-7 )
    model = LNP_model( train_LNP.unflat( sv1 )['sv1'] )
    model['LL'] = global_objective( unknown, extract(init_LNP,['u','V2']), 
                             vardict, run=linear_stats( rgc_type, (-5,0)), 
                             indices=indices).LL(sv1)
    save(model,'LNP_'+rgc_type)
    return model
Exemple #12
0
def optimize_u( v1, init_u, v2 , T, gtol=1e-7 , maxiter=500):
   optimizer = optimize.optimizer( objU_data( R , v1 , v2 , T ) )
   # debug_here()
   params = optimizer(init_params={'u': init_u },maxiter=maxiter,gtol=gtol)
   opt_u = obj_u.unflat(params)
   return opt_u['u']
def optimize():
    content = request.json
    data = content['data']
    solution = optimizer(data)

    return jsonify({"solution": solution})
Exemple #14
0
# aqfn,aqpara = acquisitions.EIMAP
aqpara["lb"] = [-1.0, -1.0]
aqpara["ub"] = [1.0, 1.0]
aqpara["ev"]["s"] = 1e-12
cfn = objectives.cfaexp(1.0, 0.2)
ojf, xmin, ymin = objectives.genbiasedmat52ojf(len(aqpara["lb"]), aqpara["lb"], aqpara["ub"], 1.0)
ojfn = objectives.costfnwrap(ojf, cfn)
ojfchar = {"dx": len(aqpara["lb"]), "dev": len(aqpara["ev"])}
aqpara["cfn"] = cfn
aqpara["xau"] = 1.0
aqpara["xal"] = 0.0


# cfn = objectives.cf42
# ojfn,xmin,ymin = objectives.genmat52ojf(len(aqpara['lb']),aqpara['lb'],aqpara['ub'])
# ojfchar = {'dx':len(aqpara['lb']),'dev':len(aqpara['ev'])}


stoppara = {"nmax": 20}
stopfn = optimize.nstopfn


reccfn, reccpara = reccomenders.gpasmap
reccpara["lb"] = aqpara["lb"]
reccpara["ub"] = aqpara["ub"]


O = optimize.optimizer(path, aqpara, aqfn, stoppara, stopfn, reccpara, reccfn, ojfn, ojfchar, checkrecc=True)

O.run()
Exemple #15
0
def optimize_V1( objective, true_run, u , v2, gtol=1e-4 , maxiter=500):
    optimizer = optimize.optimizer( objective )
    init_params = {'V1': 0.001 + 0.01 * Rand.rand(*true_run['model']['V'].shape) }
    params = optimizer(init_params=init_params,maxiter=maxiter,gtol=gtol)
    return objective.unflat(params)
Exemple #16
0
def main():
    """
    Main function
    """
    # Download data for NLTK if not already done
    # nltk.download('all')

    # Read
    np.random.seed(5)
    baseline = False  ## Make this true if you want to run the baseline, which is a simple latent factor model
    path_to_save_results = './test/'

    imdb = Indexer()
    imdb_file = 'data/clothing_data_small.json'  ## path to data file
    logging.basicConfig(format='%(levelname)s: %(message)s',
                        level=logging.INFO)
    logging.info('Reading file %s' % imdb_file)
    imdb.read_file(imdb_file)
    logging.info('File %s read' % imdb_file)

    (
        vocab_size,
        user_list,  # remove
        movie_list,
        review_matrix,
        review_map,
        user_dict,
        movie_dict,
        rating_list,
        t_mean,
        movie_reviews,
        word_dictionary,
        U,
        M,
        R,
        test_indices) = imdb.get_mappings(path_to_save_results)

    mul_factor = 0.1
    ## Initialize
    alpha_vu = np.random.normal(0, sigma_u, (U, K)) * mul_factor
    alpha_bu = np.random.normal(0, sigma_u, (U, 1)) * mul_factor
    alpha_tu = np.random.normal(0, sigma_u, (U, A)) * mul_factor

    # User
    v_u = np.random.normal(0, sigma_u,
                           (U, K)) * mul_factor  # Latent factor vector
    b_u = np.random.normal(0, sigma_bu,
                           (U, 1)) * mul_factor  # Common bias vector
    theta_u = np.random.normal(0, sigma_ua,
                               (U, A)) * mul_factor  # Aspect specific vector

    # Movie
    v_m = np.random.normal(0, sigma_m,
                           (M, K)) * mul_factor  # Latent factor vector
    b_m = np.random.normal(0, sigma_bm,
                           (M, 1)) * mul_factor  # Common bias vector
    theta_m = np.random.normal(0, sigma_ma,
                               (M, A)) * mul_factor  # Aspect specific vector

    # Common bias
    b_o = np.random.normal(0, sigma_b0) * mul_factor

    # Scaling Matrix
    M_a = np.random.normal(0, sigma_Ma, (A, K)) * mul_factor

    params = numpy.concatenate(
        (alpha_vu.flatten('F'), v_u.flatten('F'), alpha_bu.flatten('F'),
         b_u.flatten('F'), alpha_tu.flatten('F'), theta_u.flatten('F'),
         v_m.flatten('F'), b_m.flatten('F'), theta_m.flatten('F'),
         M_a.flatten('F'), np.array([b_o]).flatten('F')))

    save_test_rmse = []
    # Get number of users and movies
    Users = len(user_list)
    Movies = len(movie_list)
    logging.info('No. of users U = %d' % Users)
    logging.info('No. of movies M = %d' % Movies)

    # change gibbs sampler initialization
    gibbs_sampler = GibbsSampler(vocab_size, review_matrix, rating_list,
                                 movie_dict, user_dict, movie_reviews,
                                 word_dictionary, U, M, R, test_indices)

    # Run Gibbs EM
    for it in range(1, MAX_ITER + 1):
        print('Running iteration %d of Gibbs EM' % it)
        print('Running E-Step - Gibbs Sampling')

        if baseline != True:
            Nums, Numas, Numa = gibbs_sampler.run(vocab_size, review_matrix,
                                                  rating_list, user_dict,
                                                  movie_dict, movie_reviews,
                                                  word_dictionary, t_mean,
                                                  params, test_indices,
                                                  path_to_save_results)
        else:
            Nums = np.zeros((R, 2))
            Numas = np.zeros((R, A, 2))
            Numa = np.zeros((R, A))
        print('Running M-Step - Gradient Descent')
        for i in range(1, MAX_OPT_ITER + 1):
            params, save_test_rmse = optimizer(Nums, Numas, Numa, rating_list,
                                               t_mean, params, U, M, R,
                                               test_indices, save_test_rmse)
            np.save(path_to_save_results + 'params.npy', params)
            np.save(
                path_to_save_results +
                'performance_notime_medium_noreg_seed5.npy', save_test_rmse)
 def testPlanesEjecucion(self):
     optima = optimize.optimizer({"@nombre": "S30", "@ciudad": "Sevilla"}, self.tabla)
Exemple #18
0
# To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.

import optimize
import acquisitions
import reccomenders
import objectives

import scipy as sp
import os
import sys

import logging
logging.basicConfig(level=logging.DEBUG)

sys.path.append('configs')
#import randomsh as optconfig
#import gridsh as optconfig
#import EIMLsh as optconfig
import PESbssh as optconfig
#import EIpumppid as optconfig
#import PESpump as optconfig
#import PBpump as optconfig

O = optimize.optimizer(optconfig.path,optconfig.aqpara,optconfig.aqfn,optconfig.stoppara,optconfig.stopfn,optconfig.reccpara,optconfig.reccfn,optconfig.ojf,optconfig.ojfchar,checkrecc=True)

O.run()
Exemple #19
0
def f(params,args):
    return np.sum([term.f(params[i*N*(N+1):(i+1)*N*(N+1)],args[i]) for i in range(iii)],0)

def barrier(params,args):
    return np.sum([term.barrier(params[i*N*(N+1):(i+1)*N*(N+1)],args[i]) for i in range(iii)],0)

def df(params,args):
    return np.concatenate([term.df(params[i*N*(N+1):(i+1)*N*(N+1)],args[i]) for i in range(iii)])

objective.f       = f
objective.df      = df
objective.barrier = barrier

def callback_one(ip,d): pass

optimize  = optimize.optimizer( objective , callback=callback_one )


#true   = [{ 'theta' : np.dot( U.T , V1[i,:] ) , 'M' : 0.1*np.dot( U.T * V1[i,:] , U ) } for i in range(iii)]
#
#for t in true:
#    w,v = eig( np.eye(t['M'].shape[0]) - t['M'] )
#    print 'eig true M' , w.real
#
#trupar = true
#for i in range(5):
#    trupar = optimize(init_params=trupar,args=data)
#    callback_one(trupar,data)
#trupar = objective.inflate(trupar)

params = np.concatenate( [term.flatten(ip) for ip in init_params] )
Exemple #20
0
                                               aqpara['ub'], 1.)
ojfn = objectives.costfnwrap(ojf, cfn)
ojfchar = {'dx': len(aqpara['lb']), 'dev': len(aqpara['ev'])}
aqpara['cfn'] = cfn
aqpara['xau'] = 1.
aqpara['xal'] = 0.

#cfn = objectives.cf42
#ojfn,xmin,ymin = objectives.genmat52ojf(len(aqpara['lb']),aqpara['lb'],aqpara['ub'])
#ojfchar = {'dx':len(aqpara['lb']),'dev':len(aqpara['ev'])}

stoppara = {'nmax': 20}
stopfn = optimize.nstopfn

reccfn, reccpara = reccomenders.gpasmap
reccpara['lb'] = aqpara['lb']
reccpara['ub'] = aqpara['ub']

O = optimize.optimizer(path,
                       aqpara,
                       aqfn,
                       stoppara,
                       stopfn,
                       reccpara,
                       reccfn,
                       ojfn,
                       ojfchar,
                       checkrecc=True)

O.run()
Exemple #21
0
def callback( objective , params ):
    print 'Objective: ' , objective.f(params) , '  barrier: ', objective.barrier(params)


true = {'U' : U , 'V1': V1 }
data = {'STAs':np.vstack(STA) , 'STCs':np.vstack([stc[np.newaxis,:] for stc in STC]), 
        'V2':V2*np.ones(Nsub) , 'N':NRGC }

targets = { 'f':quadratic_Poisson, 'barrier':eig_barrier }

targets = kolia_theano.reparameterize(targets,UVs(NRGC))

objective = kolia_theano.Objective( init_params=true, differentiate=['f'], 
                          callback=callback, **targets )

optimizer = optimize.optimizer( objective.where(**data) )

trupar = true
for i in range(2):
    trupar = optimizer(init_params=trupar)
trupar = objective.unflat(trupar)


init_params = {'U' : 0.0001+0.05*R.random(size=U.shape ) ,
               'V1': 0.0001+0.05*R.random(size=V1.shape) }

params = init_params
for i in range(10):
    params = optimizer(init_params=params)
params = objective.unflat(params)