Example #1
0
def extend_callgrad(x, Q, P, M, L, r, mu, sigma, sm, maxdegree, vecsize, experiment, opt_var, small_k):
    #opt_var elemnts
    #         1. pwm
    #         2. sm
    #         3. sigma
    # 0: no optimization variable
    # 1: optimization variable
    #example: opt_var =[1,0,1] === pwm,sigma
    
    if (opt_var[0] == 1):
        r = utils.converter(x[:vecsize[0]], P, M)
    if (opt_var[1] == 1):
        sm = utils.listconverter(x[vecsize[0] :vecsize[1]], P, M)
    if (opt_var[2] == 1):
        sigma = utils.listconverter(x[vecsize[1]:vecsize[2]], P, M)
    if (opt_var[3] == 1):
        mu = utils.listconverter(x[vecsize[2]:], P, M)
    
    gradmu, gradsig, gradpwm, gradsm = extend_gradient(Q, P, M, L, r, mu, sigma, sm, maxdegree, opt_var, small_k)
    
    gradient = []
    if (opt_var[0] == 1):
        gradpwm, vecsize1 = utils.matr2vec(gradpwm, P, M)
        gradient = np.concatenate((gradient, gradpwm))
    if (opt_var[1] == 1):
        gradsm, vecsize2 = utils.list2vec(gradsm, P, M)
        gradient = np.concatenate((gradient, gradsm))
    if (opt_var[2] == 1):
        gradsig, vecsize3 = utils.list2vec(gradsig, P, M)
        gradient = np.concatenate((gradient, gradsig))
    if (opt_var[3] == 1):
        gradmu, vecsize4 = utils.list2vec(gradmu, P, M)
        gradient = np.concatenate((gradient, gradmu))
    return gradient