Пример #1
0
def Initialize_IC():

    # The following lines are for generate the directory:
    current_path = os.path.dirname(
        os.path.abspath(inspect.getfile(inspect.currentframe())))
    apts = current_path + "/allpoints"
    if not os.path.exists(apts):
        os.makedirs(apts)

    n = 3  # Dimension of data
    K = 3  # Tuning parameter for continuous search function
    Nm = 8  # Initial mesh grid size
    L = 1  # Tuning parameter for discrete search function
    flag = 1  # Identify
    method = "NPS"  # The strategy for regression function, you can choose NPS or MAPS
    user = '******'
    # fe_times = np.array([])  # Represents the times of function evaluation at this point.

    # The following lines represents the initial points:
    # bnd1: lower bounds for physical data
    # bnd2: upper bounds for physical data
    # xE: initial interested points
    # y0: estimate value for minimum
    if n == 1:
        xE = np.array([[0.5, 0.75]])
        bnd2 = np.array([30])
        bnd1 = np.array([24])

    elif n == 2:
        xE = np.array([[0.5, 0.75, 0.5], [0.5, 0.5, 0.75]])
        bnd2 = np.array([30, 30])
        bnd1 = np.array([24, 24])
    elif n == 3:
        xE = np.array([[0.5, 0.5, 0.5, 0.75], [0.5, 0.5, 0.75, 0.5],
                       [0.5, 0.75, 0.5, 0.5]])
        bnd2 = np.array([30, 30, 30])
        bnd1 = np.array([24, 24, 24])

    xU = dogs.bounds(np.zeros([n, 1]), np.ones([n, 1]), n)

    xE = dogs.physical_bounds(xE, bnd1, bnd2)
    xU = dogs.physical_bounds(xU, bnd1, bnd2)

    k = 0  # times of iteration, start with 0
    iter_max = 50  # maximum iteration steps
    idx = 0

    var_opt = {}
    var_opt['n'] = n
    var_opt['K'] = K
    var_opt['Nm'] = Nm
    var_opt['L'] = L
    var_opt['ub'] = bnd2
    var_opt['lb'] = bnd1
    var_opt['user'] = user
    var_opt['inter_par_method'] = method
    var_opt['xE'] = xE
    var_opt['xU'] = xU
    var_opt['num_point'] = idx
    var_opt['flag'] = flag
    var_opt['iter'] = k
    var_opt['iter_max'] = iter_max
    # var_opt['fe_times'] = fe_times
    io.savemat("allpoints/pre_opt_IC", var_opt)

    return
Пример #2
0
method = var_opt['inter_par_method']
xE = var_opt['xE']
xU = var_opt['xU']
if xU.shape[1] == 0:
    xU = xU.reshape(n, 0)

Data = io.loadmat("allpoints/Yall")
yE = Data['yE'][0]
SigmaT = Data['SigmaT'][0]


xE = np.array([[0.5, 0.5, 0.5, 0.75], [0.5, 0.5, 0.75, 0.5], [0.5, 0.75, 0.5, 0.5]])
bnd2 = np.array([30, 30, 30])
bnd1 = np.array([24, 24, 24])

xU = dogs.bounds(np.zeros([n, 1]), np.ones([n, 1]), n)

xE = dogs.physical_bounds(xE, bnd1, bnd2)
xU = dogs.physical_bounds(xU, bnd1, bnd2)

# initilization
Ain = np.concatenate((np.identity(n), -np.identity(n)), axis=0)
Bin = np.concatenate((np.ones((n, 1)), np.zeros((n, 1))), axis=0)
yE = []
for ll in range(xE.shape[1]):
    J = mpl_cost(xE[:,ll])
    yE = np.hstack([yE,J])


inter_par = Inter_par(method='NPS')
Пример #3
0
def __main___:

    n = 3  # Dimension of data
    K = 3  # Tuning parameter for continuous search function
    Nm = 8  # Initial mesh grid size
    L = 1  # Tuning parameter for discrete search function
    flag = 1  # Identify
    method = "NPS"  # The strategy for regression function, you can choose NPS or MAPS
    user = '******'
    # fe_times = np.array([])  # Represents the times of function evaluation at this point.

    # The following lines represents the initial points:
    # bnd1: lower bounds for physical data
    # bnd2: upper bounds for physical data
    # xE: initial interested points
    # y0: estimate value for minimum
    if n == 1:
        xE = np.array([[0.5, 0.75]])
        bnd2 = np.array([30])
        bnd1 = np.array([24])

    elif n == 2:
        xE = np.array([[0.5, 0.75, 0.5], [0.5, 0.5, 0.75]])
        bnd2 = np.array([30, 30])
        bnd1 = np.array([24, 24])
    elif n == 3:
        xE = np.array([[0.5, 0.5, 0.5, 0.75], [0.5, 0.5, 0.75, 0.5], [0.5, 0.75, 0.5, 0.5]])
        bnd2 = np.array([30, 30, 30])
        bnd1 = np.array([24, 24, 24])

    xU = dogs.bounds(np.zeros([n, 1]), np.ones([n, 1]), n)

    xE = dogs.physical_bounds(xE, bnd1, bnd2)
    xU = dogs.physical_bounds(xU, bnd1, bnd2)

    k = 0  # times of iteration, start with 0
    iter_max = 50  # maximum iteration steps
    idx = 0

    print('==================================================')

    current_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))

    pre_opt = current_path + "/allpoints/pre_opt_IC.mat"
    pre_opt_path = Path(pre_opt)

    pre_Y = current_path + "/allpoints/Yall.mat"
    pre_Y_path = Path(pre_Y)

    pre_J = current_path + "/allpoints/surr_J_new.dat"
    pre_J_path = Path(pre_J)
    # Check whether or not it is the first iteraiton, if the optimizaton information file pre_opt_IC doesn't exist, then
    # generate that file:

    var_opt = io.loadmat("allpoints/pre_opt_IC")
    k = var_opt['iter'][0, 0]
    flag = var_opt['flag'][0, 0]

    # initilizaiton
    if k == 0:  # k is the number of iteration. k = 0 means that the initialization is not finished.

        if pre_Y_path.is_file():  # The file 'Yall' exists.
            # Read from the previous points
            data = io.loadmat("allpoints/Yall")
            yE = data['yE'][0]
            SigmaT = data['SigmaT'][0]
            T = data['T'][0]

            # if sign == 1:
            zs = np.loadtxt("allpoints/surr_J_new.dat")
            xx = uq.data_moving_average(zs, 40).values
            ind = tr.transient_removal(xx)
            sig = np.sqrt(uq.stationary_statistical_learning_reduced(xx[ind:], 18)[0])
            t = len(zs)  # not needed for Alpha-DOGS  # TODO fix the length of data
            J = np.abs(np.mean(xx[ind:]))

            yE = np.hstack((yE, J))
            SigmaT = np.hstack((SigmaT, sig))
            T = np.hstack((T, t))

            data = {'yE': yE, 'SigmaT': SigmaT, 'T': T}
            io.savemat("allpoints/Yall", data)

            print(' len of yE = ', len(yE))

        else:
            if not pre_J_path.is_file():
                # The very first iteration, the file 'Yall' doesn't exist.
                yE = np.array([])
                print("The first time running the iteration")
                print(' len of yE = ', len(yE))
                print('iter k = ', k)
            else:
                # The second time of running the algorithm.
                yE = np.array([])
                SigmaT = np.array([])
                T = np.array([])

                # Read from surr_J_new.
                zs = np.loadtxt("allpoints/surr_J_new.dat")

                xx = uq.data_moving_average(zs, 40).values
                ind = tr.transient_removal(xx)
                sig = np.sqrt(uq.stationary_statistical_learning_reduced(xx[ind:], 18)[0])
                t = len(zs)  # not needed for Alpha-DOGS
                J = np.abs(np.mean(xx[ind:]))

                yE = np.hstack((yE, J))
                SigmaT = np.hstack((SigmaT, sig))
                T = np.hstack((T, t))  # not needed for Alpha-DOGS
                data = {'yE': yE, 'SigmaT': SigmaT, 'T': T}
                io.savemat("allpoints/Yall", data)

                print("The second time running the iteration")
                print(' len of yE = ', len(yE))
                print('iter k = ', k)
                print('function evaluation at this iteration: ', J)

        # we read pre_opt_IC.mat
        # untill len(yE) < n+1 then k=1
        # var_opt = io.loadmat("allpoints/pre_opt_IC")
        # The following variables are needed for initialization:

        xE = var_opt['xE']
        n = var_opt['n'][0, 0]

        if len(yE) < xE.shape[1]:
            # Generate the point that we want to evaluate.
            xcurr = np.copy(xE[:, len(yE)])
            fout = open("allpoints/pts_to_eval.dat", 'w')
            keywords = ['Awin', 'lambdain', 'fanglein']
            fout.write(str('flagin') + '=' + str(int(flag)) + "\n")
            fout.write(str('IDin') + '=' + str(int(len(yE))) + "\n")
            for j in range(n):
                fout.write(keywords[j] + '=' + str(float(xcurr[j])) + "\n")
            fout.close()

            var_opt['iter'] = 0
            var_opt['num_point'] = len(yE)
            io.savemat("allpoints/pre_opt_IC", var_opt)

            print('point to eval at this iteration x = ', xcurr)
            print('len of yE = ', len(yE))

            return
        else:

            # Initialization complete
            var_opt['iter'] = 1
            io.savemat("allpoints/pre_opt_IC", var_opt)
            # Run one iteration after initialization.
            if pre_J_path.is_file():
                dogs.DOGS_standalone_IC()
                os.remove(current_path + "/allpoints/surr_J_new.dat")
                return
            else:
                return

    else:
        if pre_J_path.is_file():  # If surr_J_new exists, function evaluation is succeeded.
            dogs.DOGS_standalone_IC()
            var_opt = io.loadmat("allpoints/pre_opt_IC")
            flag = var_opt['flag'][0, 0]
            if flag != 2:  # If flag == 2, mesh refinement, perform one more iteration.
                os.remove(current_path + "/allpoints/surr_J_new.dat")
            return
        else:  # function evaluation is failed, reperform functon evaluation
            return
Пример #4
0
def Initialize_IC():

    # The following lines are for generate the directory:
    # current_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
    # apts = current_path + "/allpoints"
    # if not os.path.exists(apts):
    #     os.makedirs(apts)

    n = 1  # Dimension of data
    K = 3  # Tuning parameter for continuous search function
    Nm = 8  # Initial mesh grid size
    L = 1  # Tuning parameter for discrete search function
    flag = 1  # Identify
    method = "NPS"  # The strategy for regression function, you can choose NPS or MAPS
    user = '******'

    # The following lines represents the initial points:
    # bnd1: lower bounds for physical data
    # bnd2: upper bounds for physical data
    # xE: initial interested points
    # y0: estimate value for minimum
    if n == 1:
        xE = np.array([[0.5, 0.75]])
        y0 = np.array([23.5712])
        bnd2 = np.array([30])
        bnd1 = np.array([24])
    elif n == 2:
        xE = np.array([[0.5, 0.75, 0.5], [0.5, 0.5, 0.75]])
        y0 = np.array([23.5712, 23.5712])
    elif n == 3:
        xE = np.array([[0.5, 0.5, 0.5, 0.75], [0.5, 0.5, 0.75, 0.5],
                       [0.5, 0.75, 0.5, 0.5]])
        y0 = np.array([23.5712, 23.5712, 23.5712])
        bnd2 = np.array([30, 30, 30])
        bnd1 = np.array([24, 24, 24])

    xU = bounds(np.zeros([n, 1]), np.ones([n, 1]), n)

    xE = physical_bounds(xE, bnd1, bnd2)
    xU = physical_bounds(xU, bnd1, bnd2)

    k = 0  # times of iteration, start with 0
    iter_max = 50  # maximum iteration steps
    T_lorenz = 5
    h_lorenz = 0.005
    idx = 0

    var_opt = {}
    var_opt['y0'] = y0
    var_opt['n'] = n
    var_opt['K'] = K
    var_opt['Nm'] = Nm
    var_opt['L'] = L
    var_opt['ub'] = bnd2
    var_opt['lb'] = bnd1
    var_opt['user'] = user
    var_opt['inter_par_method'] = method
    var_opt['xE'] = xE
    var_opt['xU'] = xU
    var_opt['num_point'] = idx
    var_opt['flag'] = flag
    var_opt['T_lorenz'] = T_lorenz
    var_opt['h_lorenz'] = h_lorenz
    var_opt['iter'] = k
    var_opt['iter_max'] = iter_max
    io.savemat("allpoints/pre_opt_IC", var_opt)

    # The following lines are for generating stop file.
    # fout_stop = open("allpoints/stop.dat", 'w')
    # fout_stop.write(str(0) + "\n")
    # fout_stop.close()

    return