from problems.arxiv.process_raw_data import ProcessRawData

import argparse

from stratified_bayesian_optimization.initializers.log import SBOLog

logger = SBOLog(__name__)

if __name__ == '__main__':
    # python -m problems.arxiv.scripts.run_year_data '1'
    parser = argparse.ArgumentParser()
    parser.add_argument('month', help='e.g. 23')
    args = parser.parse_args()
    month = args.month

    files = ProcessRawData.generate_filenames_month(2016, int(args.month))

    logger.info("Files to be processed: ")
    logger.info(files)

    ProcessRawData.get_click_data(
        files, "problems/arxiv/data/2016_%s_processed_data.json" % month)
예제 #2
0
            logger.info('Error in epoch %d is:' % epoch)
            logger.info(100. * correct / float(total))

            data, target = train_dict[i]
            data, target = Variable(data), Variable(target)
            optimizer.zero_grad()
            output = model(data)
            loss = F.nll_loss(output, target)
            loss.backward()
            optimizer.step()

        f_name = 'data/multi_start/neural_networks/training_results/'
        f_name += name_model
        JSONFile.write(values, f_name)


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('nn', help='e.g. neural network: an intenger from 0 to 10')
    parser.add_argument('n_epochs', help='e.g. 20')
    args = parser.parse_args()

    nn_model = int(args.nn)
    n_epochs = int(args.n_epochs)
    model = Net()

    f_name = 'data/multi_start/neural_networks/nn_' + str(nn_model)
    model.load_state_dict(torch.load(f_name))
    logger.info('nn loaded successfully')
    train_nn(model, n_epochs=n_epochs, name_model=str(nn_model))
from stratified_bayesian_optimization.util.json_file import JSONFile
from stratified_bayesian_optimization.initializers.log import SBOLog

logger = SBOLog(__name__)

if __name__ == '__main__':
    # Example usage:
    # python -m problems.cnn_cifar10.scripts.maximum_runs 500 600

    parser = argparse.ArgumentParser()
    parser.add_argument('min_rs', help='e.g. 500')
    parser.add_argument('max_rs', help='e.g. 600')

    args = parser.parse_args()
    min_rs = int(args.min_rs)
    max_rs = int(args.max_rs)

    max_values = []
    for i in xrange(min_rs, max_rs):
        file_name = 'problems/cnn_cifar10/runs_random_seeds/' + 'rs_%d' % i + '.json'
        if not os.path.exists(file_name):
            continue
        data = JSONFile.read(file_name)
        max_values.append(data['test_error_images'])

    max = np.max(max_values)
    min = np.min(max_values)

    logger.info('max is: %f' % max)
    logger.info('min is: %f' % min)
예제 #4
0
        bounds = None
        lb = ['None']
        ub = ['None']

    np.random.seed(random_seed)
    start = np.zeros(dimension)
    for i in range(dimension):
        start[i] = np.random.uniform(lb[i], ub[i], 1)
    sign = np.random.binomial(1, 0.5)

    if choose_sign_st:
        if sign == 0:
            start = -1.0 * start

    logger.info('start')
    logger.info(start)

    results = SGD(start,
                  gradient,
                  batch_size,
                  objective,
                  maxepoch=n_epochs,
                  adam=False,
                  name_model='std_%f_rs_%d_lb_%f_ub_%f_lr_%f_%s' %
                  (std, random_seed, lb[0], ub[0], lr, method),
                  exact_gradient=exact_gradient,
                  learning_rate=lr,
                  method=method,
                  n_epochs=5,
                  n_samples=100,
예제 #5
0
logger = SBOLog(__name__)

if __name__ == '__main__':
    # Example:
    # python -m scripts.run_validate_gp_model

    type_kernel = [PRODUCT_KERNELS_SEPARABLE, MATERN52_NAME, TASKS_KERNEL_NAME]
    n_training = 200
    problem_name = "arxiv"
    bounds_domain = [[0.01, 1.01], [0.1, 2.1], [1, 21], [1, 201], [0, 1, 2, 3, 4]]
    type_bounds = [0, 0, 0, 0, 1]
    dimensions = [5, 4, 5]
    thinning = 5
    n_burning = 100
    max_steps_out = 1000
    random_seed = 5
    training_name = None
    points = None
    noise = False
    n_samples = 0
    cache = True
    kernel_params = {SAME_CORRELATION: True}


    result = ValidateGPService.validate_gp_model(
        type_kernel, n_training, problem_name, bounds_domain, type_bounds, dimensions, thinning,
        n_burning, max_steps_out, random_seed, training_name, points, noise, n_samples, cache,
        **kernel_params)

    logger.info("Success proportion is: %f" % result)
예제 #6
0
 def test_info(self):
     logger = SBOLog(__name__)
     logger.info('testing')