def create_spaces_and_args_pickles(args):
    f = open(
        "/lus/theta-fs0/projects/CVD-Mol-AI/mzvyagin/tmp/hyperres_pickled_args",
        "wb")
    pickle.dump(args, f)
    f.close()
    print(
        "Dumped arguments to /lus/theta-fs0/projects/CVD-Mol-AI/mzvyagin/tmp/hyperres_pickled_args, "
        "now creating hyperspaces.")
    # Defining the hyperspace
    if args.model == "segmentation_cityscapes":
        hyperparameters = [
            (0.00001, 0.1),  # learning_rate
            (10, 100),  # epochs
            (8, 24),  # batch size
            (1, .00000001)
        ]  # epsilon for Adam optimizer
    elif args.model == "segmentation_gis":
        hyperparameters = [
            (0.00001, 0.1),  # learning_rate
            (10, 100),  # epochs
            (100, 1000),  # batch size
            (1, .00000001)
        ]  # epsilon for Adam optimizer
    else:
        hyperparameters = [
            (0.00001, 0.1),  # learning_rate
            (0.2, 0.9),  # dropout
            (10, 100),  # epochs
            (10, 500),  # batch size
            (.00000001, 1)
        ]  # epsilon for Adam optimizer
    # create pickled space
    space = create_hyperspace(hyperparameters)
    f = open(
        "/lus/theta-fs0/projects/CVD-Mol-AI/mzvyagin/tmp/hyperres_pickled_spaces",
        "wb")
    pickle.dump(space, f)
    f.close()
    print(
        "Dumped scikit opt spaces to /lus/theta-fs0/projects/CVD-Mol-AI/mzvyagin/tmp/hyperres_pickled_spaces.... "
        "Submitting batch jobs to Cobalt now.")
    return space
Exemple #2
0
if __name__ == "__main__":
    parser = ArgumentParser(
        "Run AlexNet cross framework tuning on PyTorch and AlexNet.")
    parser.add_argument("--out",
                        "-o",
                        help="Specify the out csv filename.",
                        required=True)
    args = parser.parse_args()
    # Defining the hyperspace
    hyperparameters = [
        (0.00001, 0.1),  # learning_rate
        (0.2, 0.9),  # dropout
        (10, 100),  # epochs
        (10, 1000)
    ]  # batch size
    space = create_hyperspace(hyperparameters)
    # Perform runs and aggregate results
    results = []
    for section in tqdm(space):
        # create a skopt gp minimize object
        optimizer = Optimizer(section)
        search_algo = SkOptSearch(
            optimizer, ['learning_rate', 'dropout', 'epochs', 'batch_size'],
            metric='average_res',
            mode='max')
        analysis = tune.run(multi_train,
                            search_alg=search_algo,
                            num_samples=50,
                            resources_per_trial={'gpu': 1})
        results.append(analysis)