示例#1
0
from deeplab import train, eval, vis, export_model
from sacred import Experiment
from labwatch.assistant import LabAssistant
from labwatch.optimizers import RandomSearch
import os

ex = Experiment()
a = LabAssistant(experiment=ex,
                 database_name="labwatch",
                 optimizer=RandomSearch)


@ex.config
def config():
    num_iterations = 100
    train_batch_size = 4
    model_variants = "xception_65"
    atrous_rates_0 = 6
    atrous_rates_1 = 12
    atrous_rates_2 = 18
    output_stride = 16
    decoder_output_stride = 4

    crop_size = "42, 42"
    fine_tune_batch_norm = True

    exp_id = 0
    OM_DIR = "/om/user/amineh/exp/exp_id"
    INIT_DIR = "${OM_DIR}/deeplabv3_pascal_trainval/model.ckpt"
    TRAIN_LOGDIR = "${OM_DIR}/train"
    VIS_LOGDIR = "${OM_DIR}/vis"
import sacred
from sacred.stflow import LogFileWriter

from labwatch.assistant import LabAssistant
from labwatch.optimizers import RandomSearch
from labwatch.hyperparameters import UniformFloat, UniformNumber

from tensorflow.examples.tutorials.mnist import input_data

from hyperparam_opt.tensorflow.dnn_mnist import HyperParams, train

ex = sacred.Experiment(name='MINST')
a = LabAssistant(ex,
                 database_name='labwatch-db',
                 optimizer=RandomSearch)


@ex.config
def cfg():
    lr = 0.001
    batch_size = 100
    n_hidden = 32
    keep_prob = 0.5
    data_dir = '../../data/tmp/mnist/'
    log_dir = './logs/'


@a.search_space
def search_space():
    lr = UniformFloat(0.0001, 0.01, default=0.001, log_scale=True)
    batch_size = UniformNumber(8, 256, default=100, type=int)
示例#3
0
from keras.models import Sequential, Model
from keras.layers import Dropout, Flatten, Dense, GlobalAveragePooling2D
from keras import backend as K
from keras.callbacks import ModelCheckpoint, LearningRateScheduler, TensorBoard, EarlyStopping, TensorBoard

import os, os.path
import sys

from sacred import Experiment
from sacred.observers import MongoObserver
from labwatch.assistant import LabAssistant
from labwatch.optimizers.random_search import RandomSearch
from labwatch.hyperparameters import UniformNumber, UniformFloat

ex = Experiment()
a = LabAssistant(ex, optimizer=RandomSearch)


@a.search_space
def search_space():
    num_units_first_dense = UniformNumber(lower=950,
                                          upper=1450,
                                          default=1024,
                                          type=int,
                                          log_scale=True)
    num_units_second_dense = UniformNumber(lower=900,
                                           upper=1600,
                                           default=1024,
                                           type=int,
                                           log_scale=True)
    dropout_rate = UniformFloat(lower=.33, upper=.55, default=.45)
示例#4
0
#!/usr/bin/env python
# coding=utf-8
from __future__ import division, print_function, unicode_literals

from sacred import Experiment
from labwatch.assistant import LabAssistant
from labwatch.hyperparameters import UniformFloat
import numpy as np


ex = Experiment()
a = LabAssistant(ex)


@ex.config
def cfg():
    x = (0., 5.)


@a.search_space
def search_space():
    x = (UniformFloat(-5, 10), UniformFloat(0, 15))


@ex.automain
def branin(x):
    x1, x2 = x
    print("{:.2f}, {:.2f}".format(x1, x2))
    y = (x2 - (5.1 / (4 * np.pi ** 2)) * x1 ** 2 + 5 * x1 / np.pi - 6) ** 2
    y += 10 * (1 - 1 / (8 * np.pi)) * np.cos(x1) + 10
示例#5
0
#!/usr/bin/env python
# coding=utf-8
from __future__ import division, print_function, unicode_literals

from labwatch.optimizers.random_search import RandomSearch
from sacred import Experiment
from labwatch.assistant import LabAssistant
from labwatch.hyperparameters import UniformFloat
import numpy as np

ex = Experiment()
a = LabAssistant(ex, database_name='branin', optimizer=RandomSearch)


@ex.config
def cfg():
    x = (0., 5.)


@a.search_space
def search_space():
    x = (UniformFloat(-5, 10), UniformFloat(0, 15))


@ex.automain
def branin(x):
    x1, x2 = x
    print("{:.2f}, {:.2f}".format(x1, x2))
    y = (x2 - (5.1 / (4 * np.pi**2)) * x1**2 + 5 * x1 / np.pi - 6)**2
    y += 10 * (1 - 1 / (8 * np.pi)) * np.cos(x1) + 10
示例#6
0
from __future__ import print_function

import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import RMSprop

from sacred import Experiment
from labwatch.assistant import LabAssistant
from labwatch.hyperparameters import UniformInt, UniformFloat
from labwatch.optimizers.random_search import RandomSearch

ex = Experiment()
a = LabAssistant(ex, "labwatch_demo_keras", optimizer=RandomSearch)


@ex.config
def cfg():
    batch_size = 128
    num_units_first_layer = 512
    num_units_second_layer = 512
    dropout_first_layer = 0.2
    dropout_second_layer = 0.2
    learning_rate = 0.001


@a.search_space
def small_search_space():
    batch_size = UniformInt(lower=32, upper=64, default=32, log_scale=True)
    learning_rate = UniformFloat(lower=10e-3,