コード例 #1
0
ファイル: utils.py プロジェクト: pzhokhov/FaderNetworks
def get_dump_path(params):
    """
    Create a directory to store the experiment.
    """
    if fs_tracker:
        return fs_tracker.get_model_directory()
    if os.environ.get('MODELS_PATH'):
        return os.environ.get('MODELS_PATH')

    MODELS_PATH = DEFAULT_MODELS_PATH
    assert os.path.isdir(MODELS_PATH)

    # create the sweep path if it does not exist
    sweep_path = os.path.join(MODELS_PATH, params.name)
    if not os.path.exists(sweep_path):
        subprocess.Popen("mkdir %s" % sweep_path, shell=True).wait()

    # create a random name for the experiment
    chars = 'abcdefghijklmnopqrstuvwxyz0123456789'
    while True:
        exp_id = ''.join(random.choice(chars) for _ in range(10))
        dump_path = os.path.join(MODELS_PATH, params.name, exp_id)
        if not os.path.isdir(dump_path):
            break

    # create the dump folder
    if not os.path.isdir(dump_path):
        subprocess.Popen("mkdir %s" % dump_path, shell=True).wait()
    return dump_path
コード例 #2
0
    def test_get_model_directory_noargs(self):
        testExperiment = 'testExperiment'
        testPath = os.path.join(os.path.expanduser('~'),
                                '.studioml/experiments', testExperiment,
                                'modeldir')

        os.environ['STUDIOML_EXPERIMENT'] = testExperiment
        self.assertTrue(testPath == fs_tracker.get_model_directory())
コード例 #3
0
 def test_get_model_directory_args(self):
     experimentName = 'testExperiment'
     modelDir = fs_tracker.get_model_directory(experimentName)
     self.assertTrue(modelDir == os.path.join(
         os.path.expanduser('~'),
         '.studioml/experiments/testExperiment/modeldir'))
コード例 #4
0
model = make_parallel(model, no_gpus)
model.compile(loss='categorical_crossentropy', optimizer='adam')

(x_train, y_train), (x_test, y_test) = mnist.load_data()

x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255

# convert class vectors to binary class matrices
y_train = to_categorical(y_train, 10)
y_test = to_categorical(y_test, 10)

checkpointer = ModelCheckpoint(fs_tracker.get_model_directory() +
                               '/checkpoint.{epoch:02d}-{val_loss:.2f}.hdf')

tbcallback = TensorBoard(log_dir=fs_tracker.get_tensorboard_dir(),
                         histogram_freq=0,
                         write_graph=True,
                         write_images=False)

model.fit(x_train,
          y_train,
          validation_data=(x_test, y_test),
          epochs=int(sys.argv[1]),
          batch_size=batch_size * no_gpus,
          callbacks=[checkpointer, tbcallback])
コード例 #5
0
# Let's train the model with a TensorFlow optimizer:

mnist_data = input_data.read_data_sets('MNIST_data', one_hot=True)

global_step = tf.Variable(0, name='global_step', trainable=False)
train_step = tf.train.GradientDescentOptimizer(
    0.5).minimize(loss, global_step=global_step)
# Initialize all variables
init_op = tf.global_variables_initializer()
saver = tf.train.Saver()
sess.run(init_op)


logger = logging.get_logger('train_mnist')
logger.setLevel(10)
# Run training loop
with sess.as_default():
    while True:
        batch = mnist_data.train.next_batch(50)
        train_step.run(feed_dict={img: batch[0],
                                  labels: batch[1]})

        sys.stdout.flush()
        saver.save(
            sess,
            os.path.join(
                fs_tracker.get_model_directory(),
                "ckpt"),
            global_step=global_step)
        time.sleep(1)
コード例 #6
0
ファイル: save_model.py プロジェクト: FrazerBayley/studio
from keras.layers import Dense
from keras.models import Sequential

from studio import fs_tracker
import os

model = Sequential()
model.add(Dense(2, input_shape=(2, )))

model.set_weights([[[2, 0], [0, 2]]])
model.save(os.path.join(fs_tracker.get_model_directory(), 'weights.h5'))
コード例 #7
0
model.add(Dense(10, activation='softmax'))
model.summary()

batch_size = 128
no_epochs = int(sys.argv[1]) if len(sys.argv) > 1 else 10
lr = 0.01

print('learning rate = {}'.format(lr))
print('batch size = {}'.format(batch_size))
print('no_epochs = {}'.format(no_epochs))

model.compile(loss='categorical_crossentropy',
              optimizer=optimizers.SGD(lr=lr),
              metrics=['accuracy'])

print("Saving checkpoints to {}".format(fs_tracker.get_model_directory()))
checkpointer = ModelCheckpoint(fs_tracker.get_model_directory() +
                               '/checkpoint.{epoch:02d}-{val_loss:.2f}.hdf')

tbcallback = TensorBoard(log_dir=fs_tracker.get_tensorboard_dir(),
                         histogram_freq=0,
                         write_graph=True,
                         write_images=True)

model.fit(x_train,
          y_train,
          validation_data=(x_test, y_test),
          epochs=no_epochs,
          callbacks=[checkpointer, tbcallback],
          batch_size=batch_size)
コード例 #8
0
ファイル: train_mnist.py プロジェクト: vpomponiu/studio
# We define the placeholder for the labels, and the loss function we will use:

labels = tf.placeholder(tf.float32, shape=(None, 10))

loss = tf.reduce_mean(categorical_crossentropy(labels, preds))
# Let's train the model with a TensorFlow optimizer:

mnist_data = input_data.read_data_sets('MNIST_data', one_hot=True)

global_step = tf.Variable(0, name='global_step', trainable=False)
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(
    loss, global_step=global_step)
# Initialize all variables
init_op = tf.global_variables_initializer()
saver = tf.train.Saver()
sess.run(init_op)

logger = logging.getLogger('train_mnist')
logger.setLevel(10)
# Run training loop
with sess.as_default():
    while True:
        batch = mnist_data.train.next_batch(50)
        train_step.run(feed_dict={img: batch[0], labels: batch[1]})

        sys.stdout.flush()
        saver.save(sess,
                   os.path.join(fs_tracker.get_model_directory(), "ckpt"),
                   global_step=global_step)
        time.sleep(1)