Esempio n. 1
0
    def distributed_initializer(self):
        bcast_ops = []
        for v in self.variables():
            bcast_ops.append(tf.assign(v, broadcast(v)))

        with tf.control_dependencies(bcast_ops):
            with tf.control_dependencies([self._save_model_op]):
                return barrier()
Esempio n. 2
0
def test_save_and_request():
    global_step = tf.Variable(tf.constant(0, dtype=tf.int64))
    target = tf.Variable(tf.constant(0, dtype=tf.int32))

    x = tf.Variable(tf.zeros([10], dtype=tf.int32))

    inc_op = tf.assign_add(global_step, 1)
    update_op = tf.assign(x, x + 1)
    save_op = save_variable(x, global_step)
    y = request_variable(target, global_step, x.name, x.shape, x.dtype)

    with tf.Session() as sess:
        sess.run(tf.global_variables_initializer())

        for _ in range(3):
            sess.run([inc_op, update_op])
            sess.run(save_op)
            sess.run(barrier())
            v = sess.run(y)
            print(v)
        sess.run(barrier())
Esempio n. 3
0
def test_barrier():
    with tf.Session() as sess:
        sess.run(barrier())
Esempio n. 4
0
model = getattr(applications, args.model)(weights=None)

opt = None
learning_rate = 0.01
if args.optimizer == 'sgd':
    opt = tf.train.GradientDescentOptimizer(learning_rate)
elif args.optimizer == 'adam':
    opt = tf.train.AdamOptimizer(learning_rate)
else:
    raise Exception('Unknown optimizer option')

barrier_op = None

if args.kungfu:
    from kungfu.ops import barrier
    barrier_op = barrier()
    if args.kungfu == 'sync-sgd':
        from kungfu.optimizers import SyncSGDOptimizer
        opt = SyncSGDOptimizer(opt)
    elif args.kungfu == 'async-sgd':
        from kungfu.optimizers import PeerModelAveragingOptimizer
        opt = PeerModelAveragingOptimizer(opt)
    elif args.kungfu == 'sync-sgd-nccl':
        from kungfu.optimizers import SyncSGDOptimizer
        opt = SyncSGDOptimizer(opt, nccl=True, nccl_fusion=True)
    elif args.kungfu == 'ada-sgd':
        from kungfu.optimizers import AdaptiveSGDOptimizer
        opt = AdaptiveSGDOptimizer(opt, 10)
    elif args.kungfu == 'sma-sgd':
        from kungfu.optimizers import SyncModelAveragingSGDOptimizer
        opt = SyncModelAveragingSGDOptimizer(opt)