Example #1
0
def read_cmd():
    cmd = deepwater_pb2.Cmd()
    size_bytes = sys.stdin.buffer.read(4)
    if len(size_bytes) == 0:
        return None
    size = struct.unpack('>I', size_bytes)[0]
    data = sys.stdin.buffer.read(size)
    cmd.ParseFromString(data)
    if debug:
        print('Received', size, file=sys.stderr)
    return cmd
Example #2
0
#!/usr/bin/env python3

import numpy as np

import backend
import deepwater_pb2

backend.size = 1
backend.gpus = []
res = deepwater_pb2.Cmd()

sizes = [784, 1024, 1024, 512, 10]
types = ['data', 'relu', 'relu', 'relu', 'loss']
drops = [.8, .5, .5, .5, 0]

create = deepwater_pb2.Cmd()
create.type = deepwater_pb2.Create
create.input_shape.extend([64, 1, 1, sizes[0]])
create.solver_type = 'SGD'
create.sizes.extend(sizes)
create.types.extend(types)
create.dropout_ratios.extend(drops)
create.learning_rate = .01
create.momentum = .9
backend.message(create, res)

for i in range(2):
    train = deepwater_pb2.Cmd()
    train.type = deepwater_pb2.Train
    train.input_shape.extend(create.input_shape)
    batch = np.zeros(train.input_shape, dtype=np.float32)
Example #3
0
#!/usr/bin/env python3

from multiprocessing import *
from solver import *
import numpy as np
import deepwater_pb2
cmd = deepwater_pb2.Cmd()

cmd.type = deepwater_pb2.Create
cmd.graph = 'lenet'
cmd.input_shape.extend([128, 1, 28, 28])
cmd.solver_type = 'SGD'
cmd.learning_rate = .01
cmd.momentum = .9

size = 1
uid = None
gpus = []

pool = Pool(size, initializer=create, initargs=(cmd, uid, size, gpus))

pool.map(start, range(size))

for i in range(2):
    batch = np.zeros(cmd.input_shape, dtype=np.float32)
    label = np.zeros([cmd.input_shape[0], 1], dtype=np.float32)

    if i % 10 == 0:
        print('map', (batch.shape, label.shape), file=sys.stderr)
    tmp = zip(np.split(batch, size), np.split(label, size))
    if i % 10 == 0:
Example #4
0
        batch = np.frombuffer(req.data[0], dtype=np.float32)
        batch = batch.reshape(req.input_shape)
        data = np.split(batch, size)
        data = pool.map(predict, data)
        data = np.concatenate(data)
        res.data.append(data.tobytes())
        res.type = deepwater_pb2.Success

    if req.type == deepwater_pb2.SaveGraph:
        print('Saving graph to', req.path, file=sys.stderr)
        pool.map(save_graph, [req.path])

    if req.type == deepwater_pb2.Save:
        print('Saving model to', req.path, file=sys.stderr)
        pool.map(save, [req.path])

    if req.type == deepwater_pb2.Load:
        print('Loading model to', req.path, file=sys.stderr)
        pool.map(load, [req.path])


if __name__ == '__main__':
    print('Started Caffe backend', file=sys.stderr)
    while True:
        req = read_cmd()
        if req is None:
            break
        res = deepwater_pb2.Cmd()
        message(req, res)
        write_cmd(res)