Exemplo n.º 1
0
def create_objects(root_yaml,
                   be_type='gpu',
                   batch_size=128,
                   rng_seed=None,
                   device_id=0,
                   default_dtype=np.float32,
                   stochastic_rounding=False):
    """
    Instantiate objects as per the given specifications.

    Arguments:
        root_yaml (dict): Model definition dictionary parse from YAML file

        be_type (str): backend either 'gpu', 'mgpu' or 'cpu'

        rng_seed (None or int): random number generator seed

        device_id (int): for GPU backends id of device to use

        default_dtype (type): numpy data format for default data types,

        stochastic_rounding (bool or int): number of bits for stochastic rounding
                                           use False for no rounding

    Returns:
        tuple: Contains model, cost and optimizer objects.
    """

    assert NervanaObject.be is not None, 'Must generate a backend before running this function'

    # can give filename or parse dictionary
    if type(root_yaml) is str:
        with open(root_yaml, 'r') as fid:
            root_yaml = yaml.safe_load(fid.read())

    # in case references were used
    root_yaml = deepcopy(root_yaml)

    # initialize layers
    yaml_layers = root_yaml['layers']

    # currently only support sequential in yaml
    layer_dict = {'layers': yaml_layers}
    layers = Sequential.gen_class(layer_dict)

    # initialize model
    model = Model(layers=layers)

    # cost (before layers for shortcut derivs)
    cost_name = root_yaml['cost']
    cost = GeneralizedCost.gen_class({'costfunc': {'type': cost_name}})

    # create optimizer
    opt = None
    if 'optimizer' in root_yaml:
        yaml_opt = root_yaml['optimizer']
        typ = yaml_opt['type']
        opt = getattr(neon.optimizers, typ).gen_class(yaml_opt['config'])

    return model, cost, opt
Exemplo n.º 2
0
def test_gan_container(backend_default):
    """
    Set up a GenerativeAdversarial container and make sure generator
    and discriminator layers get configured correctly.
    """
    init_norm = Gaussian(loc=0.0, scale=0.01)
    # set up container and ensure layers get wired up correctly
    generator = Sequential([Affine(nout=10, init=init_norm), Affine(nout=100, init=init_norm)])
    discriminator = Sequential([Affine(nout=100, init=init_norm), Affine(nout=1, init=init_norm)])
    layers = GenerativeAdversarial(generator, discriminator)

    assert len(layers.layers) == 4
    assert layers.layers[0].nout == 10
    assert layers.layers[1].nout == 100
    assert layers.layers[2].nout == 100
    assert layers.layers[3].nout == 1
    assert layers.generator.layers == layers.layers[0:2]
    assert layers.discriminator.layers == layers.layers[2:4]
Exemplo n.º 3
0
def sub_handler(layers, flags, stacks, this_model):
    head, layers = split_merge_layers(layers)
    if len(layers) > 2:
        left = layers[0]
        right = sequential(
                layers=(
                    MergeSum(layers[1:]),
                    Activation(neon.transforms.Normalizer(divisor=-1))))
        network = Sequential(layers=head+(MergeSum(layers=(left, right)),))
    elif len(layers) == 2:
        left = layers[0]
        right = sequential(
                layers=(
                    layers[1],
                    Activation(neon.transforms.Normalizer(divisor=-1))))
        network = Sequential(layers=head+(MergeSum(layers=(left, right)),))
    else:
        network = layers[0]
    return network
Exemplo n.º 4
0
def sequential(layers):
    a = ()
    for t in layers:
        if type(t) == Sequential:
            a += tuple(t.layers)
        else:
            a += (t, )
    res=Sequential(layers=a)
    #print 'in_shape:',a[0].in_shape
    #res.configure(a[0].in_shape)
    return res
Exemplo n.º 5
0
def split_merge_layers(layers):
    bs = []
    ls = []
    for layer in layers:
        if type(layer) == BranchNode:
            b = layer
            l = SkipNode()
        elif type(layer.layers[0]) == BranchNode:
            b = layer.layers[0]
            l = Sequential(tuple(layer.layers[1:]))
        else:
            b = None
            l = None
        bs += [b]
        ls += [l]
    bset = set(bs)-set({None})
    if len(bset) > 1:
        print bset
    assert len(bset) <= 1
    if len(bset) == 1:
        for b in bset:
            pass
    print 'bs:', bs
    print 'ls:', ls

    head = ()
    for i, layer in enumerate(layers):
        if ls[i] is None:
            ll = split_list(layers[i].layers, b)
            assert len(ll) <= 2
            if len(ll) == 2:
                assert head == ()
                head, l = ll
                head = tuple(head)
                l = tuple(l)
                ls[i] = l
            else:
                ls[i] = layers[i].layers

    print 'bs:', bs
    print 'head:', head
    print 'ls:', ls

    return head, tuple(ls)
Exemplo n.º 6
0
def add_handler(layers, flags, stacks, this_model):
    head, ls = split_merge_layers(layers)
    return Sequential(layers=head+(MergeSum(ls),))
Exemplo n.º 7
0
def concat_handler(layers, flags, stacks, this_model):
    head, ls = split_merge_layers(layers)
    return Sequential(layers=head+(MergeBroadcast(ls, merge="depth"),))