def test_simple_mlp_creating(): X = cn.ParameterInput([20, 784], "X") Y = cn.ParameterInput([20, 10], "Y") hl = cn.mlp_layer(X, 100, cn.mlp_layer_opts().tanh().group("hl")) lr = cn.logistic_regression(hl.output, Y, False) with open("bla.dot", "w") as dotfile: dotfile.write(lr.loss.dot(True)) dw = xdot.DotWindow() dw.connect('destroy', gtk.main_quit) dw.set_filter('dot') dw.set_dotcode(lr.loss.dot(True)) gtk.main()
def __init__(self, args): self.args = args cfg = cn.mlp_layer_opts() for k, v in IterNotNone(args): if k == "size": self.size = int(v) elif k == "group": if isinstance(v, tuple): cfg.group(*v) else: cfg.group(v) elif k == "verbose": cfg.verbose(v) elif k == "dropout": cfg.dropout(v) elif k == "nonlin": if v == "linear": pass elif v == "rectified_linear": cfg.rectified_linear() elif v == "tanh": cfg.tanh() else: raise RuntimeError("Unknown non-linearity type `%s'" % str(nonlin)) elif k == "n_groups": cfg.n_groups(v) elif k == "maxout": cfg.maxout(v) elif k == "lr_fact": cfg.learnrate_factor(v) elif k == "with_bias": if isinstance(v, tuple): cfg.with_bias(*v) else: cfg.with_bias(v) elif k == "init_std": cfg.weight_init_std(v) else: raise RuntimeError("Unknown argument `%s'" % k) self.cfg = cfg