def get_classifier(input_data, num_classes): """ Get VGG classifier layers as fc layers. """ flatten = sym.flatten(data=input_data, name="flatten") fc1 = sym.dense(data=flatten, units=32, name="fc1") relu1 = sym.relu(data=fc1, name="relu1") drop1 = sym.dropout(data=relu1, rate=0.5, name="drop1") fc2 = sym.dense(data=drop1, units=32, name="fc2") relu2 = sym.relu(data=fc2, name="relu2") drop2 = sym.dropout(data=relu2, rate=0.5, name="drop2") fc3 = sym.dense(data=drop2, units=num_classes, name="fc3") return fc3
def check(dim, axis, nstep): eps = 0.01 x = sym.Variable("x") + 1 beta = sym.Variable("beta") gamma = sym.Variable("gamma") moving_var = sym.Variable("moving_var") moving_mean = sym.Variable("moving_mean") y1, y2 = x, sym.Variable("xx") + 1 ishape = {"x": tuple(10 for i in range(dim))} for i in range(nstep): y1 = sym.batch_norm(y1 + 1, gamma, beta, moving_mean, moving_var, epsilon=eps, axis=axis) y1 = sym.dropout(y1) y2 = simple_bn(y2 + 1, gamma, beta, moving_mean, moving_var, epsilon=eps, axis=axis, shape=ishape["x"]) g = nnvm.graph.create(y1) g2 = nnvm.graph.create(y2) graph_attr.set_shape_inputs(g, ishape) g1 = g.apply("InferShape").apply("SimplifyInference") # assert graph equals as expected graph_util.check_graph_equal(g1, g2)
def get_symbol(num_classes, version, **kwargs): """Get symbol of SqueezeNet Parameters ---------- num_classes: int The number of classification results version : str, optional "1.0" or "1.1" of SqueezeNet """ assert version == '1.1', ("Unsupported SqueezeNet version {version}:" "1.1 expected".format(version=version)) net = sym.Variable("data") net = sym.conv2d(net, channels=64, kernel_size=(3, 3), strides=(2, 2)) net = sym.relu(net) net = sym.max_pool2d(net, pool_size=(3, 3), strides=(2, 2)) net = _make_fire(net, 16, 64, 64) net = _make_fire(net, 16, 64, 64) net = sym.max_pool2d(net, pool_size=(3, 3), strides=(2, 2)) net = _make_fire(net, 32, 128, 128) net = _make_fire(net, 32, 128, 128) net = sym.max_pool2d(net, pool_size=(3, 3), strides=(2, 2)) net = _make_fire(net, 48, 192, 192) net = _make_fire(net, 48, 192, 192) net = _make_fire(net, 64, 256, 256) net = _make_fire(net, 64, 256, 256) net = sym.dropout(net, rate=0.5) net = sym.conv2d(net, channels=num_classes, kernel_size=(1, 1)) net = sym.relu(net) net = sym.global_avg_pool2d(net) return sym.softmax(net, axis=1)
def check(dim, axis, nstep): eps = 0.01 x = sym.Variable("x") + 1 beta = sym.Variable("beta") gamma = sym.Variable("gamma") moving_var = sym.Variable("moving_var") moving_mean = sym.Variable("moving_mean") y1, y2 = x, sym.Variable("xx") + 1 ishape = {"x": tuple(10 for i in range(dim))} for i in range(nstep): y1 = sym.batch_norm( y1 + 1, gamma, beta, moving_mean, moving_var, epsilon=eps, axis=axis) y1 = sym.dropout(y1) y2 = simple_bn(y2 + 1, gamma, beta, moving_mean, moving_var, epsilon=eps, axis=axis, shape=ishape["x"]) g = nnvm.graph.create(y1) g2 = nnvm.graph.create(y2) graph_attr.set_shape_inputs(g, ishape) g1 = g.apply("InferShape").apply("SimplifyInference") # assert graph equals as expected graph_util.check_graph_equal(g1, g2)
def forward(self, inputs): return sym.dropout(data=inputs, rate=self._rate)