def conv_base(inputs, kernel, dilation=(1, 1), downsample=False): filters = inputs.shape[1] if downsample: output = layers.conv(inputs, filters * 2, kernel, (2, 2)) else: output = layers.conv(inputs, filters, kernel, dilation=dilation) return output
def pair_base(inputs, kernel, downsample=False): filters = inputs.shape[1] if downsample: output = layers.conv(inputs, filters, (1, kernel), (1, 2)) output = layers.conv(output, filters, (kernel, 1), (2, 1)) output = layers.conv(output, filters * 2, (1, 1)) else: output = layers.conv(inputs, filters, (1, kernel)) output = layers.conv(output, filters, (kernel, 1)) return output
def block(x, tokens, pre_activation=False, downsample=False): """ build block. """ if pre_activation: x = layers.bn_relu(x) res = x else: res = x x = layers.bn_relu(x) x = ops[tokens[0]](x, downsample) print("%s \t-> shape %s" % (ops[0].__name__, x.shape)) for token in tokens[1:]: x = layers.bn_relu(x) x = ops[token](x) print("%s \t-> shape %s" % (ops[token].__name__, x.shape)) if downsample: filters = res.shape[1] if FLAGS.downsample == "conv": res = layers.conv(res, filters * 2, (1, 1), (2, 2)) elif FLAGS.downsample == "pool": res = layers.avgpool(res, (2, 2), (2, 2)) res = fluid.layers.pad( res, (0, 0, filters // 2, filters // 2, 0, 0, 0, 0)) else: raise NotImplementedError return x + res
def maxpool_base(inputs, kernel, downsample=False): if downsample: filters = inputs.shape[1] output = layers.maxpool(inputs, kernel, (2, 2)) output = layers.conv(output, filters * 2, (1, 1)) else: output = layers.maxpool(inputs, kernel) return output
def net(inputs, tokens): """ build network with skip links """ x = layers.conv(inputs, FLAGS.width, (3, 3)) num_ops = FLAGS.num_blocks * FLAGS.num_ops x = stage(x, tokens[:num_ops], pre_activation=True) for i in range(1, FLAGS.num_stages): x = stage(x, tokens[i * num_ops:(i + 1) * num_ops], downsample=True) x = layers.bn_relu(x) x = layers.global_avgpool(x) x = layers.dropout(x) logits = layers.fully_connected(x, num_classes) return fluid.layers.softmax(logits)
def net(inputs, tokens): depth = len(tokens) q, r = divmod(depth + 1, FLAGS.num_stages) downsample_steps = [ i * q + max(0, i + r - FLAGS.num_stages + 1) - 2 for i in range(1, FLAGS.num_stages) ] x = layers.conv(inputs, FLAGS.width, (3, 3)) x = layers.bn_relu(x) for i, token in enumerate(tokens): downsample = i in downsample_steps x = ops[token](x, downsample) print("%s \t-> shape %s" % (ops[token].__name__, x.shape)) if downsample: print("=" * 12) x = layers.bn_relu(x) x = layers.global_avgpool(x) x = layers.dropout(x) logits = layers.fully_connected(x, num_classes) return fluid.layers.softmax(logits)