Пример #1
0
# resnet_v1 101
with slim.arg_scope(resnet_v1.resnet_arg_scope()):
    net, end_points = resnet_v1.resnet_v1_101(img,
                                              num_classes,
                                              is_training=False)
net_logit = tf.squeeze(net)

# tensorflow operation for load pretrained weights
variables_to_restore = get_variables_to_restore(
    exclude=['resnet_v1_101/logits', 'resnet_v1_101/AuxLogits'])
init_fn = assign_from_checkpoint_fn('resnet_v1_101.ckpt', variables_to_restore)

# multiscale resnet_v1 101
visual_features, fusion_logit = multiscale_resnet101(end_points, num_classes,
                                                     is_training)
textual_features, textual_logit = mlp(tag, num_classes, is_training)
refined_features = tf.concat([visual_features, textual_features], 1)

# score is prediction score, and k is label quantity
score = multi_class_classification_model(refined_features, num_classes)
k = label_quantity_prediction_model(refined_features, keep_prob)
k = tf.reshape(k, shape=[batch_size])

# make trainable variable list
var_list0 = [
    v for v in tf.trainable_variables()
    if v.name.split('/')[0][0:3] in train_layers0
]
var_list1_1 = [
    v for v in tf.trainable_variables()
    if v.name.split('/')[0][0:3] in train_layers1_1
args.log_dir = os.path.join(args.output_dir, args.exp_name, args.run_name)
os.makedirs(args.log_dir, exist_ok=True)
json.dump(obj=vars(args),
          fp=open(os.path.join(args.log_dir, 'config.json'), 'w'),
          sort_keys=True,
          indent=4)

# initialize visdom
viz = Visdom(port=8000, env=f'{args.exp_name}_{args.run_name}')
viz.text(json.dumps(obj=vars(args), sort_keys=True, indent=4))

# build network
if args.dataset == 'sinusoid':
    net_init, f = mlp(n_output=1,
                      n_hidden_layer=args.n_hidden_layer,
                      n_hidden_unit=args.n_hidden_unit,
                      bias_coef=args.bias_coef,
                      activation=args.activation,
                      norm=args.norm)
    _, params = net_init(rng=random.PRNGKey(42), input_shape=(-1, 1))

elif args.dataset == 'omniglot':
    net_init, f = conv_net(n_output=args.n_way,
                           n_conv_layer=args.n_hidden_layer,
                           n_filter=args.n_hidden_unit,
                           bias_coef=args.bias_coef,
                           activation='relu',
                           norm='None')
    _, params = net_init(rng=random.PRNGKey(42), input_shape=(-1, 28, 28, 1))

elif args.dataset == 'circle':
    net_init, f = mlp(n_output=args.n_way,
Пример #3
0
os.makedirs(args.log_dir, exist_ok=True)

json.dump(obj=vars(args),
          fp=open(os.path.join(args.log_dir, 'config.json'), 'w'),
          sort_keys=True,
          indent=4)

viz = Visdom(port=8000, env=f'{args.exp_name}_{args.run_name}')
# if not args.wandb_sync:
#     os.environ['WANDB_MODE'] = 'dryrun'
# wandb.init(project='neural-tangents', dir=args.log_dir)
# wandb.config.update(args)

net_init, net_fn = mlp(n_output=1,
                       n_hidden_layer=args.n_hidden_layer,
                       n_hidden_unit=args.n_hidden_unit,
                       activation=args.activation,
                       norm=args.norm)

if args.outer_opt_alg == 'adam':
    outer_opt = partial(optimizers.adam, step_size=args.outer_step_size)
else:
    raise ValueError

opt_init, opt_update, get_params = outer_opt()
opt_update = jit(opt_update)


def loss(net_params, x, y):
    predictions = net_fn(net_params, x)
    return np.mean((y - predictions)**2)