import numpy as np import nin import utils import input import time ISOTIMEFORMAT = '%Y-%m-%d %X' tmp = '/ais/gobi4/fashion/data/Category-Attribute-Prediction/' with tf.device('/gpu:0'): sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) images = tf.placeholder(tf.float32, [None, 227, 227, 3]) true_out = tf.placeholder(tf.float32, [None, 50]) train_mode = tf.placeholder(tf.bool) nin = nin.NIN() nin.build(images, train_mode) # print number of variables used print nin.get_var_count() #sess.run(tf.initialize_all_variables()) # test classification loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(nin.final, true_out)) #loss = tf.reduce_mean(-tf.reduce_sum(true_out*tf.log(nin.prob),[1])) #loss = tf.reduce_mean(tf.reduce_sum((nin.prob-true_out)**2,[1])) #train = tf.train.GradientDescentOptimizer(0.1).minimize(loss) # train = tf.train.AdamOptimizer(1e-4).minimize(loss) global_step = tf.Variable(0)
bottom = model.insize + top right = model.insize + left image = image[:, top:bottom, left:right].astype(np.float32) image -= mean_image[:, top:bottom, left:right] image /= 255 if flip and random.randint(0, 1) == 0: return image[:, :, ::-1] else: return image import nin mean_image = pickle.load(open(args.mean, 'rb')) model = nin.NIN() serializers.load_npz('tl.model', model) cropwidth = 256 - model.insize model.to_cpu() def predict(net, x): h = F.max_pooling_2d(F.relu(net.conv1(x)), 3, stride=2) h = F.max_pooling_2d(F.relu(net.conv2(h)), 3, stride=2) h = F.max_pooling_2d(F.relu(net.conv3(h)), 3, stride=2) h = net.conv4(F.dropout(h, train=True)) h = F.reshape(F.average_pooling_2d(h, 6), (x.data.shape[0], 1024)) return F.softmax(h) #setattr(model, 'predict', predict)
def __init__(self): model_path = '../based_imagenet/model_recent' print('Load model:', model_path, file=sys.stderr) model_obj = nin.NIN() chainer.serializers.load_npz(model_path, model_obj) self.core = simple_classifier.classifier(model_obj, None)