def testEndpointsReuse(self): inputs = create_test_input(2, 32, 32, 3) with slim.arg_scope(xception.xception_arg_scope()): _, end_points0 = xception.xception_65(inputs, num_classes=10, reuse=False) with slim.arg_scope(xception.xception_arg_scope()): _, end_points1 = xception.xception_65(inputs, num_classes=10, reuse=True) self.assertItemsEqual(end_points0.keys(), end_points1.keys())
def testEndpointsReuse(self): inputs = create_test_input(2, 32, 32, 3) with slim.arg_scope(xception.xception_arg_scope()): _, end_points0 = xception.xception_65( inputs, num_classes=10, reuse=False) with slim.arg_scope(xception.xception_arg_scope()): _, end_points1 = xception.xception_65( inputs, num_classes=10, reuse=True) self.assertItemsEqual(end_points0.keys(), end_points1.keys())
if __name__ == '__main__': inputs = tf.random_normal([1, 224, 224, 3]) with slim.arg_scope(xception.xception_arg_scope()): net, end_points = xception.xception_65(inputs, num_classes=100, is_training=False, global_pool=True, keep_prob=0.5, output_stride=None, regularize_depthwise=False, multi_grid=[12,16,18], reuse=None, scope='xception_65') writer = tf.summary.FileWriter("./logs", graph=tf.get_default_graph()) print("Layers") for k, v in end_points.items(): print('name = {}, shape = {}'.format(v.name, v.get_shape())) # print("Parameters") # for v in slim.get_model_variables(): # print('name = {}, shape = {}'.format(v.name, v.get_shape()))