def _build_net(self, x, reuse=False, initializer=None, collections=None): if not initializer: initializer = [ tf.contrib.layers.variance_scaling_initializer(), tf.constant_initializer() ] if collections: collections = [collections, tf.GraphKeys.GLOBAL_VARIABLES] else: collections = [tf.GraphKeys.GLOBAL_VARIABLES] more_arg = { 'reuse': reuse, 'initializer': initializer, 'collections': collections } y = tf.cast(x, tf.float32) / 255.0 ws = [] ys = [] ms_size = 0 for idx, args in enumerate(self.arch): args.update(more_arg) y, w, m_size = tf_layer[args['layer']](idx, y, args) ws.append(w) ys.append(y) ms_size += m_size main_logger.info("param: {}, memory size: {}B".format( ms_size, pretty_num(ms_size * 4, True))) return y, ws, ys
def flatten(idx, x, args): l_name = args['layer'] + "_%d" % idx w_size = [] b_size = [] m_size = 0 main_logger.info("{}: w: {}, b: {}".format(l_name, w_size, b_size)) with tf.variable_scope(l_name, reuse=args['reuse']): y = tf.contrib.layers.flatten(x) return y, [], m_size
def save_model(self): save_path = get_path('tflog/' + self.algorithm + '/' + self.args.env + '/' + datetime.datetime.now().strftime('%Y%m%d_%H%M%S')) main_logger.info("Save model at {} with score {:.2f}".format( save_path, self.score)) self.saver.save(self.sess, save_path + '/model.ckpt') with open(save_path + '/state.json', 'w') as f: json.dump({'score': self.score, 'args': self.args}, f)
def fc(idx, x, args): l_name = args['layer'] + "_%d" % idx w_size = [x.get_shape()[1].value, args['size']] b_size = [args['size']] m_size = x.get_shape()[1].value * args['size'] + args['size'] main_logger.info("{}: w: {}, b: {}".format(l_name, w_size, b_size)) with tf.variable_scope(l_name, reuse=args['reuse']): w = tf.get_variable("w", w_size, initializer=args['initializer'][0], collections=args['collections']) b = tf.get_variable("b", b_size, initializer=args['initializer'][1], collections=args['collections']) y = tf.matmul(x, w) + b return y, [w, b], m_size
def conv2d(idx, x, args): l_name = args['layer'] + "_%d" % idx w_size = args['kernel_size'] + [args['input'], args['output']] b_size = [args['output']] m_size = args['kernel_size'][0] * args['kernel_size'][1] * args[ 'input'] * args['output'] + args['output'] main_logger.info("{}: w: {}, b: {}".format(l_name, w_size, b_size)) with tf.variable_scope(l_name, reuse=args['reuse']): w = tf.get_variable("w", w_size, initializer=args['initializer'][0], collections=args['collections']) b = tf.get_variable("b", b_size, initializer=args['initializer'][1], collections=args['collections']) y = tf.nn.conv2d( x, w, strides=[1] + args['stride'] + [1], padding="VALID") + b return y, [w, b], m_size
def load_model(self): self.saver = tf.train.Saver(max_to_keep=50) model_path = get_path('tflog/' + self.algorithm + '/' + self.args.env) subdir = next(os.walk(model_path))[1] if subdir: cmd = input( "Found {} saved model(s), do you want to load? [y/N]".format( len(subdir))) if 'y' in cmd or 'Y' in cmd: if len(subdir) > 1: print("Choose one:") for i in range(len(subdir)): state_fn = model_path + '/' + subdir[i] + '/state.json' with open(state_fn, 'r') as f: state = json.load(f) print("[{}]: Score: {}, Path: {}".format( i, state['score'], subdir[i])) load_path = model_path + '/' + subdir[int(input("Index:"))] else: load_path = model_path + '/' + subdir[0] state_fn = load_path + '/state.json' with open(state_fn, 'r') as f: state = json.load(f) checkpoint = tf.train.get_checkpoint_state(load_path) if checkpoint and checkpoint.model_checkpoint_path: self.saver.restore(self.sess, checkpoint.model_checkpoint_path) main_logger.info( "Successfully loaded model: Score: {}, Path: {}". format(state['score'], checkpoint.model_checkpoint_path)) self.score = state['score'] return True self.score = None main_logger.info("No model loaded") return False