def _instantiate_parameters(self, input_shape): dim, fmt = len(input_shape), luchador.get_nn_conv_format() channel = 1 if dim == 2 or fmt == 'NCHW' else 3 self._axes = tuple(i for i in range(dim) if not i == channel) shape = tuple(input_shape[i] for i in range(dim) if i == channel) const_init = fetch_initializer('ConstantInitializer') if self.get_parameter_variable('mean') is None: mean = wrapper.make_variable( name='mean', shape=shape, initializer=const_init(0), trainable=False) self.set_parameter_variables(mean=mean) if self.get_parameter_variable('var') is None: var = wrapper.make_variable( name='var', shape=shape, initializer=const_init(1), trainable=False) self.set_parameter_variables(var=var) if self.get_parameter_variable('scale') is None: scale = wrapper.make_variable( name='scale', shape=shape, trainable=True, initializer=const_init(self.args['scale'])) self.set_parameter_variables(scale=scale) if self.get_parameter_variable('offset') is None: offset = wrapper.make_variable( name='offset', shape=shape, trainable=True, initializer=const_init(self.args['offset'])) self.set_parameter_variables(offset=offset)
def _main(): args = _parase_command_line_args() _initialize_logger(args.debug) data_format = luchador.get_nn_conv_format() batch_size = 32 input_shape = ( [batch_size, 28, 28, 1] if data_format == 'NHWC' else [batch_size, 1, 28, 28] ) classifier = _build_model(args.model, input_shape, batch_size) dataset = _load_data(args.mnist, data_format) session = nn.Session() session.initialize() summary = nn.SummaryWriter(output_dir='tmp') if session.graph: summary.add_graph(session.graph) try: _train(session, classifier, dataset['train'], batch_size) _test(session, classifier, dataset['test'], batch_size) except KeyboardInterrupt: pass
def _summarize_layer_outputs(self): """Summarize layer output""" samples, _ = self._sample() if luchador.get_nn_conv_format() == 'NHWC': samples['state0'] = _transpose(samples['state0']) dataset = self._ql.get_layer_outputs(samples['state0']) self._summary_writer.summarize(global_step=self._n_train, dataset=dataset)
def _gen_model_def(self, n_actions): cfg = self.args['model_config'] fmt = luchador.get_nn_conv_format() w, h, c = cfg['input_width'], cfg['input_height'], cfg['input_channel'] shape = [None, h, w, c] if fmt == 'NHWC' else [None, c, h, w] return nn.get_model_config(cfg['model_file'], n_actions=n_actions, input_shape=shape)
def _train_network(self): """Train network""" samples, indices = self._sample() if luchador.get_nn_conv_format() == 'NHWC': samples['state0'] = _transpose(samples['state0']) samples['state1'] = _transpose(samples['state1']) errors = self._ql.train(**samples) self._recorder.update(indices, np.abs(errors)) return errors
def _get_output_shape_from_arg(self): if not self.args.get('output_shape_format'): return self.args['output_shape'] _be = luchador.get_nn_conv_format() if _be == self.args['output_shape_format']: return self.args['output_shape'] if _be == 'NHWC': _LG.info(' * Converting `output_shape` to NHWC') return common.nchw2nhwc(self.args['output_shape']) _LG.info(' * Converting `output_shape` to NCHW') return common.nhwc2nchw(self.args['output_shape'])
def _main(): args = _parase_command_line_args() initialize_logger(args.debug) batch_size = 32 data_format = luchador.get_nn_conv_format() autoencoder = _build_model(args.model, data_format, batch_size) dataset = load_mnist(args.dataset, data_format=data_format, mock=args.mock) sess = nn.Session() sess.initialize() if args.output: summary = nn.SummaryWriter(output_dir=args.output) if sess.graph is not None: summary.add_graph(sess.graph) def _train_ae(): batch = dataset.train.next_batch(batch_size).data return sess.run( inputs={autoencoder.input: batch}, outputs=autoencoder.output['error'], updates=autoencoder.get_update_operations(), name='train_autoencoder', ) def _plot_reconstruction(epoch): if not args.output: return orig = dataset.test.next_batch(batch_size).data recon = sess.run( inputs={autoencoder.input: orig}, outputs=autoencoder.output['reconstruction'], name='reconstruct_images', ) axis = 3 if data_format == 'NHWC' else 1 orig = np.squeeze(orig, axis=axis) recon = np.squeeze(recon, axis=axis) base_path = os.path.join(args.output, '{:03}_'.format(epoch)) plot_images(orig, base_path + 'orign.png') plot_images(recon, base_path + 'recon.png') _train(_train_ae, _plot_reconstruction, n_iterations=args.n_iterations, n_epochs=args.n_epochs)
def _main(): args = _parase_command_line_args() initialize_logger(args.debug) batch_size = 32 data_format = luchador.get_nn_conv_format() autoencoder = _build_model(args.model, data_format, batch_size) dataset = load_mnist(args.dataset, data_format=data_format, mock=args.mock) sess = nn.Session() sess.initialize() if args.output: summary = nn.SummaryWriter(output_dir=args.output) if sess.graph is not None: summary.add_graph(sess.graph) def _train_ae(): batch = dataset.train.next_batch(batch_size).data return sess.run( inputs={autoencoder.input: batch}, outputs=autoencoder.output['error'], updates=autoencoder.get_update_operations(), name='train_autoencoder', ) def _plot_reconstruction(epoch): if not args.output: return orig = dataset.test.next_batch(batch_size).data recon = sess.run( inputs={autoencoder.input: orig}, outputs=autoencoder.output['reconstruction'], name='reconstruct_images', ) axis = 3 if data_format == 'NHWC' else 1 orig = np.squeeze(orig, axis=axis) recon = np.squeeze(recon, axis=axis) base_path = os.path.join(args.output, '{:03}_'.format(epoch)) plot_images(orig, base_path + 'orign.png') plot_images(recon, base_path + 'recon.png') _train( _train_ae, _plot_reconstruction, n_iterations=args.n_iterations, n_epochs=args.n_epochs )
def _main(): args = _parase_command_line_args() initialize_logger(args.debug) batch_size = 32 data_format = luchador.get_nn_conv_format() classifier = _build_model(args.model, data_format) dataset = load_mnist(args.dataset, data_format=data_format, mock=args.mock) sess = nn.Session() sess.initialize() if args.output: summary = nn.SummaryWriter(output_dir=args.output) if sess.graph is not None: summary.add_graph(sess.graph) def _train_classifier(): batch = dataset.train.next_batch(batch_size) return sess.run( inputs={ classifier.input['data']: batch.data, classifier.input['label']: batch.label, }, outputs=classifier.output['error'], updates=classifier.get_update_operations(), name='train_classifier', ) def _test_classifier(): batch = dataset.test.next_batch(batch_size) return sess.run( inputs={ classifier.input['data']: batch.data, classifier.input['label']: batch.label, }, outputs=classifier.output['error'], name='test_classifier', ) _train(_train_classifier, _test_classifier, n_iterations=args.n_iterations, n_epochs=args.n_epochs)
def _main(): args = _parase_command_line_args() initialize_logger(args.debug) batch_size = 32 data_format = luchador.get_nn_conv_format() classifier = _build_model(args.model, data_format) dataset = load_mnist(args.dataset, data_format=data_format, mock=args.mock) sess = nn.Session() sess.initialize() if args.output: summary = nn.SummaryWriter(output_dir=args.output) if sess.graph is not None: summary.add_graph(sess.graph) def _train_classifier(): batch = dataset.train.next_batch(batch_size) return sess.run( inputs={ classifier.input['data']: batch.data, classifier.input['label']: batch.label, }, outputs=classifier.output['error'], updates=classifier.get_update_operations(), name='train_classifier', ) def _test_classifier(): batch = dataset.test.next_batch(batch_size) return sess.run( inputs={ classifier.input['data']: batch.data, classifier.input['label']: batch.label, }, outputs=classifier.output['error'], name='test_classifier', ) _train( _train_classifier, _test_classifier, n_iterations=args.n_iterations, n_epochs=args.n_epochs )
def _write_data_to_file(file_, data): for key, value in data.items(): _LG.debug(' Saving: %10s %24s %s', value.dtype, value.shape, key) if key in file_: del file_[key] chunks = None if value.size == 1 else True file_.create_dataset(key, data=value, chunks=chunks) if 'LUCHADOR_NN_BACKEND' not in file_: data = np.string_(luchador.get_nn_backend()) file_.create_dataset('LUCHADOR_NN_BACKEND', data=data, dtype='S10') if 'LUCHADOR_NN_CONV_FORMAT' not in file_: data = np.string_(luchador.get_nn_conv_format()) file_.create_dataset('LUCHADOR_NN_CONV_FORMAT', data=data, dtype='S4') if 'LUCHADOR_NN_DTYPE' not in file_: data = np.string_(luchador.get_nn_dtype()) file_.create_dataset('LUCHADOR_NN_DTYPE', data=data, dtype='S10') if 'LUCHADOR_VERSION' not in file_: data = np.string_(luchador.__version__) file_.create_dataset('LUCHADOR_VERSION', data=data) file_.flush()
def _main(): args = _parase_command_line_args() _initialize_logger(args.debug) data_format = luchador.get_nn_conv_format() batch_size = 32 input_shape = ( [batch_size, 28, 28, 1] if data_format == 'NHWC' else [batch_size, 1, 28, 28] ) autoencoder = _build_model(args.model, input_shape) images = _load_data(args.mnist, data_format) session = nn.Session() session.initialize() summary = nn.SummaryWriter(output_dir='tmp') if session.graph: summary.add_graph(session.graph) try: _train(session, autoencoder, images['train'], batch_size) except KeyboardInterrupt: pass orig = images['test'][:batch_size, ...] recon = session.run( outputs=autoencoder.output['reconstruction'], inputs={autoencoder.input: orig} ) axis = 3 if data_format == 'NHWC' else 1 original = 255 * np.squeeze(orig, axis=axis) recon = 255 * np.squeeze(recon, axis=axis) if not args.no_plot: _plot(original.astype('uint8'), recon.astype('uint8'))
def _instantiate_parameters(self, input_shape): dim, fmt = len(input_shape), luchador.get_nn_conv_format() channel = 1 if dim == 2 or fmt == 'NCHW' else 3 self._axes = tuple(i for i in range(dim) if not i == channel) shape = tuple(input_shape[i] for i in range(dim) if i == channel) const_init = get_initializer('ConstantInitializer') if self.get_parameter_variable('mean') is None: mean = wrapper.get_variable(name='mean', shape=shape, initializer=const_init(0), trainable=False) self.set_parameter_variables(mean=mean) if self.get_parameter_variable('var') is None: var = wrapper.get_variable(name='var', shape=shape, initializer=const_init(1), trainable=False) self.set_parameter_variables(var=var) if self.get_parameter_variable('scale') is None: scale = wrapper.get_variable(name='scale', shape=shape, trainable=True, initializer=const_init( self.args['scale'])) self.set_parameter_variables(scale=scale) if self.get_parameter_variable('offset') is None: offset = wrapper.get_variable(name='offset', shape=shape, trainable=True, initializer=const_init( self.args['offset'])) self.set_parameter_variables(offset=offset)
def _predict_q(self): # _LG.debug('Predicting Q value from NN') state = self._recorder.get_last_record()['state1'][None, ...] if luchador.get_nn_conv_format() == 'NHWC': state = _transpose(state) return self._ql.predict_action_value(state)[0]
"""Test Layer behaviors""" from __future__ import division from __future__ import absolute_import import numpy as np # import theano # theano.config.optimizer = 'None' # theano.config.exception_verbosity = 'high' import luchador from luchador import nn from tests.unit.fixture import TestCase _BE = luchador.get_nn_backend() _FMT = luchador.get_nn_conv_format() class Conv2DTransposeTest(TestCase): """Test for Conv2DTranspose class""" def _check(self, input_var, output_var): session = nn.Session() session.initialize() input_val = np.random.randn(*input_var.shape) output_val = session.run( outputs=output_var, inputs={input_var: input_val}) self.assertEqual(output_var.shape, input_var.shape) self.assertEqual(output_var.dtype, input_var.dtype) self.assertEqual(output_var.shape, output_val.shape)
def setUp(self): self.conv_format = luchador.get_nn_conv_format()
def _get_format(data_format): return data_format or luchador.get_nn_conv_format()
"""Test Q-Learning module""" from __future__ import division from __future__ import absolute_import import numpy as np import luchador import luchador.nn as nn from luchador.nn import util from luchador.agent.rl.q_learning import DeepQLearning from tests.unit import fixture _CONV = luchador.get_nn_conv_format() def _make_dqn(discount_rate=0.9, min_reward=-1, max_reward=1, scale_reward=1.0, input_shape=None, n_actions=5, model_def=None): """Make DQN module for test If model_def is given, that model definition is used, otherwise, vanilla_dqn is used. """ dqn = DeepQLearning( q_learning_config={ 'discount_rate': discount_rate, 'min_reward': min_reward,
"""Test Layer behaviors""" from __future__ import division from __future__ import absolute_import import numpy as np # import theano # theano.config.optimizer = 'None' # theano.config.exception_verbosity = 'high' import luchador from luchador import nn from tests.unit.fixture import TestCase _BE = luchador.get_nn_backend() _FMT = luchador.get_nn_conv_format() class Conv2DTransposeTest(TestCase): """Test for Conv2DTranspose class""" def _check(self, input_var, output_var): session = nn.Session() session.initialize() input_val = np.random.randn(*input_var.shape) output_val = session.run(outputs=output_var, inputs={input_var: input_val}) self.assertEqual(output_var.shape, input_var.shape) self.assertEqual(output_var.dtype, input_var.dtype) self.assertEqual(output_var.shape, output_val.shape)
import luchador from luchador.util import load_config, initialize_logger from luchador import nn from luchador.nn.saver import Saver from luchador.agent.rl.q_learning import DeepQLearning _LG = logging.getLogger('luchador') WIDTH = 84 HEIGHT = 84 CHANNEL = 4 BATCH_SIZE = 32 N_ACTIONS = 6 SHAPE = ((BATCH_SIZE, HEIGHT, WIDTH, CHANNEL) if luchador.get_nn_conv_format() == 'NHWC' else (BATCH_SIZE, CHANNEL, HEIGHT, WIDTH)) def _parse_command_line_args(): from argparse import ArgumentParser as AP ap = AP(description=('Build Network model and optimization, ' 'and serialize variables with Saver')) ap.add_argument('model', help='Model definition YAML file. ') ap.add_argument('optimizer', help='Optimizer configuration YAML file.') ap.add_argument('--output', help='File path to save parameters') ap.add_argument('--input', help='Path to parameter file from which data is loaded') return ap.parse_args()
def _main(): args = _parse_command_line_args() initialize_logger(args.debug) batch_size = 32 format_ = luchador.get_nn_conv_format() dataset = load_celeba_face(args.dataset, data_format=format_, mock=args.mock) model = _build_models(args.model) discriminator, generator = model['discriminator'], model['generator'] input_gen = nn.Input(shape=(None, args.n_seeds), name='GeneratorInput') data_shape = (None, ) + dataset.train.shape[1:] data_real = nn.Input(shape=data_shape, name='InputData') _LG.info('Building Generator') data_fake = generator(input_gen) _LG.info('Building fake discriminator') logit_fake = discriminator(data_fake) _LG.info('Building real discriminator') logit_real = discriminator(data_real) gen_loss, disc_loss = _build_loss(logit_real, logit_fake) opt_gen, opt_disc = _build_optimization(generator, gen_loss, discriminator, disc_loss) sess = nn.Session() sess.initialize() _summary_writer = None if args.output: _summary_writer = nn.SummaryWriter(output_dir=args.output) if sess.graph is not None: _summary_writer.add_graph(sess.graph) def _train_disc(): return sess.run( inputs={ input_gen: _sample_seed(batch_size, args.n_seeds), data_real: dataset.train.next_batch(batch_size).data }, outputs=disc_loss, updates=discriminator.get_update_operations() + [opt_disc], name='train_discriminator', ) def _train_gen(): return sess.run( inputs={ input_gen: _sample_seed(batch_size, args.n_seeds), }, outputs=gen_loss, updates=generator.get_update_operations() + [opt_gen], name='train_generator', ) random_seed = _sample_seed(batch_size, args.n_seeds) def _summarize(epoch, losses=None): if not args.output: return if losses: _summary_writer.summarize( summary_type='scalar', global_step=epoch, dataset={ 'Generator/Loss': losses[0], 'Discriminator/Loss': losses[1], }, ) images = sess.run( inputs={ input_gen: random_seed, }, outputs=data_fake, name='generate_samples', ) if format_ == 'NCHW': images = images.transpose(0, 2, 3, 1) images = (255 * images).astype(np.uint8) _summary_writer.summarize( summary_type='image', global_step=epoch, dataset={'Genearated/epoch_{:02d}'.format(epoch): images}, max_outputs=10, ) _train( _train_disc, _train_gen, _summarize, args.n_iterations, args.n_epochs, )
from __future__ import absolute_import import os import logging import h5py import numpy as np import luchador import luchador.util from luchador import nn _LG = logging.getLogger('luchador') _BE = luchador.get_nn_backend() _CONV = luchador.get_nn_conv_format() def _parse_command_line_args(): from argparse import ArgumentParser as AP ap = AP( description='Feed batch data to layer and save the output to file' ) ap.add_argument( 'config', help='File contains layer and run config.' ) ap.add_argument( '--output', help='Output data file.' )