Пример #1
0
    def __init__(self,
                 game_state,
                 resized_height,
                 resized_width,
                 phi_length,
                 name,
                 replay_memory,
                 terminate_loss_of_life=False,
                 folder='',
                 sample_num=0):
        """ Initialize collection of demo """
        assert sample_num > 0
        self.file_num = sample_num
        self.game_state = game_state
        self.resized_h = resized_height
        self.resized_w = resized_width
        self.phi_length = phi_length
        self.name = name
        self.D = replay_memory
        self.terminate_loss_of_life = terminate_loss_of_life

        self._skip = 1
        if self.game_state._env.frameskip == 1:
            self._skip = 4

        self.state_input = np.zeros(
            (1, self.resized_h, self.resized_w, self.phi_length),
            dtype=np.uint8)
        self.folder = folder + '/{n:03d}/'.format(name=self.name,
                                                  n=self.file_num)
        prepare_dir(self.folder, empty=True)
    def __init__(self,
        autoencoder_test_data,
        classifier_train_json,
        classifier_test_json,
        classifier_validation_json,
        index=None):

        self.autoencoder_test_data = autoencoder_test_data
        self.classifier_train_json = classifier_train_json
        self.classifier_test_json = classifier_test_json
        self.classifier_validation_json = classifier_validation_json
        
        self.compressed_set = set()
        for img in autoencoder_test_data:
            self.compressed_set.add(draw_util.compress_bits(img))
        self.classifier_accuracy_all = []
        self.cost_all = []
        self.reconstruction_loss_all = []
        self.kl_divergence_all = []
        self.reconstruction_accuracy_all = []

        results_dir = util.get_results_dir()
        util.prepare_dir(results_dir, hard=False)

        self.index = constants.run_index() if index is None else index
        self.classifier_summary_file = os.path.join(results_dir,
            'classifier_accuracy_summary_' + str(self.index) + '.txt')
Пример #3
0
def create_test_from_file(fl, name, group, policy):
  txt = fl.read()
  fl.close()

  appdir = os.path.join(TESTS_DIR, group, name)
  if os.path.exists(appdir):
    if OVERWRITE:
      if not os.path.isdir(appdir):
        fatal("Unable to overwrite file: %s" % appdir)
      warn("Creating in existing directory: %s" % appdir)
    else:
      fatal("Not overwriting existing directory: %s" % appdir)
  prepare_dir(appdir)

  inputdir = os.path.join(appdir, 'source-input')
  if os.path.exists(inputdir):
    assert OVERWRITE
    if not os.path.isdir(inputdir):
      fatal("Unable to overwrite non-directory: %s" % inputdir)
  else:
    os.makedirs(inputdir)

  tgtfile = "%s.js" % name
  tgtpath = os.path.join(inputdir, tgtfile)
  tgtfl = open(tgtpath, 'w')

  tgtfl.write(txt)
  tgtfl.close()
def gen_classifier_data(index=None):
    target_dir = util.get_classifier_data_dir()
    util.prepare_dir(target_dir, hard=False)
    suffix = str(constants.run_index()) if index is None else str(index)

    if os.path.isfile(os.path.join(target_dir, 'test-' + suffix + '.txt')) and \
        os.path.isfile(os.path.join(target_dir, 'validation-' + suffix + '.txt')) and \
        os.path.isfile(os.path.join(target_dir, 'train-' + suffix + '.txt')):
        return

    TOTAL = 50000
    with open(os.path.join(target_dir, 'train-' + suffix + '.txt'), 'w') \
        as outfile:
        json.dump(create_balanced_dataset(int(TOTAL * TRAIN_RATIO / 100)),
                  outfile)
        logger.info('Train data written.')
    with open(os.path.join(target_dir, 'test-' + suffix + '.txt'), 'w') \
        as outfile:
        json.dump(create_balanced_dataset(int(TOTAL * TEST_RATIO / 100)),
                  outfile)
        logger.info('Test data written.')
    with open(os.path.join(target_dir, 'validation-' + suffix + '.txt'), 'w') \
        as outfile:
        json.dump(create_balanced_dataset(int(TOTAL * VALIDATION_RATIO / 100)),
                  outfile)
        logger.info('Validation data written.')
def gen_autoencoder_data(gen_unique=True, reduced=False):
    target_dir = util.get_autoencoder_data_dir()
    suffix = ('' if not gen_unique else 'unique-') + str(constants.run_index())
    if os.path.isfile(os.path.join(target_dir, 'test-' + suffix + '.npy')) and \
        os.path.isfile(os.path.join(target_dir, 'validation-' + suffix + '.npy')) and \
        os.path.isfile(os.path.join(target_dir, 'train-' + suffix + '.npy')):
        return
    util.prepare_dir(target_dir, hard=False)

    train, test, validation = [], [], []
    tot = -1
    shapes = ['square', 'ellipse', 'triangle'] if not reduced else ['square']
    for shape in shapes:
        _train, _test, _validation = split_for_shape(shape, tot, gen_unique,
                                                     reduced)

        if tot == -1:
            tot = len(_train) + len(_test) + len(_validation)

        random.shuffle(_train)
        random.shuffle(_test)
        random.shuffle(_validation)

        train.extend(_train)
        test.extend(_test)
        validation.extend(_validation)

        random.shuffle(train)
        random.shuffle(test)
        random.shuffle(validation)

    statistics(train, test, validation)

    logger.info('Separation done.')

    np_test = np.array([
        draw_util.encoded_image_to_flattened_bits(encoded) for encoded in test
    ])
    np_test = np.random.permutation(np_test)
    np.save(os.path.join(target_dir, 'test-' + suffix), np_test)
    logger.info('Test dataset converted.')

    np_validation = np.array([
        draw_util.encoded_image_to_flattened_bits(encoded)
        for encoded in validation
    ])
    np_validation = np.random.permutation(np_validation)
    np.save(os.path.join(target_dir, 'validation-' + suffix), np_validation)
    logger.info('Validation dataset converted.')

    np_train = np.array([
        draw_util.encoded_image_to_flattened_bits(encoded) for encoded in train
    ])
    np_train = np.random.permutation(np_train)
    np.save(os.path.join(target_dir, 'train-' + suffix), np_train)
    logger.info('Train dataset converted.')
    def __init__(
            self,
            image_dimension,  # only squared images assumed
            code_dimension,
            beta=None,
            encoder_activation_fn=tf.nn.relu,
            decoder_activation_fn=tf.tanh,
            learning_rate=None,
            experiment_name=None,
            denoising=False):
        print('Construct convolutional autoencoder:')
        print('Image dimension: {0}'.format(image_dimension))
        print('Code dimension: {0}'.format(code_dimension))
        print('Encoder activation function: {0}'.format(encoder_activation_fn))
        print('Decoder activation function: {0}'.format(decoder_activation_fn))
        print('Beta = {0}'.format(beta))
        print('Experiment name: {0}'.format(experiment_name))
        print('Is trainable: {0}'.format(learning_rate is not None))
        print('Is denoising: {0}'.format(denoising))
        print('Learning rate: {0}'.format(learning_rate))
        print('Logs dir: {0}'.format(
            os.path.join(util.get_logs_dir(), experiment_name)))

        self.image_dimension = image_dimension
        self.code_dimension = code_dimension
        self.beta = beta
        self.encoder_activation_fn = encoder_activation_fn
        self.decoder_activation_fn = decoder_activation_fn
        self.is_training = (learning_rate is not None)
        self.is_denoising = denoising
        self.optimizer = tf.train.AdamOptimizer(learning_rate if learning_rate >= 0 else 0.001) \
            if self.is_training else None
        self.logs_dir = os.path.join(util.get_logs_dir(), experiment_name)

        if self.is_training:
            util.prepare_dir(self.logs_dir)
        else:
            assert (os.path.exists(self.logs_dir))

        self.encoder_get_weights = util.get_weights_he \
            if 'elu' in encoder_activation_fn.__name__ else util.get_weights_xavier
        self.decoder_get_weights = util.get_weights_he \
            if 'elu' in decoder_activation_fn.__name__ else util.get_weights_xavier

        self._describe_autoencoder()
        self._define_loss_function()

        init_op = tf.global_variables_initializer()
        self.saver = tf.train.Saver(max_to_keep=1)

        self.sess = tf.InteractiveSession()
        self.sess.run(init_op)
    def __init__(self,
            encoder_layers_size=[4096, 1200, 1200, 10],
            decoder_layers_size=[10, 1200, 1200, 1200, 4096],
            beta=None,
            encoder_activation_fn=tf.nn.relu,
            decoder_activation_fn=tf.tanh,
            learning_rate=None,
            seq_index=None,
            denoising=False):
        print('Construct fully connected autoencoder:')
        print('Encoder layers: {0}'.format(encoder_layers_size))
        print('Decoder layers: {0}'.format(decoder_layers_size))
        print('Encoder activation function: {0}'.format(encoder_activation_fn))
        print('Decoder activation function: {0}'.format(decoder_activation_fn))
        print('Beta = {0}'.format(beta))
        print('Seq index = {0}'.format(seq_index))
        print('Is trainable: {0}'.format(learning_rate is not None))
        print('Is denoising: {0}'.format(denoising))
        print('Learning rate: {0}'.format(learning_rate))
        print('Logs dir: {0}'.format(os.path.join(util.get_logs_dir(), seq_index)))

        self.encoder_layers_size = encoder_layers_size
        self.decoder_layers_size = decoder_layers_size
        self.code_dimension = encoder_layers_size[-1]
        self.beta = beta
        self.encoder_activation_fn = encoder_activation_fn
        self.decoder_activation_fn = decoder_activation_fn
        self.is_training = (learning_rate is not None)
        self.is_denoising = denoising
        self.optimizer = tf.train.AdamOptimizer(learning_rate) if self.is_training else None
        self.logs_dir = os.path.join(util.get_logs_dir(), seq_index)

        if self.is_training:
            util.prepare_dir(self.logs_dir)
        else:
            assert (os.path.exists(self.logs_dir))

        self.encoder_get_weights = util.get_weights_he \
            if 'elu' in encoder_activation_fn.__name__ else util.get_weights_xavier
        self.decoder_get_weights = util.get_weights_he \
            if 'elu' in decoder_activation_fn.__name__ else util.get_weights_xavier

        self._build_network()
        self._define_loss_function()

        init_op = tf.global_variables_initializer()
        self.merged_summary_op = tf.summary.merge_all()
        self.saver = tf.train.Saver(max_to_keep=1)

        self.sess = tf.InteractiveSession()
        self.sess.run(init_op)