Пример #1
0
def main():
    reader1 = our_reader.Reader(cfg['vocab_size'], cfg['buckets'])
    reader2 = cornell_reader.Reader(cfg)

    reader1.build_dict(cfg['dictionary_name'], cfg['reversed_dictionary_name'], cfg['path']['train'])
    reader1.read_data(cfg['path']['train'])

    encoder_inputs_10 = [i for i in reader.dataset_enc if len(i) == 10]
    decoder_inputs_10 = [i for i in reader.dataset_dec if len(i) == 10]
    encoder_inputs__toks_10 = [i for i in reader.dataset_enc_tok if len(i) == 10]
    decoder_inputs__toks_10 = [i for i in reader.dataset_dec_tok if len(i) == 10]

    print("done")

    model.create_placeholders()
Пример #2
0
    identifier = 'mnistfull'
    settings = utils.load_settings_from_file(identifier)

    samples, pdf, labels = data_utils.get_samples_and_labels(settings)

    locals().update(settings)
    # json.dump(settings, open('./experiments/settings/' + identifier + '.txt', 'w'), indent=0)

    data_path = './experiments/data/' + identifier + '.data.npy'
    np.save(data_path, {'samples': samples, 'pdf': pdf, 'labels': labels})
    print('Saved training data to', data_path)

    # --- build model --- #

    Z, X, CG, CD, CS = model.create_placeholders(batch_size, seq_length,
                                                 latent_dim, num_signals,
                                                 cond_dim)

    discriminator_vars = [
        'hidden_units_d', 'seq_length', 'cond_dim', 'batch_size', 'batch_mean'
    ]
    discriminator_settings = dict((k, settings[k]) for k in discriminator_vars)
    generator_vars = [
        'hidden_units_g', 'seq_length', 'batch_size', 'num_generated_features',
        'cond_dim', 'learn_scale'
    ]
    generator_settings = dict((k, settings[k]) for k in generator_vars)

    CGAN = (cond_dim > 0)
    print(CGAN)
    D_loss, G_loss, accuracy = model.GAN_loss(Z,
Пример #3
0
num_variables = samples.shape[2]
print('num_variables:', num_variables)
# --- save settings, data --- #
print('Ready to run with settings:')
for (k, v) in settings.items():
    print(v, '\t', k)
# add the settings to local environment
# WARNING: at this point a lot of variables appear
locals().update(settings)
json.dump(settings,
          open('./experiments/settings/' + identifier + '.txt', 'w'),
          indent=0)

# --- build model --- #
# preparation: data placeholders and model parameters
Z, X, T = model.create_placeholders(batch_size, seq_length, latent_dim,
                                    num_variables)
discriminator_vars = [
    'hidden_units_d', 'seq_length', 'batch_size', 'batch_mean'
]
discriminator_settings = dict((k, settings[k]) for k in discriminator_vars)
generator_vars = ['hidden_units_g', 'seq_length', 'batch_size', 'learn_scale']
generator_settings = dict((k, settings[k]) for k in generator_vars)
generator_settings['num_signals'] = num_variables

# model: GAN losses
D_loss, G_loss = model.GAN_loss(Z, X, generator_settings,
                                discriminator_settings)
D_solver, G_solver, priv_accountant = model.GAN_solvers(
    D_loss,
    G_loss,
    learning_rate,
Пример #4
0
# --- training sample --- #
# --- save settings, data --- #
print('Ready to run with settings:')
for (k, v) in settings.items():
    print(v, '\t', k)
# add the settings to local environment
# WARNING: at this point a lot of variables appear
locals().update(settings)
json.dump(settings,
          open('./experiments/settings/' + identifier + '.txt', 'w'),
          indent=0)

# --- build model --- #

Z, X, T = model.create_placeholders(batch_size, seq_length, latent_dim,
                                    num_signals)

discriminator_vars = [
    'hidden_units_d', 'seq_length', 'batch_size', 'batch_mean'
]
discriminator_settings = dict((k, settings[k]) for k in discriminator_vars)

generator_vars = [
    'hidden_units_g', 'seq_length', 'batch_size', 'num_generated_features',
    'learn_scale'
]
generator_settings = dict((k, settings[k]) for k in generator_vars)

D_loss, G_loss = model.GAN_loss(Z, X, generator_settings,
                                discriminator_settings)
D_solver, G_solver, priv_accountant = model.GAN_solvers(
Пример #5
0
# WARNING: at this point a lot of variables appear
locals().update(settings)
json.dump(settings,
          open('./experiments/settings/' + identifier + '.txt', 'w'),
          indent=0)
epoch = 150

#if not data == 'load':
#    data_path = './experiments/data/' + identifier + '.data.npy'
#    np.save(data_path, {'samples': samples, 'pdf': pdf, 'labels': labels})
#    print('Saved training data to', data_path)

# --- build model --- #

Z, X, CG, CD, CS = model.create_placeholders(batch_size, seq_length,
                                             latent_dim,
                                             num_generated_features, cond_dim)

discriminator_vars = [
    'hidden_units_d', 'seq_length', 'cond_dim', 'batch_size', 'batch_mean',
    'latent_dim'
]
discriminator_settings = dict((k, settings[k]) for k in discriminator_vars)
generator_vars = [
    'hidden_units_g', 'seq_length', 'batch_size', 'latent_dim',
    'num_generated_features', 'cond_dim', 'learn_scale'
]
generator_settings = dict((k, settings[k]) for k in generator_vars)

CGAN = (cond_dim > 0)
if CGAN: assert not predict_labels
Пример #6
0
        cond_path = './experiments/cond_data/' + identifier + '.data.npy'
        np.save(cond_path, {'cond_samples': cond_samples_train})
        print('Saved cond images to ', cond_path)

# --- build model --- #

if generate_test:
    eval.sine_plot(identifier, 250)
    exit()

if cond_dim > 0:
    num_signals = 1

if info:
    latent_C, Z, X, CG, CD, CS, cond_sine = model.create_placeholders(
        batch_size, seq_length, latent_dim, num_signals, cond_dim, info,
        latent_C_dim)
else:
    Z, X, CG, CD, CS, cond_sine = model.create_placeholders(
        batch_size, seq_length, latent_dim, num_signals, cond_dim, info)
    latent_C = None

discriminator_vars = [
    'hidden_units_d', 'seq_length', 'cond_dim', 'batch_size', 'batch_mean'
]
discriminator_settings = dict((k, settings[k]) for k in discriminator_vars)
generator_vars = [
    'hidden_units_g', 'seq_length', 'batch_size', 'num_generated_features',
    'cond_dim', 'learn_scale'
]
generator_settings = dict((k, settings[k]) for k in generator_vars)