Esempio n. 1
0
                        metavar='TD',
                        help='name of saved model (default: '
                        ')')

    args = parser.parse_args()

    batch_loader = BatchLoader('',
                               custom_index=True,
                               train_data_name=args.sample_data)
    parameters = Parameters(batch_loader.max_word_len,
                            batch_loader.max_seq_len,
                            batch_loader.words_vocab_size,
                            batch_loader.chars_vocab_size)

    rvae = RVAE(parameters)
    rvae.load_state_dict(t.load('./trained_model/{}'.format(args.model_name)))
    if args.use_cuda:
        rvae = rvae.cuda()

    sampler = rvae.latent_sampler(batch_loader)

    zs = {}
    for i in range(
            0,
            int(batch_loader.total_lines('train') / args.batch_size) + 1):
        indexes = np.array(
            range(
                i * args.batch_size,
                min((i + 1) * args.batch_size,
                    batch_loader.total_lines('train'))))
        if len(indexes) > 0:
Esempio n. 2
0
                        type=bool,
                        default=True,
                        metavar='CUDA',
                        help='use cuda (default: True)')
    parser.add_argument('--num-sample',
                        type=int,
                        default=100,
                        metavar='NS',
                        help='num samplings (default: 10)')

    args = parser.parse_args()

    batch_loader = BatchLoader('')
    parameters = Parameters(batch_loader.max_word_len,
                            batch_loader.max_seq_len,
                            batch_loader.words_vocab_size,
                            batch_loader.chars_vocab_size)

    rvae = RVAE(parameters)
    rvae.load_state_dict(t.load('trained_RVAE_code'))
    if args.use_cuda:
        rvae = rvae.cuda()

    with open("code_sampling_100.txt", 'w') as cs:
        for iteration in range(args.num_sample):
            seed = np.random.normal(size=[1, parameters.latent_variable_size])
            result = rvae.sample(batch_loader, 50, seed, args.use_cuda)
            # print(result)
            # print()
            cs.write(result + '\n')
Esempio n. 3
0
    tensor_files = [['data/super/train_word_tensor_2.npy'],
                         ['data/super/train_character_tensor_2.npy']]
    batch_loader_2 = BatchLoader(data_files, idx_files, tensor_files)
    parameters_2 = Parameters(batch_loader_2.max_word_len,
                            batch_loader_2.max_seq_len,
                            batch_loader_2.words_vocab_size,
                            batch_loader_2.chars_vocab_size)


    '''======================================== RVAE loading ==================================================
    '''
    print ('Started loading')
    start_time = time.time()
    rvae = RVAE(parameters,parameters_2)
    rvae.load_state_dict(t.load(args.save_model))
    if args.use_cuda:
        rvae = rvae.cuda()
    loading_time=time.time() - start_time
    print ('Time elapsed in loading model =' , loading_time)
    print ('Finished loading')

    ''' ==================================== Parameters Initialising ===========================================
    '''
    n_best = args.beam_top 
    beam_size =args.beam_size 
    
    assert n_best <= beam_size 
    use_cuda = args.use_cuda

    if args.use_file:
Esempio n. 4
0
    batch_loader = BatchLoader(path='',
                               custom_index=False,
                               train_data_name=args.train_data)

    parameters = Parameters(batch_loader.max_word_len,
                            batch_loader.max_seq_len,
                            batch_loader.words_vocab_size,
                            batch_loader.chars_vocab_size)

    rvae = RVAE(parameters)
    optimizer = Adam(rvae.learnable_parameters(), args.learning_rate)

    if args.use_trained:
        rvae.load_state_dict(
            t.load('./trained_model/{}_trained_{}'.format(
                args.train_data.split('.')[0], args.start_epoch)))
        optimizer.load_state_dict(
            t.load('./trained_model/{}_trained_optimizer_{}'.format(
                args.train_data.split('.')[0], args.start_epoch)))

    if args.use_cuda:
        rvae = rvae.cuda()

    train_step = rvae.trainer(optimizer, batch_loader)
    validate = rvae.validater(batch_loader)

    ce_result = []
    kld_result = []

    for iteration in range(args.start_epoch, args.num_iterations):
Esempio n. 5
0
                        help='name of model to save (default: '
                        ')')
    args = parser.parse_args()

    batch_loader = BatchLoader('')
    parameters = Parameters(batch_loader.max_word_len,
                            batch_loader.max_seq_len,
                            batch_loader.words_vocab_size,
                            batch_loader.chars_vocab_size)

    rvae = RVAE(parameters)
    ce_result = []
    kld_result = []

    if args.use_trained:
        rvae.load_state_dict(
            t.load('saved_models/trained_RVAE_' + args.model_name))
        ce_result = list(
            np.load('saved_models/ce_result_{}.npy'.format(args.model_name)))
        kld_result = list(
            np.load('saved_models/kld_result_npy_{}.npy'.format(
                args.model_name)))

    if args.use_cuda:
        rvae = rvae.cuda()

    optimizer = Adam(rvae.learnable_parameters(), args.learning_rate)

    train_step = rvae.trainer(optimizer, batch_loader)
    validate, validation_sample = rvae.validater(batch_loader)

    for iteration in range(args.num_iterations):
Esempio n. 6
0
                        path + 'super/train_character_tensor_2.npy',
                        path + 'super/valid_character_tensor_2.npy'
                    ]]
    batch_loader_2 = BatchLoader(data_files, idx_files, tensor_files, path)
    parameters_2 = Parameters(batch_loader_2.max_word_len,
                              batch_loader_2.max_seq_len,
                              batch_loader_2.words_vocab_size,
                              batch_loader_2.chars_vocab_size, path)
    '''=================================================================================================
    '''

    rvae = RVAE(parameters, parameters_2)
    if args.use_trained != '':
        trainedModelName = os.path.join(
            os.path.join(args.use_trained, 'trained_RVAE'))
        rvae.load_state_dict(t.load(trainedModelName))
    if args.use_cuda:
        print("Using cuda")
        rvae = rvae.cuda()

    optimizer = Adam(rvae.learnable_parameters(), args.learning_rate)

    train_step = rvae.trainer(optimizer, batch_loader, batch_loader_2)
    validate = rvae.validater(batch_loader, batch_loader_2)

    loss_tr_result = ["loss_train"]
    ce_result = ["cross_entropy_train"]
    kld_result = ["kld_train"]
    coef_result = ["coef_train"]
    it = ["iteration"]
    loss_val_result = ["loss_val"]
Esempio n. 7
0
                        default=True,
                        metavar='CUDA',
                        help='use cuda (default: True)')
    # parser.add_argument('--num-sample', type=int, default=10, metavar='NS',
    #                     help='num samplings (default: 10)')

    args = parser.parse_args()

    batch_loader = BatchLoader('')
    parameters = Parameters(batch_loader.max_word_len,
                            batch_loader.max_seq_len,
                            batch_loader.words_vocab_size,
                            batch_loader.chars_vocab_size)

    rvae = RVAE(parameters)
    rvae.load_state_dict(torch.load('trained_RVAE'))
    if args.use_cuda:
        rvae = rvae.cuda()

    seq_len = 50
    seed = np.random.normal(size=[1, parameters.latent_variable_size])

    data = [["how are you ?"], ["how are you doing"]]
    data_words = [[line.split() for line in target] for target in data]
    word_tensor = np.array(
        [[list(map(batch_loader.word_to_idx.get, line)) for line in target]
         for target in data_words])

    character_tensor = np.array(
        [[list(map(batch_loader.encode_characters, line)) for line in target]
         for target in data_words])