コード例 #1
0
def load(dset):
    if dset in ['jsb', 'nottingham', 'musedata', 'piano']:
        musicdata = loadDataset(dset)
        dataset = {}
        for k in ['train', 'valid', 'test']:
            dataset[k] = {}
            dataset[k]['tensor'] = musicdata[k]
            dataset[k]['mask'] = musicdata['mask_' + k]
        dataset['data_type'] = musicdata['data_type']
        dataset['dim_observations'] = musicdata['dim_observations']
    elif dset == 'synthetic':
        dataset = loadSyntheticData()
    else:
        raise ValueError('Invalid dataset: ' + dset)
    return dataset
コード例 #2
0
import os, time, sys
sys.path.append('../')
import numpy as np
from datasets.load import loadDataset
from parse_args_dkf import params
from utils.misc import removeIfExists, createIfAbsent, mapPrint, saveHDF5, displayTime

params['dim_stochastic'] = 1

if params['dataset'] == '':
    params['dataset'] = 'synthetic9'
dataset = loadDataset(params['dataset'])
params['savedir'] += '-' + params['dataset']
createIfAbsent(params['savedir'])

#Saving/loading
for k in ['dim_observations', 'dim_actions', 'data_type']:
    params[k] = dataset[k]
mapPrint('Options: ', params)

#Setup VAE Model (or reload from existing savefile)
start_time = time.time()
from stinfmodel.dkf import DKF
import stinfmodel.evaluate as DKF_evaluate
import stinfmodel.learning as DKF_learn
displayTime('import DKF', start_time, time.time())
dkf = None

#Remove from params
start_time = time.time()
removeIfExists('./NOSUCHFILE')
コード例 #3
0
ファイル: train.py プロジェクト: rahulk90/vae_sparse
import os, time, sys
sys.path.append('../')
import numpy as np
from datasets.load import loadDataset
from optvaedatasets.load import loadDataset as loadDataset_OVAE
from optvaeutils.parse_args import params
from utils.misc import removeIfExists, createIfAbsent, mapPrint, saveHDF5, displayTime, getLowestError
from sklearn.feature_extraction.text import TfidfTransformer

dataset = params['dataset']
params['savedir'] += '-' + dataset + '-' + params['opt_type']
createIfAbsent(params['savedir'])
if 'mnist' in dataset:
    dataset = loadDataset(dataset)
    if 'binarized' not in dataset:
        dataset['train'] = (dataset['train'] > 0.5) * 1.
        dataset['valid'] = (dataset['valid'] > 0.5) * 1.
        dataset['test'] = (dataset['test'] > 0.5) * 1.
else:
    dataset = loadDataset_OVAE(dataset)
#Store dataset parameters into params
for k in ['dim_observations', 'data_type']:
    params[k] = dataset[k]
if params['data_type'] == 'bow':
    params['max_word_count'] = dataset['train'].max()
mapPrint('Options: ', params)
#Setup VAE Model (or reload from existing savefile)
start_time = time.time()
from optvaemodels.vae import VAE as Model
import optvaemodels.vae_learn as Learn
import optvaemodels.vae_evaluate as Evaluate
コード例 #4
0
                                                        lr = lr,  
                                                        grad_noise = self.params['grad_noise'],
                                                        rng = self.srng)#,
            self._p('# additional updates: '+str(len(self.updates)))
            optimizer_up+=anneal_update +self.updates
            fxn_inputs      = [X]
            self.train      = theano.function(fxn_inputs, [elbo, norm_list[0], norm_list[1], norm_list[2], anneal.sum(), lr.sum()],
                                              updates = optimizer_up, name = 'Train')
        else:
            assert False,'Invalid optimization type: '+self.params['opt_type']
        self._p('Done creating functions for training')
    
if __name__=='__main__':
    print 'Initializing VAE'
    pfile = './tmp'
    from optvaeutils.parse_args_vae import params
    params['dim_observations'] =2000
    params['data_type']   = 'bow'
    params['opt_type']    = 'none'
    params['opt_method']  = 'adam'
    params['anneal_finopt_rate'] = 100
    params['GRADONLY']=True
    vae   = VAE(params, paramFile=pfile)
    from datasets.load import loadDataset
    dataset = loadDataset('binarized_mnist')
    np.random.seed(1)
    idxlist = np.random.permutation(dataset['train'].shape[0])
    X = dataset['train'][idxlist[:200]].astype('float32')
    os.remove(pfile)
    import ipdb;ipdb.set_trace()
コード例 #5
0
ファイル: logistic.py プロジェクト: vroomzel/theanomodels
            Y = labels[st_idx:end_idx][:, None].astype(config.floatX)
            batch_nll = self.evaluate(X=X, Y=Y)
            nll += batch_nll
            self._p(('\tBnum:%d, Batch Bound: %.4f') %
                    (bnum, batch_nll / float(X.shape[0])))
        nll /= float(X.shape[0])
        end_time = time.time()
        self._p(('(Evaluation) NLL: %.4f [Took %.4f seconds]') %
                (nll, end_time - start_time))
        return nll


if __name__ == '__main__':
    print 'Starting Logistic Regression'
    from datasets.load import loadDataset
    mnist = loadDataset('mnist')
    labels_train = (mnist['train_y'] >= 5.) * 1.
    labels_test = (mnist['test_y'] >= 5.) * 1.
    params = {}
    from utils.parse_args_vae import params
    params['dim_observations'] = 784
    params['validate_only'] = False
    pfile = 'tmp'
    LR = LogisticRegression(params, paramFile=pfile)
    results = LR.learn(mnist['train'],
                       labels_train,
                       epoch_start=0,
                       epoch_end=100,
                       savefreq=10,
                       batch_size=2000,
                       dataset_eval=mnist['test'],