print k, opt_params[k].shape opt_params = np.load( './chkpt-ipython/DMM_lr-0_0008-dh-40-ds-2-nl-relu-bs-200-ep-40-rs-80-rd-0_1-infm-R-tl-2-el-2-ar-2_0-use_p-approx-rc-lstm-uid-EP30-params.npz' ) for k in opt_params: print k, opt_params[k].shape import glob, os, sys, time sys.path.append('../') from utils.misc import getConfigFile, readPickle, displayTime start_time = time.time() from model_th.dmm import DMM import model_th.learning as DMM_learn import model_th.evaluate as DMM_evaluate displayTime('importing DMM', start_time, time.time()) #This is the prefix we will use DIR = './chkpt-ipython/' prefix = 'DMM_lr-0_0008-dh-40-ds-2-nl-relu-bs-200-ep-40-rs-80-rd-0_1-infm-R-tl-2-el-2-ar-2_0-use_p-approx-rc-lstm-uid' pfile = os.path.join(DIR, prefix + '-config.pkl') print 'Hyperparameters in: ', pfile, 'Found: ', os.path.exists(pfile) #The hyperparameters are saved in a pickle file - lets load them here params = readPickle(pfile, quiet=True)[0] #Reload the model at Epoch 30 EP = '-EP30' #File containing model paramters reloadFile = os.path.join(DIR, prefix + EP + '-params.npz') print 'Model parameters in: ', reloadFile
""" Add dataset and NADE parameters to "params" which will become part of the model """ for k in ['dim_observations', 'data_type']: params[k] = dataset[k] mapPrint('Options: ', params) if params['use_nade']: params['data_type'] = 'binary_nade' """ import DKF + learn/evaluate functions """ start_time = time.time() from stinfmodel.dkf import DKF import stinfmodel.learning as DKF_learn import stinfmodel.evaluate as DKF_evaluate displayTime('import DKF', start_time, time.time()) dkf = None #Remove from params start_time = time.time() removeIfExists('./NOSUCHFILE') reloadFile = params.pop('reloadFile') """ Reload parameters if reloadFile exists otherwise setup model from scratch and initialize parameters randomly. """ if os.path.exists(reloadFile): pfile = params.pop('paramFile') """ paramFile is set inside the BaseClass in theanomodels to point to the pickle file containing params""" assert os.path.exists(pfile), pfile + ' not found. Need paramfile' print 'Reloading trained model from : ', reloadFile
#Setup VAE Model (or reload from existing savefile) start_time = time.time() from optvaemodels.vae import VAE as Model import optvaemodels.vae_learn as Learn import optvaemodels.vae_evaluate as Evaluate import optvaemodels.evaluate_vecs as EVECS additional_attrs = {} if params['data_type'] == 'bow': additional_attrs = {} tfidf = TfidfTransformer(norm=None) tfidf.fit(dataset['train']) #Get normalized idf vectors additional_attrs['idf'] = tfidf.idf_ displayTime('import vae', start_time, time.time()) vae = None #Remove from params start_time = time.time() removeIfExists('./NOSUCHFILE') reloadFile = params.pop('reloadFile') if os.path.exists(reloadFile): pfile = params.pop('paramFile') assert os.path.exists(pfile), pfile + ' not found. Need paramfile' print 'Reloading trained model from : ', reloadFile print 'Assuming ', pfile, ' corresponds to model' model = Model(params, paramFile=pfile, reloadFile=reloadFile, additional_attrs=additional_attrs) else:
dataset = load(params['dataset']) params['savedir'] += '-' + params['dataset'] createIfAbsent(params['savedir']) for k in ['dim_observations', 'data_type']: params[k] = dataset[k] mapPrint('Options: ', params) """ Import files for learning """ start_time = time.time() from model_th.dmm import DMM import model_th.learning as DMM_learn import model_th.evaluate as DMM_evaluate displayTime('import DMM', start_time, time.time()) dmm = None """ Reload from savefile or train new model """ start_time = time.time() removeIfExists('./NOSUCHFILE') reloadFile = params.pop('reloadFile') if os.path.exists(reloadFile): pfile = params.pop('paramFile') assert os.path.exists(pfile), pfile + ' not found. Need paramfile' print 'Reloading trained model from : ', reloadFile print 'Assuming ', pfile, ' corresponds to model' dmm = DMM(params, paramFile=pfile, reloadFile=reloadFile) else: pfile = params['savedir'] + '/' + params['unique_id'] + '-config.pkl'
if params['dataset'] == '': params['dataset'] = 'jsb' dataset = loadDataset(params['dataset']) params['savedir'] += '-' + params['dataset'] createIfAbsent(params['savedir']) #Saving/loading for k in ['dim_observations', 'data_type']: params[k] = dataset[k] mapPrint('Options: ', params) #Setup VAE Model (or reload from existing savefile) start_time = time.time() from models.lstm import LSTM displayTime('import LSTM', start_time, time.time()) lstm = None #Remove from params start_time = time.time() removeIfExists('./NOSUCHFILE') reloadFile = params.pop('reloadFile') if os.path.exists(reloadFile): pfile = params.pop('paramFile') assert os.path.exists(pfile), pfile + ' not found. Need paramfile' print 'Reloading trained model from : ', reloadFile print 'Assuming ', pfile, ' corresponds to model' lstm = LSTM(params, paramFile=pfile, reloadFile=reloadFile) else: pfile = params['savedir'] + '/' + params['unique_id'] + '-config.pkl' print 'Training model from scratch. Parameters in: ', pfile