numHiddentUnits_decoder=400 numberOfInducingPoints =500 learning_rate = 1e-3 numTestSamples = 100 numHiddenLayers_decoder = 1 print "Initialising" va = VA( numberOfInducingPoints, # Number of inducing ponts in sparse GP batchSize, # Size of mini batch dimX, # Dimensionality of the latent co-ordinates dimZ, # Dimensionality of the latent variables x_train, # [NxP] matrix of observations kernelType=kernelType, encoderType_qX=encoderType_qX, # 'FreeForm1', 'FreeForm2', 'MLP', 'Kernel'. encoderType_rX=encoderType_rX, # 'MLP', 'Kernel', 'NoEncoding'. Xu_optimise=Xu_optimise, numHiddenUnits_encoder=numHiddenUnits_encoder, numHiddenUnits_decoder=numHiddentUnits_decoder, numHiddenLayers_decoder=numHiddenLayers_decoder, continuous=True ) va.randomise() # va.Xz.set_value() va.init_Xu_from_Xz() theta = np.ones((1,dimX+1))*10 va.setKernelParameters(theta)
def checkpoint0(dataset): if dataset == 'MNIST': continuous=True else: RuntimeError('Case not implemented') load_dataset_from_name(dataset) model = VA( numberOfInducingPoints, # Number of inducing ponts in sparse GP batchSize, # Size of mini batch dimX, # Dimensionality of the latent co-ordinates dimZ, # Dimensionality of the latent variables x_train, # [NxP] matrix of observations kernelType=kernelType, encoderType_qX=encoderType_qX, # 'FreeForm', 'MLP', 'Kernel'. encoderType_rX=encoderType_rX, # 'FreeForm', 'MLP', 'Kernel', 'NoEncoding'. encoderType_ru=encoderType_ru, # 'FreeForm', 'MLP', 'NoEncoding' Xu_optimise=Xu_optimise, numHiddenUnits_encoder=numHiddenUnits_encoder, numHiddentUnits_decoder=numHiddentUnits_decoder, continuous=continuous ) model.construct_L_using_r() model.setKernelParameters(0.01, 5*np.ones((2,)), 1e-100, 0.5, [1e-10,1e-10], [10,10] ) model.randomise() model.constructUpdateFunction() model = model.randomise() srng = utils.srng() return model, srng