arch['flows'] = [LogisticFlowLayer] arch['batch_norm'] = False model_dict['z1y->z2'] = OrderedDict([('arch', arch)]) arch = OrderedDict() arch['hidden'] = [20] arch['gain'] = np.sqrt(2) arch['nonlin'] = lasagne.nonlinearities.tanh arch['num_output'] = num_features arch['sample_layer'] = SampleLayer arch['flows'] = [] arch['batch_norm'] = False model_dict['yz2->_z1'] = OrderedDict([('arch', arch)]) # set result directory path res_out = 'examples/results/crism/' + timeStamp().format("") # construct the semi^2-supervised deep generative model m = SSDGM(num_features, num_output, variational=True, model_dict=model_dict, prior_x=prior_x, prior_y=prior_y, prior_z2=prior_z2, loss_x=L2, loss_y=KL, coeff_x=1, coeff_y=1e-2, coeff_x_dis=1, coeff_y_dis=1,
arch['flows'] = [LogisticFlowLayer] arch['batch_norm'] = False model_dict['z1y->z2'] = OrderedDict([('arch', arch)]) arch = OrderedDict() arch['hidden'] = [20] arch['gain'] = np.sqrt(2) arch['nonlin'] = lasagne.nonlinearities.tanh arch['num_output'] = num_features arch['sample_layer'] = SampleLayer arch['flows'] = [] arch['batch_norm'] = False model_dict['yz2->_z1'] = OrderedDict([('arch', arch)]) # set result directory path res_out = 'examples/results/gridsearch/' + timeStamp().format("") # construct the semi^2-supervised deep generative model m = SSDGM(num_features, num_output, model_dict=model_dict, variational=True, prior_x=prior_x, prior_y=prior_y, prior_z2=prior_z2, loss_x=L2, loss_y=KL, coeff_x=1, coeff_y=1e-2, coeff_x_dis=1, coeff_y_dis=1e3,
arch['flows'] = [LogisticFlowLayer] arch['batch_norm'] = False model_dict['z1y->z2'] = OrderedDict([('arch',arch)]) arch = OrderedDict() arch['hidden'] = [250,500] arch['gain'] = np.sqrt(2) arch['nonlin'] = lasagne.nonlinearities.tanh arch['num_output'] = num_features arch['sample_layer'] = BernoulliSampleLayer arch['flows'] = [] arch['batch_norm'] = False model_dict['yz2->_z1'] = OrderedDict([('arch',arch)]) # set result directory path res_out='examples/results/mnist/'+timeStamp().format("") # construct the semi^2-supervised deep generative model m = SSDGM(num_features,num_output,variational=True,model_dict=model_dict,eq_samples=1,iw_samples=1, prior_x=prior_x,prior_y=prior_y,prior_z2=prior_z2,loss_x=L2,loss_y=KL, coeff_x=1e-1,coeff_y=1e-1,coeff_x_dis=1,coeff_y_dis=1e-2,coeff_x_prob=1e-1,coeff_y_prob=0, num_epochs=1000,eval_freq=100,lr=1e-2, batch_size_Xy_train=10000,batch_size_X__train=10000,batch_size__y_train=10000, batch_size_Xy_eval=10000,batch_size_X__eval=10000,batch_size__y_eval=10000, res_out=res_out) # fit the model m.fit(verbose=True,debug=True,**data) # auto-set title and plot results (saved to res_out) title = 'M2'
arch['flows'] = [LogisticFlowLayer] arch['batch_norm'] = False model_dict['z1y->z2'] = OrderedDict([('arch', arch)]) arch = OrderedDict() arch['hidden'] = [50] arch['gain'] = np.sqrt(2) arch['nonlin'] = lasagne.nonlinearities.tanh arch['num_output'] = num_features arch['sample_layer'] = SampleLayer arch['flows'] = [SoftplusFlowLayer] arch['batch_norm'] = False model_dict['yz2->_z1'] = OrderedDict([('arch', arch)]) # set result directory path res_out = 'examples/results/raman/' + timeStamp().format("") # construct the semi^2-supervised deep generative model m = SSDGM(num_features, num_output, variational=True, model_dict=model_dict, prior_x=prior_x, prior_y=prior_y, prior_z2=prior_z2, loss_x=L2, loss_y=KL, coeff_x=1e-2, coeff_y=0, coeff_x_dis=1, coeff_y_dis=0,
arch['flows'] = [LogisticFlowLayer] arch['batch_norm'] = False model_dict['z1y->z2'] = OrderedDict([('arch', arch)]) arch = OrderedDict() arch['hidden'] = [50] arch['gain'] = np.sqrt(2) arch['nonlin'] = lasagne.nonlinearities.tanh arch['num_output'] = num_features arch['sample_layer'] = SampleLayer arch['flows'] = [SoftplusFlowLayer] arch['batch_norm'] = False model_dict['yz2->_z1'] = OrderedDict([('arch', arch)]) # set result directory path res_out = 'examples/results/libs/' + timeStamp().format("") # construct the semi^2-supervised deep generative model m = SSDGM(num_features, num_output, model_dict=model_dict, prior_x=prior_x, prior_y=prior_y, prior_z2=prior_z2, loss_x=L2, loss_y=KL, coeff_x=1e-2, coeff_y=1e-4, coeff_x_dis=10, coeff_y_dis=1e-4, coeff_x_prob=0,