def test_bivariate_model():
	spamsDict = {
		"lambda1": 0.1, 
		"it0": 3, 
		"max_it": 10,
		"intercept" : True
	}
	spamsDict['compute_gram'] = True
	spamsDict["loss"] = "square"
	spamsDict["regul"] = "l1l2"
	learner = BatchBivariateLearner(**spamsDict)
	gen = BillMatlabGenerator(MATLAB_FILE_LOC,98,True)

	for i in range(10):
		X,Y = gen.generate()
		learner.process(X,Y)
def runExperiment():
	spamsDict = {
		"numThreads": 1,
		"L0": 0.1,
		"lambda1": 0.001,
		"it0":100,
		"max_it":500,
		"tol":1e-3,
		"intercept":True, 
		"bivar_it0": 3, 
		"bivar_max_it": 10,
	}
	spamsDict['compute_gram'] = True
	spamsDict["loss"] = "square"
	spamsDict["regul"] = "l1l2"
	learner = BatchBivariateLearner(**spamsDict)
	gen = BillMatlabGenerator(MATLAB_DATA,98,True)
	evaluator = RootMeanSumSquareEval(learner)
	foldN = 0;
	for fold in gen.folds:
		logger.info("Performing fold: %d"%foldN)
		X,Y = gen.fromFold(fold['training'])
		learner.process(X,Y)
		if learner.w is not None and learner.u is not None:
			Xtest,Ytest = gen.fromFold(fold['test'])
			
			logger.info("W sparcity: %2.2f"%learner._wsparcity())
			logger.info("U sparcity: %2.2f"%learner._usparcity())
			logger.info("Bias: %s"%learner.bias)
			loss = evaluator.evaluate(Xtest,Ytest)
			logger.info("Loss: %2.5f"%loss)
			logger.info("The predictions:")
			for i in range(len(Xtest)):
				for t in range(Y.shape[1]):
					dotproduct = learner.u[:,t:t+1].T.dot(Xtest[i].T).dot(learner.w[:,t:t+1])[0,0]
					withoutbias = dotproduct
					if learner.bias is not None: dotproduct += learner.bias[0,t]
					logger.info("task=%d,i=%d,y=%2.5f,v=%2.5f,vb=%2.5f,delta=%3.5f"%(
						t,i,Ytest[i,t],dotproduct,withoutbias,
						pow(Ytest[i,t]-dotproduct,2)
					))
			# calculate loss of Y for new X
			pass
		foldN +=1
		break
def runExperiment():
	spamsDict = {
		"numThreads": 1, 
		"lambda1": 0.0001,
		"bivar_it0": 3, 
		"bivar_max_it": 10,
		"max_it":500,
		"tol":1e-3,
		"intercept":True, 
	}
	spamsDict['compute_gram'] = True
	spamsDict["loss"] = "square"
	spamsDict["regul"] = "l1l2"
	learner = BatchBivariateLearner(**spamsDict)
	gen = BillMatlabGenerator(MATLAB_DATA,98,True)
	evaluator = RootMeanSumSquareEval(learner)
	for i in range(35):
		logger.info("New Item Seen: %d"%i)
		X,Y = gen.generate()
		if learner.w is not None and learner.u is not None:
			loss = evaluator.evaluate(X,Y)
			logger.info("Loss: %2.5f"%loss)
			logger.info("W sparcity: %2.2f"%learner._wsparcity())
			logger.info("U sparcity: %2.2f"%learner._usparcity())
			logger.info("Bias: %s"%learner.bias)
			logger.info("The predictions:")
			for t in range(Y.shape[1]):
				dotproduct = learner.u[:,t:t+1].T.dot(X.T).dot(learner.w[:,t:t+1])[0,0]
				withoutbias = dotproduct
				dotproduct += learner.bias[0,t]
				logger.info("task=%d,y=%2.5f,v=%2.5f,vb=%2.5f,rse=%2.5f"%(
					t,Y[0,t],dotproduct,withoutbias,
					sqrt(pow(Y[0,t]-dotproduct,2))
				))
			# calculate loss of Y for new X
			pass
		learner.process(X,Y)
		loss = evaluator.evaluate(X,Y)
		logger.info("Loss (post addition): %2.5f"%loss)