algorithmTest.addSingleObservation(inDataTrain[:][i], outDataTrain[i])
    secondTS = time.time()
    timestamps["load" + str(i)] = secondTS - firstTS
# training CE using the added data, while the training time is measured
firstTS = time.time()
algorithmTest.train()
secondTS = time.time()
timestamps["train"] = secondTS - firstTS
print "Done: loading and training"
print "Beginning execution"
runningTotal = 0
for executeSample in range(testRecStop - testRecStart + 1):
    # computing output of test data using trained CE (time measured)
    # Saving error for each test data.
    firstTS = time.time()
    theor = algorithmTest.execute(list(inDataTest[executeSample]))
    secondTS = time.time()
    timestamps["test" + str(executeSample)] = secondTS - firstTS
    timestamps["delta" + str(executeSample)] = \
            abs(outDataTest[executeSample] - theor)
    runningTotal += outDataTest[executeSample]
print "Done: execution"
# computing average of the output test data
avgActual = runningTotal/(1.0*numExecuteSamples)
# calculating the loading time of the whole training dataset
netLoadingTime = 0
for i in range(numTrainingSamples):
    netLoadingTime += timestamps["load" + str(i)];
# calculating the execution time and Normalized Mean Absolute Error
netExecuteTime = 0
runningNMAE = 0.0
Exemple #2
0
    date = date.replace("/", " ")
    date = date.replace(":", " ")
    t = strptime(date, "%d %m %Y %H %M %S")
    weekday = t[6]
    #range [0, 6], Monday is 0
    hour = t[3]
    minute = t[4]
    sec = t[5]
    time = (t[3] * 3600 + t[4] * 60 + t[5]) / (24 * 3600.0)
    x_real.append([time])
    y_real.append(energy)

#T = np.linspace(0, 1, 96)
T = np.linspace(0, 1, 96)
y_predict = np.empty([0])

for num in T:
    x_predict = [num]
    result = trainer.execute(x_predict)
    y_predict = np.concatenate((y_predict, result))
#print y_predict

plt.subplot(1, 1, 1)
plt.scatter(x_real, y_real, c='r', label='data')
plt.plot(T, y_predict, c='g', label='prediction')
plt.axis('tight')
plt.legend()
plt.title("KNeighborsRegressor (k = %i, weights = '%s')" % (2, "uniform"))

plt.show()
np.random.seed(0)
X = np.sort(5 * np.random.rand(40, 1), axis=0)
T = np.linspace(0, 5, 500)[:, np.newaxis]
y = np.sin(X).ravel();

# Add noise to targets
y[::5] += 1 * (0.5 - np.random.rand(8))
#y = y.ravel();

###############################################################################
# Fit regression model
trainer = Knn(complexity=0, numInputs=1, discreteOutputs=0, discreteInputs=0);
trainer.addBatchObservations(X,y);
trainer.train();


y_predict = np.empty([0]);

for i in range(T.shape[0]):
	result = trainer.execute(T[i]);
	y_predict = np.concatenate((y_predict,result));

plt.subplot(1, 1, 1)
plt.scatter(X, y, c='k', label='data')
plt.plot(T, y_predict, c='g', label='prediction')
plt.axis('tight')
plt.legend()
plt.title("KNeighborsRegressor (k = %i, weights = '%s')" % (2,
	"uniform"))

plt.show()
teslaTimestamps["train"] = secondTS - firstTS;

runningTotal = 0;

for i in range(numRow*day_predict,numRow*(day_predict+1)):
	row = csv[i]
	date=row[0]
	date_predict = csv[i+1][0]
	output=round(csv[i+1][3],4)
	date=date.replace("/"," ")
	date=date.replace(":"," ")
	t=strptime(date, "%d %m %Y %H %M %S")
	time = (t[3]*3600+t[4]*60+t[5])/(24*3600.0)
	x_predict=[time_predict, row[2], row[4], row[5], row[6], row[7], row[8]];
	firstTS = time.time();
	theor = algorithmTest.execute(x_predict);
	secondTS = time.time();
	knnTimestamps["test" + str(i)] = secondTS - firstTS;
	knnTimestamps["delta" + str(i)] = abs(output - theor);
	runningTotal += output;

avgActual = runningTotal/(1.0*numExecuteSamples);
netLoadingTime = 0;
for i in range(numTrainingSamples):
    netLoadingTime += knnTimestamps["load" + str(i)];

netExecuteTime = 0;
runningMAE = 0.0;
for i in range(numExecuteSamples):
    netExecuteTime += knnTimestamps["test" + str(i)];
    runningMAE += knnTimestamps["delta" + str(i)];
Exemple #5
0
teslaTimestamps["train"] = secondTS - firstTS

runningTotal = 0

for i in range(numRow * day_predict, numRow * (day_predict + 1)):
    row = csv[i]
    date = row[0]
    date_predict = csv[i + 1][0]
    output = round(csv[i + 1][3], 4)
    date = date.replace("/", " ")
    date = date.replace(":", " ")
    t = strptime(date, "%d %m %Y %H %M %S")
    time = (t[3] * 3600 + t[4] * 60 + t[5]) / (24 * 3600.0)
    x_predict = [time_predict, row[2], row[4], row[5], row[6], row[7], row[8]]
    firstTS = time.time()
    theor = algorithmTest.execute(x_predict)
    secondTS = time.time()
    knnTimestamps["test" + str(i)] = secondTS - firstTS
    knnTimestamps["delta" + str(i)] = abs(output - theor)
    runningTotal += output

avgActual = runningTotal / (1.0 * numExecuteSamples)
netLoadingTime = 0
for i in range(numTrainingSamples):
    netLoadingTime += knnTimestamps["load" + str(i)]

netExecuteTime = 0
runningMAE = 0.0
for i in range(numExecuteSamples):
    netExecuteTime += knnTimestamps["test" + str(i)]
    runningMAE += knnTimestamps["delta" + str(i)]
	dishwasher=round(csv[i+1][3],4)
	date=date.replace("/"," ")
	date=date.replace(":"," ")
	date_predict=date_predict.replace("/"," ")
	date_predict=date_predict.replace(":"," ")
	t=strptime(date, "%m %d %Y %H %M")
	t_predict = strptime(date_predict, "%m %d %Y %H %M")
	weekday = t[6]; #range [0, 6], Monday is 0
	hour=t[3]
	minute=t[4]
	sec=t[5]
	time = (t[3]*3600+t[4]*60+t[5])/(24*3600.0)
	time_predict = (t_predict[3]*3600+t_predict[4]*60+t_predict[5])/(24*3600.0)
	x_predict=[time_predict, row[2], row[4], row[5], row[6], row[7], row[8]];
	testStart = T.clock()
	result = trainer.execute(x_predict);
	result = round(result,4)
	testEnd = T.clock()
	testLatency += testEnd - testStart
	#y_predict = np.concatenate((y_predict,result));
	y_predict.append(result);
	x_real.append([time_predict])
	y_real.append(dishwasher)
	#write result to output file
	writer.writerow({'real_power': dishwasher, 'predict_power': result})
	
	#MAE
	MAE += abs(dishwasher-result);
	MBE += dishwasher-result

Exemple #7
0
from Knn import Knn

np.random.seed(0)
X = np.sort(5 * np.random.rand(40, 1), axis=0)
T = np.linspace(0, 5, 500)[:, np.newaxis]
y = np.sin(X).ravel()

# Add noise to targets
y[::5] += 1 * (0.5 - np.random.rand(8))
#y = y.ravel();

###############################################################################
# Fit regression model
trainer = Knn(complexity=0, numInputs=1, discreteOutputs=0, discreteInputs=0)
trainer.addBatchObservations(X, y)
trainer.train()

y_predict = np.empty([0])

for i in range(T.shape[0]):
    result = trainer.execute(T[i])
    y_predict = np.concatenate((y_predict, result))

plt.subplot(1, 1, 1)
plt.scatter(X, y, c='k', label='data')
plt.plot(T, y_predict, c='g', label='prediction')
plt.axis('tight')
plt.legend()
plt.title("KNeighborsRegressor (k = %i, weights = '%s')" % (2, "uniform"))

plt.show()