Exemplo n.º 1
0
GRADIENT DESCENT CHECK
"""

# split training and testing data, normalize features
average_death_rate = np.reshape(average_death_rate, (-1, 1))

# option 1: normalize features
#confirmed_features = normalize(confirmed_features, axis=1)

# option 2: binary inputs
gradients = np.gradient(confirmed_features)
confirmed_features = -np.sign(gradients[0])
print(confirmed_features.shape)

data = np.hstack((confirmed_features, average_death_rate))
np.random.shuffle(data)
training_data = data[0:212, :]
testing_data = data[212:, :]
training_features = training_data[:, :-1]
training_targets = training_data[:, -1]
testing_features = testing_data[:, :-1]
testing_targets = testing_data[:, -1]

model = GradientDescent('squared', regularization='l1', reg_param=.5)
model.fit(training_features.astype(float), training_targets.astype(float))
prediction = model.confidence(testing_features.astype(float))

error = ((testing_targets - prediction)**2).mean()

r = np.correlate(prediction, testing_targets)
print(r)