def test_quantile_regression_list2(self):
        X = random(1000)
        eps1 = (random(900) - 0.5) * 0.1
        eps2 = random(100) * 2
        eps = numpy.hstack([eps1, eps2])
        X = X.reshape((1000, 1))  # pylint: disable=E1101
        Y = X * 3.4 + 5.6 + eps

        clq = QuantileLinearRegression(verbose=False, fit_intercept=True)
        self.assertRaise(lambda: clq.fit(X, Y), ValueError)

        Y = X.ravel() * 3.4 + 5.6 + eps

        clq = QuantileLinearRegression(verbose=False, fit_intercept=True)
        clq.fit(X, Y)

        clr = LinearRegression(fit_intercept=True)
        clr.fit(X, Y)

        self.assertNotEqual(clr.intercept_, 0)
        self.assertNotEqual(clq.intercept_, 0)
        self.assertNotEqualArray(clr.coef_, clq.coef_)
        self.assertNotEqualArray(clr.intercept_, clq.intercept_)
        self.assertLesser(clq.n_iter_, 10)

        pr = clr.predict(X)
        pq = clq.predict(X)
        self.assertEqual(pr.shape, pq.shape)
 def test_quantile_regression_quantile_check(self):
     n = 100
     X = (numpy.arange(n) / n)
     Y = X + X * X / n
     X = X.reshape((n, 1))
     for q in [0.1, 0.5, 0.9]:
         clq = QuantileLinearRegression(verbose=False,
                                        fit_intercept=True,
                                        quantile=q,
                                        max_iter=10)
         clq.fit(X, Y)
         y = clq.predict(X)
         diff = y - Y
         sign = numpy.sign(diff)  # pylint: disable=E1111
         pos = (sign > 0).sum()  # pylint: disable=W0143
         neg = (sign < 0).sum()  # pylint: disable=W0143
         if q < 0.5:
             self.assertGreater(neg, pos * 4)
         if q > 0.5:
             self.assertLesser(neg * 7, pos)
plt.plot(X, y)

ols_model = lm.LinearRegression()
ols_model.fit(X, y)

ols_trend = ols_model.predict(X)

print(ols_model.coef_)
print(ols_trend[-1] - ols_trend[0])

plt.plot(X, ols_trend, color="r")

X_lad = np.array(call_center_data.index).reshape(-1, 1)
y_lad = np.array(call_center_data["calls"])

# print(X_lad)

lad_model = QuantileLinearRegression(verbose=True)

lad_model.fit(X_lad, y_lad)

lad_trend = lad_model.predict(X_lad)

print(lad_model.coef_)
print(lad_trend[-1] - lad_trend[0])

plt.plot(X, lad_trend, color="g")

plt.show()
})
df_results.plot('Actual MSRP', 'Predicted MSRP', kind='scatter')

# In[ ]:

#from mlinsights.mlmodel import PiecewiseRegressor
#from sklearn.tree import DecisionTreeRegressor

clqs = {}
for qu in [0.25, 0.5, 0.85]:
    clq = QuantileLinearRegression(quantile=qu)
    clq.fit(X_tr, Y_tr)
    clqs['q=%1.2f' % qu] = clq
    print(clq)
    print('Training Mean Absolute Error:',
          metrics.mean_absolute_error(Y_tr, clq.predict(X_tr)))
    print('Testing Mean Squared Error:',
          metrics.mean_squared_error(Y_te, clq.predict(X_te)))
    print('Training Root Mean Squared Error:',
          np.sqrt(metrics.mean_squared_error(Y_tr, clq.predict(X_tr))))
    print('Testing Root Mean Squared Error:',
          np.sqrt(metrics.mean_squared_error(Y_te, clq.predict(X_te))))

    R2_tr = r2_score(Y_tr, clq.predict(X_tr))
    print(R2_tr)

    R2 = r2_score(Y_te, clq.predict(X_te))
    print(R2)

    #let's look at the residuals as well:
    matplotlib.rcParams['figure.figsize'] = (6.0, 6.0)