def plot(data, weights):
    data_mat = array(df['density', 'radio_suger'].values[:,:])
    label_mat = mat(df['label'].values[:]).transpose()
    m = shape(data_mat)[0]
    xcord1 = []
    ycord1 = []
    xcord2 = []
    ycord2 = []
    for i in xrange(m):
        if label_mat[i] == 1:
            xcord1.append(data_mat[i])
            ycore1.append(label_mat[i])
        else:
            xcord2.append(data_mat[i])
            ycord2.append(label_mat[i])
    plt.figure(1)
    ax = plt.subplot(111)
    ax.scatter(xcord1, ycord1, s=30, c='red', marker='s')
    ax.scatter(xcord2, ycord2, s=30, c='greeen')
    x = arange(-0.2, 0.8, 1)
    y = array((-w[0,0]*x)/w[0,1])
    print shape(x)
    print shape(y)
    plt.sca(ax)
    
    plt.plot(x,y)
    plt.xlabel('density')
    plt.ylabel('radio_suger')
    plt.title('LDA')
    plt.show()
def predict_price(dates,prices,x):
    dates = np.reshape(dates,len(dates),1)
	
	svr_len = SVR(kernel='linear',c=1e3)
	svr_poly = SVR(kernel='poly',c=1e3,degree=2)
	svr_len = SVR(kernel='rbf',c=1e3,gamma=0.1)
	svr_lin.fit(dates,prices)
	svr_poly.fit(dates,prices)
	svr_rbf.fit(dates,prices)
	
	plt.scatter(dates,prices,color='black', label='Data')
	plt.plot(dates, svr_rbf.predict(dates), color='red', label='RBF model')
	plt.plot(dates, svr_lin.predict(dates), color='green', label='Linear model')
	plt.plot(dates, svr_ply.predict(dates), color='blue', label='Ploynomial model')
	plt.xlabel('Date')
	plt.ylabel('Price')
	plt.title('Support Vector Regration')
	plt.legend()
	plt.show()
	
	return svr_rbf.predict(x)[0],svr_lin.predict(x)[0], ,svr_poly.predict(x)[0]
def plot_the_loss_curve(epochs, mae_training, mae_validation):
  """Plot a curve of loss vs. epoch."""

  plt.figure()
  plt.xlabel("Epoch")
  plt.ylabel("Root Mean Squared Error")

  plt.plot(epochs[1:], mae_training[1:], label="Training Loss")
  plt.plot(epochs[1:], mae_validation[1:], label="Validation Loss")
  plt.legend()
  
  # We're not going to plot the first epoch, since the loss on the first epoch
  # is often substantially greater than the loss for other epochs.
  merged_mae_lists = mae_training[1:] + mae_validation[1:]
  highest_loss = max(merged_mae_lists)
  lowest_loss = min(merged_mae_lists)
  delta = highest_loss - lowest_loss
  print(delta)

  top_of_y_axis = highest_loss + (delta * 0.05)
  bottom_of_y_axis = lowest_loss - (delta * 0.05)
   
  plt.ylim([bottom_of_y_axis, top_of_y_axis])
  plt.show()  
Exemple #4
0
code = (1, 2, ('foo': 'bar'}))
object = ast.literal_eval(code)
print(object)
type(object)"""
# Data Visualization with Python.
import numpy as np
import seaborn as sns
data = np.random.randn(1000)
sns.distplot(data, kde=True, rug=True)

import matplot.pyplot as plt
x = [0, 1, 2, 3, 4, 5, 6]
y = [i**2 for i in x]
plt.scatter(x, y, c='blue', marker='x', s=100)
plt.plot(x, y, color='red', linewidth=2)
plt.xlabel('x data')
plt.ylabel('y data')
plt.title('An example plot')
plt.show()

# MayaVI
from numpy import sin, cos, mgrid, pi, sqrt
from mayavi import mlab
mlab.figure(fgcolor=(0, 0, 0), bgcolor=(1, 1, 1))
u, v = mgrid[-0.01, -0.035:pi:0.01]
x = 2 / 3. * (cos(u) * cos(2 * v) + sqrt(2) * sin(u) * cos(v)) * cos(u) / (
    sqrt(2) - sin(2 * u) * sin(3 * v))
y = 2 / 3. * (cos(u) * sin(2 * v) - sqrt(2) * sin(u) * sin(v)) * cos(u) / (
    sqrt(2) - sin(2 * u) * sin(3 * v))
Z = -sqrt(2) * cos(u) * cos(u) / (sqrt(2) - sin(2 * u) * sin(3 * v))
S = sin(u)