def display_ij(self,n_dimensions,X_fs,X_val,Y_val,X_test,Y_test,i,j,classifieur_model): ACP = PCA(n_components=n_dimensions) ACP.fit(X_fs) X_valr = ACP.tranform(X_val) X_testr = ACP.transform(X_test) index_min = [min(X_valr[:,i].min(),X_testr[:,i].min()) - .5 for i in range(n_dimensions)] index_max = [max(X_valr[:,i].max(),X_testr[:,i].max()) + .5 for i in range(n_dimensions)] plt.figure(figsize=self.figsize) ax = plt.subplot(1,1,1) self.plotij(n_dimensions,i,j,ax,X_fs,X_val,X_valr,Y_val,X_test,X_testr,Y_test,ACP,index_min,index_max,classifieur_model) plt.show()
def plot_square_piece_simple(): """simple demo - makes more sense for a 'cut' to be the next level of abstraction after an 'edge' though""" pset = get_default_nub_parameters() pset.randomize() bxy, _ = create_puzzle_piece_edge(pset) pset.randomize() txy, _ = create_puzzle_piece_edge(pset) pset.randomize() lxy, _ = create_puzzle_piece_edge(pset) pset.randomize() rxy, _ = create_puzzle_piece_edge(pset) fig = plt.figure() fig.add_subplot(111, aspect='equal') plt.plot(bxy[:, 0], random_sign() * bxy[:, 1], 'k-') plt.plot(txy[:, 0], random_sign() * txy[:, 1] + 1, 'k-') plt.plot(random_sign() * lxy[:, 1], lxy[:, 0], 'k-') plt.plot(random_sign() * rxy[:, 1] + 1, rxy[:, 0], 'k-') plt.show()
def getPredictedPriceNormalized(self): # get model first self.getModelFromFilePath(self, self.file) input_features = self.df.iloc[:, [2, 3]].values input_data = input_features predicted_value = self.model.predict(self.X_test) plt.figure(figsize=(100, 40)) plt.plot(predicted_value, color='red') plt.plot(input_data[self.lookback:self.test_size + (2 * self.lookback), 1], color='green') plt.title("Opening price of stocks sold") plt.xlabel("Time (latest-> oldest)") plt.ylabel("Stock Opening Price") plt.show() self.sc.inverse_transform(input_features[self.lookback:self.test_size + (2 * self.lookback)]) return predicted_value
def display_XY(self,n_dimensions,X_fs,X_val,Y_val,X_test,Y_test,classifieur_model): ACP = PCA(n_components=n_dimensions) ACP.fit(X_fs) X_valr = ACP.tranform(X_val) X_testr = ACP.transform(X_test) index_min = [min(X_valr[:,i].min(),X_testr[:,i].min()) - .5 for i in range(n_dimensions)] index_max = [max(X_valr[:,i].max(),X_testr[:,i].max()) + .5 for i in range(n_dimensions)] plt.figure(figsize=self.figsize) for i in range(n_dimensions - 1): for j in range(i + 1,n_dimensions): ax = plt.subplot(n_dimensions - 1,n_dimensions - 1,i + 1 + ((n_dimensions - 1) * (j - 1))) self.plotij(n_dimensions,i,j,ax,X_fs,X_val,Y_val,X_test,Y_test,ACP,index_min,index_max,classifieur_model) plt.tight_layout() plt.show()
# Now we predict on X rather than X_test allPredicts = [] models = [joblib.load(m) for m in modelFiles] for i, (m, modelName) in enumerate(zip(models, modelNames)): print i, pred = m.predict_proba(X)[:, 1] allPredicts.append(pred) allPredicts = np.array(allPredicts).T meanPreds = allPredicts.mean(axis=1) temp.append(ROC(y, meanPreds)) totalS.append( temp ) totalS = np.array( totalS ) plt.imgshow(totalS) plt.show() # tmStr = dt.now().strftime('%Y-%m-%d-%H-%M-%S') # createSubmission(meanPreds, fileName='predictions/mean_%s.csv'%tmStr) print 'done'
import csv open_file = open("sitka_weather_07-2018_simple.csv", "r") csv_file = csv.reader(open_file, delimiter=",") header_row = next(csv_file) ''' print(header_row) for index, column_header in enumerate(header_row): print(index,column_header) ''' highs = [] for row in csv_file: highs.append(int(row[5])) print(highs) import matplotlib.pyploy as plt plt.plot(highs, c="red") plt.title("Daily High Temp, July 2018", fontsize=16) plt.xlabel("") plt.ylabel("Temperature (F)", fontsize=16) plt.tick_params(axis="both", which="major", labelsize=16) plt.show()