def data(): X_train, X_test, Y_train, Y_test = dp.get_data() data = {'X_train': X_train, 'X_test': X_test,\ 'Y_train': Y_train, 'Y_test': Y_test} for key in data.keys(): print(f'{key}: {data[key].shape}')
def gradients(): X_train, X_test, Y_train, Y_test = dp.get_data() weights = nn.init_weights(X_train.shape[1], X_train.shape[1]) forward_info, loss = nn.forward_loss(X_train, Y_train, weights) gradients = nn.loss_gradients(forward_info, weights) for key in gradients.keys(): print(f'gradients[{key}].shape: {gradients[key].shape}')
def forward(): X_train, X_test, Y_train, Y_test = dp.get_data() weights = nn.init_weights(X_train.shape[1], X_train.shape[1]) forward_info, loss = nn.forward_loss(X_train, Y_train, weights) for key in forward_info.keys(): print(f'forward_info[{key}].shape: {forward_info[key].shape}') print(f'loss: {loss}')
def train(): X_train, X_test, Y_train, Y_test = dp.get_data() deep_net = NeuralNetwork(layers=[Dense(n_neurons=13, activation=Sigmoid()), Dense(n_neurons=13, activation=Sigmoid()), Dense(n_neurons=1, activation=Linear())], loss=MeanSquaredError(), seed=80718) trainer = Trainer(deep_net, SGD(learning_rate=0.01)) trainer.train(X_train, Y_train, X_test, Y_test, epochs=1_000, eval_period=100, batch_size=23, seed=80718)
def main(): X_train, X_test, Y_train, Y_test = dp.get_data() train_info = nn.train(X_train, Y_train, X_test, Y_test, n_iter=1_000, test_every=100, learning_rate=0.001, hidden_size=13, batch_size=23, return_losses=True, return_weights=True, return_scores=True, seed=80718) losses = train_info[0] weights = train_info[1] val_scores = train_info[2] print(f'val_scores: {[round(s, 2) for s in val_scores]}') plt.xlabel('iteration') plt.ylabel('loss (RMSE)') plt.plot(losses) plt.show()
def setUp(self): self.x, self.y = get_data(25) self.x_train, self.y_train, self.x_val, self.y_val = split( self.x, self.y) self.x_train_reshaped = self.x_train.reshape(self.x_train.shape[0], self.x_train.shape[1], 1) self.y_train_adjusted = self.y_train - 20 mask_train = np.where(self.y_train_adjusted == -21) self.y_train_adjusted[mask_train] = 0 self.y_train_one_hot = to_categorical(self.y_train_adjusted, num_classes=89, dtype='float32') self.x_val_reshaped = self.x_val.reshape(self.x_val.shape[0], self.x_val.shape[1], 1) self.y_val_adjusted = self.y_val - 20 mask_val = np.where(self.y_val_adjusted == -21) self.y_val_adjusted[mask_val] = 0 self.y_val_one_hot = to_categorical(self.y_val_adjusted, num_classes=89, dtype='float32')
def next_batch(self, k): x = [] y = [] for i in range(self.batch_size): tmp = list(self.data)[k * self.batch_size + i][:3] x.append(tmp) y_ = list(self.data)[k * self.batch_size + i][3] y.append(y_) x = np.array(x) # y = np.array(y).T return x, np.array(y) if __name__ == '__main__': bert_root = './bert_model_chinese' bert_vocab_file = os.path.join(bert_root, 'vocab.txt') train_input, eval_input, test_input = get_data('./data', bert_vocab_file, 64) # print(len(train_input)) # print(train_input[0][3]) data = Data_loader(train_input, 4) for i in range(1): x, y = data.next_batch(i) print(x[:, 0]) print(x[:, 1]) print(x[:, 2]) print('***' * 8) print(y) # print(x.shape) # print(y.shape)
recurrent_initializer="glorot_uniform", unit_forget_bias=True)) model.add(Dense(nb_classes)) model.add(Activation('softmax')) rmsprop = RMSprop(clipnorm=5.0) model.compile(loss='categorical_crossentropy', optimizer=rmsprop) return model def train_model(model, X_train, Y_train, X_test=None, Y_test=None, epochs=100, batch_size=10, save_model=False): model.fit(X_train, Y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(X_test, Y_test)) if __name__ == '__main__': import data_processor as dp (X_train, Y_train, X_test, Y_test) = dp.get_data() print Y_train.shape model = keras_model(X_train, Y_train, 10, X_test, Y_test) train_model(model, X_train, Y_train, X_test, Y_test)
from dash.dependencies import Input, Output from app import app import pandas as pd import data_processor import plotly.graph_objs as go debug = True def debug_print(statement): if debug: print(statement) debug_print('Loading trail counter readings...') trail_counter_readings_df = data_processor.get_data('trail_counter_readings') debug_print('Loading trail counter info...') trail_counter_info_df = data_processor.get_data('trail_counter_info') def list_for_dropdown(df): dropdown_options = [] for index, row in df.iterrows(): option = {'label': row['LOCATION'], 'value': row['ID']} dropdown_options.append(option) return dropdown_options def name_lookup(df, id, key_field, name_field): return df.loc[df[key_field] == id][name_field].values[0] @app.callback( [Output('traffic_graph', 'figure'), Output('traffic_summary', 'figure'),