return K.mean(K.square((difference * weights)), axis=-1) inputLayer = Input(shape=(21, )) hiddenLayer1 = Dense(11)(inputLayer) outputLayer = Dense(7)(hiddenLayer1) model = Model(inputs=inputLayer, outputs=outputLayer) model.compile(optimizer='adam', loss='mean_squared_error', metrics=['accuracy']) p = Parser() dataFileTrain = sys.argv[1] dataFileTest = sys.argv[2] inputDataTrain = array(p.Parse(dataFileTrain)) print(inputDataTrain.shape) outputDataTrain = array(p.ParseSpine(dataFileTrain)) print(outputDataTrain.shape) history = model.fit(inputDataTrain, outputDataTrain, 32, 2000) print(model.outputs) [print(n.name) for n in K.get_session().graph.as_graph_def().node] # summarize history for loss plt.plot(history.history['loss']) plt.title('model loss') plt.ylabel('loss') plt.xlabel('epoch') plt.legend(['train', 'test'], loc='upper left') plt.show()
# tf.train.Saver().save(K.get_session(), export_path + '/checkpoint.ckpt') # tf.train.write_graph(K.get_session().graph.as_graph_def(), # export_path, 'graph.pbtxt', as_text=True) # tf.train.write_graph(K.get_session().graph.as_graph_def(), # export_path, 'graph.pb', as_text=False) # freeze_graph.freeze_graph(input_graph = export_path +'/graph.pbtxt', # input_binary = False, # input_checkpoint = export_path + '/checkpoint.ckpt', # output_node_names = "dense_2/BiasAdd", # output_graph = export_path +'/model.bytes' , # clear_devices = True, initializer_nodes = "",input_saver = "", # restore_op_name = "save/restore_all", filename_tensor_name = "save/Const:0") inputDataTest = array(p.Parse(dataFileTest)) outputDataTest = array(p.ParseSpineRotation(dataFileTest)) #loss_and_metrics = model.evaluate(inputDataTest, outputDataTest) #print(loss_and_metrics) #test the model test = [] tolist = list(inputDataTest[0]) e = Estimator() estimate = e.Estimate([inputDataTest[0]]) print(estimate[0][0]) print(estimate[0][1]) print(estimate[0][2]) tolist.append( estimate[0] [0]) ###########################use estimator to get first frame prev.