def train_autokeras(self): #Load images train_data, train_labels = load_image_dataset(csv_file_path=self.TRAIN_CSV_DIR, images_path=self.RESIZE_TRAIN_IMG_DIR) test_data, test_labels = load_image_dataset(csv_file_path=self.TEST_CSV_DIR, images_path=self.RESIZE_TEST_IMG_DIR) train_data = train_data.astype('float32') / 255 test_data = test_data.astype('float32') / 255 print("Train data shape:", train_data.shape) clf = ImageClassifier(verbose=True, path=self.TEMP_DIR, resume=False) clf.fit(train_data, train_labels, time_limit=self.TIME) clf.final_fit(train_data, train_labels, test_data, test_labels, retrain=True) evaluate_value = clf.evaluate(test_data, test_labels) print("Evaluate:", evaluate_value) # clf.load_searcher().load_best_model().produce_keras_model().save(MODEL_DIR) # clf.export_keras_model(MODEL_DIR) clf.export_autokeras_model(self.MODEL_DIR) #统计训练信息 dic = {} ishape = clf.cnn.searcher.input_shape dic['n_train'] = train_data.shape[0] #训练总共用了多少图 dic['n_classes'] = clf.cnn.searcher.n_classes dic['input_shape'] = str(ishape[0]) + 'x' + str(ishape[1]) + 'x' + str(ishape[2]) dic['history'] = clf.cnn.searcher.history dic['model_count'] = clf.cnn.searcher.model_count dic['best_model'] = clf.cnn.searcher.get_best_model_id() best_model = [item for item in dic['history'] if item['model_id'] == dic['best_model']] if len(best_model) > 0: dic['loss'] = best_model[0]['loss'] dic['metric_value'] = best_model[0]['metric_value'] dic['evaluate_value'] = evaluate_value self.traininfo = dic
def train_autokeras(RESIZE_TRAIN_IMG_DIR, RESIZE_TEST_IMG_DIR, TRAIN_CSV_DIR, TEST_CSV_DIR, TIME): # Load images train_data, train_labels = load_image_dataset( csv_file_path=TRAIN_CSV_DIR, images_path=RESIZE_TRAIN_IMG_DIR) # 加载数据 test_data, test_labels = load_image_dataset( csv_file_path=TEST_CSV_DIR, images_path=RESIZE_TEST_IMG_DIR) train_data = train_data.astype('float32') / 255 test_data = test_data.astype('float32') / 255 clf = ImageClassifier(verbose=True) clf.fit(train_data, train_labels, time_limit=TIME) # 找最优模型 clf.final_fit(train_data, train_labels, test_data, test_labels, retrain=True) # 最优模型继续训练 y = clf.evaluate(test_data, test_labels) print("测试集精确度:", y) score = clf.evaluate(train_data, train_labels) # score: 0.8139240506329114 print("训练集精确度:", score) clf.export_keras_model(MODEL_DIR) # 储存
def train_autokeras(RESIZE_TRAIN_IMG_DIR, TRAIN_CSV_DIR, RESIZE_TEST_IMG_DIR, TEST_CSV_DIR, TIME): #Load images train_data, train_labels = load_image_dataset( csv_file_path=TRAIN_CSV_DIR, images_path=RESIZE_TRAIN_IMG_DIR) test_data, test_labels = load_image_dataset( csv_file_path=TEST_CSV_DIR, images_path=RESIZE_TEST_IMG_DIR) train_data = train_data.astype('float32') / 255 test_data = test_data.astype('float32') / 255 print("Train data shape:", train_data.shape) clf = ImageClassifier(verbose=True) clf.fit(train_data, train_labels, time_limit=TIME) clf.final_fit(train_data, train_labels, test_data, test_labels, retrain=True) y = clf.evaluate(test_data, test_labels) print("Evaluate:", y) #Predict the category of the test image img = load_img(PREDICT_IMG_PATH) x = img_to_array(img) x = x.astype('float32') / 255 x = np.reshape(x, (1, RESIZE, RESIZE, 3)) print("x shape:", x.shape) y = clf.predict(x) print("predict:", y) clf.load_searcher().load_best_model().produce_keras_model().save(MODEL_DIR) #Save model architecture diagram model = load_model(MODEL_DIR) plot_model(model, to_file=MODEL_PNG)
def train_autokeras(self): time_limit = self.projectinfo['parameter_time'] #Load images train_data, train_labels = load_image_dataset(csv_file_path=self.project_train_labels_csv, images_path=self.project_resize_train_dir) test_data, test_labels = load_image_dataset(csv_file_path=self.project_test_labels_csv, images_path=self.project_resize_test_dir) train_data = train_data.astype('float32') / 255 test_data = test_data.astype('float32') / 255 self.log.info("Train data shape: %d" % train_data.shape[0]) clf = ImageClassifier(verbose=True, path=self.project_tmp_dir, resume=False) clf.fit(train_data, train_labels, time_limit=time_limit) clf.final_fit(train_data, train_labels, test_data, test_labels, retrain=True) evaluate_value = clf.evaluate(test_data, test_labels) self.log.info("Evaluate: %f" % evaluate_value) clf.export_autokeras_model(self.project_mod_path) #统计训练信息 dic = {} ishape = clf.cnn.searcher.input_shape dic['n_train'] = train_data.shape[0] #训练总共用了多少图 dic['n_classes'] = clf.cnn.searcher.n_classes dic['input_shape'] = str(ishape[0]) + 'x' + str(ishape[1]) + 'x' + str(ishape[2]) dic['history'] = clf.cnn.searcher.history dic['model_count'] = clf.cnn.searcher.model_count dic['best_model'] = clf.cnn.searcher.get_best_model_id() best_model = [item for item in dic['history'] if item['model_id'] == dic['best_model']] if len(best_model) > 0: dic['loss'] = best_model[0]['loss'] dic['metric_value'] = best_model[0]['metric_value'] dic['evaluate_value'] = evaluate_value return dic
from autokeras.image.image_supervised import ImageClassifier, load_image_dataset # we created ./all path where we copied all the images train_path = '../data/all' train_labels = '../data/labels_train.csv' x_train, y_train = load_image_dataset(csv_file_path=train_labels, images_path=train_path) #x_val, y_val = load_image_dataset(csv_file_path=validation_labels,images_path=validation_path) clf = ImageClassifier(verbose=True) # 4 hours search clf.fit(x_train, y_train, time_limit=4 * 60 * 60) best_model = clf.export_keras_model() keras_model = best_model.produce_keras_model('asdf') keras_model.summary() # save it keras_model.save('best.hdf5') #clf.final_fit(x_train,y_train,x_val,y_val,retrain = True, trainer_args={'max_iter_num':10}) #print(clf.evaluate(x_val,y_val))
# In[ ]: x_train, y_train = load_image_dataset( csv_file_path="split_data/train/label.csv", images_path="split_data/resized-train/") print(x_train.shape) print(y_train.shape) # In[ ]: from autokeras.image.image_supervised import ImageClassifier # In[ ]: get_ipython().system(u'mkdir models') clf = ImageClassifier(path="models/", verbose=True) clf.fit(x_train, y_train, time_limit=hours_for_training * 60 * 60) # In[ ]: x_val, y_val = load_image_dataset(csv_file_path="split_data/val/label.csv", images_path="split_data/resized-val/") print(x_val.shape) print(y_val.shape) # In[ ]: clf.final_fit(x_train, y_train, x_val, y_val, retrain=True) y = clf.evaluate(x_val, y_val) print(y)
to_pdf(graph, os.path.join(path, str(model_id))) if __name__ == '__main__': # 需要把数据放到 ~/.keras/dataset 中,不然下载会报错 (x_train, y_train), (x_test, y_test) = mnist.load_data() print(x_train.shape) # (60000, 28, 28) print('增加一个维度,以符合格式要求') x_train = x_train.reshape(x_train.shape + (1, )) print(x_train.shape) # (60000, 28, 28, 1) x_test = x_test.reshape(x_test.shape + (1, )) # 指定模型更新路径 clf = ImageClassifier(path="automodels/", verbose=True) # 限制为 4 个小时 # 搜索部分 gap = 6 clf.fit(x_train[::gap], y_train[::gap], time_limit=4 * 60 * 60) # 用表现最好的再训练一次 clf.final_fit(x_train[::gap], y_train[::gap], x_test, y_train, retrain=True) y = clf.evaluate(x_test, y_test) print(y) print("导出训练好的模型") clf.export_autokeras_model("automodels/auto_mnist_model")
from keras.datasets import mnist from autokeras.image.image_supervised import ImageClassifier (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(x_train.shape + (1, )) x_test = x_test.reshape(x_test.shape + (1, )) clf = ImageClassifier(verbose=True) #clf.fit(x_train, y_train, time_limit=12 * 60 * 60) ## 300 seconds -> Search Time too short. No model was found during the search time clf.fit(x_train, y_train, time_limit=10 * 60 * 60) # 1 hour clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) y = clf.evaluate(x_test, y_test) print(y)
x_train.append(img) # x_train.reshape(256,256,3) y_train.append(0) x_train = np.array(x_train) y_train = np.array(y_train) for file_name in os.listdir("test/normal"): img = cv2.imread("test/normal/" + file_name) x_test.append(img) # x_train.reshape(256,256,3) y_test.append(0) for file_name in os.listdir("test/anomaly"): img = cv2.imread("test/anomaly/" + file_name) x_test.append(img) # x_train.reshape(256,256,3) y_test.append(0) x_test = np.array(x_test) y_test = np.array(y_test) print(x_train.shape) print(y_train.shape) clf = ImageClassifier(verbose=True) clf.fit(x_train, y_train, time_limit=12 * 60 * 60) clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) clf.export_autokeras_model("./autokeras_model.bin") # Auto-Kerasで読み込めるモデルを保存 clf.export_keras_model("./keras_model.bin") # Kerasで読み込めるモデルを保存 acc = clf.evaluate(x_test, y_test)
from autokeras.image.image_supervised import load_image_dataset from autokeras.image.image_supervised import ImageClassifier x_train, y_train = load_image_dataset(csv_file_path="../data-mnist/train_label.csv", images_path="../data-mnist/train") print(len(x_train)) # print(x_train.shape) # print(y_train.shape) x_test, y_test = load_image_dataset(csv_file_path="../data-mnist/test_label.csv", images_path="../data-mnist/test") # print(x_test.shape) # print(y_test.shape) clf = ImageClassifier(verbose=True) clf.fit(x_train, y_train, time_limit= 14 * 60 * 60) # 14 hours clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) y = clf.evaluate(x_test, y_test) print(y) model_file_name = "mnist_1_hour" clf.export_autokeras_model(model_file_name)
print("the key hole:", x_test_keyhole.shape) print(y_test_keyhole.shape) x_test_no_keyhole, y_test_no_keyhole = load_image_dataset( csv_file_path="deal-data/test/test_no_keyhole.csv", images_path="deal-data/test/no_keyhole") print(x_test_no_keyhole.shape) print(y_test_no_keyhole.shape) x_test, y_test = np.vstack((x_test_keyhole, x_test_no_keyhole)), np.hstack( (y_test_keyhole, y_test_no_keyhole)) print("x_test:", x_test.shape) print("y_test:", y_test.shape) from keras.datasets import mnist from autokeras.image.image_supervised import ImageClassifier if __name__ == '__main__': #(x_train, y_train), (x_test, y_test) = mnist.load_data() #x_train = x_train.reshape(x_train.shape + (1,)) #x_test = x_test.reshape(x_test.shape + (1,)) model_file_name = "./deal-data/model/autokeras.h5" clf = ImageClassifier(path="./deal-data/show_net", verbose=True, augment=False) clf.fit(x_train, y_train, time_limit=12 * 60 * 60) clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) clf.export_autokeras_model(model_file_name) y = clf.evaluate(x_test, y_test) print(y)
IMAGE_SIZE = 28 if __name__ == '__main__': # 获取本地图片,转换成numpy格式 train_data, train_labels = load_image_dataset(csv_file_path=TRAIN_CSV_DIR, images_path=TRAIN_IMG_DIR) test_data, test_labels = load_image_dataset(csv_file_path=TEST_CSV_DIR, images_path=TEST_IMG_DIR) # 数据进行格式转换 train_data = train_data.astype('float32') / 255 test_data = test_data.astype('float32') / 255 print("train data shape:", train_data.shape) # 使用图片识别器 clf = ImageClassifier(verbose=True) # 给其训练数据和标签,训练的最长时间可以设定,假设为1分钟,autokers会不断找寻最优的网络模型 clf.fit(train_data, train_labels, time_limit=1 * 60) # 找到最优模型后,再最后进行一次训练和验证 clf.final_fit(train_data, train_labels, test_data, test_labels, retrain=True) # 给出评估结果 y = clf.evaluate(test_data, test_labels) print("evaluate:", y) # 给一个图片试试预测是否准确 img = load_img(PREDICT_IMG_PATH) x = img_to_array(img)
# x_train = x_train.reshape(x_train.shape + (1,)) # x_test = x_test.reshape(x_test.shape + (1,)) # clf = ImageClassifier(path='output/', verbose=True, searcher_args={ # 'trainer_args': {'max_iter_num': 1, # 'max_no_improvement_num': 1}}) # clf.fit(x_train, y_train, time_limit=1 * 60 * 30) # clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) # y = clf.evaluate(x_test, y_test) # print(y) (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(x_train.shape + (1,)) x_test = x_test.reshape(x_test.shape + (1,)) clf = ImageClassifier(verbose=True, searcher_args={'trainer_args':{'max_iter_num':7}}) clf.fit(x_train, y_train, time_limit=12 * 60 * 60) clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) y = clf.evaluate(x_test, y_test) print(y) clf.export_autokeras_model('output/auto_mnist_model') # alternative best_model = clf.cnn.best_model.produce_model() pickle_to_file(best_model, 'output/auto_mnist_best_model') print(best_model) # Step 2 : After the model training is complete, run examples/visualize.py, whilst passing the same path as parameter # if __name__ == '__main__': # visualize('~/automodels/')
from keras.datasets import mnist from autokeras.image.image_supervised import ImageClassifier # gather data (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(x_train.shape + (1, )) x_test = x_test.reshape(x_test.shape + (1, )) # train the model model = ImageClassifier(verbose=True) model.fit(x_train, y_train, time_limit=15 * 60) model.final_fit(x_train, y_train, x_test, y_test, retrain=False) y = model.evaluate(x_test, y_test) print(y)
num_images = 47 x_train = [] for i in range (1,num_images+1): img = cv2.imread("images/" + str(i) + ".jpg") resized_image = cv2.resize(img, (100,100)) x_train.append(resized_image) print(len(x_train)) x_train = np.array(x_train) y_train = [5,5,5,4,7,8,6,6,8,10,9,7,7,8,8,9,6,9,4,4,9,3,8,5,1,3,2,6,7,4,7,6,6,2,3,8,3,7,8,4,6,9,5,10,1,0,10] x_test = [] img = cv2.imread("images/" + str(num_images) + ".jpg") resized_image = cv2.resize(img, (100,100)) x_test.append(resized_image) x_test = np.array(x_test) y_test = [num_images] x_train = x_train.reshape(x_train.shape + (1,)) x_test = x_test.reshape(x_test.shape + (1,)) clf = ImageClassifier(verbose=True) clf.fit(x_train, y_train, time_limit=2*60) ''' clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) y = clf.evaluate(x_test, y_test) clf.load_searcher().load_best_model().produce_keras_model().save('test_save.h5') '''
y_test = [] base_path = "../data-deep-fashion-women/img/" #Load the data from local file into a dataframe df = pd.read_csv('../data-deep-fashion-women/img/WOMEN/labels_test.csv') print(len(df)) for index, row in df.iterrows(): #print(row[0], row[1]) ss = base_path + row[0] #print(ss) img = image.load_img(ss, target_size=(224, 224)) img_data = image.img_to_array(img) image_data_np = np.array(img_data) x_test.append(image_data_np) y_test.append(row[1]) from autokeras.image.image_supervised import load_image_dataset from autokeras.image.image_supervised import ImageClassifier clf = ImageClassifier(verbose=True) clf.fit(x_train, y_train, time_limit=10 * 60 * 60) # 10 hours clf.final_fit(x_train, y_train, x_test, y_test, retrain=True) y = clf.evaluate(x_test, y_test) print(y) clf.export_autokeras_model('./_models/nas_1.h5') clf.export_keras_model('./_models/nas_2.h5') clf.load_searcher().load_best_model().produce_keras_model().save( './_models/nas_3.h5')