def datadealing(City): # print u"收到请求", request.path, request.args # a = time.clock() Mode = request.args["Mode"] k = dataProcess(Mode, City) # print u"用了时间", time.clock() - a return jsonify(k if Mode != u"rawJson" else {u"rawJson": k})
def load_data(self, npy_dir, is_train=True): mydata = dataProcess(self.img_rows, self.img_cols) if (is_train): img_train, label_train = mydata.load_train_data(npy_dir) return img_train, label_train else: img_test, label_test = mydata.load_test_data(npy_dir) return img_test, label_test
def load_train_data(self): mydata = dataProcess(self.img_rows, self.img_cols) imgs_train, imgs_mask_train = mydata.load_train_data() # preprocessing imgs_train = DataProcess.scale(imgs_train) imgs_mask_train = DataProcess.scale(imgs_mask_train, flag_mask=True) return imgs_train, imgs_mask_train
def load_train_data(self): mydata = dataProcess() imgs_train, mask_train = mydata.load_train_data() # preprocessing imgs_train = DataProcess.preprocess(imgs_train, self.img_rows, self.img_cols) mask_train = DataProcess.preprocess(mask_train, self.img_rows, self.img_cols, flag_mask=True) return imgs_train, mask_train
def predict(self, image, model): mydata = dataProcess(self.img_rows, self.img_cols) imgs_test = mydata.creat_and_load_single_test_image_data(image) print("loading data done") print('predict test data') imgs_mask_test = model.predict(imgs_test, batch_size=1, verbose=1) imgs = imgs_mask_test img = imgs[0] #因为只有一张图像 img = array_to_img(img) return img
def addLocation(City, StationName, MarsLat, MarsLon): a = dataProcess(City=City, Mode="allStations") for station in a[u"Stations"]: if station["Name"] == StationName: station["MarsLat"] = MarsLat station["MarsLon"] = MarsLon file = open(u"./cache/{City}_allStations.json".format( City=City), "w") file.write(json.dumps(a)) file.close() return getStationPP(City, StationName)
def predict(self, model): print('load test data') # load data mydata = dataProcess(self.img_rows, self.img_cols) imgs_test = mydata.load_test_data() # preprocess imgs_test = DataProcess.scale(imgs_test) print('predict test data') imgs_mask_test = model.predict(imgs_test, batch_size=1, verbose=1) np.save(self.test_result_name(), imgs_mask_test) print('save results to jpeg files') self.save_img()
def load_model_and_predict(self, st_num, end_num): model = self.get_unet() # 加载保存的模型权重 model.load_weights('unet.hdf5') for i in range(st_num, end_num): mydata = dataProcess(self.img_rows, self.img_cols) mydata.create_single_test_data(i) # mydata.create_test_data() imgs_test = self.load_test_data() print("loading data done") print('predict test data') imgs_mask_test = model.predict(imgs_test, batch_size=1, verbose=1) np.save('../results/imgs_mask_test.npy', imgs_mask_test) self.save_single_img(i)
def use_saved_model(self, num): mydata = dataProcess(self.img_rows, self.img_cols) mydata.create_single_test_data(num) #mydata.create_test_data() imgs_test = self.load_test_data() print("loading data done") model = self.get_unet() #加载保存的模型权重 model.load_weights('unet.hdf5') # compile 编译 #model.compile(metrics=['accuracy']) print('predict test data') imgs_mask_test = model.predict(imgs_test, batch_size=1, verbose=1) np.save('../results/imgs_mask_test.npy', imgs_mask_test)
def load_test_data(self): mydata = dataProcess(self.img_rows, self.img_cols) imgs_test = mydata.load_test_data() return imgs_test
def load_data(self): mydata = dataProcess(self.img_rows, self.img_cols) imgs_train, imgs_mask_train = mydata.load_train_data() imgs_test = mydata.load_test_data() return imgs_train, imgs_mask_train, imgs_test
activation='relu')(model) model = concatenate([model_conn4, model_tr], axis=3) model = Conv2D(32, (3, 3), activation='relu', padding='same')(model) model_tr = Conv2DTranspose(32, (3, 3), strides=2, padding='same', activation='relu')(model) model = concatenate([model_conn5, model_tr], axis=3) model = Conv2D(32, (3, 3), activation='relu', padding='same')(model) model = Conv2D(1, (1, 1), activation='sigmoid', padding='same')(model) instru = Model(input=inputs, output=model) instru.compile(optimizer=Adam(lr=1e-4), loss=IOU_calc_loss, metrics=[IOU_calc]) mydata = dataProcess(1024, 1280) imgs_train, imgs_mask_train = mydata.load_train_data() print(imgs_mask_train.shape) print('Loading done') model_checkpoint = ModelCheckpoint('../drive/checkpoints/instru.hdf5', monitor='loss', verbose=1, save_best_only=True) instru.load_weights('../drive/checkpoints/instru.hdf5') instru.fit(imgs_train, imgs_mask_train, batch_size=2, nb_epoch=3, verbose=1, validation_split=0.2, shuffle=True,
from keras.callbacks import ModelCheckpoint import data, model dataproc = data.dataProcess(299, 299) dataproc.create_train_data() imgs, mask = dataproc.load_train_data() model = model.get_model() model_checkpoint = ModelCheckpoint('model.{epoch:02d}-{val_loss:.2f}.hdf5', verbose=0, period=50) print('Fitting model...') model.fit(imgs, mask, batch_size=4, epochs=10000, verbose=1, validation_split=0.2, shuffle=True, callbacks=[model_checkpoint])
def City(City): if "Attention" in dataProcess("rawJson", City): for attention in dataProcess("rawJson", City)["Attention"]: flash(attention, "warning") return render_template('city.html', lista=List, City=City)
def load_test_data(self): mydata = dataProcess(self.img_rows, self.img_cols, mode=self.mode) imgs_test, imgs_label_test = mydata.load_test_data() return imgs_test, imgs_label_test