args = parser.parse_args() train_data = myDataGenerator(args.timgs, args.tmasks, batch=args.batch, imgsize=args.size) numcls = train_data.getClsnum() valid_data = myDataGenerator(args.vimgs, args.vmasks, numcls, batch=args.batch, imgsize=args.size) model = myUnet(numcls, args.filters, args.droprate, args.convkernel, args.transkernel, args.convstride, args.transstride, args.convpadding, args.transpadding, args.activation, args.batchnorm) if args.checkpoint != '': try: model.load(args.checkpoint) except Exception as e: print(e) exit() lossobj = CategoricalCrossentropy() optimizer = None if args.optimizer.lower() == 'adadelta': optimizer = Adadelta(args.learningrate)
from tqdm import tqdm from sklearn.preprocessing import OneHotEncoder from model import myUnet from datapro import loaddata from evaluation import Jaccard_eval from repress import watershed_process, kluster_proess from datapro import loadpro_img result_path = 'dataset1/test_RES' #存储路径 model_path = 'unet1params3.pkl' test_path = 'dataset1/test/' repressmethod = 'connectedComponents' #选择后处理方法'watershed','cluster' imgsize = 628 #加载模型 model = myUnet().cuda() model.load_state_dict(torch.load(model_path)) test_list = sorted( [os.path.join(test_path, img) for img in os.listdir(test_path)]) test_data = loadpro_img(test_list) test_datapro = np.zeros( (len(test_data), 1, imgsize + 92 * 2, imgsize + 92 * 2)) for i in range(len(test_data)): test_datapro[i] = np.pad(test_data[i], ((92, 92)), 'symmetric') test_datapro[i] = test_datapro[i] / 255.0 test_datapro[i] = (test_datapro[i] - 0.5) / 0.5 test_datapro = torch.from_numpy(test_datapro) model.eval() pred_all = [] i = 0
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img from PIL import Image from model import myUnet import cv2 import glob import data import math import matplotlib.image as mpimg Train_DIR = "E:\\Experiment data\\data\\Landsat\\Train\\" Val_DIR = "E:\\Experiment data\\data\\Landsat\\Val\\" Test_DIR = "E:\\Experiment data\\data\\Net3\\Test\\masked\\" Result_DIR = "E:\\Experiment data\\data\\Net3\\Test\\resultMSCN\\" model = myUnet() model.load(r"G:\Detection\Net1\MSCN\weight\1000_weights_2018-11-16-17-16-07.h5") imgsname = glob.glob(Test_DIR + "whole\\*.tif") imgdatas = np.ndarray((1, 512, 512, 3), dtype=np.float32) num = 0 patch_size = 512 def max_img(img): x = np.zeros((patch_size, patch_size, 3)) # cloud = np.ndarray((patch_size, patch_size)) # shadow = np.ndarray((patch_size, patch_size)) # background = np.ndarray((patch_size, patch_size)) for i in range(0, patch_size): for j in range(0, patch_size):