config.display() # 训练数据集准备 dataset_train = ShapesDataset() dataset_train.load_shapes(len(train_imglist), img_floder, mask_floder, train_imglist, yaml_floder) dataset_train.prepare() # 验证数据集准备 dataset_val = ShapesDataset() dataset_val.load_shapes(len(val_imglist), img_floder, mask_floder, val_imglist, yaml_floder) dataset_val.prepare() # 获得训练模型 model = get_train_model(config) model.summary() model.load_weights(COCO_MODEL_PATH, by_name=True, skip_mismatch=True) # 数据生成器 train_generator = data_generator(dataset_train, config, shuffle=True, batch_size=config.BATCH_SIZE) val_generator = data_generator(dataset_val, config, shuffle=True, batch_size=config.BATCH_SIZE) # 回执函数 # 每次训练一个世代都会保存
NUM_CLASSES = num_classes RPN_ANCHOR_SCALES = RPN_ANCHOR_SCALES IMAGE_MAX_DIM = IMAGE_MAX_DIM config = TrainConfig() config.display() backbone_shapes = compute_backbone_shapes(config, config.IMAGE_SHAPE) anchors = generate_pyramid_anchors(config.RPN_ANCHOR_SCALES, config.RPN_ANCHOR_RATIOS, backbone_shapes, config.BACKBONE_STRIDES, config.RPN_ANCHOR_STRIDE) model_body = get_train_model(config) if model_path != "": #------------------------------------------------------# # 载入预训练权重 #------------------------------------------------------# print('Load weights {}.'.format(model_path)) model_body.load_weights(model_path, by_name=True, skip_mismatch=True) if ngpus_per_node > 1: model = ParallelModel(model_body, ngpus_per_node) else: model = model_body #---------------------------# # 读取数据集对应的txt #---------------------------#