def inference_ssd_vgg16(config, args): model = SSD_VGG16(config, args["label_maps"], is_training=False, num_predictions=args["num_predictions"]) process_input_fn = vgg16.preprocess_input return model, process_input_fn
def get_model(config, args): model_config = config["model"] if model_config["name"] == "ssd_vgg16": with open(args.label_maps, "r") as label_map_file: label_maps = [i.strip("\n") for i in label_map_file.readlines()] return SSD_VGG16( config=config, label_maps=label_maps, is_training=True ) elif model_config["name"] == "ssd_mobilenetv1": with open(args.label_maps, "r") as label_map_file: label_maps = [i.strip("\n") for i in label_map_file.readlines()] return SSD_MOBILENET( config=config, label_maps=label_maps, is_training=True ) elif model_config["name"] == "ssd_mobilenetv2": with open(args.label_maps, "r") as label_map_file: label_maps = [i.strip("\n") for i in label_map_file.readlines()] return SSD_MOBILENETV2( config=config, label_maps=label_maps, is_training=True ) elif model_config["name"] == "tbpp_vgg16": return TBPP_VGG16( config=config, is_training=True ) elif model_config["name"] == "qssd_vgg16": with open(args.label_maps, "r") as label_map_file: label_maps = [i.strip("\n") for i in label_map_file.readlines()] model = QSSD_VGG16( config=config, label_maps=label_maps, is_training=True ) return model elif model_config["name"] == "qssd_mobilenetv2": with open(args.label_maps, "r") as label_map_file: label_maps = [i.strip("\n") for i in label_map_file.readlines()] model = QSSD_MOBILENETV2( config=config, label_maps=label_maps, is_training=True ) return model else: print( f"model with name ${model_config['name']} has not been implemented yet") exit()
def get_model(config, label_maps): model_config = config["model"] if model_config["name"] == "ssd_vgg16": return SSD_VGG16(config=config, label_maps=label_maps, is_training=True) elif model_config["name"] == "ssd_vgg19": return SSD_VGG19(config, label_maps, is_training=False) else: print( f"model with name ${model_config['name']} has not been implemented yet" ) exit()
def ssd_vgg16(config, args): assert args.label_maps is not None, "please specify a label map file" assert os.path.exists(args.label_maps), "label_maps file does not exist" with open(args.label_maps, "r") as file: label_maps = [line.strip("\n") for line in file.readlines()] model = SSD_VGG16( config, label_maps, is_training=False, num_predictions=args.num_predictions ) process_input_fn = vgg16.preprocess_input return model, process_input_fn, label_maps
def inference_ssd_vgg16(config, args): """""" assert args.label_maps is not None, "please specify a label map file" assert os.path.exists(args.label_maps), "label_maps file does not exist" with open(args.label_maps, "r") as file: label_maps = [line.strip("\n") for line in file.readlines()] model = SSD_VGG16(config, label_maps, is_training=False, num_predictions=args.num_predictions) process_input_fn = vgg16.preprocess_input image = cv2.imread(args.input_image) # read image in bgr format image = np.array(image, dtype=np.float) return model, label_maps, process_input_fn, np.uint8(image)
def inference_ssd_vgg16(config, args): """""" assert args.label_maps is not None, "please specify a label map file" assert os.path.exists(args.label_maps), "label_maps file does not exist" with open(args.label_maps, "r") as file: label_maps = [line.strip("\n") for line in file.readlines()] model = SSD_VGG16(config, label_maps, is_training=False, num_predictions=args.num_predictions) process_input_fn = vgg16.preprocess_input image, bboxes, classes = ssd_utils.read_sample( image_path=args.input_image, label_path=args.label_file, ) return model, label_maps, process_input_fn, np.uint8( image), bboxes, classes
def train_ssd_vgg16(config, args): """""" assert args.label_maps is not None, "please specify a label maps file for this model" assert os.path.exists(args.label_maps), "label_maps file does not exist" with open(args.label_maps, "r") as file: label_maps = [line.strip("\n") for line in file.readlines()] assert args.training_split is not None, "please specify a training split file for this model" assert os.path.exists( args.training_split), "training_split file does not exist" training_samples = data_utils.get_samples_from_split( split_file=args.training_split, images_dir=args.images_dir, labels_dir=args.labels_dir) assert args.batch_size <= len( training_samples ), "batch_size less than or equal to len(training_samples)" training_config = config["training"] model = SSD_VGG16(config=config) loss = SSD_LOSS( alpha=training_config["alpha"], min_negative_boxes=training_config["min_negative_boxes"], negative_boxes_ratio=training_config["negative_boxes_ratio"]) optimizer = SGD(lr=args.learning_rate, momentum=0.9, decay=0.0005, nesterov=False) generator = SSD_DATA_GENERATOR(samples=training_samples, label_maps=label_maps, config=config, shuffle=args.shuffle, batch_size=args.batch_size, augment=args.augment, process_input_fn=vgg16.preprocess_input) model.compile(optimizer=optimizer, loss=loss.compute) return model, generator, optimizer, loss, training_samples
with open(args.label_maps, "r") as file: label_maps = [line.strip("\n") for line in file.readlines()] if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) with open(args.config, "r") as config_file: config = json.load(config_file) model_config = config["model"] training_config = config["training"] if model_config["name"] == "ssd_vgg16": process_input_fn = vgg16.preprocess_input model = SSD_VGG16(config=config, label_maps=label_maps) loss = SSD_LOSS( alpha=training_config["alpha"], min_negative_boxes=training_config["min_negative_boxes"], negative_boxes_ratio=training_config["negative_boxes_ratio"]) optimizer = SGD(lr=args.learning_rate, momentum=0.9, decay=0.0005, nesterov=False) model.compile(optimizer=optimizer, loss=loss.compute) elif model_config["name"] == "ssd_mobilenetv1": process_input_fn = mobilenet.preprocess_input model = SSD_MOBILENET(config=config, label_maps=label_maps) loss = SSD_LOSS( alpha=training_config["alpha"], min_negative_boxes=training_config["min_negative_boxes"],
assert args.num_predictions > 0, "num_predictions must be larger than zero" assert args.output_type in SUPPORTED_TYPES, f"{args.output_type} is not supported yet. Please choose one of type {SUPPORTED_TYPES}" if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) with open(args.config, "r") as config_file: config = json.load(config_file) model_config = config["model"] if model_config["name"] == "ssd_vgg16": with open(args.label_maps, "r") as file: label_maps = [line.strip("\n") for line in file.readlines()] model = SSD_VGG16(config, label_maps, is_training=False, num_predictions=args.num_predictions) elif model_config["name"] == "ssd_mobilenetv1": with open(args.label_maps, "r") as file: label_maps = [line.strip("\n") for line in file.readlines()] model = SSD_MOBILENET(config, label_maps, is_training=False, num_predictions=args.num_predictions) elif model_config["name"] == "ssd_mobilenetv2": with open(args.label_maps, "r") as file: label_maps = [line.strip("\n") for line in file.readlines()] model = SSD_MOBILENETV2(config, label_maps, is_training=False,
def ssd_vgg16(config, args, callbacks): training_config = config["training"] with open(args.label_maps, "r") as label_map_file: label_maps = [i.strip("\n") for i in label_map_file.readlines()] training_samples = data_utils.get_samples_from_split( split_file=args.training_split, images_dir=args.images_dir, labels_dir=args.labels_dir) if args.validation_split is not None: validation_samples = data_utils.get_samples_from_split( split_file=args.validation_split, images_dir=args.images_dir, labels_dir=args.labels_dir) training_data_generator = SSD_DATA_GENERATOR( samples=training_samples, config=config, label_maps=label_maps, shuffle=args.shuffle, batch_size=args.batch_size, augment=args.augment, process_input_fn=preprocess_input) if args.validation_split is not None: print("-- validation split specified") validation_data_generator = SSD_DATA_GENERATOR( samples=validation_samples, config=config, label_maps=label_maps, shuffle=args.shuffle, batch_size=args.batch_size, augment=False, process_input_fn=preprocess_input) loss = SSD_LOSS( alpha=training_config["alpha"], min_negative_boxes=training_config["min_negative_boxes"], negative_boxes_ratio=training_config["negative_boxes_ratio"]) if training_config["optimizer"]["name"] == "adam": optimizer = Adam(learning_rate=args.learning_rate, beta_1=training_config["optimizer"]["beta_1"], beta_2=training_config["optimizer"]["beta_2"], epsilon=training_config["optimizer"]["epsilon"], decay=training_config["optimizer"]["decay"]) elif training_config["optimizer"]["name"] == "sgd": optimizer = SGD(learning_rate=args.learning_rate, momentum=training_config["optimizer"]["momentum"], decay=training_config["optimizer"]["decay"], nesterov=training_config["optimizer"]["nesterov"]) else: optimizer = Adam(learning_rate=args.learning_rate, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0) model = SSD_VGG16(config=config, label_maps=label_maps, is_training=True) if args.show_network_structure: model.summary() model.compile(optimizer=optimizer, loss=loss.compute) if args.checkpoint is not None: assert os.path.exists(args.checkpoint), "checkpoint does not exist" model.load_weights(args.checkpoint, by_name=True) model.fit( x=training_data_generator, validation_data=validation_data_generator if args.validation_split is not None else None, batch_size=args.batch_size, validation_batch_size=args.batch_size, epochs=args.epochs, initial_epoch=args.initial_epoch, callbacks=callbacks, ) model.save_weights(os.path.join(args.output_dir, "model.h5"))