示例#1
0
文件: eval.py 项目: zjuwyz/hyperpose
                        type=int,
                        default=10000,
                        help='number of evaluation')
    parser.add_argument('--vis_num',
                        type=int,
                        default=60,
                        help='number of visible evaluation')
    parser.add_argument('--multiscale',
                        type=bool,
                        default=False,
                        help='enable multiscale_search')

    args = parser.parse_args()
    Config.set_model_name(args.model_name)
    Config.set_model_type(Config.MODEL[args.model_type])
    Config.set_model_backbone(Config.BACKBONE[args.model_backbone])
    Config.set_dataset_type(Config.DATA[args.dataset_type])
    Config.set_dataset_path(args.dataset_path)
    Config.set_dataset_version(args.dataset_version)

    config = Config.get_config()
    model = Model.get_model(config)
    evaluate = Model.get_evaluate(config)
    dataset = Dataset.get_dataset(config)

    evaluate(model,
             dataset,
             vis_num=args.vis_num,
             total_eval_num=args.eval_num,
             enable_multiscale_search=args.multiscale)
示例#2
0
                        help='learning rate')
    parser.add_argument('--batch_size', type=int, default=8, help='batch_size')
    parser.add_argument(
        '--kf_optimizer',
        type=str,
        default='Sync_avg',
        help=
        'kung fu parallel optimizor,available options: Sync_sgd, Sync_avg, Pair_avg'
    )

    args = parser.parse_args()
    #config model
    Config.set_model_name(args.model_name)
    Config.set_model_type(Config.MODEL[args.model_type])
    Config.set_model_backbone(Config.BACKBONE[args.model_backbone])
    #config train
    Config.set_train_type(Config.TRAIN[args.train_type])
    Config.set_learning_rate(args.learning_rate)
    Config.set_batch_size(args.batch_size)
    Config.set_kungfu_option(Config.KUNGFU[args.kf_optimizer])
    #config dataset
    Config.set_dataset_type(Config.DATA[args.dataset_type])
    Config.set_dataset_path(args.dataset_path)

    #train
    config = Config.get_config()
    model = Model.get_model(config)
    train = Model.get_train(config)
    dataset = Dataset.get_dataset(config)
    train(model, dataset)
示例#3
0
                        default='Pair_avg',
                        help='kung fu parallel optimizor,available options: Sync_sgd, Sync_avg, Pair_avg')
    parser.add_argument('--test_num',
                        type=int,
                        default=100000,
                        help='number of test')
    parser.add_argument('--vis_num',
                        type=int,
                        default=60,
                        help='number of visible test')
    parser.add_argument('--multiscale',
                        type=bool,
                        default=False,
                        help='enable multiscale_search')
                        

    args=parser.parse_args()
    Config.set_model_name(args.model_name)
    Config.set_model_type(Config.MODEL[args.model_type])
    Config.set_model_backbone(Config.BACKBONE[args.model_backbone])
    Config.set_dataset_type(Config.DATA[args.dataset_type])
    Config.set_dataset_path(args.dataset_path)
    Config.set_dataset_version(args.dataset_version)
    
    config=Config.get_config()
    model=Model.get_model(config)
    test=Model.get_test(config)
    dataset=Dataset.get_dataset(config)

    test(model,dataset,vis_num=args.vis_num,total_test_num=args.test_num,enable_multiscale_search=args.multiscale)
示例#4
0
    parser.add_argument('--kf_optimizer',
                        type=str,
                        default='Sma',
                        help='kung fu parallel optimizor,available options: Sync_sgd, Async_sgd, Sma')
    parser.add_argument("--output_dir",
                        type=str,
                        default="save_dir",
                        help="which dir to output the exported pb model")
    
    
    args=parser.parse_args()    
    Config.set_model_name(args.model_name)
    Config.set_model_type(Config.MODEL[args.model_type])
    Config.set_model_backbone(Config.BACKBONE[args.model_backbone])
    config=Config.get_config()
    export_model=Model.get_model(config)

    input_path=f"{config.model.model_dir}/newest_model.npz"
    output_dir=f"{args.output_dir}/{config.model.model_name}"
    output_path=f"{output_dir}/frozen_{config.model.model_name}.pb"
    print(f"exporting model {config.model.model_name} from {input_path}...")
    if(not os.path.exists(output_dir)):
        print("creating output_dir...")
        os.mkdir(output_dir)
    if(not os.path.exists(input_path)):
        print("input model file doesn't exist!")
        print("conversion aborted!")
    else:
        export_model.load_weights(input_path)
        export_model.eval()
        if(export_model.data_format=="channels_last"):
示例#5
0
import cv2
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from hyperpose import Config,Model,Dataset
from hyperpose.Dataset import imread_rgb_float,imwrite_rgb_float
Config.set_model_name("openpose")
Config.set_model_type(Config.MODEL.Openpose)
config=Config.get_config()

#get and load model
model=Model.get_model(config)
weight_path=f"{config.model.model_dir}/newest_model.npz"
model.load_weights(weight_path)

#infer on single image
ori_image=cv2.cvtColor(cv2.imread("./sample.jpg"),cv2.COLOR_BGR2RGB)
input_image=ori_image.astype(np.float32)/255.0
if(model.data_format=="channels_first"):
    input_image=np.transpose(input_image,[2,0,1])

img_c,img_h,img_w=input_image.shape
conf_map,paf_map=model.infer(input_image[np.newaxis,:,:,:])

#get visualize function, which is able to get visualized part and limb heatmap image from inferred heatmaps
visualize=Model.get_visualize(Config.MODEL.Openpose)
vis_parts_heatmap,vis_limbs_heatmap=visualize(input_image,conf_map[0],paf_map[0],save_tofile=False,)

#get postprocess function, which is able to get humans that contains assembled detected parts from inferred heatmaps
postprocess=Model.get_postprocess(Config.MODEL.Openpose)
示例#6
0
import cv2
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from hyperpose import Config, Model, Dataset
from hyperpose.Dataset import imread_rgb_float, imwrite_rgb_float
Config.set_model_name("new_opps")
Config.set_model_type(Config.MODEL.Openpose)
config = Config.get_config()

#get and load model
model = Model.get_model(config)
weight_path = f"{config.model.model_dir}/newest_model.npz"
model.load_weights(weight_path)

#infer on single image
ori_image = cv2.cvtColor(cv2.imread("./sample.jpeg"), cv2.COLOR_BGR2RGB)
input_image = ori_image.astype(np.float32) / 255.0
if (model.data_format == "channels_first"):
    input_image = np.transpose(input_image, [2, 0, 1])

img_c, img_h, img_w = input_image.shape
conf_map, paf_map = model.infer(input_image[np.newaxis, :, :, :])

#get visualize function, which is able to get visualized part and limb heatmap image from inferred heatmaps
visualize = Model.get_visualize(Config.MODEL.Openpose)
vis_parts_heatmap, vis_limbs_heatmap = visualize(input_image,
                                                 conf_map[0],
                                                 paf_map[0],
                                                 save_tofile=False)
示例#7
0
        type=str,
        default="Default",
        help=
        "model backbone, available options: Mobilenet, Vggtiny, Vgg19, Resnet18, Resnet50"
    )
    parser.add_argument(
        "--model_name",
        type=str,
        default="default_name",
        help="model name,to distinguish model and determine model dir")
    parser.add_argument(
        "--dataset_path",
        type=str,
        default="./data",
        help="dataset path,to determine the path to load the dataset")

    args = parser.parse_args()
    #config model
    Config.set_model_name(args.model_name)
    Config.set_model_type(Config.MODEL[args.model_type])
    Config.set_model_backbone(Config.BACKBONE[args.model_backbone])
    Config.set_pretrain(True)
    #config dataset
    Config.set_pretrain_dataset_path(args.dataset_path)
    config = Config.get_config()
    #train
    model = Model.get_model(config)
    pretrain = Model.get_pretrain(config)
    dataset = Dataset.get_pretrain_dataset(config)
    pretrain(model, dataset)
示例#8
0
                        type=str,
                        default="./save_dir/example_dir/output_dir",
                        help="ouput directory of the model forwarding")

args = parser.parse_args()
# config model
Config.set_model_name(args.model_name)
Config.set_model_type(Config.MODEL[args.model_type])
Config.set_dataset_type(Config.DATA[args.dataset_type])
Config.set_model_backbone(Config.BACKBONE[args.model_backbone])
config = Config.get_config()
output_dir = os.path.join(args.output_dir, args.model_name)
os.makedirs(output_dir, exist_ok=True)

# contruct model and processors
model = Model.get_model(config)
# visualizer
VisualizerClass = Model.get_visualizer(config)
visualizer = VisualizerClass(save_dir=output_dir,
                             parts=model.parts,
                             limbs=model.limbs)
# post processor
PostProcessorClass = Model.get_postprocessor(config)
post_processor = PostProcessorClass(parts=model.parts,
                                    limbs=model.limbs,
                                    hin=model.hin,
                                    win=model.win,
                                    hout=model.hout,
                                    wout=model.wout,
                                    colors=model.colors)
# image processor