def from_opts(cls, opt) -> InferencePipeline: """Load all trained models required from the locations indicated in opt.""" pinet = create_model(opt).eval() args = DEFAULT_ARGS args.opts = ['TEST.MODEL_FILE', opt.pose_estimator] pose_estimator = PoseEstimator(args, opt.gpu_ids != []) segmentator = SegmentationModel(opt.segmentation_model, bool(opt.gpu_ids)) return cls(pose_estimator, pinet, segmentator, opt)
from options.test_options import TestOptions from data.data_loader import CreateDataLoader from models.PINet20 import create_model from util.visualizer import Visualizer from util import html import time opt = TestOptions().parse() opt.nThreads = 1 # test code only supports nThreads = 1 opt.batchSize = 1 # test code only supports batchSize = 1 opt.serial_batches = True # no shuffle opt.no_flip = True # no flip data_loader = CreateDataLoader(opt) dataset = data_loader.load_data() model = create_model(opt) visualizer = Visualizer(opt) # create website web_dir = os.path.join(opt.results_dir, opt.name, '%s_%s' % (opt.phase, opt.which_epoch)) webpage = html.HTML( web_dir, 'Experiment = %s, Phase = %s, Epoch = %s' % (opt.name, opt.phase, opt.which_epoch)) print(opt.how_many) print(len(dataset)) model = model.eval() print(model.training)