示例#1
0
def infer():
    config.load_cfg_fom_args("Infer a metric model.")
    config.assert_and_infer_cfg()
    cfg.freeze()

    model = MetricModel()
    print(model)
    #model.load_state_dict(torch.load(cfg.CONVERT_MODEL_FROM)['model_state'], strict=False)
    load_checkpoint(cfg.TRAIN.WEIGHTS, model)
    model.eval()

    new_input = np.ones((1, 3, 224, 224), dtype='float32')
    new_input = torch.from_numpy(new_input)
    #new_input = process(new_input)
    '''
    img = np.ones([224,224,3])
    b = np.ones([224,224])
    g = b * 2
    r = b * 3
    img[:, :, 0]  = b
    img[:, :, 1]  = g
    img[:, :, 2]  = r
    new_input = process(img)
    '''
    fea = model(new_input)
    fea_numpy = fea.detach().numpy()
    print(fea_numpy[0][:10])
示例#2
0
def export():
    config.load_cfg_fom_args("Convert a metric model.")
    config.assert_and_infer_cfg()
    cfg.freeze()

    model = MetricModel()
    print(model)
    #model.load_state_dict(torch.load(cfg.CONVERT_MODEL_FROM)['model_state'], strict=False)
    load_checkpoint(cfg.TRAIN.WEIGHTS, model)
    model.eval()
    dummy_inputs = torch.randn(1, 3, 224, 224)
    print(dummy_inputs.size())
    fea = model(dummy_inputs)
    print(fea.size())
    export_tf_model(model, dummy_inputs)
示例#3
0
def main():
    config.load_cfg_fom_args("Train a metric model.")
    config.assert_and_infer_cfg()
    cfg.freeze()
    dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer.train_model)
示例#4
0
              format(len(model_dict), len(state_dict)))
        print('{} pretrain keys load successfully.'.format(
            len(pretrained_dict)))
        not_loaded_keys = [
            k for k in state_dict.keys() if k not in pretrained_dict.keys()
        ]
        print(('%s, ' * (len(not_loaded_keys) - 1) + '%s') %
              tuple(not_loaded_keys))
    model_dict.update(pretrained_dict)
    ms.load_state_dict(model_dict)
    #ms.load_state_dict(checkpoint["model_state"])
    # Load the optimizer state (commonly not done when fine-tuning)
    if optimizer:
        optimizer.load_state_dict(checkpoint["optimizer_state"])
    #return checkpoint["epoch"]
    return checkpoint


if __name__ == '__main__':
    print(sys.argv)
    config.load_cfg_fom_args("Extract feature.")
    config.assert_and_infer_cfg()
    cfg.freeze()
    total_card = cfg.INFER.TOTAL_NUM
    assert total_card > 0, 'cfg.TOTAL_NUM should larger than 0. ~'
    assert cfg.INFER.CUT_NUM <= total_card, "cfg.CUT_NUM <= cfg.TOTAL_NUM. ~"
    if total_card == 1:
        main(INFER_DIR)
    else:
        main_multicard(INFER_DIR, cfg.INFER.CUT_NUM, cfg.INFER.TOTAL_NUM)