예제 #1
0
def set_config():
    config = AnalysisConfig("")
    config.set_model("model/__model__", "model/__params__")
    config.switch_use_feed_fetch_ops(False)
    config.switch_specify_input_names(True)
    config.enable_profile()

    return config
예제 #2
0
def main():
    """Predictor main"""
    args = parse_args()

    config = AnalysisConfig(args.model_files_path)
    config.disable_gpu()
    config.enable_profile()
    # config.enable_mkldnn()
    config.set_cpu_math_library_num_threads(args.cpu_num)

    predictor = create_paddle_predictor(config)
    tdm_model = TdmInferNet(args)
    first_layer_node = tdm_model.first_layer_node
    first_layer_nums = len(first_layer_node)
    first_layer_node = np.array(first_layer_node)
    first_layer_node = first_layer_node.reshape((1, -1)).astype('int64')
    first_layer_node = first_layer_node.repeat(args.batch_size, axis=0)
    first_layer_mask = (np.zeros(
        (args.batch_size, first_layer_nums))).astype('int64')

    file_list = [
        str(args.test_files_path) + "/%s" % x
        for x in os.listdir(args.test_files_path)
    ]
    test_reader = TDMDataset().infer_reader(file_list, args.batch_size)

    for batch_id, data in enumerate(test_reader()):
        input_emb = data2tensor(data)

        inputs = tdm_input(input_emb, first_layer_node, first_layer_mask)
        outputs = predictor.run(inputs)
        output = outputs[0]
        output_data = output.as_ndarray()

        logger.info("TEST --> batch: {} infer_item {}".format(
            batch_id, output_data))