def test_model_inference():
    """
    Test model end2end inference
    """
    data_path = path.join(get_data_real_path(),
                          'np_semantic_segmentation_prepared_data.csv')
    output_path = path.join(get_data_real_path(),
                            'np_semantic_segmentation_output.csv')
    model_path = path.join(get_data_real_path(),
                           'np_semantic_segmentation.prm')
    num_epochs = 200
    callback_args = {}
    be = gen_backend(batch_size=10)
    print_stats = False
    data_set = NpSemanticSegData(data_path, train_to_test_ratio=1)
    results = classify_collocation(data_set, model_path, num_epochs,
                                   callback_args)
    if print_stats and (data_set.is_y_labels is not None):
        y_labels = extract_y_labels(data_path)
        print_evaluation(y_labels, results.argmax(1))
    write_results(results.argmax(1), output_path)
    assert path.isfile(path.join(get_data_real_path(), 'np_semantic_segmentation_output.csv'))\
        is True
    input_reader_list = read_csv_file_data(data_path)
    output_reader_list = read_csv_file_data(output_path)
    assert len(output_reader_list) == len(input_reader_list) - 1
    os.remove(model_path)
    os.remove(output_path)
def test_model_training():
    """
    Test model end2end training
    """
    data_path = path.join(get_data_real_path(),
                          'np_semantic_segmentation_prepared_data.csv')
    model_path = path.join(get_data_real_path(),
                           'np_semantic_segmentation.prm')
    num_epochs = 200
    be = gen_backend(batch_size=64)
    # load data sets from file
    data_set = NpSemanticSegData(data_path, train_to_test_ratio=0.8)
    # train the mlp classifier
    train_mlp_classifier(data_set, model_path, num_epochs, {})
    assert path.isfile(
        path.join(get_data_real_path(),
                  'np_semantic_segmentation.prm')) is True
def test_model_inference():
    """
    Test model end2end inference
    """
    data_path = path.join(get_data_real_path(),
                          'np_semantic_segmentation_prepared_data.csv')
    output_path = path.join(get_data_real_path(),
                            'np_semantic_segmentation_output.csv')
    model_path = path.join(get_data_real_path(),
                           'np_semantic_segmentation.prm')
    num_epochs = 200
    callback_args = {}
    gen_backend(batch_size=64, backend='cpu')
    data_set = NpSemanticSegData(data_path, train_to_test_ratio=1)
    results = classify_collocation(data_set.train_set, model_path, num_epochs,
                                   callback_args)
    write_results(results, output_path)
    assert \
        path.isfile(path.join(get_data_real_path(), 'np_semantic_segmentation_output.csv')) is True
    input_reader_list = read_csv_file_data(data_path)
    output_reader_list = read_csv_file_data(output_path)
    assert len(output_reader_list) == len(input_reader_list) - 1
    os.remove(model_path)
    os.remove(output_path)
Exemple #4
0
if __name__ == "__main__":
    # parse the command line arguments
    parser = argparse.ArgumentParser()
    parser.set_defaults(epochs=200)
    parser.add_argument('--data',
                        help='prepared data CSV file path',
                        type=validate_existing_filepath)
    parser.add_argument('--model',
                        help='path to the trained model file',
                        type=validate_existing_filepath)
    parser.add_argument(
        '--print_stats',
        action='store_true',
        default=False,
        help='print evaluation stats for the model predictions - if '
        'your data has tagging')
    parser.add_argument('--output',
                        help='path to location for inference output file',
                        type=validate_parent_exists)
    args = parser.parse_args()
    data_path = absolute_path(args.data)
    model_path = absolute_path(args.model)
    print_stats = args.print_stats
    output_path = absolute_path(args.output)
    data_set = NpSemanticSegData(data_path)
    results = classify_collocation(data_set.test_set, model_path, args.epochs)
    if print_stats and (data_set.is_y_labels is not None):
        y_labels = data_set.test_set_y
        print_evaluation(y_labels, results)
    write_results(results, output_path)
Exemple #5
0
            writer.writerow([result])
    print("Results of inference saved in {0}".format(output))


if __name__ == "__main__":
    # parse the command line arguments
    parser = NeonArgparser()
    parser.set_defaults(epochs=200)
    parser.add_argument('--data', help='prepared data CSV file path',
                        type=validate_existing_filepath)
    parser.add_argument('--model', help='path to the trained model file',
                        type=validate_existing_filepath)
    parser.add_argument('--print_stats', action='store_true', default=False,
                        help='print evaluation stats for the model predictions - if '
                        'your data has tagging')
    parser.add_argument('--output', help='path to location for inference output file',
                        type=validate_parent_exists)
    args = parser.parse_args()
    data_path = absolute_path(args.data)
    model_path = absolute_path(args.model)
    print_stats = args.print_stats
    output_path = absolute_path(args.output)
    # generate backend
    be = gen_backend(batch_size=10)
    data_set = NpSemanticSegData(data_path, train_to_test_ratio=1)
    results = classify_collocation(data_set, model_path, args.epochs, args.callback_args)
    if print_stats and (data_set.is_y_labels is not None):
        y_labels = extract_y_labels(data_path)
        print_evaluation(y_labels, results.argmax(1))
    write_results(results.argmax(1), output_path)