Example #1
0
def test_fake_pings():
    """Another similarly-fragile test.
    It generates C++ for pings_test.yaml, comparing it byte-for-byte
    with an expected output C++ file `pings_test_output_cpp`.
    Expect it to be fragile.
    To generate a new expected output file, edit t/c/g/metrics_index.py,
    comment out all other ping yamls, and add one for
    t/c/g/pytest/pings_test.yaml. Run `mach build` (it'll fail). Copy
    objdir/t/c/g/GleanPings.h over pings_test_output_cpp.
    (Don't forget to undo your edits to t/c/g/metrics_index.py)
    """

    options = {"allow_reserved": False}
    input_files = [Path(path.join(path.dirname(__file__), "pings_test.yaml"))]

    all_objs = parser.parse_objects(input_files, options)
    assert not util.report_validation_errors(all_objs)
    assert not lint.lint_metrics(all_objs.value, options)

    output_fd = io.StringIO()
    cpp.output_cpp(all_objs.value, output_fd, options)

    with open(path.join(path.dirname(__file__), "pings_test_output_cpp"), "r") as file:
        EXPECTED_CPP = file.read()
    assert output_fd.getvalue() == EXPECTED_CPP
def test_all_metric_types():
    """Honestly, this is a pretty bad test.
    It generates C++ for a given test metrics.yaml and compares it byte-for-byte
    with an expected output C++ file.
    Expect it to be fragile.
    To generate new expected output files, set `UPDATE_EXPECT=1` when running the test suite:

    UPDATE_EXPECT=1 mach test toolkit/components/glean/pytest
    """

    options = {"allow_reserved": False}
    input_files = [
        Path(path.join(path.dirname(__file__), "metrics_test.yaml"))
    ]

    all_objs, options = run_glean_parser.parse_with_options(
        input_files, options)

    output_fd = io.StringIO()
    cpp.output_cpp(all_objs, output_fd, options)

    expect(
        path.join(path.dirname(__file__), "metrics_test_output_cpp"),
        output_fd.getvalue(),
    )
Example #3
0
def test_all_metric_types():
    """Honestly, this is a pretty bad test.
    It generates C++ for a given test metrics.yaml and compares it byte-for-byte
    with an expected output C++ file.
    Expect it to be fragile.
    To generate a new expected output file, copy the test yaml over the one in t/c/g,
    run mach build, then copy the C++ output from objdir/t/c/g/.
    """

    options = {"allow_reserved": False}
    input_files = [
        Path(path.join(path.dirname(__file__), "metrics_test.yaml"))
    ]

    all_objs = parser.parse_objects(input_files, options)
    assert not util.report_validation_errors(all_objs)
    assert not lint.lint_metrics(all_objs.value, options)

    output_fd = io.StringIO()
    cpp.output_cpp(all_objs.value, output_fd, options)

    with open(path.join(path.dirname(__file__), "metrics_test_output_cpp"),
              "r") as file:
        EXPECTED_CPP = file.read()
    assert output_fd.getvalue() == EXPECTED_CPP
def test_fake_pings():
    """Another similarly-fragile test.
    It generates C++ for pings_test.yaml, comparing it byte-for-byte
    with an expected output C++ file `pings_test_output_cpp`.
    Expect it to be fragile.
    To generate new expected output files, set `UPDATE_EXPECT=1` when running the test suite:

    UPDATE_EXPECT=1 mach test toolkit/components/glean/pytest
    """

    options = {"allow_reserved": False}
    input_files = [Path(path.join(path.dirname(__file__), "pings_test.yaml"))]

    all_objs, options = run_glean_parser.parse_with_options(
        input_files, options)

    output_fd = io.StringIO()
    cpp.output_cpp(all_objs, output_fd, options)

    expect(path.join(path.dirname(__file__), "pings_test_output_cpp"),
           output_fd.getvalue())
Example #5
0
def cpp_metrics(output_fd, _metrics_index, *args):
    all_objs, options = parse(args)
    cpp.output_cpp(all_objs, output_fd, options)
Example #6
0
def cpp_metrics(output_fd, *args):
    args = args[DEPS_LEN:]
    all_objs, options = parse(args)
    cpp.output_cpp(all_objs, output_fd, options)