Example #1
0
def test_all_metric_types():
    """Honestly, this is a pretty bad test.
    It generates C++ for a given test metrics.yaml and compares it byte-for-byte
    with an expected output C++ file.
    Expect it to be fragile.
    To generate a new expected output file, copy the test yaml over the one in t/c/g,
    run mach build, then copy the C++ output from objdir/t/c/g/.
    """

    options = {"allow_reserved": False}
    input_files = [
        Path(path.join(path.dirname(__file__), "metrics_test.yaml"))
    ]

    all_objs = parser.parse_objects(input_files, options)
    assert not util.report_validation_errors(all_objs)
    assert not lint.lint_metrics(all_objs.value, options)

    output_fd = io.StringIO()
    js.output_js(all_objs.value, output_fd, options)

    with open(path.join(path.dirname(__file__), "metrics_test_output_js"),
              "r") as file:
        EXPECTED_JS = file.read()
    assert output_fd.getvalue() == EXPECTED_JS
Example #2
0
def test_fake_pings():
    """Another similarly-fragile test.
    It generates C++ for pings_test.yaml, comparing it byte-for-byte
    with an expected output C++ file `pings_test_output_js`.
    Expect it to be fragile.
    To generate a new expected output file, edit t/c/g/metrics_index.py,
    comment out all other ping yamls, and add one for
    t/c/g/pytest/pings_test.yaml. Run `mach build` (it'll fail). Copy
    objdir/t/c/g/GleanJSPingsLookup.h over pings_test_output_js.
    (Don't forget to undo your edits to t/c/g/metrics_index.py)
    """

    options = {"allow_reserved": False}
    input_files = [Path(path.join(path.dirname(__file__), "pings_test.yaml"))]

    all_objs = parser.parse_objects(input_files, options)
    assert not util.report_validation_errors(all_objs)
    assert not lint.lint_metrics(all_objs.value, options)

    output_fd = io.StringIO()
    js.output_js(all_objs.value, output_fd, options)

    with open(path.join(path.dirname(__file__), "pings_test_output_js"),
              "r") as file:
        EXPECTED_JS = file.read()
    assert output_fd.getvalue() == EXPECTED_JS
Example #3
0
def test_fake_pings():
    """Another similarly-fragile test.
    It generates C++ for pings_test.yaml, comparing it byte-for-byte
    with an expected output C++ file `pings_test_output_js`.
    Expect it to be fragile.
    To generate new expected output files, set `UPDATE_EXPECT=1` when running the test suite:

    UPDATE_EXPECT=1 mach test toolkit/components/glean/pytest
    """

    options = {"allow_reserved": False}
    input_files = [Path(path.join(path.dirname(__file__), "pings_test.yaml"))]

    all_objs, options = run_glean_parser.parse_with_options(input_files, options)

    output_fd = io.StringIO()
    js.output_js(all_objs, output_fd, options)

    expect(
        path.join(path.dirname(__file__), "pings_test_output_js"), output_fd.getvalue()
    )
Example #4
0
def test_all_metric_types():
    """Honestly, this is a pretty bad test.
    It generates C++ for a given test metrics.yaml and compares it byte-for-byte
    with an expected output C++ file.
    Expect it to be fragile.
    To generate new expected output files, set `UPDATE_EXPECT=1` when running the test suite:

    UPDATE_EXPECT=1 mach test toolkit/components/glean/pytest
    """

    options = {"allow_reserved": False}
    input_files = [Path(path.join(path.dirname(__file__), "metrics_test.yaml"))]

    all_objs, options = run_glean_parser.parse_with_options(input_files, options)

    output_fd = io.StringIO()
    js.output_js(all_objs, output_fd, options)

    expect(
        path.join(path.dirname(__file__), "metrics_test_output_js"),
        output_fd.getvalue(),
    )
Example #5
0
def js_metrics(output_fd, _metrics_index, *args):
    all_objs, options = parse(args)
    js.output_js(all_objs, output_fd, options)
Example #6
0
def js_metrics(output_fd, *args):
    args = args[DEPS_LEN:]
    all_objs, options = parse(args)
    js.output_js(all_objs, output_fd, options)