Beispiel #1
0
def fix_coverage(from_path, to_path):
    coverage_data = CoverageData()
    os.rename('.coverage', '.coverage.orig')
    coverage_data.read_file('.coverage.orig')
    merge_coverage(coverage_data, from_path, to_path)
    coverage_data.write_file('.coverage')
    os.remove('.coverage.orig')
Beispiel #2
0
    def test_read_write_lines(self):
        covdata1 = CoverageData()
        covdata1.add_lines(LINES_1)
        covdata1.write_file("lines.dat")

        covdata2 = CoverageData()
        covdata2.read_file("lines.dat")
        self.assert_lines1_data(covdata2)
Beispiel #3
0
    def test_read_write_arcs(self):
        covdata1 = CoverageData()
        covdata1.add_arcs(ARCS_3)
        covdata1.write_file("arcs.dat")

        covdata2 = CoverageData()
        covdata2.read_file("arcs.dat")
        self.assert_arcs3_data(covdata2)
Beispiel #4
0
    def test_read_write_arcs(self):
        covdata1 = CoverageData()
        covdata1.add_arcs(ARCS_3)
        covdata1.write_file("arcs.dat")

        covdata2 = CoverageData()
        covdata2.read_file("arcs.dat")
        self.assert_arcs3_data(covdata2)
Beispiel #5
0
    def test_read_write_lines(self):
        covdata1 = CoverageData()
        covdata1.add_lines(LINES_1)
        covdata1.write_file("lines.dat")

        covdata2 = CoverageData()
        covdata2.read_file("lines.dat")
        self.assert_lines1_data(covdata2)
Beispiel #6
0
    def test_read_write_lines(self):
        covdata1 = CoverageData()
        covdata1.set_lines(LINES_1)
        covdata1.write_file("lines.dat")

        covdata2 = CoverageData()
        covdata2.read_file("lines.dat")
        self.assert_line_counts(covdata2, SUMMARY_1)
        self.assert_measured_files(covdata2, MEASURED_FILES_1)
        self.assertCountEqual(covdata2.lines("a.py"), A_PY_LINES_1)
        self.assertEqual(covdata2.run_infos(), [])
Beispiel #7
0
def fix_paths(site_pkg_dir, cov_data_file):
    site_pkg_dir = os.path.abspath(site_pkg_dir)

    paths = PathAliases()
    paths.add(site_pkg_dir, '.')

    old_coverage_data = CoverageData()
    old_coverage_data.read_file(cov_data_file)

    new_coverage_data = CoverageData()
    new_coverage_data.update(old_coverage_data, paths)

    new_coverage_data.write_file(cov_data_file)
Beispiel #8
0
    def test_read_write_arcs(self):
        covdata1 = CoverageData()
        covdata1.set_arcs(ARCS_3)
        covdata1.write_file("arcs.dat")

        covdata2 = CoverageData()
        covdata2.read_file("arcs.dat")
        self.assert_line_counts(covdata2, SUMMARY_3)
        self.assert_measured_files(covdata2, MEASURED_FILES_3)
        self.assertCountEqual(covdata2.lines("x.py"), X_PY_LINES_3)
        self.assertCountEqual(covdata2.arcs("x.py"), X_PY_ARCS_3)
        self.assertCountEqual(covdata2.lines("y.py"), Y_PY_LINES_3)
        self.assertCountEqual(covdata2.arcs("y.py"), Y_PY_ARCS_3)
        self.assertEqual(covdata2.run_infos(), [])
Beispiel #9
0
def pickle2json(infile, outfile):
    """Convert a coverage.py 3.x pickle data file to a 4.x JSON data file."""
    try:
        old_read_raw_data = CoverageData._read_raw_data
        CoverageData._read_raw_data = pickle_read_raw_data

        covdata = CoverageData()

        with open(infile, 'rb') as inf:
            covdata.read(inf)

        covdata.write_file(outfile)
    finally:
        CoverageData._read_raw_data = old_read_raw_data
Beispiel #10
0
    def test_combining_from_files(self):
        covdata1 = CoverageData()
        covdata1.add_lines(LINES_1)
        os.makedirs('cov1')
        covdata1.write_file('cov1/.coverage.1')

        covdata2 = CoverageData()
        covdata2.add_lines(LINES_2)
        os.makedirs('cov2')
        covdata2.write_file('cov2/.coverage.2')

        # This data won't be included.
        covdata_xxx = CoverageData()
        covdata_xxx.add_arcs(ARCS_3)
        covdata_xxx.write_file('.coverage.xxx')
        covdata_xxx.write_file('cov2/.coverage.xxx')

        covdata3 = CoverageData()
        self.data_files.combine_parallel_data(
            covdata3, data_paths=['cov1', 'cov2/.coverage.2'])

        self.assert_line_counts(covdata3, SUMMARY_1_2)
        self.assert_measured_files(covdata3, MEASURED_FILES_1_2)
        self.assert_doesnt_exist("cov1/.coverage.1")
        self.assert_doesnt_exist("cov2/.coverage.2")
        self.assert_exists(".coverage.xxx")
        self.assert_exists("cov2/.coverage.xxx")
Beispiel #11
0
    def test_combining_from_files(self):
        covdata1 = CoverageData()
        covdata1.add_lines(LINES_1)
        os.makedirs('cov1')
        covdata1.write_file('cov1/.coverage.1')

        covdata2 = CoverageData()
        covdata2.add_lines(LINES_2)
        os.makedirs('cov2')
        covdata2.write_file('cov2/.coverage.2')

        # This data won't be included.
        covdata_xxx = CoverageData()
        covdata_xxx.add_arcs(ARCS_3)
        covdata_xxx.write_file('.coverage.xxx')
        covdata_xxx.write_file('cov2/.coverage.xxx')

        covdata3 = CoverageData()
        self.data_files.combine_parallel_data(covdata3, data_paths=['cov1', 'cov2/.coverage.2'])

        self.assert_line_counts(covdata3, SUMMARY_1_2)
        self.assert_measured_files(covdata3, MEASURED_FILES_1_2)
        self.assert_doesnt_exist("cov1/.coverage.1")
        self.assert_doesnt_exist("cov2/.coverage.2")
        self.assert_exists(".coverage.xxx")
        self.assert_exists("cov2/.coverage.xxx")
Beispiel #12
0
    def test_combining_from_different_directories(self):
        covdata1 = CoverageData()
        covdata1.add_line_data(DATA_1)
        os.makedirs('cov1')
        covdata1.write_file('cov1/.coverage.1')

        covdata2 = CoverageData()
        covdata2.add_line_data(DATA_2)
        os.makedirs('cov2')
        covdata2.write_file('cov2/.coverage.2')

        covdata3 = CoverageData()
        covdata3.combine_parallel_data(data_dirs=[
            'cov1/',
            'cov2/',
            ])

        self.assert_summary(covdata3, SUMMARY_1_2)
        self.assert_measured_files(covdata3, MEASURED_FILES_1_2)
Beispiel #13
0
def combine(data_paths, output_file):
    try:
        if CoverageData.combine_parallel_data:
            # use the old API, version 3.6 and 3.7.X
            data = CoverageData(output_file)
            data = coverage3x_combine(data_paths, data)
            data.write()
    except AttributeError:
        # new versions have better support for combining files
        # and the method combine_parallel_data() has been moved
        # to teh new CoverageDataFiles class.
        # see https://bitbucket.org/ned/coveragepy/pull-requests/62
        from coverage.data import CoverageDataFiles

        data = CoverageData()

        dataf = CoverageDataFiles()
        dataf.combine_parallel_data(data, data_paths=data_paths)

        data.write_file(output_file)
Beispiel #14
0
    def test_debug_main(self):
        covdata1 = CoverageData()
        covdata1.add_lines(LINES_1)
        covdata1.write_file(".coverage")
        debug_main([])

        covdata2 = CoverageData()
        covdata2.add_arcs(ARCS_3)
        covdata2.add_file_tracers({"y.py": "magic_plugin"})
        covdata2.add_run_info(version="v3.14", chunks=["z", "a"])
        covdata2.write_file("arcs.dat")

        covdata3 = CoverageData()
        covdata3.write_file("empty.dat")
        debug_main(["arcs.dat", "empty.dat"])

        expected = {
            ".coverage": {
                "lines": {
                    "a.py": [1, 2],
                    "b.py": [3],
                },
            },
            "arcs.dat": {
                "arcs": {
                    "x.py": [[-1, 1], [1, 2], [2, 3], [3, -1]],
                    "y.py": [[-1, 17], [17, 23], [23, -1]],
                },
                "file_tracers": {
                    "y.py": "magic_plugin"
                },
                "runs": [
                    {
                        "chunks": ["z", "a"],
                        "version": "v3.14",
                    },
                ],
            },
            "empty.dat": {},
        }
        pieces = re.split(r"(?m)-+ ([\w.]+) -+$", self.stdout())
        for name, json_out in zip(pieces[1::2], pieces[2::2]):
            json_got = json.loads(json_out)
            canonicalize_json_data(json_got)
            self.assertEqual(expected[name], json_got)
Beispiel #15
0
    def test_debug_main(self):
        covdata1 = CoverageData()
        covdata1.set_lines(LINES_1)
        covdata1.write_file(".coverage")
        debug_main([])

        covdata2 = CoverageData()
        covdata2.set_arcs(ARCS_3)
        covdata2.set_file_tracers({"y.py": "magic_plugin"})
        covdata2.add_run_info(version="v3.14", chunks=["z", "a"])
        covdata2.write_file("arcs.dat")

        covdata3 = CoverageData()
        covdata3.write_file("empty.dat")
        debug_main(["arcs.dat", "empty.dat"])

        expected = {
            ".coverage": {
                "lines": {
                    "a.py": [1, 2],
                    "b.py": [3],
                },
            },
            "arcs.dat": {
                "arcs": {
                    "x.py": [[-1, 1], [1, 2], [2, 3], [3, -1]],
                    "y.py": [[-1, 17], [17, 23], [23, -1]],
                },
                "file_tracers": {"y.py": "magic_plugin"},
                "runs": [
                    {
                        "chunks": ["z", "a"],
                        "version": "v3.14",
                    },
                ],
            },
            "empty.dat": {"lines": {}},
        }
        pieces = re.split(r"(?m)-+ ([\w.]+) -+$", self.stdout())
        for name, json_out in zip(pieces[1::2], pieces[2::2]):
            json_got = json.loads(json_out)
            canonicalize_json_data(json_got)
            self.assertEqual(expected[name], json_got)
#!/usr/bin/env python
# By Danilo J. S. Bellini
"""
Script to update the file paths stored in a single coverage data file

Syntax: python fixpath.py DATA_FILE OLD_PATH NEW_PATH
"""
import sys, os
from coverage.data import CoverageData, PathAliases

coverage_file_name, old_path, new_path = sys.argv[1:]

pa = PathAliases()
pa.add(old_path, new_path)

old_cd = CoverageData()
old_cd.read_file(coverage_file_name)

new_cd = CoverageData()
try:
    new_cd.update(old_cd, pa)
except AttributeError: # Coverage 3.7.1 (CPython 3.2)
    namer = lambda f: os.path.abspath(os.path.expanduser(pa.map(f)))
    new_cd.lines = dict((namer(f), d) for f, d in old_cd.lines.items())
    new_cd.arcs = dict((namer(f), d) for f, d in old_cd.arcs.items())
new_cd.write_file(coverage_file_name)
Beispiel #17
0
def fix_coverage(from_path, to_path):
    coverage_data = CoverageData()
    os.rename('.coverage', '.coverage.orig')
    coverage_data.read_file('.coverage.orig')
    merge_coverage(coverage_data, from_path, to_path)
    coverage_data.write_file('.coverage')
Beispiel #18
0
def fix_coverage(from_path, to_path):
    coverage_data = CoverageData()
    os.rename(".coverage", ".coverage.orig")
    coverage_data.read_file(".coverage.orig")
    merge_coverage(coverage_data, from_path, to_path)
    coverage_data.write_file(".coverage")