def test_determine_stable_flow(self, tmdata): tmdata.write("test_1.py::test_1", {"test_1.py": encode_lines(["FINGERPRINT1"])}) filenames_fingerprints = tmdata.filenames_fingerprints assert tuple(filenames_fingerprints[0]) == ("test_1.py", 1.0, "100", 1, 0) _, mtime_misses = split_filter(tmdata.source_tree, check_mtime, filenames_fingerprints) checksum_hits, checksum_misses = split_filter(tmdata.source_tree, check_checksum, mtime_misses) changed_files = {checksum_miss[3] for checksum_miss in checksum_misses} assert changed_files == {1} changed_file_data = tmdata.db.get_changed_file_data(changed_files) assert changed_file_data == [("test_1.py", "test_1.py::test_1", encode_lines(["FINGERPRINT1"]), 1, 0)] hits, misses = split_filter(tmdata.source_tree, check_fingerprint, changed_file_data) assert misses == changed_file_data
def test_write_data2(self, tmdata): tmdata.determine_stable() node_data = { "test_1.py::test_1": { "test_1.py": encode_lines(["F1"]), "a.py": encode_lines(["FA"]), }, "test_1.py::test_2": { "test_1.py": encode_lines(["F1"]), "a.py": encode_lines(["FA2"]), }, "test_1.py::test_3": { "a.py": encode_lines(["FA"]) }, } tmdata.sync_db_fs_nodes(set(node_data.keys())) for node, files in node_data.items(): tmdata.write(node, files) result = defaultdict(dict) for ( filename, node_name, fingerprint, _, _, ) in tmdata.db.get_changed_file_data(set(range(10))): result[node_name][filename] = fingerprint assert result == node_data change = { "test_1.py::test_1": { "a.py": encode_lines(["FA2"]), "test_1.py": encode_lines(["F1"]), } } node_data.update(change) tmdata.write( "test_1.py::test_1", { "a.py": encode_lines(["FA2"]), "test_1.py": encode_lines(["F1"]), }, ) for ( filename, node_name, fingerprint, _, _, ) in tmdata.db.get_changed_file_data(set(range(10))): result[node_name][filename] = fingerprint assert result == node_data
def test_write_get_changed_file_data(self, tmdata): tmdata.write("test_1.py::test_1", {"test_1.py": encode_lines(["FINGERPRINT1"])}, failed=1) node_data = tmdata.db.get_changed_file_data({1}) assert node_data == [ ChangedFileData("test_1.py", "test_1.py::test_1", encode_lines(["FINGERPRINT1"]), 1, 1) ]
def test_one_failed_in_fingerprints(self, tmdata): tmdata.write( "test_1.py::test_1", {"test_1.py": encode_lines(["FINGERPRINT1"])}, failed=True, ) tmdata.write( "test_1.py::test_2", {"test_1.py": encode_lines(["FINGERPRINT1"])}, failed=False, ) assert tmdata.filenames_fingerprints[0]["sum(failed)"] == 1
def sync_db_fs_nodes(self, retain): collected = retain.union(set(self.stable_nodeids)) with self.connection as con: add = collected - set(self.all_nodes) for nodeid in add: if is_python_file(home_file(nodeid)): self.write_node_data( nodeid, self.make_nodedata( {home_file(nodeid): None}, encode_lines(["0match"]) ), fake=True, ) con.executemany( """ DELETE FROM node WHERE environment = ? AND name = ?""", [ (self.environment, nodeid) for nodeid in set(self.all_nodes) - collected ], )
def test_garbage_retain_stable(self, tmdata): tmdata.write("test_1.py::test_1", {"test_1.py": encode_lines(["FINGERPRINT1"])}) tmdata.determine_stable() tmdata.sync_db_fs_nodes(retain=set()) assert set(tmdata.all_nodes) == {"test_1.py::test_1"}
def test_double_dedent(self): lines = [ "def a():", " def b():", " 1", " 2", ] fingerprints = ["def a():", GAP_MARKS[0]] assert file_has_lines(lines, encode_lines(fingerprints))
def test_remove_unused_fingerprints(self, tmdata): tmdata.write("n1", {"test_a.py": encode_lines(["1"])}) tmdata.source_tree.cache["test_a.py"] = None tmdata.determine_stable() tmdata.sync_db_fs_nodes(set()) tmdata.db.remove_unused_fingerprints() c = tmdata.connection assert c.execute("SELECT * FROM fingerprint").fetchall() == []
def make_nodedata(self, measured_files, default=None): result = {} for filename, covered in measured_files.items(): if default: result[filename] = default else: if os.path.exists(os.path.join(self.rootdir, filename)): module = self.source_tree.get_file(filename) result[filename] = encode_lines( create_fingerprints(module.lines, module.special_blocks, covered)) return result
def test_filenames_fingerprints(self, tmdata): tmdata.write("test_1.py::test_1", {"test_1.py": encode_lines("FINGERPRINT1")}, failed=1) fps = tuple(tmdata.filenames_fingerprints[0]) assert fps == ( "test_1.py", 1.0, "100", 1, 1, )
def test_collect_garbage(self, tmdata): tmdata.write("test_1", {"test_1.py": encode_lines(["FINGERPRINT1"])}) tmdata.source_tree.cache["test_1.py"] = Module(source_code="") tmdata.source_tree.cache["test_1.py"].mtime = 1100.0 tmdata.source_tree.cache["test_1.py"].checksum = 600 tmdata.source_tree.cache["test_1.py"].fingerprint = "FINGERPRINT2" tmdata.determine_stable() assert set(tmdata.all_nodes) tmdata.sync_db_fs_nodes(retain=set()) tmdata.close_connection() td2 = CoreTestmonData("") td2.determine_stable() assert set(td2.all_nodes) == set()
def create_report(self, phases_count, duration, node_name, node_module, node_class=None): phases = ["setup", "call", "teardown"] result = {} location = ( node_module, 1, f"{node_class}.{node_name}" if node_class else node_name, ) for i in range(0, phases_count): result[phases[i]] = self.create_report_phase( duration / phases_count, location) self.write(node_name, {node_module: encode_lines([""])}, result)
def sync_db_fs_nodes(self, retain): collected = retain.union(set(self.stable_nodeids)) with self.db as db: add = collected - set(self.all_nodes) for nodeid in add: if is_python_file(home_file(nodeid)): db.insert_node_fingerprints( nodeid=nodeid, fingerprint_records=( { "filename": home_file(nodeid), "fingerprint": checksums_to_blob( encode_lines("0match") ), "mtime": None, "checksum": None, }, ), ) db.delete_nodes(set(self.all_nodes) - collected)
def stop_and_save(self, testmon_data: TestmonData, nodeid, result): self.stop() if hasattr(self, "sub_cov_file"): self.cov.combine() measured_files = get_measured_relfiles( self.rootdir, self.cov, home_file(nodeid) ) node_data = testmon_data.node_data_from_cov(measured_files) nodes_fingerprints = testmon_data.node_data2records(node_data) nodes_fingerprints.append( { "filename": LIBRARIES_KEY, "checksum": testmon_data.libraries, "mtime": None, "fingerprint": checksums_to_blob( encode_lines([testmon_data.libraries]) ), } ) testmon_data.db.insert_node_fingerprints( nodeid, nodes_fingerprints, result, )
def test_double_dedent_with_remainder(self): lines = ["def a():", " def b():", " 1", " 2", "3"] fingerprints = ["def a():", GAP_MARKS[0], "3"] assert file_has_lines(lines, encode_lines(fingerprints))
def test_indent_eof1(self): lines = ["def a():", " 2"] fingerprints = ["def a():", GAP_MARKS[0]] assert file_has_lines(lines, encode_lines(fingerprints))
def test_2line_dedent(self): lines = ["def a():", " 2", " 2.5", "3"] fingerprints = ["def a():", GAP_MARKS[0], "3"] assert file_has_lines(lines, encode_lines(fingerprints))
def test_identical(self): lines = ["1"] fingerprints = ["1"] assert file_has_lines(lines, encode_lines(fingerprints))
def test_indent_eof2(self): lines = ["raise Exception()", "print(1)"] fingerprints = ["raise Exception()", GAP_MARKS[-1]] assert file_has_lines(lines, encode_lines(fingerprints))
def test_write_read_nodedata(self, tmdata): tmdata.write("test_a.py::n1", {"test_a.py": encode_lines(["1"])}) assert tmdata.all_nodes == {"test_a.py::n1": {}} assert tmdata.all_files == {"test_a.py"}