def test_mismatch(self): """ Check mismatching base and mask data cancel out. """ empty = from_io({ "version": self.version, }) first = from_io({ "version": self.version, "revisions": [IO_REVISION1,], "builds": [{"id": "origin:1", "origin": "origin", "revision_id": IO_REVISION1["id"]},], "tests": [{"id": "origin:1", "origin": "origin", "build_id": "origin:1"},], }) second = from_io({ "version": self.version, "revisions": [IO_REVISION2,], "builds": [{"id": "origin:2", "origin": "origin", "revision_id": IO_REVISION2["id"]},], "tests": [{"id": "origin:2", "origin": "origin", "build_id": "origin:2"},], }) self.assertEqual(apply_mask(first, second), empty)
def test_root_object(self): """ Check root objects are not removed. """ input_data = from_io({ "version": self.version, "revisions": [ { "id": "origin:1", }, ], }) expected_output_data = { "version": self.version, "revisions": input_data["revisions"].copy() } self.assertEqual(remove_orphans(input_data), expected_output_data) input_data = from_io({ "version": self.version, "revisions": [ { "id": "origin:1", }, { "id": "origin:2", }, ], }) expected_output_data = { "version": self.version, "revisions": input_data["revisions"].copy() } self.assertEqual(remove_orphans(input_data), expected_output_data)
def test_empty(self): """ Check removing orphans from empty data is a noop. """ empty = { "version": self.version, } print(empty) self.assertEqual(remove_orphans(from_io(empty)), from_io(empty))
def match_new_io(base_io, new_io, match_map=None, copy=True): """ Generate notifications for new I/O data being added to base I/O data. Args: base_io: The existing (base) I/O data being added to, and possibly referred to by the arriving (new) I/O data. Will be used to complete the data being notified about. Can already contain the new I/O data, it will be considered "new" regardless. new_io: The arriving (new) I/O data being added to the existing (base) data. Can refer to the existing I/O data. The new I/O data can already be added to the base I/O data, anything in the new data will be considered "new" regardless. match_map: The map of subscription match functions: a dictionary with OO data object list names and a list of tuples, each containing the name of the subscription and a match function. Each function must accept an object from the corresponding object list in OO data, and return an iterable producing kcidb.monitor.output.NotificationMessage objects, or None, which is equivalent to an empty iterable. The default is a dictionary of matching functions from all kcidb.subscriptions.* modules, where each is called "match_<OBJ_NAME>", where "<OBJ_NAME>" is an object list name without the "s" ending. copy: True, if the data should be copied before referencing/modifying. False, if the data could be referenced and modified in-place. Optional, default is True. Returns: The list of notifications: kcidb.monitor.output.Notification objects. """ assert LIGHT_ASSERTS or io.schema.is_valid(base_io) assert LIGHT_ASSERTS or io.schema.is_valid(new_io) # Merge the new data into the base (*copy* new data as we'll need it) merged_io = io.merge(base_io, [new_io], copy_target=copy, copy_sources=True) # Convert both to OO representation merged_oo = oo.from_io(merged_io, copy=False) new_oo = oo.from_io(new_io, copy=copy) # Remove all objects with missing parents from the merged data rooted_oo = oo.remove_orphans(merged_oo) # Delist everything except loaded or modified objects, but keep references masked_oo = oo.apply_mask(rooted_oo, new_oo) # Generate notifications return match_oo(masked_oo, match_map)
def test_match(self): """ Check fully-matching base and mask data stay unchanged. """ first = from_io({ "version": self.version, "revisions": [ { "id": "origin:1", }, ], "builds": [ { "id": "origin:1", "revision_id": "origin:1" }, ], "tests": [ { "id": "origin:1", "build_id": "origin:1" }, ], }) second = from_io({ "version": self.version, "revisions": [ { "id": "origin:1", }, ], "builds": [ { "id": "origin:1", "revision_id": "origin:1" }, ], "tests": [ { "id": "origin:1", "build_id": "origin:1" }, ], }) self.assertEqual(apply_mask(first, second), first)
def describe_main(): """Execute the kcidb-describe command-line tool""" description = 'kcidb-describe - Output descriptions of report objects' parser = misc.ArgumentParser(description=description) parser.add_argument( 'obj_list_name', metavar='LIST', choices={n for n in io.schema.LATEST.tree if n}, help='Name of the object list to output (%(choices)s)' ) parser.add_argument( 'ids', metavar='ID', nargs='*', default=[], help='ID of the object to limit output to' ) args = parser.parse_args() oo_data = oo.from_io(io.schema.upgrade(json.load(sys.stdin), copy=False)) obj_map = oo_data.get(args.obj_list_name, {}) for obj_id in args.ids or obj_map: if obj_id in obj_map: sys.stdout.write(obj_map[obj_id].describe()) sys.stdout.write("\x00") return 0
def describe_main(): """Execute the kcidb-describe command-line tool""" sys.excepthook = misc.log_and_print_excepthook description = 'kcidb-describe - Output descriptions of report objects' parser = misc.ArgumentParser(description=description) parser.add_argument('obj_list_name', metavar='LIST', choices={n for n in io.schema.LATEST.tree if n}, help='Name of the object list to output (%(choices)s)') parser.add_argument('ids', metavar='ID', nargs='*', default=[], help='ID of the object to limit output to') args = parser.parse_args() for io_data in misc.json_load_stream_fd(sys.stdin.fileno()): io_data = io.schema.upgrade(io.schema.validate(io_data), copy=False) oo_data = oo.from_io(io_data) obj_map = oo_data.get(args.obj_list_name, {}) for obj_id in args.ids or obj_map: if obj_id in obj_map: sys.stdout.write(obj_map[obj_id].describe()) sys.stdout.write("\x00") sys.stdout.flush()
def test_build(self): """ Check single-build I/O data is converted correctly. """ io_data = { "version": self.version, "builds": [ { "architecture": "aarch64", "command": "make -j30 INSTALL_MOD_STRIP=1 targz-pkg", "compiler": "aarch64-linux-gnu-gcc (GCC) 9.2.1 20190827 (Red Hat Cross 9.2.1-1)", "config_name": "fedora", "duration": 237.0, "id": "redhat:678223", "origin": "redhat", "input_files": [], "log_url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/03/469720/build_aarch64.log", "misc": { "job_id": 678223, "pipeline_id": 469720 }, "output_files": [], "revision_id": "1254e88b4fc1470d152f494c3590bb6a33ab33eb", "start_time": "2020-03-03T17:52:02.370000+00:00", "valid": True }, ], } expected_oo_data = { "version": self.version, "builds": { "redhat:678223": Build({}, { "architecture": "aarch64", "command": "make -j30 INSTALL_MOD_STRIP=1 targz-pkg", "compiler": "aarch64-linux-gnu-gcc (GCC) 9.2.1 20190827 (Red Hat Cross 9.2.1-1)", "config_name": "fedora", "duration": 237.0, "id": "redhat:678223", "origin": "redhat", "input_files": [], "log_url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/03/469720/build_aarch64.log", "misc": { "job_id": 678223, "pipeline_id": 469720 }, "output_files": [], "revision_id": "1254e88b4fc1470d152f494c3590bb6a33ab33eb", "start_time": "2020-03-03T17:52:02.370000+00:00", "valid": True, "revision_": None, "tests_": {}, }), }, } oo_data = from_io(io_data) self.assertEqual(oo_data, expected_oo_data)
def test_empty_non_empty(self): """ Check empty/non-empty base/mask are handled correctly. """ empty = from_io({ "version": self.version, }) non_empty = from_io({ "version": self.version, "revisions": [IO_REVISION1,], }) self.assertEqual(apply_mask(empty, empty), empty) self.assertEqual(apply_mask(non_empty, empty), empty) self.assertEqual(apply_mask(empty, non_empty), empty) self.assertEqual(apply_mask(non_empty, non_empty), non_empty)
def test_empty(self): """ Check empty I/O data is converted correctly. """ io_data = dict(version=self.version) expected_oo_data = dict(version=self.version) oo_data = from_io(io_data) self.assertEqual(oo_data, expected_oo_data)
def test_mixed(self): """ Check mixed orphaned and linked objects are handled correctly. """ input_data = from_io({ "version": self.version, "revisions": [IO_REVISION1,], "builds": [ {"id": "origin:1-1", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:1-2", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:X-1", "origin": "origin", "revision_id": IO_REVISION3["id"]}, {"id": "origin:X-2", "origin": "origin", "revision_id": IO_REVISION3["id"]}, ], "tests": [ {"id": "origin:1-1-1", "origin": "origin", "build_id": "origin:1-1"}, {"id": "origin:1-1-2", "origin": "origin", "build_id": "origin:1-1"}, {"id": "origin:1-2-1", "origin": "origin", "build_id": "origin:1-2"}, {"id": "origin:1-2-2", "origin": "origin", "build_id": "origin:1-2"}, {"id": "origin:X-1-1", "origin": "origin", "build_id": "origin:X-1"}, {"id": "origin:X-1-2", "origin": "origin", "build_id": "origin:X-1"}, {"id": "origin:X-X-1", "origin": "origin", "build_id": "origin:X-X"}, {"id": "origin:X-X-2", "origin": "origin", "build_id": "origin:X-X"}, ], }) expected_output_data = { "version": self.version, "revisions": { k: v for k, v in input_data["revisions"].items() if "X" not in k }, "builds": { k: v for k, v in input_data["builds"].items() if "X" not in k }, "tests": { k: v for k, v in input_data["tests"].items() if "X" not in k }, } self.assertEqual(remove_orphans(input_data), expected_output_data)
def test_revision(self): """ Check single-build I/O data is converted correctly. """ io_data = { "version": self.version, "revisions": [ { "contacts": ["*****@*****.**"], "discovery_time": "2020-03-02T15:16:15.790000+00:00", "git_repository_branch": "wip/jgg-for-next", "git_repository_commit_hash": "5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e", "git_repository_url": "git://git.kernel.org/pub/scm/linux/kernel/git/rdma/rdma.git", "misc": { "pipeline_id": 467715 }, "id": "redhat:git://git.kernel.org/pub/scm/linux/kernel/git/rdma/rdma.git@5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e", "patch_mboxes": [], "valid": True, }, ], } expected_oo_data = { "version": self.version, "revisions": { "redhat:git://git.kernel.org/pub/scm/linux/kernel/git/rdma/rdma.git@5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e": Revision({}, { "contacts": ["*****@*****.**"], "discovery_time": "2020-03-02T15:16:15.790000+00:00", "git_repository_branch": "wip/jgg-for-next", "git_repository_commit_hash": "5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e", "git_repository_url": "git://git.kernel.org/pub/scm/linux/kernel/git/rdma/rdma.git", "misc": { "pipeline_id": 467715 }, "id": "redhat:git://git.kernel.org/pub/scm/linux/kernel/git/rdma/rdma.git@5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e", "patch_mboxes": [], "valid": True, "builds_": {}, }), }, } oo_data = from_io(io_data) self.assertEqual(oo_data, expected_oo_data)
def test_empty(self): """ Check empty I/O data is converted correctly. """ io_data = dict( version=dict(major=schema.LATEST.major, minor=schema.LATEST.minor)) expected_oo_data = dict( version=dict(major=schema.LATEST.major, minor=schema.LATEST.minor)) oo_data = from_io(io_data) self.assertEqual(oo_data, expected_oo_data)
def test_root_object(self): """ Check root objects are not removed. """ input_data = from_io({ "version": self.version, "revisions": [IO_REVISION1,], }) expected_output_data = { "version": self.version, "revisions": input_data["revisions"].copy() } self.assertEqual(remove_orphans(input_data), expected_output_data) input_data = from_io({ "version": self.version, "revisions": [IO_REVISION1, IO_REVISION2,], }) expected_output_data = { "version": self.version, "revisions": input_data["revisions"].copy() } self.assertEqual(remove_orphans(input_data), expected_output_data)
def test_min(self): """Check minimal Notification functionality""" oo_data = oo.from_io({ "version": self.version, "revisions": [ { "contacts": [ "*****@*****.**" ], "discovery_time": "2020-03-02T15:16:15.790000+00:00", "git_repository_branch": "wip/jgg-for-next", "git_commit_hash": "5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e", "git_repository_url": "git://git.kernel.org/pub/scm/linux/kernel/git/rdma/rdma.git", "misc": { "pipeline_id": 467715 }, "id": "5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e", "origin": "origin", "patch_mboxes": [], "valid": True, }, ], }) notification_message = NotificationMessage( ["*****@*****.**", "*****@*****.**"], "Revision detected: ", "We detected a new revision!\n\n", "id" ) notification = Notification("revisions", next(iter(oo_data["revisions"].values())), "subscription", notification_message) self.assertEqual(notification.id, "subscription:revisions:" "NWUyOWQxNDQzYzQ2YjZjYTcwYTRjOTQwYTY3ZThjMDlmMDVkY2" "I3ZQ==:aWQ=") message = notification.render() self.assertIsInstance(message, email.message.EmailMessage) self.assertIsNone(message['From']) self.assertEqual(message['To'], "[email protected], [email protected]") self.assertEqual(message['X-KCIDB-Notification-ID'], notification.id) self.assertEqual(message['X-KCIDB-Notification-Message-ID'], "id") self.assertIn("Revision detected: ", message['Subject']) self.assertIn("We detected a new revision!", message.get_payload())
def test_partial_match(self): """ Check partially-matching base and mask data work correctly. """ first = from_io({ "version": self.version, "revisions": [IO_REVISION1, IO_REVISION2,], "builds": [ {"id": "origin:1-1", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:1-2", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:2-1", "origin": "origin", "revision_id": IO_REVISION2["id"]}, {"id": "origin:2-2", "origin": "origin", "revision_id": IO_REVISION2["id"]}, ], "tests": [ {"id": "origin:1-1-1", "origin": "origin", "build_id": "origin:1-1"}, {"id": "origin:1-1-2", "origin": "origin", "build_id": "origin:1-1"}, {"id": "origin:1-2-1", "origin": "origin", "build_id": "origin:1-2"}, {"id": "origin:1-2-2", "origin": "origin", "build_id": "origin:1-2"}, {"id": "origin:2-1-1", "origin": "origin", "build_id": "origin:2-1"}, {"id": "origin:2-1-2", "origin": "origin", "build_id": "origin:2-1"}, {"id": "origin:2-2-1", "origin": "origin", "build_id": "origin:2-2"}, {"id": "origin:2-2-2", "origin": "origin", "build_id": "origin:2-2"}, ], }) second = from_io({ "version": self.version, "revisions": [IO_REVISION1, IO_REVISION3,], "builds": [ {"id": "origin:1-1", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:1-X", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:2-1", "origin": "origin", "revision_id": IO_REVISION3["id"]}, {"id": "origin:2-X", "origin": "origin", "revision_id": IO_REVISION3["id"]}, ], "tests": [ {"id": "origin:1-1-1", "origin": "origin", "build_id": "origin:1-1"}, {"id": "origin:1-1-X", "origin": "origin", "build_id": "origin:1-1"}, {"id": "origin:1-2-1", "origin": "origin", "build_id": "origin:1-2"}, {"id": "origin:1-2-X", "origin": "origin", "build_id": "origin:1-2"}, {"id": "origin:2-1-1", "origin": "origin", "build_id": "origin:2-1"}, {"id": "origin:2-1-X", "origin": "origin", "build_id": "origin:2-1"}, {"id": "origin:2-2-1", "origin": "origin", "build_id": "origin:2-2"}, {"id": "origin:2-2-X", "origin": "origin", "build_id": "origin:2-2"}, ], }) expected_output = { "version": first["version"], "revisions": { k: first["revisions"][k] for k in (IO_REVISION1["id"], IO_REVISION2["id"],) }, "builds": { k: first["builds"][k] for k in ( "origin:1-1", "origin:1-2", "origin:2-1", "origin:2-2", ) }, "tests": { k: first["tests"][k] for k in ( "origin:1-1-1", "origin:1-2-1", "origin:2-1-1", "origin:2-2-1", ) }, } output = apply_mask(first, second) self.assertEqual(output, expected_output) del first["tests"] del second["tests"] expected_output = { "version": first["version"], "revisions": { k: first["revisions"][k] for k in (IO_REVISION1["id"], IO_REVISION2["id"],) }, "builds": { k: first["builds"][k] for k in ("origin:1-1", "origin:2-1",) }, } output = apply_mask(first, second) self.assertEqual(output, expected_output) del first["builds"] del second["builds"] expected_output = { "version": first["version"], "revisions": { k: first["revisions"][k] for k in (IO_REVISION1["id"],) }, } output = apply_mask(first, second) self.assertEqual(output, expected_output)
def test_min(self): """Check minimal matching""" oo_data = oo.from_io({ "version": self.version, "revisions": [ { "contacts": ["*****@*****.**"], "discovery_time": "2020-03-02T15:16:15.790000+00:00", "git_repository_branch": "wip/jgg-for-next", "git_commit_hash": "5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e", "git_repository_url": "git://git.kernel.org/pub/scm/linux/kernel/git/rdma/rdma.git", "misc": { "pipeline_id": 467715 }, "id": "5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e", "origin": "non_test", "patch_mboxes": [], "valid": True, }, { "contacts": ["*****@*****.**"], "discovery_time": "2020-03-02T15:16:15.790000+00:00", "git_repository_branch": "wip/jgg-for-next", "git_commit_hash": "1254e88b4fc1470d152f494c3590bb6a33ab33eb", "git_repository_url": "git://git.kernel.org/pub/scm/linux/kernel/git/rdma/rdma.git", "misc": { "pipeline_id": 467715 }, "id": "1254e88b4fc1470d152f494c3590bb6a33ab33eb", "origin": "test", "patch_mboxes": [], "valid": True, }, ], "builds": [ { "architecture": "aarch64", "command": "make -j30 INSTALL_MOD_STRIP=1 targz-pkg", "compiler": "aarch64-linux-gnu-gcc (GCC) 9.2.1 20190827 (Red Hat Cross 9.2.1-1)", "config_name": "fedora", "duration": 237.0, "id": "non_test:1", "origin": "non_test", "input_files": [], "log_url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/03/469720/build_aarch64.log", "misc": { "job_id": 678223, "pipeline_id": 469720 }, "output_files": [], "revision_id": "5e29d1443c46b6ca70a4c940a67e8c09f05dcb7e", "start_time": "2020-03-03T17:52:02.370000+00:00", "valid": True }, { "architecture": "aarch64", "command": "make -j30 INSTALL_MOD_STRIP=1 targz-pkg", "compiler": "aarch64-linux-gnu-gcc (GCC) 9.2.1 20190827 (Red Hat Cross 9.2.1-1)", "config_name": "fedora", "duration": 237.0, "id": "test:1", "origin": "test", "input_files": [], "log_url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/03/469720/build_aarch64.log", "misc": { "job_id": 678223, "pipeline_id": 469720 }, "output_files": [], "revision_id": "1254e88b4fc1470d152f494c3590bb6a33ab33eb", "start_time": "2020-03-03T17:52:02.370000+00:00", "valid": True }, ], "tests": [ { "build_id": "redhat:679936", "description": "IOMMU boot test", "duration": 1847.0, "id": "non_test:1", "origin": "non_test", "output_files": [{ "name": "x86_64_4_console.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_console.log" }, { "name": "x86_64_4_IOMMU_boot_test_dmesg.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_dmesg.log" }, { "name": "x86_64_4_IOMMU_boot_test_resultoutputfile.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_resultoutputfile.log" }, { "name": "x86_64_4_IOMMU_boot_test_taskout.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_taskout.log" }], "environment": { "description": "meson-gxl-s905d-p230 in lab-baylibre", "misc": { "device": "meson-gxl-s905d-p230", "instance": "meson-gxl-s905d-p230-sea", "lab": "lab-baylibre", "mach": "amlogic", "rootfs_url": "https://storage.kernelci.org/images/rootfs/buildroot/kci-2019.02-9-g25091c539382/arm64/baseline/rootfs.cpio.gz" } }, "path": "redhat_iommu_boot", "start_time": "2020-03-04T21:30:57+00:00", "status": "ERROR", "waived": True }, { "build_id": "redhat:679936", "description": "IOMMU boot test", "duration": 1847.0, "id": "test:1", "origin": "test", "output_files": [{ "name": "x86_64_4_console.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_console.log" }, { "name": "x86_64_4_IOMMU_boot_test_dmesg.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_dmesg.log" }, { "name": "x86_64_4_IOMMU_boot_test_resultoutputfile.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_resultoutputfile.log" }, { "name": "x86_64_4_IOMMU_boot_test_taskout.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_taskout.log" }], "environment": { "description": "meson-gxl-s905d-p230 in lab-baylibre", "misc": { "device": "meson-gxl-s905d-p230", "instance": "meson-gxl-s905d-p230-sea", "lab": "lab-baylibre", "mach": "amlogic", "rootfs_url": "https://storage.kernelci.org/images/rootfs/buildroot/kci-2019.02-9-g25091c539382/arm64/baseline/rootfs.cpio.gz" } }, "path": "redhat_iommu_boot", "start_time": "2020-03-04T21:30:57+00:00", "status": "ERROR", "waived": True }, ], }) notifications = subscriptions.match_oo(oo_data) self.assertEqual(len(notifications), 3) for notification in notifications: obj_list_name = notification.obj_list_name assert obj_list_name.endswith("s") obj_name = obj_list_name[:-1] self.assertIsInstance(notification, Notification) message = notification.render() self.assertIsNone(message['From']) self.assertEqual(message['To'], "*****@*****.**") self.assertEqual(message['X-KCIDB-Notification-Message-ID'], obj_name) self.assertIn(f"Test {obj_name}: ", message['Subject']) self.assertIn(f"Test {obj_name} detected!\n\n", message.get_payload())
def test_test(self): """ Check single-test I/O data is converted correctly. """ io_data = { "version": self.version, "tests": [ { "build_id": "redhat:679936", "description": "IOMMU boot test", "duration": 1847.0, "id": "redhat:107205807", "origin": "redhat", "output_files": [ { "name": "x86_64_4_console.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_console.log" }, { "name": "x86_64_4_IOMMU_boot_test_dmesg.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_dmesg.log" }, { "name": "x86_64_4_IOMMU_boot_test_resultoutputfile.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_resultoutputfile.log" }, { "name": "x86_64_4_IOMMU_boot_test_taskout.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_taskout.log" } ], "environment": { "description": "meson-gxl-s905d-p230 in lab-baylibre", "misc": { "device": "meson-gxl-s905d-p230", "instance": "meson-gxl-s905d-p230-sea", "lab": "lab-baylibre", "mach": "amlogic", "rootfs_url": "https://storage.kernelci.org/images/rootfs/buildroot/kci-2019.02-9-g25091c539382/arm64/baseline/rootfs.cpio.gz" } }, "path": "redhat_iommu_boot", "start_time": "2020-03-04T21:30:57+00:00", "status": "ERROR", "waived": True }, ], } expected_oo_data = { "version": self.version, "tests": { "redhat:107205807": Test({}, { "build_id": "redhat:679936", "description": "IOMMU boot test", "duration": 1847.0, "id": "redhat:107205807", "origin": "redhat", "output_files": [ Node({}, { "name": "x86_64_4_console.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_console.log" }), Node({}, { "name": "x86_64_4_IOMMU_boot_test_dmesg.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_dmesg.log" }), Node({}, { "name": "x86_64_4_IOMMU_boot_test_resultoutputfile.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_resultoutputfile.log" }), Node({}, { "name": "x86_64_4_IOMMU_boot_test_taskout.log", "url": "https://cki-artifacts.s3.amazonaws.com/datawarehouse/2020/03/04/471145/x86_64_4_IOMMU_boot_test_taskout.log" }) ], "environment": TestEnvironment({}, { "description": "meson-gxl-s905d-p230 in lab-baylibre", "misc": { "device": "meson-gxl-s905d-p230", "instance": "meson-gxl-s905d-p230-sea", "lab": "lab-baylibre", "mach": "amlogic", "rootfs_url": "https://storage.kernelci.org/images/rootfs/buildroot/kci-2019.02-9-g25091c539382/arm64/baseline/rootfs.cpio.gz" } }), "path": "redhat_iommu_boot", "start_time": "2020-03-04T21:30:57+00:00", "status": "ERROR", "waived": True, "build_": None, }), }, } oo_data = from_io(io_data) self.assertEqual(oo_data, expected_oo_data)
def test_linking(self): """ Check objects are linked correctly. """ io_data = { "version": self.version, "revisions": [IO_REVISION1, IO_REVISION2,], "builds": [ { "revision_id": IO_REVISION1["id"], "id": "origin:1", "origin": "origin", }, { "revision_id": IO_REVISION3["id"], "id": "origin:2", "origin": "origin", }, ], "tests": [ { "build_id": "origin:1", "id": "origin:1", "origin": "origin", }, { "build_id": "origin:non-existent", "id": "origin:2", "origin": "origin", }, ], } revision1_attrs = {"builds_": {}} revision1_attrs.update(IO_REVISION1) revision1 = Revision({}, revision1_attrs) revision2_attrs = {"builds_": {}} revision2_attrs.update(IO_REVISION2) revision2 = Revision({}, revision2_attrs) build1 = Build({}, {"revision_id": IO_REVISION1["id"], "id": "origin:1", "origin": "origin", "revision_": revision1, "tests_": {}}) build2 = Build({}, {"revision_id": IO_REVISION3["id"], "id": "origin:2", "origin": "origin", "revision_": None, "tests_": {}}) revision1.builds_[build1.id] = build1 test1 = Test({}, {"build_id": "origin:1", "build_": build1, "id": "origin:1", "origin": "origin",}) test2 = Test({}, {"build_id": "origin:non-existent", "build_": None, "id": "origin:2", "origin": "origin",}) build1.tests_[test1.id] = test1 expected_oo_data = { "version": self.version, "revisions": { IO_REVISION1["id"]: revision1, IO_REVISION2["id"]: revision2, }, "builds": { "origin:1": build1, "origin:2": build2, }, "tests": { "origin:1": test1, "origin:2": test2, }, } oo_data = from_io(io_data) self.assertEqual(oo_data, expected_oo_data)
def test_non_root_orphans(self): """ Check non-root orphans are removed. """ # Builds expected_output_data = { "version": self.version, "builds": {} } # Single build input_data = from_io({ "version": self.version, "builds": [ {"id": "origin:1", "origin": "origin", "revision_id": IO_REVISION1["id"]}, ], }) self.assertEqual(remove_orphans(input_data), expected_output_data) # Two builds input_data = from_io({ "version": self.version, "builds": [ {"id": "origin:1", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:2", "origin": "origin", "revision_id": IO_REVISION2["id"]}, ], }) self.assertEqual(remove_orphans(input_data), expected_output_data) # Tests expected_output_data = { "version": self.version, "tests": {} } # Single test input_data = from_io({ "version": self.version, "tests": [ {"id": "origin:1", "origin": "origin", "build_id": "origin:1"}, ], }) self.assertEqual(remove_orphans(input_data), expected_output_data) # Two tests input_data = from_io({ "version": self.version, "tests": [ {"id": "origin:1", "origin": "origin", "build_id": "origin:1"}, {"id": "origin:2", "origin": "origin", "build_id": "origin:2"}, ], }) self.assertEqual(remove_orphans(input_data), expected_output_data) # Builds and tests expected_output_data = { "version": self.version, "builds": {}, "tests": {}, } # Disconnected builds and tests input_data = from_io({ "version": self.version, "builds": [ {"id": "origin:1", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:2", "origin": "origin", "revision_id": IO_REVISION2["id"]}, ], "tests": [ {"id": "origin:1", "origin": "origin", "build_id": "origin:3"}, {"id": "origin:2", "origin": "origin", "build_id": "origin:4"}, ], }) self.assertEqual(remove_orphans(input_data), expected_output_data) # Connected builds and tests input_data = from_io({ "version": self.version, "builds": [ {"id": "origin:1", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:2", "origin": "origin", "revision_id": IO_REVISION2["id"]}, ], "tests": [ {"id": "origin:1", "origin": "origin", "build_id": "origin:1"}, {"id": "origin:2", "origin": "origin", "build_id": "origin:2"}, ], }) self.assertEqual(remove_orphans(input_data), expected_output_data)
def test_non_root_linked(self): """ Check non-root linked objects are preserved. """ # Builds input_data = from_io({ "version": self.version, "revisions": [IO_REVISION1, IO_REVISION2,], "builds": [ {"id": "origin:1-1", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:1-2", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:2-1", "origin": "origin", "revision_id": IO_REVISION2["id"]}, {"id": "origin:2-2", "origin": "origin", "revision_id": IO_REVISION2["id"]}, ], }) expected_output_data = { "version": self.version, "revisions": input_data["revisions"].copy(), "builds": input_data["builds"].copy(), } self.assertEqual(remove_orphans(input_data), expected_output_data) # Tests input_data = from_io({ "version": self.version, "revisions": [IO_REVISION1, IO_REVISION2,], "builds": [ {"id": "origin:1-1", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:1-2", "origin": "origin", "revision_id": IO_REVISION1["id"]}, {"id": "origin:2-1", "origin": "origin", "revision_id": IO_REVISION2["id"]}, {"id": "origin:2-2", "origin": "origin", "revision_id": IO_REVISION2["id"]}, ], "tests": [ {"id": "origin:1-1-1", "origin": "origin", "build_id": "origin:1-1"}, {"id": "origin:1-1-2", "origin": "origin", "build_id": "origin:1-1"}, {"id": "origin:1-2-1", "origin": "origin", "build_id": "origin:1-2"}, {"id": "origin:1-2-2", "origin": "origin", "build_id": "origin:1-2"}, {"id": "origin:2-1-1", "origin": "origin", "build_id": "origin:2-1"}, {"id": "origin:2-1-2", "origin": "origin", "build_id": "origin:2-1"}, {"id": "origin:2-2-1", "origin": "origin", "build_id": "origin:2-2"}, {"id": "origin:2-2-2", "origin": "origin", "build_id": "origin:2-2"}, ], }) expected_output_data = { "version": self.version, "revisions": input_data["revisions"].copy(), "builds": input_data["builds"].copy(), "tests": input_data["tests"].copy(), } self.assertEqual(remove_orphans(input_data), expected_output_data)