def test_tarballs_pre_extracted_tech_specific(self) -> None: """ Test that tarballs that are pre-extracted and specified using a tech-specific setting work. """ import hammer_config tech_dir, tech_dir_base = HammerToolTestHelpers.create_tech_dir("dummy28") tech_json_filename = os.path.join(tech_dir, "dummy28.tech.json") # Add defaults to specify tarball_dir. with open(os.path.join(tech_dir, "defaults.json"), "w") as f: f.write(json.dumps({ "technology.dummy28.tarball_dir": tech_dir, "vlsi.technology.extracted_tarballs_dir": "/should/not/be/used", "technology.dummy28.extracted_tarballs_dir": tech_dir_base })) HammerToolTestHelpers.write_tech_json(tech_json_filename, self.add_tarballs) tech = self.get_tech(hammer_tech.HammerTechnology.load_from_dir("dummy28", tech_dir)) tech.cache_dir = tech_dir database = hammer_config.HammerDatabase() database.update_technology(tech.get_config()) HammerVLSISettings.load_builtins_and_core(database) tech.set_database(database) outputs = tech.process_library_filter(pre_filts=[], filt=hammer_tech.filters.gds_filter, must_exist=False, output_func=lambda str, _: [str]) self.assertEqual(outputs, ["{0}/foobar.tar.gz/test.gds".format(tech_dir_base)]) # Cleanup shutil.rmtree(tech_dir_base)
def test_tarballs_not_extracted(self) -> None: """ Test that tarballs that are not pre-extracted work fine. """ import hammer_config tech_dir, tech_dir_base = HammerToolTestHelpers.create_tech_dir( "dummy28") tech_json_filename = os.path.join(tech_dir, "dummy28.tech.json") # Add defaults to specify tarball_dir. with open(os.path.join(tech_dir, "defaults.json"), "w") as f: f.write(json.dumps({"technology.dummy28.tarball_dir": tech_dir})) HammerToolTestHelpers.write_tech_json(tech_json_filename, self.add_tarballs) tech = self.get_tech( hammer_tech.HammerTechnology.load_from_dir("dummy28", tech_dir)) tech.cache_dir = tech_dir database = hammer_config.HammerDatabase() database.update_technology(tech.get_config()) HammerVLSISettings.load_builtins_and_core(database) tech.set_database(database) outputs = tech.process_library_filter( pre_filts=[], filt=hammer_tech.filters.gds_filter, must_exist=False, output_func=lambda str, _: [str]) self.assertEqual( outputs, ["{0}/extracted/foobar.tar.gz/test.gds".format(tech_dir)]) # Cleanup shutil.rmtree(tech_dir_base)
def test_installs_in_cache_dir(self) -> None: """ Test that we can access files in the tech cache dir. Use case: A PDK file needs to be hacked by post_install_script """ import hammer_config tech_dir, tech_dir_base = HammerToolTestHelpers.create_tech_dir( "dummy28") tech_json_filename = os.path.join(tech_dir, "dummy28.tech.json") tech_json = { "name": "dummy", "installs": [{ "path": "tech-dummy28-cache", "base var": "" # means relative to tech dir }], "libraries": [{ "lef file": "tech-dummy28-cache/tech.lef", "provides": [{ "lib_type": "technology" }] }] } # type: Dict[str, Any] with open(tech_json_filename, "w") as f: # pyline: disable=invalid-name f.write(json.dumps(tech_json, cls=HammerJSONEncoder, indent=4)) tech = self.get_tech( hammer_tech.HammerTechnology.load_from_dir("dummy28", tech_dir)) tech.cache_dir = tech_dir database = hammer_config.HammerDatabase() database.update_technology(tech.get_config()) HammerVLSISettings.load_builtins_and_core(database) tech.set_database(database) outputs = tech.process_library_filter( pre_filts=[], filt=hammer_tech.filters.lef_filter, must_exist=False, output_func=lambda str, _: [str]) self.assertEqual(outputs, ["{0}/tech.lef".format(tech.cache_dir)]) # Cleanup shutil.rmtree(tech_dir_base)
def test_flat_makefile(self) -> None: """ Test that a Makefile for a flat design is generated correctly. """ tmpdir = tempfile.mkdtemp() proj_config = os.path.join(tmpdir, "config.json") settings = { "vlsi.core.technology": "nop", "vlsi.core.build_system": "make", "synthesis.inputs.top_module": "TopMod" } with open(proj_config, "w") as f: f.write(json.dumps(settings, cls=HammerJSONEncoder, indent=4)) options = HammerDriverOptions(environment_configs=[], project_configs=[proj_config], log_file=os.path.join(tmpdir, "log.txt"), obj_dir=tmpdir) self.assertTrue( HammerVLSISettings.set_hammer_vlsi_path_from_environment(), "hammer_vlsi_path must exist") driver = HammerDriver(options) CLIDriver.generate_build_inputs(driver, lambda x: None) d_file = os.path.join(driver.obj_dir, "hammer.d") self.assertTrue(os.path.exists(d_file)) with open(d_file, "r") as f: contents = f.readlines() targets = self._read_targets_from_makefile(contents) tasks = {"pcb", "syn", "par", "drc", "lvs"} expected_targets = tasks.copy() expected_targets.update({"redo-" + x for x in tasks if x is not "pcb"}) expected_targets.update({ os.path.join(tmpdir, x + "-rundir", x + "-output-full.json") for x in tasks }) expected_targets.update({ os.path.join(tmpdir, x + "-input.json") for x in tasks if x not in {"syn", "pcb"} }) self.assertEqual(set(targets.keys()), set(expected_targets)) # TODO at some point we should add more tests # Cleanup shutil.rmtree(tmpdir)
def setUp(self) -> None: # Make sure the HAMMER_VLSI path is set correctly. self.assertTrue( HammerVLSISettings.set_hammer_vlsi_path_from_environment())
def setUp(self) -> None: # Make sure the HAMMER_VLSI path is set correctly. self.assertTrue(HammerVLSISettings.set_hammer_vlsi_path_from_environment())
def test_hier_makefile(self) -> None: """ Test that a Makefile for a hierarchical design is generated correctly. """ tmpdir = tempfile.mkdtemp() proj_config = os.path.join(tmpdir, "config.json") settings = { "vlsi.core.technology": "nop", "vlsi.core.build_system": "make", "vlsi.inputs.hierarchical.mode": "hierarchical", "vlsi.inputs.hierarchical.top_module": "TopMod", "vlsi.inputs.hierarchical.config_source": "manual", "vlsi.inputs.hierarchical.manual_modules": [{ "TopMod": ["SubModA", "SubModB"] }], "vlsi.inputs.hierarchical.manual_placement_constraints": [{ "TopMod": [{ "path": "top", "type": "toplevel", "x": 0, "y": 0, "width": 1234, "height": 7890, "margins": { "left": 1, "top": 2, "right": 3, "bottom": 4 } }, { "path": "top/C", "type": "placement", "x": 2, "y": 102, "width": 30, "height": 40 }, { "path": "top/B", "type": "hierarchical", "x": 10, "y": 30, "master": "SubModB" }, { "path": "top/A", "type": "hierarchical", "x": 200, "y": 120, "master": "SubModA" }] }, { "SubModA": [{ "path": "a", "type": "toplevel", "x": 0, "y": 0, "width": 100, "height": 200, "margins": { "left": 0, "top": 0, "right": 0, "bottom": 0 } }] }, { "SubModB": [{ "path": "b", "type": "toplevel", "x": 0, "y": 0, "width": 340, "height": 160, "margins": { "left": 0, "top": 0, "right": 0, "bottom": 0 } }] }] } with open(proj_config, "w") as f: f.write(json.dumps(settings, cls=HammerJSONEncoder, indent=4)) options = HammerDriverOptions(environment_configs=[], project_configs=[proj_config], log_file=os.path.join(tmpdir, "log.txt"), obj_dir=tmpdir) self.assertTrue( HammerVLSISettings.set_hammer_vlsi_path_from_environment(), "hammer_vlsi_path must exist") driver = HammerDriver(options) CLIDriver.generate_build_inputs(driver, lambda x: None) d_file = os.path.join(driver.obj_dir, "hammer.d") self.assertTrue(os.path.exists(d_file)) with open(d_file, "r") as f: contents = f.readlines() targets = self._read_targets_from_makefile(contents) mods = {"TopMod", "SubModA", "SubModB"} expected_targets = { "pcb", os.path.join(tmpdir, "pcb-rundir", "pcb-output-full.json") } expected_targets.update({"sim-rtl-" + x for x in mods}) expected_targets.update({"syn-" + x for x in mods}) expected_targets.update({"sim-syn-" + x for x in mods}) expected_targets.update({"par-" + x for x in mods}) expected_targets.update({"sim-par-" + x for x in mods}) expected_targets.update({"power-par-" + x for x in mods}) expected_targets.update({"lvs-" + x for x in mods}) expected_targets.update({"drc-" + x for x in mods}) expected_targets.update({"redo-sim-rtl-" + x for x in mods}) expected_targets.update({"redo-syn-" + x for x in mods}) expected_targets.update({"redo-sim-syn-" + x for x in mods}) expected_targets.update({"redo-par-" + x for x in mods}) expected_targets.update({"redo-sim-par-" + x for x in mods}) expected_targets.update({"redo-power-par-" + x for x in mods}) expected_targets.update({"redo-lvs-" + x for x in mods}) expected_targets.update({"redo-drc-" + x for x in mods}) expected_targets.update({ os.path.join(tmpdir, "sim-rtl-" + x, "sim-output-full.json") for x in mods }) expected_targets.update({ os.path.join(tmpdir, "syn-" + x, "syn-output-full.json") for x in mods }) expected_targets.update({ os.path.join(tmpdir, "sim-syn-" + x, "sim-output-full.json") for x in mods }) expected_targets.update({ os.path.join(tmpdir, "par-" + x, "par-output-full.json") for x in mods }) expected_targets.update({ os.path.join(tmpdir, "sim-par-" + x, "sim-output-full.json") for x in mods }) expected_targets.update({ os.path.join(tmpdir, "power-par-" + x, "power-output-full.json") for x in mods }) expected_targets.update({ os.path.join(tmpdir, "lvs-" + x, "lvs-output-full.json") for x in mods }) expected_targets.update({ os.path.join(tmpdir, "drc-" + x, "drc-output-full.json") for x in mods }) # Only non-leafs get a syn-*-input.json target expected_targets.update({ os.path.join(tmpdir, "syn-" + x + "-input.json") for x in mods if x in {"TopMod"} }) expected_targets.update({ os.path.join(tmpdir, "sim-syn-" + x + "-input.json") for x in mods }) expected_targets.update( {os.path.join(tmpdir, "par-" + x + "-input.json") for x in mods}) expected_targets.update({ os.path.join(tmpdir, "sim-par-" + x + "-input.json") for x in mods }) expected_targets.update({ os.path.join(tmpdir, "power-par-" + x + "-input.json") for x in mods }) expected_targets.update({ os.path.join(tmpdir, "power-sim-par-" + x + "-input.json") for x in mods }) expected_targets.update( {os.path.join(tmpdir, "lvs-" + x + "-input.json") for x in mods}) expected_targets.update( {os.path.join(tmpdir, "drc-" + x + "-input.json") for x in mods}) self.assertEqual(set(targets.keys()), expected_targets) # TODO at some point we should add more tests # Cleanup shutil.rmtree(tmpdir)