def test_hierarchies(self): """Verify the class and file hierarchies.""" # verify the file hierarchy and file declaration relationships file_hierarchy_dict = { directory("include"): { file("c_maths.h"): { function("int", "cm_add"): parameters("int", "int"), function("int", "cm_sub"): parameters("int", "int") } } } compare_file_hierarchy(self, file_hierarchy(file_hierarchy_dict)) compare_class_hierarchy(self, class_hierarchy({}))
class CPPWithSpaces(ExhaleTestCase): """ Primary test class for project ``cpp with spaces``. """ test_project = "cpp with spaces" """.. testproject:: cpp with spaces""" file_hierarchy_dict = { directory("include"): { directory("with spaces"): { file("with spaces.hpp"): { namespace("with_spaces"): { function("int", "value"): parameters() } } } } } """The file hierarchy for this project.""" def test_hierarchies(self): """ Verify the class and file hierarchies. .. todo:: Class hierarchy not tested here, need to revisit that part of the testing framework **as well as** stop emitting a "Class Hierarchy" on the root api page when it is empty. """ compare_file_hierarchy(self, file_hierarchy(self.file_hierarchy_dict)) @no_cleanup def test_build(self): """Verify that the sphinx build (with spaces in filenames) succeeds.""" self.app.build()
class CPPNesting(ExhaleTestCase): """ Primary test class for project ``cpp_nesting``. """ test_project = "cpp_nesting" """.. testproject:: cpp_nesting""" file_hierarchy_dict = { directory("include"): { file("top_level.hpp"): { clike("struct", "top_level"): {} }, directory("nested"): { directory("one"): { file("one.hpp"): { namespace("nested"): { clike("struct", "one"): { clike("struct", "params"): { union("four_bytes"): {} } } } }, }, directory("two"): { file("two.hpp"): { namespace("nested"): { clike("struct", "two"): { clike("struct", "params"): { union("four_bytes"): {} } }, union("four_bytes"): {} } } }, directory("dual_nested"): { directory("one"): { file("one.hpp"): { namespace("nested"): { namespace("dual_nested"): { clike("struct", "one"): { clike("struct", "params"): { union("four_bytes"): {} } } } } } }, directory("two"): { file("two.hpp"): { namespace("nested"): { namespace("dual_nested"): { clike("struct", "two"): { clike("struct", "params"): { union("four_bytes"): {} } } } } } } } } } } class_hierarchy_dict = { clike("struct", "top_level"): {}, namespace("nested"): { clike("struct", "one"): { clike("struct", "params"): { union("four_bytes"): {} } }, clike("struct", "two"): { clike("struct", "params"): { union("four_bytes"): {} } }, union("four_bytes"): {}, namespace("dual_nested"): { clike("struct", "one"): { clike("struct", "params"): { union("four_bytes"): {} } }, clike("struct", "two"): { clike("struct", "params"): { union("four_bytes"): {} } } } } } def test_hierarchies(self): """Verify the class and file hierarchies.""" compare_file_hierarchy(self, file_hierarchy(self.file_hierarchy_dict)) compare_class_hierarchy(self, class_hierarchy(self.class_hierarchy_dict)) @confoverrides(exhale_args={"doxygenStripFromPath": "../include"}) def test_hierarchies_stripped(self): """ Verify the class and file hierarchies with ``doxygenStripFromPath=../include``. .. todo:: this test is not supported yet """ return # TODO: Exhale should remove the include/ directory # dirty hack to pop off the first include/ directory without needing to know # the actual object that is the first and only key for key in self.file_hierarchy_dict: no_include = self.file_hierarchy_dict[key] break compare_file_hierarchy(self, file_hierarchy(no_include)) compare_class_hierarchy(self, class_hierarchy(self.class_hierarchy_dict))
def make_file_hierarchy_dict(): """ Return the :class:`python:dict` representing the file hierarchy. If :data:`RUN_ABSURD_TEST` is ``True``, :data:`ABSURD_DIRECTORY_PATH` will be incorporated in the returned dictionary. """ absurdly_long_names_hpp_contents = { define("MAKE_IT_BIG"): {}, clike("class", make_it_big("class")): {}, clike("struct", make_it_big("struct")): {}, function("std::string", make_it_big("function")): parameters(), enum(make_it_big("enum")): {}, # TODO: values("first", "second", "third"), namespace(make_it_big("namespace")): { variable("int", "value"): {} }, define(make_it_big("define").upper()): {}, variable("int", make_it_big("variable")): {}, typedef(make_it_big("typedef"), "float"): {}, union(make_it_big("union")): {} } if RUN_ABSURD_TEST: absurd_directory_structure = { directory("structure"): { directory("that"): { directory("is"): { directory("longer"): { directory("than"): { directory("two"): { directory("hundred"): { directory("and"): { directory("fifty"): { directory("five"): { directory("characters"): { directory("long"): { directory("which"): { directory("is"): { directory("an"): { directory("absolutely"): { directory("and"): { directory("completely"): { directory("ridiculous"): { directory("thing"): { directory("to"): { directory("do"): { # noqa: E501 directory("and"): { # noqa: E501 directory("if"): { # noqa: E501 directory("you"): { # noqa: E501 directory("did"): { # noqa: E501 directory("this"): { # noqa: E501 directory("in"): { # noqa: E501 directory("the"): { # noqa: E501 directory("real"): { # noqa: E501 directory("world"): { # noqa: E501 directory("you"): { # noqa: E501 directory("put"): { # noqa: E501 directory("yourself"): { # noqa: E501 directory("comfortably"): { # noqa: E501 directory("in"): { # noqa: E501 directory("a"): { # noqa: E501 directory("position"): { # noqa: E501 directory("to"): { # noqa: E501 directory("be"): { # noqa: E501 directory("downsized"): { # noqa: E501 directory("and"): { # noqa: E501 directory("outta"): { # noqa: E501 directory("here"): { # noqa: E501 directory("as"): { # noqa: E501 directory("soul"): { # noqa: E501 directory("position"): { # noqa: E501 directory("would"): { # noqa: E501 directory("explain"): { # noqa: E501 directory("to"): { # noqa: E501 directory("you"): { # noqa: E501 file("a_file.hpp"): { # noqa: E501 function("std::string", "extremely_nested"): parameters() # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } # noqa: E501 } } } } } } } } } } } } } } } } } } } } } } } } } } return { directory("include"): { file("absurdly_long_names.hpp"): absurdly_long_names_hpp_contents, directory("directory"): absurd_directory_structure } } else: return { directory("include"): { file("absurdly_long_names.hpp"): absurdly_long_names_hpp_contents } }
class CPPFortranMixed(ExhaleTestCase): """ Primary test class for project ``cpp_fortran_mixed``. This test class exists to test explicit language lexer mappings via :data:`~exhale.configs.lexerMapping`. Note that the C++ code and namespace ``convert`` is definitively chosen to be different than the name of the fortran module name (``conversions``). If they are the same, some interesting things happen on the Doxygen side, because Doxygen will combine the namespaces. .. todo:: These tests are incomplete (``from __future__ import config_objects``). The module level storage creates conflicts between the test cases when specifying the regular expressions :/ Currently there is only one test for a custom lexer mapping, and it only actually works because the name is alphabetically sorted after the default test (meaning pytest will run it after). .. todo:: The full function validation does not support templates (yet). Templates at large in the testing framework need to be reworked, currently it is all boiler plate code with no actual validation... """ test_project = "cpp_fortran_mixed" """.. testproject:: cpp_fortran_mixed""" file_hierarchy_dict = { directory("include"): { directory("convert"): { file("convert.hpp"): { namespace("convert"): { function("T", "to_degrees", template=["typename T"]): parameters("T"), function("T", "to_radians", template=["typename T"]): parameters("T") } } } }, directory("src"): { file("conversions.f90"): { namespace("conversions"): { variable("real(c_float)", "pi_s"): {}, variable("real(c_double)", "pi_d"): {}, variable("real(c_float)", "s_180"): {}, variable("real(c_double)", "d_180"): {}, # NOTE: function parameters in fortran are a little weird. # 1. <type> has 'function', e.g. 'real(c_float) function' # 2. Parameters are names, not types? function("real(c_float) function", "degrees_to_radians_s"): parameters("degrees_s"), function("real(c_double) function", "degrees_to_radians_d"): parameters("degrees_d"), function("real(c_float) function", "radians_to_degrees_s"): parameters("radians_s"), function("real(c_double) function", "radians_to_degrees_d"): parameters("radians_d") } } } } """The file hierarchy for this project.""" def test_hierarchies(self): """ Validate the class and file hierarchies. **Not tested on Windows**, maybe I'll care to fix it one day. `But it is not this day`__! __ https://youtu.be/EXGUNvIFTQw .. todo:: Too much shared code in hierarchy comparisons, the class hierarchy for this project should be **empty**. Need to enable asserting this fact (same for the ``cpp_nesting`` project). """ if platform.system() != "Windows": compare_file_hierarchy(self, file_hierarchy(self.file_hierarchy_dict)) # compare_class_hierarchy(self, class_hierarchy({})) def validate_pygments_lexers(self, exhale_root, node_map): """ Validate nodes have the expected pygments lexer in their program listing file. **Parameters** ``exhale_root`` (:class:`exhale.graph.ExhaleRoot`) The graph root object for this project. ``node_map`` (:class:`python:dict`) A map of :class:`exhale.graph.ExhaleNode` objects to string values. The keys must be the nodes extracted from the ``exhale_root`` (as opposed to a testing type). Each value should be a pygments lexer, such as ``"cpp"`` or ``"fortran"``. The generated program listing file will be parsed and an assert statement performed for equality to these specified values. """ lexer_regex = re.compile(r"^.. code-block:: (.*)$") for file_node in node_map: expected_lexer = node_map[file_node] program_listing_file_path = os.path.join( exhale_root.root_directory, file_node.program_file) lexer_was_asserted = False with open(program_listing_file_path) as program_listing_file: for line in program_listing_file: lexer_match = lexer_regex.match(line) if lexer_match: lexer = lexer_match.groups()[0] self.assertTrue( lexer == expected_lexer, "{0}: expected '{1}' but got '{2}' language lexer." .format(file_node.location, expected_lexer, lexer)) lexer_was_asserted = True break # Make sure we actually ran a check for this file. self.assertTrue( lexer_was_asserted, "Did not find '.. code-block:: xxxx' in [{0}]".format( program_listing_file_path)) def get_hpp_and_f90_nodes(self, exhale_root): """ Return the two :class:`~exhale.graph.ExhaleNode` objects for this project. **Parameters** ``exhale_root`` (:class:`~exhale.graph.ExhaleRoot`) The graph root object for this project. **Returns** :class:`python:tuple` A length two tuple of :class:`~exhale.graph.ExhaleNode` objects, ordered as ``(convert_hpp, conversions_f90)``. These represent the files ``include/convert/convert.hpp`` and ``src/conversions.f90``, respectively. """ convert_hpp = None convert_hpp_path = os.path.join("include", "convert", "convert.hpp") conversions_f90 = None conversions_f90_path = os.path.join("src", "conversions.f90") for f in exhale_root.files: if f.location == convert_hpp_path: convert_hpp = f continue elif f.location == conversions_f90_path: conversions_f90 = f continue self.assertTrue( convert_hpp is not None, "Could not find ExhaleNode with file location 'include/convert/convert.hpp'." ) self.assertTrue( conversions_f90 is not None, "Could not find ExhaleNode with file location 'src/conversions.f90'." ) return (convert_hpp, conversions_f90) def test_default_lexers(self): """ Verify ``convert.hpp`` maps to ``"cpp"`` and ``conversions.f90`` to ``"fortran"``. """ exhale_root = get_exhale_root(self) convert_hpp, conversions_f90 = self.get_hpp_and_f90_nodes(exhale_root) self.validate_pygments_lexers(exhale_root, { convert_hpp: "cpp", conversions_f90: "fortran" }) @confoverrides(exhale_args={"lexerMapping": {r".*\.f90": "python"}}) def test_modified_fortran(self): """ Verify regular expression overload of ``*.f90`` files map to ``"python"``. """ exhale_root = get_exhale_root(self) convert_hpp, conversions_f90 = self.get_hpp_and_f90_nodes(exhale_root) self.validate_pygments_lexers(exhale_root, { convert_hpp: "cpp", conversions_f90: "python" })