def test_supplement_output_html (mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'ipaddresses'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # Chop up the HTML into rough sections.
    output_parts = output.split('<h2')
    [status_section] = [x for x in output_parts if 'id="common-properties-Status"' in x]
    [endpoint_section] = [x for x in output_parts if 'id="Endpoint"' in x]

    # Chop into rows. We just want to find the Oem rows.
    output_rows = output.split('<tr')
    oem_rows = [x for x in output_rows if "<b>Oem</b>" in x]

    # These assertions target strings the supplement provided:
    assert 'SUPPLEMENT-SUPPLIED DESCRIPTION for Status' in status_section, "Referenced Object (Status) output is missing supplement-supplied description"
    assert 'SUPPLEMENT-SUPPLIED INTRO for Status' in status_section, "Referenced Object (Status) output is missing supplement-supplied intro"
    assert 'SUPPLEMENT-SUPPLIED JSON for Status' in status_section, "Referenced Object (Status) output is missing supplement-supplied json payload"

    assert 'SUPPLEMENT-SUPPLIED DESCRIPTION for Endpoint' in endpoint_section, "Schema Object (Endpoint) output is missing supplement-supplied description"
    assert 'SUPPLEMENT-SUPPLIED INTRO for Endpoint' in endpoint_section, "Schema Object (Endpoint) output is missing supplement-supplied intro"
    assert 'SUPPLEMENT-SUPPLIED JSON for Endpoint' in endpoint_section, "Schema Object (Endpoint) output is missing supplement-supplied json payload"

    oem_failed_overrides = [x for x in oem_rows if "This is a description override for the Oem object." not in x]
    assert len(oem_failed_overrides) == 0, "Property description override failed for " + str(len(oem_failed_overrides)) + " mentions of Oem"
def test_properties_with_same_name_md(mockRequest):
    """ Tests an example schema with two properties with the same name """
    config = copy.deepcopy(base_config)

    input_dir = os.path.abspath(os.path.join(testcase_path, 'manager'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    prop_details = docGen.generator.this_section.get('property_details', {})
    # Property details should have been collected for the following (and only these):
    expected_prop_details = [
        'ConnectTypesSupported', 'ManagerType', 'PowerState', 'ResetType'
    ]
    found_prop_details = [x for x in prop_details.keys()]
    found_prop_details.sort()
    assert found_prop_details == expected_prop_details

    # The following number of definitions should be captured for each:
    assert len(prop_details['ResetType'].keys()) == 2
    assert len(prop_details['ConnectTypesSupported'].keys()) == 3
    assert len(prop_details['ManagerType'].keys()) == 1
    assert len(prop_details['PowerState'].keys()) == 1

    # Spot-check the output as well. These are the headings that should appear for ConnectTypesSupported:
    assert "### ConnectTypesSupported" in output
    assert "In CommandShell:" in output
    assert "In GraphicalConsole:" in output
    assert "In SerialConsole:" in output
Example #3
0
def test_html_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'

    dirpath = os.path.abspath(os.path.join(testcase_path))
    input_dir = os.path.join(dirpath, 'input')

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    expected_output_main = open(
        os.path.join(dirpath, 'expected_output',
                     'MessagesProperty.html')).read().strip()
    expected_output_details = open(
        os.path.join(dirpath, 'expected_output',
                     'MessagesPropertyDetails.html')).read().strip()

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()
    pattern1 = r'[A-Za-z0-9]+'
    pattern2 = r'^([a-zA-Z_][a-zA-Z0-9_]*)?@(odata|Redfish|Message)\.[a-zA-Z_][a-zA-Z0-9_.]+$'

    # These are redundant, in that the third assertion would always fail if either of the first two fails.
    # The first is more focused, though.
    assert pattern1 in output, "Expected pattern " + pattern1 + " not found in output"
    assert pattern2 in output, "Expected pattern " + pattern2 + " not in output. Was its backslash-escape changed by markdown-to-html conversion?"
    assert expected_output_main in output
    assert expected_output_details in output
Example #4
0
def test_markdown_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    dirpath = os.path.abspath(os.path.join(testcase_path))
    input_dir = os.path.join(dirpath, 'input')

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    expected_output_main = open(
        os.path.join(dirpath, 'expected_output',
                     'MessagesProperty.md')).read().strip()
    expected_output_details = open(
        os.path.join(dirpath, 'expected_output',
                     'MessagesPropertyDetails.md')).read().strip()

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()
    pattern1 = r'\[A\-Za\-z0\-9\]\+'
    pattern2 = r'^([a-zA-Z_][a-zA-Z0-9_]*)?@(odata|Redfish|Message)\.[a-zA-Z_][a-zA-Z0-9_.]+$'
    pattern3 = r'^\(\[a\-zA\-Z\_\]\[a\-zA\-Z0\-9\_\]\*\)?@\(odata\|Redfish\|Message\)\\\.\[a\-zA\-Z\_\]\[a\-zA\-Z0\-9\_\.\]\+$'

    # These are redundant, in that the third assertion would always fail if either of the first two fails.
    # The first is more focused, though.
    assert pattern1 in output, "Expected pattern " + pattern1 + " not found in output"
    assert pattern3 in output, "Expected pattern " + pattern3 + " not in output. Was " + pattern2 + " escaped properly?"

    assert expected_output_main in output
    assert expected_output_details in output
Example #5
0
def test_normative_html_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'
    config['normative'] = True

    dirname = 'normative'
    name = 'Normative'

    dirpath = os.path.abspath(os.path.join(testcase_path, dirname))
    input_dir = os.path.join(dirpath, 'input')

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    expected_output = open(os.path.join(dirpath, 'expected_output', 'index.html')).read().strip()

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    output = output.strip()

    expected_output = _strip_styles(expected_output)
    output = _strip_styles(output)

    assert output == expected_output, "Failed on: " + name
def test_version_added_output_Chassis(mockRequest):
    """ Verify markdown output contains expected version_added info.

    The Chassis example gave us some distinct scenarios.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'version_added', 'Chassis'))

    expected_version_strings = ['| **Actions** { |', '| **Links** { |',  # string to match property without version
                                '**PowerState** *(v1.1+)*']


    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    discrepancies = DiscrepancyList()

    for expected in expected_version_strings:
        if expected not in output:
            discrepancies.append('"' + expected + '" not found')

    assert [] == discrepancies
Example #7
0
def test_uri_capture(mockRequest):

    config = copy.deepcopy(base_config)

    input_dir = os.path.abspath(os.path.join(testcase_path, 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    event_properties = docGen.property_data.get(
        'redfish.dmtf.org/schemas/v1/Event.json')
    logentry_properties = docGen.property_data.get(
        'redfish.dmtf.org/schemas/v1/LogEntry.json')
    logentrycollection_properties = docGen.property_data.get(
        'redfish.dmtf.org/schemas/v1/LogEntryCollection.json')

    assert event_properties['uris'] == []
    assert sorted(logentry_properties['uris']) == sorted([
        "/redfish/v1/Managers/{ManagerId}/LogServices/{LogServiceId}/Entries/{LogEntryId}",
        "/redfish/v1/Systems/{ComputerSystemId}/LogServices/{LogServiceId}/Entries/{LogEntryId}",
        "/redfish/v1/CompositionService/ResourceBlocks/{ResourceBlockId}/Systems/{ComputerSystemId}/LogServices/{LogServiceId}/Entries/{LogEntryId}"
    ])
    assert sorted(logentrycollection_properties['uris']) == sorted([
        "/redfish/v1/Managers/{ManagerId}/LogServices/{LogServiceId}/STUBCollection",
        "/redfish/v1/Systems/{ComputerSystemId}/LogServices/{LogServiceId}/STUBCollection",
        "/redfish/v1/CompositionService/ResourceBlocks/{ResourceBlockId}/Systems/{ComputerSystemId}/LogServices/{LogServiceId}/STUBCollection"
    ])
def test_property_index_config_overrides(mockRequest):
    """ Test that overrides are applied. """

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    override_desc = "This is an override description for NetDevFuncCapbilities, a string."

    config['property_index_config']['DescriptionOverrides'] = {
        "NetDevFuncCapabilities": [
        {
        "overrideDescription": override_desc,
        "type": "array",
        "globalOverride": True
        },
        ],
        }

    dirpath = os.path.abspath(os.path.join(testcase_path, 'general'))
    input_dir = os.path.join(dirpath, 'input')

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    lines = [x for x in output.split('\n') if '*NetDevFuncCapabilities*' in x]

    assert len(lines) and len([x for x in lines if override_desc in x]) == len(lines)

    updated_config = docGen.generator.generate_updated_config()
def test_version_deprecated_enum_metadata(mockRequest):
    """ Verify metadata contains expected version_deprecated info for enum properties.
    Note that there is an additional step, after generating this metadata, for generating metadata
    within property data ... so possibly this test should be replaced.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'version_deprecated', 'Chassis'))

    # This is a partial list of versions that should be detected.
    expected_versions = {
        'definitions': {
            'IndicatorLED': {
                'enum': {
                    "Lit": { },
                    "Unknown": { 'version_deprecated': '1.5.0' },
                    },
                },
            }
        }

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    meta = docGen.property_data['redfish.dmtf.org/schemas/v1/Chassis.json']['doc_generator_meta']

    discrepancies = DiscrepancyList()
    for name, data in expected_versions.items():
        if name == 'version': continue
        _version_compare(meta, name, data, discrepancies, [])

    assert [] == discrepancies
def test_schema_deprecated_earlier_output_markdown(mockRequest):
    """ Verify markdown output contains expected deprecation info. Deprecation starts at previous version."""

    input_dir = os.path.abspath(os.path.join(testcase_path, 'earlier_deprecated', 'input'))

    config = copy.deepcopy(base_config)
    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}
    config['output_format'] = 'markdown'

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    discrepancies = DiscrepancyList()

    expected_strings = [
        'Power 1.6.0 (deprecated)',
        'v1.6 Deprecated',
        'v1.5 Deprecated',
        'This schema has been deprecated and use in new implementations is discouraged except to retain compatibility with existing products.',
        'This schema has been deprecated because absolute power corrupts absolutely.',
        ]

    for expected in expected_strings:
        if expected not in output:
            discrepancies.append('"' + expected + '" not found')

    assert [] == discrepancies
def test_markdown_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    dirpath = os.path.abspath(os.path.join(testcase_path));
    input_dir = os.path.join(dirpath, 'input')

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    expected_output_main = open(os.path.join(dirpath, 'expected_output', 'MessagesProperty.md')).read().strip()
    expected_output_details = open(os.path.join(dirpath, 'expected_output', 'MessagesPropertyDetails.md')).read().strip()

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    pattern1 = r'\[A\-Za\-z0\-9\]\+'
    pattern2 = r'^([a-zA-Z_][a-zA-Z0-9_]*)?@(odata|Redfish|Message)\.[a-zA-Z_][a-zA-Z0-9_.]+$'
    pattern3 = r'^\(\[a\-zA\-Z\_\]\[a\-zA\-Z0\-9\_\]\*\)?@\(odata\|Redfish\|Message\)\\\.\[a\-zA\-Z\_\]\[a\-zA\-Z0\-9\_\.\]\+$'

    # These are redundant, in that the third assertion would always fail if either of the first two fails.
    # The first is more focused, though.
    assert pattern1 in output, "Expected pattern " + pattern1 + " not found in output"
    assert pattern3 in output, "Expected pattern " + pattern3 + " not in output. Was " + pattern2 + " escaped properly?"

    assert expected_output_main in output
    assert expected_output_details in output
def test_version_deprecated_output_Chassis(mockRequest):
    """ Verify markdown output contains expected version_deprecated info.
    The Event example gave us some distinct scenarios.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'version_deprecated', 'Chassis'))

    expected_version_strings = ['| Lit |', # string to match property without version
                                'Unknown *(deprecated v1.5)* |',
                                ]


    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    discrepancies = DiscrepancyList()

    for expected in expected_version_strings:
        if expected not in output:
            discrepancies.append('"' + expected + '" not found')

    assert [] == discrepancies
def test_html_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'

    dirpath = os.path.abspath(os.path.join(testcase_path));
    input_dir = os.path.join(dirpath, 'input')

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    expected_output_main = open(os.path.join(dirpath, 'expected_output', 'MessagesProperty.html')).read().strip()
    expected_output_details = open(os.path.join(dirpath, 'expected_output', 'MessagesPropertyDetails.html')).read().strip()

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    pattern1 = r'[A-Za-z0-9]+'
    pattern2 = r'^([a-zA-Z_][a-zA-Z0-9_]*)?@(odata|Redfish|Message)\.[a-zA-Z_][a-zA-Z0-9_.]+$'

    # These are redundant, in that the third assertion would always fail if either of the first two fails.
    # The first is more focused, though.
    assert pattern1 in output, "Expected pattern " + pattern1 + " not found in output"
    assert pattern2 in output, "Expected pattern " + pattern2 + " not in output. Was its backslash-escape changed by markdown-to-html conversion?"
    assert expected_output_main in output
    assert expected_output_details in output
def test_supplement_output_from_files_with_action_details(mockRequest):
    """ Test of supplementary blocks for Action Details pulled in from markdown file(s) """

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    input_dir = os.path.abspath(
        os.path.join(testcase_path, 'certificate_service'))
    supplement_md_dir = os.path.abspath(
        os.path.join(testcase_path, 'md_supplements'))

    config['supplement_md_dir'] = supplement_md_dir

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    # This test verifies the text was picked up, but not that it was placed correctly.
    expected_strings = [
        'ACTION DETAILS for GenerateCSR',
        'ACTION DETAILS for ReplaceCertificate',
    ]
    for es in expected_strings:
        assert es in output
def test_supplement_output_from_files(mockRequest):
    """ Test of supplementary blocks pulled in from markdown file(s) rather than schema_supplement """

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'ipaddresses'))
    supplement_md_dir = os.path.abspath(
        os.path.join(testcase_path, 'md_supplements'))

    config['supplement_md_dir'] = supplement_md_dir

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    # This test verifies the text was picked up, but not that it was placed correctly.
    expected_strings = [
        "INTRO FOR Endpoint schema.\n\nThis one happens to be multiple lines.",
        '```json\n{ "payload": "A chunk of JSON from the md supplement" }\n```',
        "PROPERTY DETAILS for HostReservationMemoryBytes",
        '#### HostReservationMemoryBytes:',  # This is a Property Details heading
    ]
    for es in expected_strings:
        assert es in output
Example #16
0
def test_action_uris(mockRequest):
    """ Action URIs are based on URIs and action names,
    and should be output as part of the Actions section in HTML and markdown output.
    """
    config = copy.deepcopy(base_config)
    config['excluded_schemas_by_match'] = ['Collection']
    config['output_format'] = 'markdown'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'actions'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    # Links should appear with asterisks around {} path parts to highlight them.
    expected_strings = [
        "/redfish/v1/CompositionService/ResourceBlocks/*{ResourceBlockId}*/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ChangePassword",
        "/redfish/v1/ResourceBlocks/*{ResourceBlockId}*/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ChangePassword",
        "/redfish/v1/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ChangePassword",
        "/redfish/v1/CompositionService/ResourceBlocks/*{ResourceBlockId}*/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ResetBios",
        "/redfish/v1/ResourceBlocks/*{ResourceBlockId}*/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ResetBios",
        "/redfish/v1/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ResetBios",
    ]

    for x in expected_strings:
        assert x in output
Example #17
0
def test_version_added_output_AccountService(mockRequest):
    """ Verify markdown output contains expected version_added info.
    This means pulling the correct version strings from the metadata
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'version_added', 'AccountService'))

    expected_version_strings = [ '**LDAP** *(v1.3+)*', '**LDAPService** {',
                                 '**LocalAccountAuth** *(v1.3+)*',
                                 '**PrivilegeMap** *(v1.1+)*', '**Actions** *(v1.2+)*'
                                 ]


    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    discrepancies = DiscrepancyList()

    for expected in expected_version_strings:
        if expected not in output:
            discrepancies.append('"' + expected + '" not found')

    assert [] == discrepancies
Example #18
0
def test_version_added_output_Chassis(mockRequest):
    """ Verify markdown output contains expected version_added info.
    This means pulling the correct version strings from the metadata.
    The Chassis example gave us some distinct scenarios.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'version_added', 'Chassis'))

    expected_version_strings = ['| **Actions** { |', '| **Links** { |',  # string to match property without version
                                # WORKAROUND for properties incorrectly included in errata versions:
                                # '**PowerState** *(v1.0.1+)*'
                                '**PowerState** *(v1.1+)*']


    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    discrepancies = DiscrepancyList()

    for expected in expected_version_strings:
        if expected not in output:
            discrepancies.append('"' + expected + '" not found')

    assert [] == discrepancies
Example #19
0
def test_subset_mode_issue_271_warn_on_inappropriate_spec(mockRequest):
    """ Warn when a profile specifies requirements directly on the Resource, IPAddress, Redundancy, or Settings schemas """

    config = copy.deepcopy(base_config)

    config['output_format'] = 'html'

    input_dir = os.path.abspath(
        os.path.join(testcase_path, 'subset_mode', 'json-schema'))
    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    subset_config = os.path.abspath(
        os.path.join(testcase_path, 'subset_mode', 'bad_spec.json'))
    config['profile_mode'] = 'subset'
    config['profile_doc'] = subset_config

    with pytest.warns(UserWarning) as record:
        docGen = DocGenerator([input_dir], '/dev/null', config)
        output = docGen.generate_docs()

    warning_msgs = [x.message.args[0] for x in record]
    expected_msgs = [
        'Profiles should not specify requirements directly on the "Resource" schema.',
        'Profiles should not specify requirements directly on the "IPAddresses" schema.',
        'Profiles should not specify requirements directly on the "Redundancy" schema.',
        'Profiles should not specify requirements directly on the "Settings" schema.',
    ]
    for m in expected_msgs:
        assert m in warning_msgs
Example #20
0
def test_profile_basic_req_props(mockRequest):
    """ Test that the required properties are all listed. """

    config = copy.deepcopy(base_config)

    input_dir = os.path.abspath(
        os.path.join(testcase_path, 'basic', 'NetworkPort'))
    profile_dir = os.path.abspath(
        os.path.join(testcase_path, 'basic', 'profiles'))
    profile_json = os.path.abspath(
        os.path.join(profile_dir, 'BasicInstanceProfile.v1_0_0.json'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}
    config['profile_doc'] = profile_json
    config['profile_uri_to_local'] = {'redfish.dmtf.org/profiles': profile_dir}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    expected_props = [
        'AssignablePhysicalPorts', 'Description', 'Ethernet', 'FibreChannel',
        'PhysicalPortAssignment', 'DeviceEnabled'
    ]
    for prop_name in expected_props:
        assert '| **' + prop_name + '** ' in output
def test_identifier_versioning(mockRequest):
    """ Checks a formerly buggy case wherein the DurableName property showed version 1.1+, but
    DurableNameFormat lacked a version notation. """

    config = copy.deepcopy(base_config)
    # config['output_format'] = 'markdown'
    config['output_format'] = 'html'
    config['supplemental'] = {'Introduction': "# Common Objects\n\n[insert_common_objects]\n"}

    input_dir = os.path.abspath(os.path.join(testcase_path, 'ipaddresses'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # common_properties['http://redfish.dmtf.org/schemas/v1/Resource.json#/definitions/Identifier'] should
    # contain properties DurableName and DurableNameFormat, both of which should have version: 1.1.0.
    common_properties = docGen.generator.common_properties
    identifier_properties = common_properties.get('http://redfish.dmtf.org/schemas/v1/Resource.json#/definitions/Identifier', {}).get('properties', {})
    assert 'DurableName' in identifier_properties
    assert 'DurableNameFormat' in identifier_properties
    DurableName_meta = identifier_properties.get('DurableName', {}).get('_doc_generator_meta', {})
    DurableNameFormat_meta = identifier_properties.get('DurableNameFormat', {}).get('_doc_generator_meta', {})
    assert DurableName_meta.get('version') == "1.1.0"
    assert DurableNameFormat_meta.get('version') == "1.1.0"

    # The above should result in the following strings in the HTML output:
    assert "<b>DurableName</b> <i>(v1.1+)</i>" in output
    assert "<b>DurableNameFormat</b> <i>(v1.1+)</i>" in output
def test_required_attribute_output_markdown(mockRequest):
    """ Check for correct "required" output in markdown. Includes some properties expanded by $ref. """
    config = copy.deepcopy(base_config)

    input_dir = os.path.abspath(os.path.join(testcase_path, 'required_attributes', 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    lines = output.split(os.linesep)

    # Check a couple of properties that are directly included in property_data.
    context_lines = [ x for x in lines if '**Context**' in x ]
    for x in context_lines:
        assert 'required' not in x
    events_lines = [ x for x in lines if '**Events**' in x ]
    for x in events_lines:
        assert 'required' in x

    # And some properties of an object from the "definitions" section.
    eventtype_lines = [ x for x in lines if '**EventType**' in x ]
    for x in eventtype_lines:
        assert 'required' in x
    messageid_lines = [ x for x in lines if '**MessageId**' in x ]
    for x in messageid_lines:
        assert 'required' in x
    severity_lines = [ x for x in lines if '**Severity**' in x ]
    for x in severity_lines:
        assert 'required' not in x

    # Make sure we actually found the lines to test:
    assert len(context_lines) and len(events_lines) and len(eventtype_lines) and len(messageid_lines) and len(severity_lines)
def test_csv_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'csv'

    dirname = 'general'
    name = 'CSV'

    dirpath = os.path.abspath(os.path.join(testcase_path, dirname))
    input_dir = os.path.join(dirpath, 'input')

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    expected_output = open(os.path.join(dirpath, 'expected_output',
                                        'output.csv'),
                           newline=None).read().strip()

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    # "Universal newline" mode replaced '\r\n' with '\n' in the expected output.
    output = output.replace('\r\n', '\n').strip()

    assert output == expected_output, "Failed on: " + name
def test_supplement_output_action_details_in_html(mockRequest):
    """ Test of action_details in supplement config. We happen to exercise HTML output here. """

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'

    input_dir = os.path.abspath(
        os.path.join(testcase_path, 'certificate_service'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    config['schema_supplement'] = {
        'CertificateService': {
            'action_details': {
                'ReplaceCertificate': "ACTION DETAILS for ReplaceCertificate",
                "GenerateCSR": "ACTION DETAILS for GenerateCSR",
            }
        }
    }

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    # This test verifies the text was picked up, but not that it was placed correctly.
    expected_strings = [
        '<p>ACTION DETAILS for GenerateCSR</p>',
        '<p>ACTION DETAILS for ReplaceCertificate</p>',
    ]
    for es in expected_strings:
        assert es in output
def test_normative_html_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'
    config['normative'] = True

    dirname = 'normative'
    name = 'Normative'

    dirpath = os.path.abspath(os.path.join(testcase_path, dirname))
    input_dir = os.path.join(dirpath, 'input')

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    expected_output = open(os.path.join(dirpath, 'expected_output', 'index.html')).read().strip()

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    output = output.strip()

    expected_output = _strip_styles(expected_output)
    output = _strip_styles(output)

    assert output == expected_output, "Failed on: " + name
Example #26
0
def test_version_order(mockRequest):
    """ Verify correct order is determined from the unversioned json data, which provides
    versions out of order.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'version_order'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)

    files_to_process = docGen.get_files(docGen.import_from)
    grouped_files, schema_data = docGen.group_files(files_to_process)

    # Check order of grouped_files. (We don't care about the order of files_to_process.)
    cos_group = grouped_files[
        'redfish.dmtf.org/schemas/v1/ClassOfService.json']
    cos_filenames = [x['filename'] for x in cos_group]
    assert cos_filenames == [
        'ClassOfService.v1_0_0.json', 'ClassOfService.v1_0_1.json',
        'ClassOfService.v1_0_2.json', 'ClassOfService.v1_1_0.json',
        'ClassOfService.v1_1_1.json'
    ]
def test_localized_schemas_default(mockRequest):
    """ Verify a few expected strings are output in the default way when no locale is specified.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(testcase_path)

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)

    files_to_process = docGen.get_files(docGen.import_from)
    output = docGen.generate_docs()

    expected_strings = [
        # Descriptions
        'The ComputerSystem schema represents a computer or system instance',
        'The BootOptionReference of the Boot Option to perform a one-time boot from when BootSourceOverrideTarget is `UefiBootNext`.',
        'The name of the boot order property that the system uses for the persistent boot order. *For the possible property values, see BootOrderPropertySelection in Property details.*',
        '| AliasBootOrder | The system uses the AliasBootOrder property to specify the persistent boot order. |',
        # enum that is annotated in the TEST locale (but not here):
        '| Continuous |',
        # property name that is annotated in the TEST locale (but not here):
        '| **AssetTag** |',
        # Action parameter that is annotated in the TEST locale (but not here):
        '**ResetType** |',
    ]

    for x in expected_strings:
        assert x in output
def test_localized_schemas_normative_default(mockRequest):
    """ Verify a few expected strings are output in the default way when no locale is specified.
    Same as test_localized_schemas_default, but with normative output.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(testcase_path)

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}
    config['normative'] = True

    docGen = DocGenerator([input_dir], '/dev/null', config)

    files_to_process = docGen.get_files(docGen.import_from)
    output = docGen.generate_docs()

    expected_strings = [
        # Descriptions
        'This resource shall represent a computing system in the Redfish Specification.',
        'This property shall contain the BootOptionReference of the UEFI boot option for one time boot, as defined by the UEFI Specification.  The valid values for this property are specified in the values of the BootOrder array.',
        'This property shall indicate which boot order property the system uses for the persistent boot order. *For the possible property values, see BootOrderPropertySelection in Property details.*',
        '| AliasBootOrder | The system uses the AliasBootOrder property to specify the persistent boot order. |',
        # enum that is annotated in the TEST locale (but not here):
        '| Continuous |',
        # property name that is annotated in the TEST locale (but not here):
        '| **AssetTag** |',
        # Action parameter that is annotated in the TEST locale (but not here):
        '**ResetType** |',
    ]

    for x in expected_strings:
        assert x in output
def test_localized_schemas_TEST_htmlmode(mockRequest):
    """ Verify that the test strings are output correctly when TEST is specified for the locale, in HTML output mode.
    """

    config = copy.deepcopy(base_config)
    config['locale'] = 'TEST'
    config['output_format'] = 'html'
    input_dir = os.path.abspath(testcase_path)

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)

    files_to_process = docGen.get_files(docGen.import_from)
    output = docGen.generate_docs()

    expected_strings = [
        # Examples of descriptions:
        'THE COMPUTERSYSTEM SCHEMA REPRESENTS A COMPUTER OR SYSTEM INSTANCE',
        'THE BOOTOPTIONREFERENCE OF THE BOOT OPTION TO PERFORM A ONE-TIME BOOT FROM WHEN BOOTSOURCEOVERRIDETARGET IS <code>UEFIBOOTNEXT</code>.',
        'THE NAME OF THE BOOT ORDER PROPERTY THAT THE SYSTEM USES FOR THE PERSISTENT BOOT ORDER.<br><i>FOR THE POSSIBLE PROPERTY VALUES, SEE <a href="#redfish.dmtf.org/schemas/v1/ComputerSystem.json|details|BootOrderPropertySelection">BootOrderPropertySelection</a> IN PROPERTY DETAILS.',
        '<td>AliasBootOrder</td><td>THE SYSTEM USES THE ALIASBOOTORDER PROPERTY TO SPECIFY THE PERSISTENT BOOT ORDER.</td>',
        # Example of enumTranslations:
        '<td>Continuous (CONTINUOUS)</td>',
        # property name with a translation annotation:
        '<td><nobr><b>AssetTag</b> <i>(Its Mine)</i></nobr></td>',
        # Action parameter with a translation annotation:
        '<nobr><b>ResetType</b> <i>(YOU WANNA RESET THIS THING)</i></nobr>',
    ]

    for x in expected_strings:
        assert x in output
def test_localized_schemas_normative_TEST(mockRequest):
    """ Verify a few expected strings are output translated when TEST locale is specified.
    Same as test_localized_schemas_TEST, but with normative output.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(testcase_path)

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}
    config['locale'] = 'TEST'
    config['normative'] = True

    docGen = DocGenerator([input_dir], '/dev/null', config)

    files_to_process = docGen.get_files(docGen.import_from)
    output = docGen.generate_docs()

    expected_strings = [
        # Descriptions
        'THIS RESOURCE SHALL REPRESENT A COMPUTING SYSTEM IN THE REDFISH SPECIFICATION.',
        'THIS PROPERTY SHALL CONTAIN THE BOOTOPTIONREFERENCE OF THE UEFI BOOT OPTION FOR ONE TIME BOOT, AS DEFINED BY THE UEFI SPECIFICATION.  THE VALID VALUES FOR THIS PROPERTY ARE SPECIFIED IN THE VALUES OF THE BOOTORDER ARRAY.',
        'THIS PROPERTY SHALL INDICATE WHICH BOOT ORDER PROPERTY THE SYSTEM USES FOR THE PERSISTENT BOOT ORDER. *FOR THE POSSIBLE PROPERTY VALUES, SEE BootOrderPropertySelection IN PROPERTY DETAILS.*',
        '| AliasBootOrder | THE SYSTEM USES THE ALIASBOOTORDER PROPERTY TO SPECIFY THE PERSISTENT BOOT ORDER. |',
    ]

    for x in expected_strings:
        assert x in output
def test_localized_schemas_TEST(mockRequest):
    """ Verify that the test strings are output correctly when TEST is specified for the locale.
    """

    config = copy.deepcopy(base_config)
    config['locale'] = 'TEST'
    input_dir = os.path.abspath(testcase_path)

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)

    files_to_process = docGen.get_files(docGen.import_from)
    output = docGen.generate_docs()

    expected_strings = [
        # Examples of descriptions:
        'THE COMPUTERSYSTEM SCHEMA REPRESENTS A COMPUTER OR SYSTEM INSTANCE',
        'THE BOOTOPTIONREFERENCE OF THE BOOT OPTION TO PERFORM A ONE-TIME BOOT FROM WHEN BOOTSOURCEOVERRIDETARGET IS `UEFIBOOTNEXT`.',
        'THE NAME OF THE BOOT ORDER PROPERTY THAT THE SYSTEM USES FOR THE PERSISTENT BOOT ORDER. *FOR THE POSSIBLE PROPERTY VALUES, SEE BootOrderPropertySelection IN PROPERTY DETAILS.*',
        '| AliasBootOrder | THE SYSTEM USES THE ALIASBOOTORDER PROPERTY TO SPECIFY THE PERSISTENT BOOT ORDER. |',
    ]

    for x in expected_strings:
        assert x in output
def test_identifier_versioning(mockRequest):
    """ Checks a formerly buggy case wherein the DurableName property showed version 1.1+, but
    DurableNameFormat lacked a version notation.

    This test originated when the doc generator compared versioned schemas to generate version
    data based on when properties appeared in a schema. Now that we have version annotations in the
    schemas and use those, this is much less likely to break.
    """

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'
    config['supplemental'] = {
        'Introduction': "# Common Objects\n\n[insert_common_objects]\n"
    }

    input_dir = os.path.abspath(os.path.join(testcase_path, 'ipaddresses'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    # The above should result in the following strings in the HTML output:
    assert "<b>DurableName</b> <i>(v1.1+)</i>" in output
    assert "<b>DurableNameFormat</b> <i>(v1.1+)</i>" in output
Example #33
0
def test_uris_in_regular_schema_html_output(mockRequest):
    """ HTML output is more complex and subject to change.

    In addition to highlighting Id placeholders, they are linked to schema documentation. For this test, several schemas
    are not documented, so the link will go to the schema file itself. """

    config = copy.deepcopy(base_config)
    config['excluded_schemas_by_match'] = ['Collection']
    config['output_format'] = 'html'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    # Links should appear with asterisks around {} path parts to highlight them.
    expected_strings = [
        '/​redfish/​v1/​Managers/​<i>{ManagerId}</i>/​LogServices/​<i>{LogServiceId}</i>/​Entries/​<i><a href="#LogEntry">{LogEntryId}</a></i>',
        '/​redfish/​v1/​Systems/​<i>{ComputerSystemId}</i>/​LogServices/​<i>{LogServiceId}</i>/​Entries/​<i><a href="#LogEntry">{LogEntryId}</a></i>',
        '/​redfish/​v1/​CompositionService/​ResourceBlocks/​<i>{ResourceBlockId}</i>/​Systems/​<i>{ComputerSystemId}</i>/​LogServices/​<i>{LogServiceId}</i>/​Entries/​<i><a href="#LogEntry">{LogEntryId}</a></i>'
    ]

    for x in expected_strings:
        assert x in output
Example #34
0
def test_property_index_config_overrides(mockRequest):
    """ Test that overrides are applied. """

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    override_desc = "This is an override description for NetDevFuncCapbilities, a string."

    config['property_index_config']['DescriptionOverrides'] = {
        "NetDevFuncCapabilities": [
            {
                "overrideDescription": override_desc,
                "type": "array",
                "globalOverride": True
            },
        ],
    }

    dirpath = os.path.abspath(os.path.join(testcase_path, 'general'))
    input_dir = os.path.join(dirpath, 'input')

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    lines = [x for x in output.split('\n') if '*NetDevFuncCapabilities*' in x]

    assert len(lines) and len([x for x in lines if override_desc in x
                               ]) == len(lines)

    updated_config = docGen.generator.generate_updated_config()
Example #35
0
def test_excerpt_circuit(mockRequest):
    """ The Circuit schema contains many references to excerpts """

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'circuit'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # Looking for the generated description line is an easy way to check that excerpts were detected:
    description1 = "This object is an excerpt of the *Sensor* resource located at the URI shown in DataSourceUri."

    # Verify one of the expanded excerpts was output.
    expected_excerpt = """| **Current** *(v0.9+)* { | object<br>(excerpt) | The current sensor for this circuit. This object is an excerpt of the *Sensor* resource located at the URI shown in DataSourceUri. |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**DataSourceUri** | string<br><br>*read-only<br>(null)* | A link to the resource that provides the data for this object. |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**Name** | string<br><br>*read-only required* | The name of the resource or array element. |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**PeakReading** | number<br><br>*read-only<br>(null)* | The peak reading value for this sensor. |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**PhysicalContext** | string<br>(enum)<br><br>*read-only<br>(null)* | Describes the area or device to which this sensor measurement applies. *See PhysicalContext in Property Details, below, for the possible values of this property.* |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**PhysicalSubContext** | string<br>(enum)<br><br>*read-only<br>(null)* | Describes the usage or location within a device to which this sensor measurement applies. *See PhysicalSubContext in Property Details, below, for the possible values of this property.* |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**Reading** | number<br><br>*read-only<br>(null)* | The present value for this Sensor. |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**ReadingUnits** | string<br><br>*read-only<br>(null)* | Units in which the reading and thresholds are measured. |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**Status** {} | object | This property describes the status and health of the resource and its children. See the *Resource* schema for details on this property. |
| } |   |   |"""

    assert output.count(description1) == 56
    assert expected_excerpt in output
Example #36
0
def test_uris_in_collection_schema_markdown_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['excluded_schemas_by_match'] = ['Collection']
    config['output_format'] = 'markdown'
    config['supplemental'] = {
        'Introduction': "# Redfish Collections\n\n[insert_collections]\n"
    }

    input_dir = os.path.abspath(os.path.join(testcase_path, 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    expected_strings = [
        "/redfish/v1/Managers/*{ManagerId}*/LogServices/*{LogServiceId}*/STUBCollection",
        "/redfish/v1/Systems/*{ComputerSystemId}*/LogServices/*{LogServiceId}*/STUBCollection",
        "/redfish/v1/CompositionService/ResourceBlocks/*{ResourceBlockId}*/Systems/*{ComputerSystemId}*/LogServices/*{LogServiceId}*/STUBCollection"
    ]

    for x in expected_strings:
        assert x in output
Example #37
0
def test_uri_capture(mockRequest):

    config = copy.deepcopy(base_config)

    input_dir = os.path.abspath(os.path.join(testcase_path, 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    event_properties = docGen.property_data.get('redfish.dmtf.org/schemas/v1/Event.json')
    logentry_properties = docGen.property_data.get('redfish.dmtf.org/schemas/v1/LogEntry.json')
    logentrycollection_properties = docGen.property_data.get('redfish.dmtf.org/schemas/v1/LogEntryCollection.json')

    assert event_properties['uris'] == []
    assert sorted(logentry_properties['uris']) == sorted([
        "/redfish/v1/Managers/{ManagerId}/LogServices/{LogServiceId}/Entries/{LogEntryId}",
        "/redfish/v1/Systems/{ComputerSystemId}/LogServices/{LogServiceId}/Entries/{LogEntryId}",
        "/redfish/v1/CompositionService/ResourceBlocks/{ResourceBlockId}/Systems/{ComputerSystemId}/LogServices/{LogServiceId}/Entries/{LogEntryId}"
        ])
    assert sorted(logentrycollection_properties['uris']) == sorted([
        "/redfish/v1/Managers/{ManagerId}/LogServices/{LogServiceId}/STUBCollection",
        "/redfish/v1/Systems/{ComputerSystemId}/LogServices/{LogServiceId}/STUBCollection",
        "/redfish/v1/CompositionService/ResourceBlocks/{ResourceBlockId}/Systems/{ComputerSystemId}/LogServices/{LogServiceId}/STUBCollection"
        ])
Example #38
0
def test_action_uris (mockRequest):
    """ Action URIs are based on URIs and action names,
    and should be output as part of the Actions section in HTML and markdown output.
    """
    config = copy.deepcopy(base_config)
    config['excluded_schemas_by_match'] = [ 'Collection' ]
    config['output_format'] = 'markdown'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'actions'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # Links should appear with asterisks around {} path parts to highlight them.
    expected_strings = [
        "/redfish/v1/CompositionService/ResourceBlocks/*{ResourceBlockId}*/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ChangePassword",
        "/redfish/v1/ResourceBlocks/*{ResourceBlockId}*/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ChangePassword",
        "/redfish/v1/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ChangePassword",
        "/redfish/v1/CompositionService/ResourceBlocks/*{ResourceBlockId}*/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ResetBios",
        "/redfish/v1/ResourceBlocks/*{ResourceBlockId}*/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ResetBios",
        "/redfish/v1/Systems/*{ComputerSystemId}*/Bios/Actions/Bios.ResetBios",
        ]

    for x in expected_strings:
        assert x in output
Example #39
0
def test_uris_in_regular_schema_html_output (mockRequest):
    """ HTML output is more complex and subject to change.

    In addition to highlighting Id placeholders, they are linked to schema documentation. For this test, several schemas
    are not documented, so the link will go to the schema file itself. """

    config = copy.deepcopy(base_config)
    config['excluded_schemas_by_match'] = [ 'Collection' ]
    config['output_format'] = 'html'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # Links should appear with asterisks around {} path parts to highlight them.
    expected_strings = [
        '/redfish/v1/Managers/<i>{ManagerId}</i>/LogServices/<i>{LogServiceId}</i>/Entries/<i><a href="#LogEntry">{LogEntryId}</a></i>',
        '/redfish/v1/Systems/<i>{ComputerSystemId}</i>/LogServices/<i>{LogServiceId}</i>/Entries/<i><a href="#LogEntry">{LogEntryId}</a></i>',
        '/redfish/v1/CompositionService/ResourceBlocks/<i>{ResourceBlockId}</i>/Systems/<i>{ComputerSystemId}</i>/LogServices/<i>{LogServiceId}</i>/Entries/<i><a href="#LogEntry">{LogEntryId}</a></i>'
        ]

    for x in expected_strings:
        assert x in output
Example #40
0
def test_version_added_output_AccountService(mockRequest):
    """ Verify markdown output contains expected version_added info.
    This means pulling the correct version strings from the metadata
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(
        os.path.join(testcase_path, 'version_added', 'AccountService'))

    expected_version_strings = [
        '**LDAP** *(v1.3+)*', '**LDAPService** {',
        '**LocalAccountAuth** *(v1.3+)*', '**PrivilegeMap** *(v1.1+)*',
        '**Actions** *(v1.2+)*'
    ]

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()
    discrepancies = DiscrepancyList()

    for expected in expected_version_strings:
        if expected not in output:
            discrepancies.append('"' + expected + '" not found')

    assert [] == discrepancies
Example #41
0
def test_action_for_rekey_markdown(mockRequest):
    """ This is the initial example, from the Certificate schema """
    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'certificate'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = {input_dir: 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    expected_output = '''
**Response Payload**

|     |     |     |     |
| --- | --- | --- | --- |
| { |  |  |  |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**Certificate** *(v1.1+)* { | object<br><br>* required* | The link to the certificate being rekeyed. |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**@odata.id** | string<br><br>*read-only* | Link to another Certificate resource. |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;} |   |   |
| &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;**CSRString** *(v1.1+)* | string<br><br>*read-only required* | The string for the certificate signing request. |
| } |  |  |  |
'''

    assert expected_output in output
Example #42
0
def test_version_added_metadata(mockRequest):
    """ Verify metadata contains expected version_added info.
    Note that there is an additional step, after generating this metadata, for generating metadata
    within property data ... so possibly this test should be replaced.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'version_added', 'AccountService'))

    # This is a partial list of versions that should be detected.
    expected_versions = {
        'AccountLockoutThreshold': {},
        'LDAP': {'version': '1.3.0'},
        'LocalAccountAuth': {'version': '1.3.0'},
        'PrivilegeMap': {'version': '1.1.0'},
        'Actions': {'version': '1.2.0',
                    'Oem': { 'version': '1.2.0'},
                    },
        'AdditionalExternalAccountProviders': { 'version': '1.3.0' },
        'definitions': { 'AccountProviderTypes': {'enum': {'ActiveDirectoryService': {'version': '1.3.0'},
                                                           'RedfishService': {'version': '1.3.0'},
                                                           'OEM': {'version': '1.3.0'},
                                                           'LDAPService': {'version': '1.3.0'},
                                                           },
                                                  'version': '1.3.0',
                                                  },
                         # WORKAROUND for properties incorrectly included in errata versions:
                         # 'Actions': { 'version': '1.2.2',
                         #              'Oem': { 'version': '1.2.2' },
                         #              },
                         'Actions': { 'version': '1.3.0',
                                      'Oem': { 'version': '1.3.0' },
                                      },
                         'LDAPSearchSettings': { 'version': '1.3.0',
                                                 'BaseDistinguishedNames': {'version': '1.3.0'},
                                                 },
                         'AccountService': { 'LDAP': { 'version': '1.3.0' },
                                             'LocalAccountAuth': { 'version': '1.3.0' },
                                             'AccountLockoutThreshold': { },
                                             'PrivilegeMap': { 'version': '1.1.0'},
                                             'Actions': { 'version': '1.2.0' }
                                             },
                         }
        }

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    meta = docGen.property_data['redfish.dmtf.org/schemas/v1/AccountService.json']['doc_generator_meta']

    discrepancies = DiscrepancyList()
    for name, data in expected_versions.items():
        if name == 'version': continue
        _version_compare(meta, name, data, discrepancies, [])

    assert [] == discrepancies
Example #43
0
def test_html_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    # This test used to compare a snippet, but with Pygments involved, the test became fragile.
    assert 'Placeholder for REQUEST' in output
    assert 'Placeholder for RESPONSE' in output
Example #44
0
def test_slate_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'slate'

    expected_output = open(os.path.join(output_dir, 'slate.md')).read().strip()

    docGen = DocGenerator([input_dir], '/dev/null', config)
    output = docGen.generate_docs()

    assert expected_output in output
def test_supplement_description_vs_full_html (mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'

    config['property_description_overrides'] = {
        "IPv4Address": "This is a description override for the IPv4Address object.",
        "DeviceId": "This is a description override for DeviceId, which is not a top-level property.",
    }

    config['property_fulldescription_overrides'] = {
        "IPv6Address": "This is a full description override for the IPv6Address object.",
        "SubsystemId": "This is a description override for SubsystemId, which is not a top-level property.",
    }

    input_dir = os.path.abspath(os.path.join(testcase_path, 'ipaddresses'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # Chop into rows. We just want to find the IPv4Address and IPv6Address rows.
    output_rows = output.split('<tr')
    ipv4_rows = [x for x in output_rows if "<b>IPv4Address</b>" in x]
    ipv6_rows = [x for x in output_rows if "<b>IPv6Address</b>" in x]
    deviceId_rows = [x for x in output_rows if "<b>DeviceId</b>" in x]
    subsystemId_rows = [x for x in output_rows if "<b>SubsystemId</b>" in x]

    # Verify that the overrides were used:
    ipv4_failed_overrides = [x for x in ipv4_rows if "This is a description override for the IPv4Address object." not in x]
    assert len(ipv4_failed_overrides) == 0, "Property description override failed for " + str(len(ipv4_failed_overrides)) + " mentions of Ipv4Address"

    ipv6_failed_overrides = [x for x in ipv6_rows if "This is a full description override for the IPv6Address object." not in x]
    assert len(ipv6_failed_overrides) == 0, "Property full description override failed for " + str(len(ipv6_failed_overrides)) + " mentions of Ipv6Address"

    deviceId_failed_overrides = [x for x in deviceId_rows if "This is a description override for DeviceId" not in x]
    assert len(deviceId_failed_overrides) == 0, "Property description override failed for " + str(len(deviceId_failed_overrides)) + " mentions of DeviceId"

    subsystemId_failed_overrides = [x for x in subsystemId_rows if "This is a description override for SubsystemId" not in x]
    assert len(subsystemId_failed_overrides) == 0, "Property description override failed for " + str(len(subsystemId_failed_overrides)) + " mentions of SubsystemId"

    # Verify that the description overrides retained the reference to the common property:
    ipv4_failed_overrides = [x for x in ipv4_rows if "for details on this property" not in x]
    assert len(ipv4_failed_overrides) == 0, "Property description override failed to include reference to common property for " + str(len(ipv4_failed_overrides)) + " mentions of Ipv4Address"

    # Verify that the full description overrides DID NOT retain the reference to the common property:
    ipv6_failed_overrides = [x for x in ipv6_rows if "for details on this property" in x]
    assert len(ipv6_failed_overrides) == 0, "Property full description override incorrectly included reference to common property " + str(len(ipv6_failed_overrides)) + " mentions of Ipv6Address"
Example #46
0
    def execute(self, dbname, revision):
        """
        Generates database documentation with a specified doc_generator_. If the latter is not specified
        then will use a default generator.
        """
        import sys
        print self.__class__.__name__ + " BEGIN"

        db_schema = self._db_schema(dbname, revision)
        doc_generator_ = DocGenerator(db_schema)
        html = doc_generator_.generate()

        print self.__class__.__name__ + " FINISH"

        return html
Example #47
0
def test_csv_basic_output(mockRequest):
    """ This initial test doesn't do much; really it's just here to make sure we don't introduce
    errors from unimplemented functionality """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'input'))
    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    lines = output.split('\n')

    assert lines[0].startswith('Schema Name')
    assert len(lines) == 10
Example #48
0
def test_actions_not_suppressed (mockRequest):
    """ By default, "Actions" appear in property tables. """
    config = copy.deepcopy(base_config)
    config['excluded_schemas_by_match'] = [ 'Collection' ]
    config['output_format'] = 'markdown'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'actions'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # This is what the start of an "Actions" object looks like in a markdown table.
    assert "| **Actions** { | object |" in output
def test_gather(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'network_sample'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    common_properties = docGen.generator.common_properties
    assert 'http://redfish.dmtf.org/schemas/v1/Resource.json#/definitions/Oem' in common_properties
    assert 'http://redfish.dmtf.org/schemas/v1/Resource.json#/definitions/Status' in common_properties
    assert len(common_properties) == 2
def test_markdown_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'
    config['supplemental'] = {'Introduction': "# Common Objects\n\n[insert_common_objects]\n"}

    input_dir = os.path.abspath(os.path.join(testcase_path, 'network_sample'))
    expected_output = open(os.path.join(testcase_path, 'network_sample_output/', 'output.md')).read().strip()

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    assert expected_output in output, "Failed on: Markdown output of Referenced Objects"
def test_supplement_output_markdown (mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'ipaddresses'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # Check for ~pagebreak~ converted to <p style="page-break-before: always">
    pbrk_location = output[output.find('text1') : output.find('text2')]
    assert '<p style="page-break-before: always"></p>' in pbrk_location, "HTML output lacked expected page break markup"
def test_release_history_data_collection(mockRequest):
    """ Verify that the correct release data is collected in the DocGenerator property_data.

    This data includes full versions (including errata).
    """

    config = copy.deepcopy(base_config)

    input_dir = os.path.abspath(os.path.join(testcase_path, 'release_history', 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    assert docGen.property_data['redfish.dmtf.org/schemas/v1/Storage.json'].get('release_history') == expected_release_history
def test_release_history_output_markdown(mockRequest):
    """ Verify that the release history output is correct.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'release_history', 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    expected_output = """|     |     |     |     |     |     |     |
| --- | --- | --- | --- | --- | --- | --- |
| *v1.6* | *v1.5* | *v1.4* | *v1.3* | *v1.2* | *v1.1* | *v1.0* |
| 2018.3 | 2018.2 | 2017.3 | 2017.2 | 2017.1 | 2016.2 | 2016.1 |"""

    assert expected_output in output
Example #54
0
def test_ipaddresses (mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'
    # The following is needed only if we want to inspect the output:
    config['supplemental'] = { 'Introduction': "# Common Objects\n\n[insert_common_objects]\n" }

    input_dir = os.path.abspath(os.path.join(testcase_path, 'ipaddresses'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()
    common_properties = docGen.generator.common_properties

    assert 'http://redfish.dmtf.org/schemas/v1/IPAddresses.json#/definitions/IPv4Address' in common_properties
    assert 'http://redfish.dmtf.org/schemas/v1/IPAddresses.json#/definitions/IPv6Address' in common_properties
Example #55
0
def test_excerpt_html_links(mockRequest):
    """ Markdown doesn't include links to the excerpted schema, so we need to test this in HTML """

    config = copy.deepcopy(base_config)
    config['output_format'] = 'html'

    input_dir = os.path.abspath(os.path.join(testcase_path, 'circuit'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # Looking for the generated description line is an easy way to check that excerpts were detected:
    description1 = 'This object is an excerpt of the <a href="#Sensor">Sensor</a> resource located at the URI shown in DataSourceUri.'

    assert output.count(description1) == 56
def test_release_history_output_html(mockRequest):
    """ Verify that the release history output is correct.
    """

    config = copy.deepcopy(base_config)
    input_dir = os.path.abspath(os.path.join(testcase_path, 'release_history', 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}
    config['output_format'] = 'html'

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    expected_output = "<table><tbody><tr><td><i>v1.6</i></td><td><i>v1.5</i></td><td><i>v1.4</i></td><td><i>v1.3</i></td><td><i>v1.2</i></td><td><i>v1.1</i></td><td><i>v1.0</i></td></tr><tr><td>2018.3</td><td>2018.2</td><td>2017.3</td><td>2017.2</td><td>2017.1</td><td>2016.2</td><td>2016.1</td></tr></tbody></table>"

    output = output.replace('\n', '')

    assert expected_output in output
Example #57
0
def test_markdown_output(mockRequest):

    config = copy.deepcopy(base_config)
    config['output_format'] = 'markdown'

    for dirname, name in cases.items():
        dirpath = os.path.abspath(os.path.join(testcase_path, dirname))
        input_dir = os.path.join(dirpath, 'input')

        config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
        config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

        expected_output = open(os.path.join(dirpath, 'expected_output', 'output.md')).read().strip()

        docGen = DocGenerator([ input_dir ], '/dev/null', config)
        output = docGen.generate_docs()
        output = output.strip()

        assert output == expected_output, "Failed on: " + name
def test_required_attribute_collection(mockRequest):
    """ Check for presence of correct prop_required in property data. """
    config = copy.deepcopy(base_config)

    input_dir = os.path.abspath(os.path.join(testcase_path, 'required_attributes', 'input'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    # Check a couple of properties that are directly included in property_data.
    # Note: when we follow a $ref, we apply "required" annotations on the fly, so the only way to
    # inspect them is by looking at formatted output, unfortunately.
    eventData = docGen.property_data['redfish.dmtf.org/schemas/v1/Event.json']
    eventProperties = eventData['properties']
    assert eventProperties['Events']['prop_required'] is True
    assert eventProperties['Id']['prop_required'] is True
    assert eventProperties['Context']['prop_required'] is False
Example #59
0
def test_profile_basic_req_props (mockRequest):
    """ Test that the required properties are all listed. """

    config = copy.deepcopy(base_config)

    input_dir = os.path.abspath(os.path.join(testcase_path, 'basic', 'NetworkPort'))
    profile_dir = os.path.abspath(os.path.join(testcase_path, 'basic', 'profiles'))
    profile_json = os.path.abspath(os.path.join(profile_dir, 'BasicInstanceProfile.v1_0_0.json'))

    config['uri_to_local'] = {'redfish.dmtf.org/schemas/v1': input_dir}
    config['local_to_uri'] = { input_dir : 'redfish.dmtf.org/schemas/v1'}
    config['profile_doc'] = profile_json
    config['profile_uri_to_local'] = { 'redfish.dmtf.org/profiles': profile_dir }

    docGen = DocGenerator([ input_dir ], '/dev/null', config)
    output = docGen.generate_docs()

    expected_props = ['AssignablePhysicalPorts', 'Description', 'Ethernet', 'FibreChannel',
                      'PhysicalPortAssignment', 'DeviceEnabled']
    for prop_name in expected_props:
        assert '| **' + prop_name + '** ' in output