def row_generator_fn( configuration=None, result=None, language=None, runtime_configuration=None, **kwargs, ): expectation = result.expectation_config expectation_string_cell = expectation_string_fn( configuration=expectation, runtime_configuration=runtime_configuration ) status_icon_renderer = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.diagnostic.status_icon", ) status_cell = ( [status_icon_renderer[1](result=result)] if status_icon_renderer else [] ) unexpected_statement = [] unexpected_table = None observed_value = ["--"] data_docs_exception_message = f"""\ An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will \ not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to \ diagnose and repair the underlying issue. Detailed information follows: """ try: unexpected_statement_renderer = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.diagnostic.unexpected_statement", ) unexpected_statement = ( unexpected_statement_renderer[1](result=result) if unexpected_statement_renderer else [] ) except Exception as e: exception_traceback = traceback.format_exc() exception_message = ( data_docs_exception_message + f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".' ) logger.error(exception_message) try: unexpected_table_renderer = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.diagnostic.unexpected_table", ) unexpected_table = ( unexpected_table_renderer[1](result=result) if unexpected_table_renderer else None ) except Exception as e: exception_traceback = traceback.format_exc() exception_message = ( data_docs_exception_message + f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".' ) logger.error(exception_message) try: observed_value_renderer = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.diagnostic.observed_value", ) observed_value = [ observed_value_renderer[1](result=result) if observed_value_renderer else "--" ] except Exception as e: exception_traceback = traceback.format_exc() exception_message = ( data_docs_exception_message + f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".' ) logger.error(exception_message) # If the expectation has some unexpected values...: if unexpected_statement: expectation_string_cell += unexpected_statement if unexpected_table: expectation_string_cell.append(unexpected_table) if len(expectation_string_cell) > 1: return [status_cell + [expectation_string_cell] + observed_value] else: return [status_cell + expectation_string_cell + observed_value]
def test_ValidationResultsTableContentBlockRenderer_get_status_cell( evr_failed_with_exception, evr_success, evr_failed ): # test for failed evr with exception output_1 = get_renderer_impl( object_name=evr_failed_with_exception.expectation_config.expectation_type, renderer_type="renderer.diagnostic.status_icon", )[1](result=evr_failed_with_exception) assert output_1.to_json_dict() == { "content_block_type": "string_template", "string_template": { "template": "$icon", "params": {"icon": "", "markdown_status_icon": "❗"}, "styling": { "params": { "icon": { "classes": ["fas", "fa-exclamation-triangle", "text-warning"], "tag": "i", } } }, }, } # test for succeeded evr output_2 = get_renderer_impl( object_name=evr_success.expectation_config.expectation_type, renderer_type="renderer.diagnostic.status_icon", )[1](result=evr_success) assert output_2.to_json_dict() == { "content_block_type": "string_template", "string_template": { "template": "$icon", "params": {"icon": "", "markdown_status_icon": "✅"}, "styling": { "params": { "icon": { "classes": ["fas", "fa-check-circle", "text-success"], "tag": "i", } } }, }, "styling": {"parent": {"classes": ["hide-succeeded-validation-target-child"]}}, } # test for failed evr output_3 = get_renderer_impl( object_name=evr_failed.expectation_config.expectation_type, renderer_type="renderer.diagnostic.status_icon", )[1](result=evr_failed) assert output_3.to_json_dict() == { "content_block_type": "string_template", "string_template": { "template": "$icon", "params": {"icon": "", "markdown_status_icon": "❌"}, "styling": { "params": { "icon": {"tag": "i", "classes": ["fas", "fa-times", "text-danger"]} } }, }, }
def row_generator_fn( configuration=None, result=None, language=None, runtime_configuration=None, **kwargs, ): eval_param_value_dict = kwargs.get("evaluation_parameters", None) # loading into evaluation parameters to be passed onto prescriptive renderer if eval_param_value_dict is not None: runtime_configuration[ "evaluation_parameters"] = eval_param_value_dict expectation = result.expectation_config expectation_string_cell = expectation_string_fn( configuration=expectation, runtime_configuration=runtime_configuration) status_icon_renderer = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.diagnostic.status_icon", ) status_cell = ([status_icon_renderer[1]( result=result)] if status_icon_renderer else [ getattr(cls, "_diagnostic_status_icon_renderer")( result=result) ]) unexpected_statement = [] unexpected_table = None observed_value = ["--"] data_docs_exception_message = """\ An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will \ not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to \ diagnose and repair the underlying issue. Detailed information follows: """ try: unexpected_statement_renderer = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.diagnostic.unexpected_statement", ) unexpected_statement = (unexpected_statement_renderer[1]( result=result) if unexpected_statement_renderer else []) except Exception as e: exception_traceback = traceback.format_exc() exception_message = ( data_docs_exception_message + f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".' ) logger.error(exception_message) try: unexpected_table_renderer = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.diagnostic.unexpected_table", ) unexpected_table = (unexpected_table_renderer[1]( result=result) if unexpected_table_renderer else None) except Exception as e: exception_traceback = traceback.format_exc() exception_message = ( data_docs_exception_message + f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".' ) logger.error(exception_message) try: observed_value_renderer = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.diagnostic.observed_value", ) observed_value = [ observed_value_renderer[1]( result=result) if observed_value_renderer else (cls._get_legacy_v2_api_observed_value( expectation_string_fn, result) or "--") ] except Exception as e: exception_traceback = traceback.format_exc() exception_message = ( data_docs_exception_message + f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".' ) logger.error(exception_message) # If the expectation has some unexpected values...: if unexpected_statement: expectation_string_cell += unexpected_statement if unexpected_table: expectation_string_cell.append(unexpected_table) if len(expectation_string_cell) > 1: output_row = [ status_cell + [expectation_string_cell] + observed_value ] else: output_row = [ status_cell + expectation_string_cell + observed_value ] meta_properties_renderer = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.diagnostic.meta_properties", ) if meta_properties_renderer: output_row[0] += meta_properties_renderer[1](result=result) return output_row
def test_ValidationResultsTableContentBlockRenderer_get_unexpected_table(evr_success): evr_failed_no_result = ExpectationValidationResult( success=False, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_column_values_to_be_in_set", kwargs={ "column": "Unnamed: 0", "value_set": [], "result_format": "SUMMARY", }, ), ) evr_failed_no_unexpected_list_or_counts = ExpectationValidationResult( success=False, result={ "element_count": 1313, "missing_count": 0, "missing_percent": 0.0, "unexpected_count": 1313, "unexpected_percent": 100.0, "unexpected_percent_nonmissing": 100.0, }, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_column_values_to_be_in_set", kwargs={ "column": "Unnamed: 0", "value_set": [], "result_format": "SUMMARY", }, ), ) evr_failed_partial_unexpected_list = ExpectationValidationResult( success=False, result={ "element_count": 1313, "missing_count": 0, "missing_percent": 0.0, "unexpected_count": 1313, "unexpected_percent": 100.0, "unexpected_percent_nonmissing": 100.0, "partial_unexpected_list": [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, ], }, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_column_values_to_be_in_set", kwargs={ "column": "Unnamed: 0", "value_set": [], "result_format": "SUMMARY", }, ), ) evr_failed_partial_unexpected_counts = ExpectationValidationResult( success=False, result={ "element_count": 1313, "missing_count": 0, "missing_percent": 0.0, "unexpected_count": 1313, "unexpected_percent": 100.0, "unexpected_percent_nonmissing": 100.0, "partial_unexpected_list": [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, ], "partial_unexpected_index_list": [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, ], "partial_unexpected_counts": [ {"value": 1, "count": 1}, {"value": 2, "count": 1}, {"value": 3, "count": 1}, {"value": 4, "count": 1}, {"value": 5, "count": 1}, {"value": 6, "count": 1}, {"value": 7, "count": 1}, {"value": 8, "count": 1}, {"value": 9, "count": 1}, {"value": 10, "count": 1}, {"value": 11, "count": 1}, {"value": 12, "count": 1}, {"value": 13, "count": 1}, {"value": 14, "count": 1}, {"value": 15, "count": 1}, {"value": 16, "count": 1}, {"value": 17, "count": 1}, {"value": 18, "count": 1}, {"value": 19, "count": 1}, {"value": 20, "count": 1}, ], }, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_column_values_to_be_in_set", kwargs={ "column": "Unnamed: 0", "value_set": [], "result_format": "SUMMARY", }, ), ) # test for succeeded evr output_1 = get_renderer_impl( object_name=evr_success.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_table", )[1](result=evr_success) assert output_1 is None # test for failed evr with no "result" key output_2 = get_renderer_impl( object_name=evr_failed_no_result.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_table", )[1](result=evr_failed_no_result) assert output_2 is None # test for failed evr with no unexpected list or unexpected counts output_3 = get_renderer_impl( object_name=evr_failed_no_unexpected_list_or_counts.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_table", )[1](result=evr_failed_no_unexpected_list_or_counts) assert output_3 is None # test for failed evr with partial unexpected list output_4 = get_renderer_impl( object_name=evr_failed_partial_unexpected_list.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_table", )[1](result=evr_failed_partial_unexpected_list) assert output_4.to_json_dict() == { "content_block_type": "table", "table": [ [1], [2], [3], [4], [5], [6], [7], [8], [9], [10], [11], [12], [13], [14], [15], [16], [17], [18], [19], [20], ], "header_row": ["Sampled Unexpected Values"], "styling": {"body": {"classes": ["table-bordered", "table-sm", "mt-3"]}}, } # test for failed evr with partial unexpected counts output_5 = get_renderer_impl( object_name=evr_failed_partial_unexpected_counts.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_table", )[1](result=evr_failed_partial_unexpected_counts) assert output_5.to_json_dict() == { "content_block_type": "table", "table": [ [1], [2], [3], [4], [5], [6], [7], [8], [9], [10], [11], [12], [13], [14], [15], [16], [17], [18], [19], [20], ], "header_row": ["Sampled Unexpected Values"], "styling": {"body": {"classes": ["table-bordered", "table-sm", "mt-3"]}}, }
def test_ValidationResultsTableContentBlockRenderer_get_unexpected_statement( evr_success, evr_failed ): evr_no_result = ExpectationValidationResult( success=True, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_table_row_count_to_be_between", kwargs={"min_value": 0, "max_value": None, "result_format": "SUMMARY"}, ), ) evr_failed_no_unexpected_count = ExpectationValidationResult( success=False, result={ "element_count": 1313, "missing_count": 0, "missing_percent": 0.0, "unexpected_percent": 0.2284843869002285, "unexpected_percent_nonmissing": 0.2284843869002285, "partial_unexpected_list": [ "Daly, Mr Peter Denis ", "Barber, Ms ", "Geiger, Miss Emily ", ], "partial_unexpected_index_list": [77, 289, 303], "partial_unexpected_counts": [ {"value": "Barber, Ms ", "count": 1}, {"value": "Daly, Mr Peter Denis ", "count": 1}, {"value": "Geiger, Miss Emily ", "count": 1}, ], }, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_column_values_to_not_match_regex", kwargs={ "column": "Name", "regex": "^\\s+|\\s+$", "result_format": "SUMMARY", }, ), ) # test for succeeded evr output_1 = get_renderer_impl( object_name=evr_success.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_statement", )[1](result=evr_success) assert output_1 == [] # test for failed evr output_2 = get_renderer_impl( object_name=evr_failed.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_statement", )[1](result=evr_failed) assert output_2 == [ RenderedStringTemplateContent( **{ "content_block_type": "string_template", "string_template": { "template": "\n\n$unexpected_count unexpected values found. $unexpected_percent of $element_count total rows.", "params": { "unexpected_count": "3", "unexpected_percent": "≈0.2285%", "element_count": "1,313", }, "tag": "strong", "styling": {"classes": ["text-danger"]}, }, } ) ] # test for evr with no "result" key output_3 = get_renderer_impl( object_name=evr_no_result.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_statement", )[1](result=evr_no_result) print(json.dumps(output_3, indent=2)) assert output_3 == [] # test for evr with no unexpected count output_4 = get_renderer_impl( object_name=evr_failed_no_unexpected_count.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_statement", )[1](result=evr_failed_no_unexpected_count) print(output_4) assert output_4 == [] # test for evr with exception evr_failed_exception = ExpectationValidationResult( success=False, exception_info={ "raised_exception": True, "exception_message": "Unrecognized column: not_a_real_column", "exception_traceback": "Traceback (most recent call last):\n...more_traceback...", }, expectation_config=ExpectationConfiguration( expectation_type="expect_column_values_to_not_match_regex", kwargs={ "column": "Name", "regex": "^\\s+|\\s+$", "result_format": "SUMMARY", }, ), ) output_5 = get_renderer_impl( object_name=evr_failed_exception.expectation_config.expectation_type, renderer_type="renderer.diagnostic.unexpected_statement", )[1](result=evr_failed_exception) output_5 = [content.to_json_dict() for content in output_5] expected_output_5 = [ { "content_block_type": "string_template", "string_template": { "template": "\n\n$expectation_type raised an exception:\n$exception_message", "params": { "expectation_type": "expect_column_values_to_not_match_regex", "exception_message": "Unrecognized column: not_a_real_column", }, "tag": "strong", "styling": { "classes": ["text-danger"], "params": { "exception_message": {"tag": "code"}, "expectation_type": { "classes": ["badge", "badge-danger", "mb-2"] }, }, }, }, }, { "content_block_type": "collapse", "collapse_toggle_link": "Show exception traceback...", "collapse": [ { "content_block_type": "string_template", "string_template": { "template": "Traceback (most recent call last):\n...more_traceback...", "tag": "code", }, } ], "inline_link": False, }, ] assert output_5 == expected_output_5
def test_ValidationResultsTableContentBlockRenderer_get_observed_value(evr_success): evr_no_result_key = ExpectationValidationResult( success=True, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_table_row_count_to_be_between", kwargs={"min_value": 0, "max_value": None, "result_format": "SUMMARY"}, ), ) evr_expect_column_values_to_not_be_null = ExpectationValidationResult( success=True, result={ "element_count": 1313, "unexpected_count": 1050, "unexpected_percent": 79.96953541508, "partial_unexpected_list": [], }, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_column_values_to_not_be_null", kwargs={"column": "Unnamed: 0", "mostly": 0.5, "result_format": "SUMMARY"}, ), ) evr_expect_column_values_to_be_null = ExpectationValidationResult( success=True, result={ "element_count": 1313, "unexpected_count": 0, "unexpected_percent": 0.0, "partial_unexpected_list": [], }, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_column_values_to_be_null", kwargs={"column": "Unnamed: 0", "mostly": 0.5, "result_format": "SUMMARY"}, ), ) evr_success_zero = ExpectationValidationResult( success=True, result={"observed_value": 0}, exception_info={ "raised_exception": False, "exception_message": None, "exception_traceback": None, }, expectation_config=ExpectationConfiguration( expectation_type="expect_table_row_count_to_be_between", kwargs={"min_value": 0, "max_value": None, "result_format": "SUMMARY"}, ), ) # test _get_observed_value when evr.result["observed_value"] exists output_1 = get_renderer_impl( object_name=evr_success.expectation_config.expectation_type, renderer_type="renderer.diagnostic.observed_value", )[1](result=evr_success) assert output_1 == "1,313" # test _get_observed_value when evr.result does not exist output_2 = get_renderer_impl( object_name=evr_no_result_key.expectation_config.expectation_type, renderer_type="renderer.diagnostic.observed_value", )[1](result=evr_no_result_key) assert output_2 == "--" # test _get_observed_value for expect_column_values_to_not_be_null expectation type output_3 = get_renderer_impl( object_name=evr_expect_column_values_to_not_be_null.expectation_config.expectation_type, renderer_type="renderer.diagnostic.observed_value", )[1](result=evr_expect_column_values_to_not_be_null) assert output_3 == "≈20.03% not null" # test _get_observed_value for expect_column_values_to_be_null expectation type output_4 = get_renderer_impl( object_name=evr_expect_column_values_to_be_null.expectation_config.expectation_type, renderer_type="renderer.diagnostic.observed_value", )[1](result=evr_expect_column_values_to_be_null) assert output_4 == "100% null" # test _get_observed_value to be 0 output_5 = get_renderer_impl( object_name=evr_success_zero.expectation_config.expectation_type, renderer_type="renderer.diagnostic.observed_value", )[1](result=evr_success_zero) assert output_5 == "0"
def _get_content_block_fn(cls, expectation_type): content_block_fn = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.prescriptive") return content_block_fn[1] if content_block_fn else None
def render(cls, render_object, **kwargs): cls.validate_input(render_object) exception_list_content_block = kwargs.get( "exception_list_content_block") data_docs_exception_message = f"""\ An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will \ not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to \ diagnose and repair the underlying issue. Detailed information follows: """ runtime_configuration = { "styling": cls._get_element_styling(), "include_column_name": kwargs.pop("include_column_name", None), } if isinstance(render_object, list): blocks = [] has_failed_evr = (False if isinstance( render_object[0], ExpectationValidationResult) else None) for obj_ in render_object: expectation_type = cls._get_expectation_type(obj_) content_block_fn = cls._get_content_block_fn(expectation_type) if isinstance( obj_, ExpectationValidationResult) and not obj_.success: has_failed_evr = True if content_block_fn is not None and not exception_list_content_block: try: if isinstance(obj_, ExpectationValidationResult): expectation_config = obj_.expectation_config result = content_block_fn( configuration=expectation_config, result=obj_, runtime_configuration=runtime_configuration, **kwargs, ) else: result = content_block_fn( configuration=obj_, runtime_configuration=runtime_configuration, **kwargs, ) except Exception as e: exception_traceback = traceback.format_exc() exception_message = ( data_docs_exception_message + f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".' ) logger.error(exception_message) if isinstance(obj_, ExpectationValidationResult): content_block_fn = cls._get_content_block_fn( "_missing_content_block_fn") expectation_config = obj_.expectation_config result = content_block_fn( configuration=expectation_config, result=obj_, runtime_configuration=runtime_configuration, **kwargs, ) else: content_block_fn = cls._missing_content_block_fn result = content_block_fn( configuration=obj_, runtime_configuration=runtime_configuration, **kwargs, ) else: if isinstance(obj_, ExpectationValidationResult): content_block_fn = (cls._get_content_block_fn( "_missing_content_block_fn") if not exception_list_content_block else cls._missing_content_block_fn) expectation_config = obj_.expectation_config result = content_block_fn( configuration=expectation_config, result=obj_, runtime_configuration=runtime_configuration, **kwargs, ) else: content_block_fn = cls._missing_content_block_fn result = content_block_fn( configuration=obj_, runtime_configuration=runtime_configuration, **kwargs, ) if result is not None: if isinstance(obj_, ExpectationConfiguration): expectation_meta_notes = cls._render_expectation_meta_notes( obj_) if expectation_meta_notes: # this adds collapse content block to expectation string result[0] = [result[0], expectation_meta_notes] horizontal_rule = RenderedStringTemplateContent( **{ "content_block_type": "string_template", "string_template": { "template": "", "tag": "hr", "styling": { "classes": ["mt-1", "mb-1"], }, }, "styling": { "parent": { "styles": { "list-style-type": "none" } } }, }) result.append(horizontal_rule) blocks += result if len(blocks) > 0: rendered_component_type_init_kwargs = { cls._content_block_type: blocks, "styling": cls._get_content_block_styling(), } rendered_component_type_default_init_kwargs = getattr( cls, "_rendered_component_default_init_kwargs", {}) rendered_component_type_init_kwargs.update( rendered_component_type_default_init_kwargs) content_block = cls._rendered_component_type( **rendered_component_type_init_kwargs) cls._process_content_block(content_block, has_failed_evr=has_failed_evr) return content_block else: return None else: expectation_type = cls._get_expectation_type(render_object) content_block_fn = get_renderer_impl( object_name=expectation_type, renderer_type="renderer.prescriptive") content_block_fn = content_block_fn[1] if content_block_fn else None if content_block_fn is not None and not exception_list_content_block: try: if isinstance(render_object, ExpectationValidationResult): result = content_block_fn( result=render_object, runtime_configuration=runtime_configuration, **kwargs, ) else: result = content_block_fn( configuration=render_object, runtime_configuration=runtime_configuration, **kwargs, ) except Exception as e: exception_traceback = traceback.format_exc() exception_message = ( data_docs_exception_message + f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".' ) logger.error(exception_message) if isinstance(render_object, ExpectationValidationResult): content_block_fn = cls._get_content_block_fn( "_missing_content_block_fn") result = content_block_fn( result=render_object, runtime_configuration=runtime_configuration, **kwargs, ) else: content_block_fn = cls._missing_content_block_fn result = content_block_fn( configuration=render_object, runtime_configuration=runtime_configuration, **kwargs, ) else: if isinstance(render_object, ExpectationValidationResult): content_block_fn = ( cls._get_content_block_fn("_missing_content_block_fn") if not exception_list_content_block else cls._missing_content_block_fn) result = content_block_fn( result=render_object, runtime_configuration=runtime_configuration, **kwargs, ) else: content_block_fn = cls._missing_content_block_fn result = content_block_fn( configuration=render_object, runtime_configuration=runtime_configuration, **kwargs, ) if result is not None: if isinstance(render_object, ExpectationConfiguration): expectation_meta_notes = cls._render_expectation_meta_notes( render_object) if expectation_meta_notes: result.append(expectation_meta_notes) return result