def test_generate_secret_access_token_wrong_param_name_gives_null(self): user = User.objects.create() workflow = Workflow.create_and_init(owner=user) create_module_zipfile("googlesheets", spec_kwargs={"parameters": [TestGoogleSecret]}) step = workflow.tabs.first().steps.create( module_id_name="googlesheets", order=0, slug="step-1", secrets={"google_credentials": { "name": "a", "secret": "hello" }}, ) response = self.run_handler( generate_secret_access_token, user=user, workflow=workflow, stepId=step.id, param="twitter_credentials", ) self.assertResponse(response, data={"token": None})
def test_load_simple(self): workflow = Workflow.create_and_init() module_zipfile = create_module_zipfile("foo") step = workflow.tabs.first().steps.create(order=0, slug="step-1", module_id_name="foo") with self.assertLogs("cjwstate.params", level=logging.INFO): result = self.run_with_async_db( fetch.load_database_objects(workflow.id, step.id)) self.assertEqual(result.step, step) self.assertEqual(result.module_zipfile, module_zipfile) self.assertEqual(result.migrated_params_or_error, {}) self.assertIsNone(result.stored_object) self.assertIsNone(result.input_cached_render_result)
def test_add_module_param_values_not_object(self): user = User.objects.create(username="******", email="*****@*****.**") workflow = Workflow.create_and_init(owner=user) # tab-1 create_module_zipfile( "amodule", spec_kwargs={"parameters": [{ "id_name": "foo", "type": "string" }]}, ) response = self.run_handler( add_module, user=user, workflow=workflow, tabSlug="tab-1", slug="step-1", position=3, moduleIdName="amodule", paramValues="foobar", ) self.assertResponse(response, error="BadRequest: paramValues must be an Object")
def test_email_no_delta_when_not_changed(self, email): workflow = Workflow.objects.create() tab = workflow.tabs.create(position=0) create_module_zipfile( "mod", spec_kwargs={"loads_data": True}, python_code= 'import pandas as pd\ndef render(table, params): return pd.DataFrame({"A": [1]})', ) step = tab.steps.create( order=0, slug="step-1", last_relevant_delta_id=2, module_id_name="mod", notifications=True, ) # stale, same result write_to_rendercache(workflow, step, 1, make_table(make_column("A", [1]))) self._execute(workflow) email.assert_not_called()
def test_fetch_result_happy_path(self): workflow = Workflow.create_and_init() tab = workflow.tabs.first() wf_module = tab.wf_modules.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, fetch_errors=[ RenderError(I18nMessage("foo", {}, "module")), RenderError(I18nMessage("bar", {"x": "y"}, "cjwmodule")), ], ) with parquet_file({"A": [1]}) as path: so = create_stored_object(workflow.id, wf_module.id, path) wf_module.stored_data_version = so.stored_at wf_module.save(update_fields=["stored_data_version"]) module_zipfile = create_module_zipfile( "x", python_code=textwrap.dedent(""" import pyarrow as pa import pandas as pd from pandas.testing import assert_frame_equal from cjwkernel.types import RenderError, I18nMessage def render(table, params, *, fetch_result, **kwargs): assert fetch_result.errors == [ RenderError(I18nMessage("foo", {}, "module")), RenderError(I18nMessage("bar", {"x": "y"}, "cjwmodule")), ] fetch_dataframe = pa.parquet.read_table(str(fetch_result.path)) assert_frame_equal(fetch_dataframe, pd.DataFrame({"A": [1]})) return pd.DataFrame() """), ) with self.assertLogs(level=logging.INFO): self.run_with_async_db( execute_wfmodule( self.chroot_context, workflow, wf_module, module_zipfile, {}, Tab(tab.slug, tab.name), RenderResult(), {}, self.output_path, ))
def test_fetch_initial_workflow(self, render, fetch): render.side_effect = async_noop fetch.side_effect = async_noop create_module_zipfile( "amodule", spec_kwargs=dict( parameters=[{"id_name": "foo", "type": "string"}], loads_data=True ), ) self.kernel.migrate_params.side_effect = lambda m, p: p self.log_in() with self.assertLogs("cjwstate.params"): response = self.client.get("/lessons/en/a-lesson") state = response.context_data["initState"] steps = state["steps"] step1 = list(steps.values())[0] self.assertEqual(step1["is_busy"], True) # because we sent a fetch # We should be rendering the modules fetch.assert_called_with(state["workflow"]["id"], step1["id"]) render.assert_not_called()
def test_resume_without_rerunning_unneeded_renders(self): workflow = Workflow.create_and_init() tab = workflow.tabs.first() create_module_zipfile( # If this runs on step1, it'll return pd.DataFrame(). # If this runs on step2, it'll return step1-output * 2. # ... step2's output depends on whether we run this on # step1. "mod", spec_kwargs={"loads_data": True}, python_code="def render(table, params): return table * 2", ) # step1: has a valid, cached result step1 = tab.steps.create( order=0, slug="step-1", last_relevant_delta_id=1, module_id_name="mod", ) write_to_rendercache(workflow, step1, 1, make_table(make_column("A", [1]))) # step2: has no cached result (must be rendered) step2 = tab.steps.create( order=1, slug="step-2", last_relevant_delta_id=1, module_id_name="mod", ) self._execute(workflow) step2.refresh_from_db() with open_cached_render_result(step2.cached_render_result) as actual: assert_arrow_table_equals(actual.table, make_table(make_column("A", [2])))
def test_report_module_error(self): workflow = Workflow.create_and_init() tab = workflow.tabs.first() step = tab.steps.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, ) module_zipfile = create_module_zipfile( "x", spec_kwargs={"loads_data": True}, python_code="def render(table, params):\n undefined()", ) with self.assertLogs(level=logging.INFO): result = self.run_with_async_db( execute_step( chroot_context=self.chroot_context, workflow=workflow, step=step, module_zipfile=module_zipfile, params={}, tab_name=tab.name, input_path=self.empty_table_path, input_table_columns=[], tab_results={}, output_path=self.output_path, ) ) self.assertEquals(result.columns, []) self.assertEqual(self.output_path.read_bytes(), b"") step.refresh_from_db() self.assertEqual( step.cached_render_result.errors, [ RenderError( I18nMessage( "py.renderer.execute.step.user_visible_bug_during_render", { "message": "exit code 1: NameError: name 'undefined' is not defined" }, None, ) ) ], )
def test_delete_deltas_without_init_delta(self): workflow = Workflow.objects.create(name="A") tab = workflow.tabs.create(position=0) self.run_with_async_db( commands.do(ChangeWorkflowTitleCommand, workflow_id=workflow.id, new_value="B")) create_module_zipfile("x") self.run_with_async_db( commands.do( AddModuleCommand, workflow_id=workflow.id, tab=tab, slug="step-1", module_id_name="x", position=0, param_values={}, )) self.run_with_async_db( commands.do(ChangeWorkflowTitleCommand, workflow_id=workflow.id, new_value="C")) workflow.delete() self.assertTrue(True) # no crash
def test_migrated_params_is_error(self): with self.assertLogs("fetcher.fetch", level=logging.ERROR): result = fetch.fetch_or_wrap_error( self.ctx, self.chroot_context, self.basedir, "mod", create_module_zipfile("mod"), ModuleExitedError("mod", 1, "Traceback:\n\n\nRuntimeError: bad"), {}, None, None, self.output_path, ) self.assertEqual(result, self._bug_err("exit code 1: RuntimeError: bad"))
def test_load_migrate_params_raise_module_error(self): workflow = Workflow.create_and_init() create_module_zipfile( "mod", spec_kwargs={"parameters": [{ "id_name": "a", "type": "string" }]}, python_code=textwrap.dedent(""" def migrate_params(params): raise RuntimeError("bad") """), ) step = workflow.tabs.first().steps.create(order=0, slug="step-1", module_id_name="mod", params={"a": "b"}) with self.assertLogs("cjwstate.params", level=logging.INFO): result = self.run_with_async_db( fetch.load_database_objects(workflow.id, step.id)) self.assertIsInstance(result.migrated_params_or_error, ModuleExitedError) self.assertRegex(result.migrated_params_or_error.log, ".*RuntimeError: bad")
def test_get_cached(self): workflow = Workflow.create_and_init() module_zipfile = create_module_zipfile( "yay", spec_kwargs={"parameters": [{"id_name": "foo", "type": "string"}]} ) step = workflow.tabs.first().steps.create( order=0, module_id_name="yay", params={}, cached_migrated_params={"foo": "bar"}, cached_migrated_params_module_version=module_zipfile.version, ) self.assertEqual(get_migrated_params(step), {"foo": "bar"}) self.kernel.migrate_params.assert_not_called()
def test_execute_cache_hit(self): workflow = Workflow.objects.create() create_module_zipfile("mod") tab = workflow.tabs.create(position=0) step1 = tab.steps.create( order=0, slug="step-1", module_id_name="mod", last_relevant_delta_id=2, ) cache_render_result(workflow, step1, 2, RenderResult(arrow_table({"A": [1]}))) step2 = tab.steps.create( order=1, slug="step-2", module_id_name="mod", last_relevant_delta_id=1, ) cache_render_result(workflow, step2, 1, RenderResult(arrow_table({"B": [2]}))) with patch.object(Kernel, "render", return_value=None): self._execute(workflow) Kernel.render.assert_not_called()
def test_generate_secret_access_token_no_service_gives_error(self): user = User.objects.create() workflow = Workflow.create_and_init(owner=user) create_module_zipfile("googlesheets", spec_kwargs={"parameters": [TestGoogleSecret]}) step = workflow.tabs.first().steps.create( module_id_name="googlesheets", order=0, slug="step-1", secrets={"google_credentials": { "name": "a", "secret": "hello" }}, ) response = self.run_handler( generate_secret_access_token, user=user, workflow=workflow, stepId=step.id, param="google_credentials", ) self.assertResponse(response, error=("AuthError: we only support twitter"))
def test_execute_cache_hit(self): workflow = Workflow.objects.create() create_module_zipfile("mod") tab = workflow.tabs.create(position=0) step1 = tab.steps.create( order=0, slug="step-1", module_id_name="mod", last_relevant_delta_id=2, ) write_to_rendercache(workflow, step1, 2, make_table(make_column("A", ["a"]))) step2 = tab.steps.create( order=1, slug="step-2", module_id_name="mod", last_relevant_delta_id=1, ) write_to_rendercache(workflow, step2, 1, make_table(make_column("B", ["b"]))) with patch.object(Kernel, "render", return_value=None): self._execute(workflow) Kernel.render.assert_not_called()
def test_add_module_default_params(self): workflow = Workflow.create_and_init() create_module_zipfile( "blah", spec_kwargs={ "parameters": [ {"id_name": "a", "type": "string", "default": "x"}, {"id_name": "c", "type": "checkbox", "name": "C", "default": True}, ] }, ) cmd = self.run_with_async_db( commands.do( AddStep, workflow_id=workflow.id, tab=workflow.tabs.first(), slug="step-1", module_id_name="blah", position=0, param_values={}, ) ) self.assertEqual(cmd.step.params, {"a": "x", "c": True})
def test_email_delta_when_errors_change(self, email_delta): workflow = Workflow.create_and_init() tab = workflow.tabs.first() step = tab.steps.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id - 1, notifications=True, ) # We need to actually populate the cache to set up the test. The code # under test will only try to open the render result if the database # says there's something there. rendercache.cache_render_result( workflow, step, workflow.last_delta_id - 1, RenderResult(errors=[ RenderError( I18nMessage("py.renderer.execute.step.noModule", {}, None)) ]), ) step.last_relevant_delta_id = workflow.last_delta_id step.save(update_fields=["last_relevant_delta_id"]) module_zipfile = create_module_zipfile( "x", spec_kwargs={"loads_data": True}, # returns different error python_code= 'import pandas as pd\ndef render(table, params): return [{"id": "err"}]', ) with self.assertLogs(level=logging.INFO): self.run_with_async_db( execute_step( self.chroot_context, workflow, step, module_zipfile, {}, Tab(tab.slug, tab.name), RenderResult(), {}, self.output_path, )) email_delta.assert_called() # there's new data
def test_email_delta_when_errors_change(self, email_delta): user = create_test_user() workflow = Workflow.create_and_init(owner_id=user.id) tab = workflow.tabs.first() step = tab.steps.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, notifications=True, ) # We need to actually populate the cache to set up the test. The code # under test will only try to open the render result if the database # says there's something there. write_to_rendercache( workflow, step, workflow.last_delta_id - 1, # stale table=make_table(), errors=[ RenderError(I18nMessage("py.renderer.execute.step.noModule", {}, None)) ], ) module_zipfile = create_module_zipfile( "x", spec_kwargs={"loads_data": True}, # returns different error python_code='import pandas as pd\ndef render(table, params): return [{"id": "err"}]', ) with self.assertLogs(level=logging.INFO): self.run_with_async_db( execute_step( chroot_context=self.chroot_context, workflow=workflow, step=step, module_zipfile=module_zipfile, params={}, tab_name=tab.name, input_path=self.empty_table_path, input_table_columns=[], tab_results={}, output_path=self.output_path, ) ) email_delta.assert_called() # there's new data
def test_fetch_module_error(self): self.kernel.fetch.side_effect = ModuleExitedError("mod", 1, "RuntimeError: bad") with self.assertLogs(level=logging.ERROR): result = fetch.fetch_or_wrap_error( self.ctx, self.chroot_context, self.basedir, "mod", create_module_zipfile("mod"), {}, {}, None, None, self.output_path, ) self.assertEqual(result, self._bug_err("exit code 1: RuntimeError: bad"))
def test_email_delta(self, email_delta): workflow = Workflow.create_and_init() tab = workflow.tabs.first() wf_module = tab.wf_modules.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id - 1, notifications=True, ) rendercache.cache_render_result( workflow, wf_module, workflow.last_delta_id - 1, RenderResult(arrow_table({"A": [1]})), ) wf_module.last_relevant_delta_id = workflow.last_delta_id wf_module.save(update_fields=["last_relevant_delta_id"]) module_zipfile = create_module_zipfile( "x", python_code= 'import pandas as pd\ndef render(table, params): return pd.DataFrame({"A": [2]})', ) with self.assertLogs(level=logging.INFO): self.run_with_async_db( execute_wfmodule( self.chroot_context, workflow, wf_module, module_zipfile, {}, Tab(tab.slug, tab.name), RenderResult(), {}, self.output_path, )) email_delta.assert_called() delta = email_delta.call_args[0][0] self.assertEqual(delta.user, workflow.owner) self.assertEqual(delta.workflow, workflow) self.assertEqual(delta.wf_module, wf_module) self.assertEqual(delta.old_result, RenderResult(arrow_table({"A": [1]}))) self.assertEqual(delta.new_result, RenderResult(arrow_table({"A": [2]})))
def test_email_delta_when_stale_crr_is_unreachable(self, email_delta, read_cache): workflow = Workflow.create_and_init() tab = workflow.tabs.first() step = tab.steps.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id - 1, notifications=True, ) # We need to actually populate the cache to set up the test. The code # under test will only try to open the render result if the database # says there's something there. rendercache.cache_render_result( workflow, step, workflow.last_delta_id - 1, RenderResult(arrow_table({})), # does not write a Parquet file ) step.last_relevant_delta_id = workflow.last_delta_id step.save(update_fields=["last_relevant_delta_id"]) module_zipfile = create_module_zipfile( "x", spec_kwargs={"loads_data": True}, # returns different data python_code= 'import pandas as pd\ndef render(table, params): return pd.DataFrame({"A": [2]})', ) with self.assertLogs(level=logging.INFO): self.run_with_async_db( execute_step( self.chroot_context, workflow, step, module_zipfile, {}, Tab(tab.slug, tab.name), RenderResult(), {}, self.output_path, )) read_cache.assert_not_called() # it would give CorruptCacheError email_delta.assert_called() # there's new data
def test_email_delta_ignore_corrupt_cache_error(self, email_delta, read_cache): read_cache.side_effect = rendercache.CorruptCacheError workflow = Workflow.create_and_init() tab = workflow.tabs.first() wf_module = tab.wf_modules.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id - 1, notifications=True, ) # We need to actually populate the cache to set up the test. The code # under test will only try to open the render result if the database # says there's something there. rendercache.cache_render_result( workflow, wf_module, workflow.last_delta_id - 1, RenderResult(arrow_table({"A": [1]})), ) wf_module.last_relevant_delta_id = workflow.last_delta_id wf_module.save(update_fields=["last_relevant_delta_id"]) module_zipfile = create_module_zipfile( "x", # returns different data -- but CorruptCacheError means we won't care. python_code= 'import pandas as pd\ndef render(table, params): return pd.DataFrame({"A": [2]})', ) with self.assertLogs(level=logging.ERROR): self.run_with_async_db( execute_wfmodule( self.chroot_context, workflow, wf_module, module_zipfile, {}, Tab(tab.slug, tab.name), RenderResult(), {}, self.output_path, )) email_delta.assert_not_called()
def test_email_delta_ignore_corrupt_cache_error(self, email_delta, read_cache): user = create_test_user() workflow = Workflow.create_and_init(owner_id=user.id) tab = workflow.tabs.first() step = tab.steps.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, notifications=True, ) # We need to actually populate the cache to set up the test. The code # under test will only try to open the render result if the database # says there's something there. write_to_rendercache( workflow, step, workflow.last_delta_id - 1, # stale make_table(make_column("A", [1])), ) read_cache.side_effect = rendercache.CorruptCacheError module_zipfile = create_module_zipfile( "x", spec_kwargs={"loads_data": True}, # returns different data -- but CorruptCacheError means we won't care. python_code='import pandas as pd\ndef render(table, params): return pd.DataFrame({"A": [2]})', ) with self.assertLogs(level=logging.ERROR): self.run_with_async_db( execute_step( chroot_context=self.chroot_context, workflow=workflow, step=step, module_zipfile=module_zipfile, params={}, tab_name=tab.name, input_path=self.empty_table_path, input_table_columns=[], tab_results={}, output_path=self.output_path, ) ) email_delta.assert_not_called()
def setUp(self): super().setUp() self.workflow = Workflow.objects.create() self.tab = self.workflow.tabs.create(position=0) self.module_zipfile = create_module_zipfile( "loadsomething", spec_kwargs={"parameters": [{ "id_name": "url", "type": "string" }]}, ) self.kernel.migrate_params.side_effect = RuntimeError( "AddModuleCommand and tests should cache migrated params correctly" ) self.delta = InitWorkflowCommand.create(self.workflow)
def test_input_crr(self, downloaded_parquet_file, clean_value): def do_fetch( compiled_module, chroot_context, basedir, params, secrets, last_fetch_result, input_parquet_filename, output_filename, ): shutil.copy(basedir / input_parquet_filename, basedir / output_filename) return FetchResult(basedir / output_filename) self.kernel.fetch.side_effect = do_fetch clean_value.return_value = {} with tempfile_context(dir=self.basedir, suffix=".parquet") as parquet_path: parquet_path.write_bytes(b"abc123") downloaded_parquet_file.return_value = parquet_path input_metadata = TableMetadata(3, [Column("A", ColumnType.Text())]) input_crr = CachedRenderResult(1, 2, 3, "ok", [], {}, input_metadata) with self.assertLogs("fetcher.fetch", level=logging.INFO): result = fetch.fetch_or_wrap_error( self.ctx, self.chroot_context, self.basedir, "mod", create_module_zipfile("mod"), {}, {}, None, input_crr, self.output_path, ) # Passed file is downloaded from rendercache self.assertEqual(result.path.read_bytes(), b"abc123") # clean_value() is called with input metadata from CachedRenderResult clean_value.assert_called() self.assertEqual(clean_value.call_args[0][2], input_metadata)
def test_deleted_step_race(self): workflow = Workflow.create_and_init() module_zipfile = create_module_zipfile( module_id="yay", spec_kwargs={"parameters": [{"id_name": "foo", "type": "string"}]}, ) step = workflow.tabs.first().steps.create( order=0, module_id_name="yay", params={} ) step.delete() self.kernel.migrate_params.return_value = {"foo": "bar"} with self.assertLogs(level=logging.INFO): self.assertEqual(get_migrated_params(step), {"foo": "bar"}) self.assertEqual(step.cached_migrated_params, {"foo": "bar"}) self.assertEqual( step.cached_migrated_params_module_version, module_zipfile.version )
def test_email_delta(self, email_delta): user = create_test_user() workflow = Workflow.create_and_init(owner_id=user.id) tab = workflow.tabs.first() step = tab.steps.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, notifications=True, ) write_to_rendercache( workflow, step, workflow.last_delta_id - 1, # stale make_table(make_column("A", [1])), ) module_zipfile = create_module_zipfile( "x", spec_kwargs={"loads_data": True}, python_code='import pandas as pd\ndef render(table, params): return pd.DataFrame({"A": [2]})', ) with self.assertLogs(level=logging.INFO): self.run_with_async_db( execute_step( chroot_context=self.chroot_context, workflow=workflow, step=step, module_zipfile=module_zipfile, params={}, tab_name=tab.name, input_path=self.empty_table_path, input_table_columns=[], tab_results={}, output_path=self.output_path, ) ) email_delta.assert_called() delta = email_delta.call_args[0][0] self.assertEqual(delta.user, workflow.owner) self.assertEqual(delta.workflow, workflow) self.assertEqual(delta.step, step)
def test_fetch_result_deleted_file_means_none(self): workflow = Workflow.create_and_init() tab = workflow.tabs.first() step = tab.steps.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, ) with parquet_file({"A": [1]}) as path: so = create_stored_object(workflow.id, step.id, path) step.stored_data_version = so.stored_at step.save(update_fields=["stored_data_version"]) # Now delete the file on S3 -- but leave the DB pointing to it. s3.remove(s3.StoredObjectsBucket, so.key) module_zipfile = create_module_zipfile( "x", spec_kwargs={"loads_data": True}, python_code=textwrap.dedent( """ import pandas as pd def render(table, params, *, fetch_result, **kwargs): assert fetch_result is None return pd.DataFrame() """ ), ) with self.assertLogs(level=logging.INFO): self.run_with_async_db( execute_step( chroot_context=self.chroot_context, workflow=workflow, step=step, module_zipfile=module_zipfile, params={}, tab_name=tab.name, input_path=self.empty_table_path, input_table_columns=[], tab_results={}, output_path=self.output_path, ) )
def test_fetch_result_deleted_file_means_none(self): workflow = Workflow.create_and_init() tab = workflow.tabs.first() wf_module = tab.wf_modules.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, ) with parquet_file({"A": [1]}) as path: so = create_stored_object(workflow.id, wf_module.id, path) wf_module.stored_data_version = so.stored_at wf_module.save(update_fields=["stored_data_version"]) # Now delete the file on S3 -- but leave the DB pointing to it. minio.remove(minio.StoredObjectsBucket, so.key) def render(*args, fetch_result, **kwargs): self.assertIsNone(fetch_result) return RenderResult() module_zipfile = create_module_zipfile( "x", python_code=textwrap.dedent(""" import pandas as pd def render(table, params, *, fetch_result, **kwargs): assert fetch_result is None return pd.DataFrame() """), ) with self.assertLogs(level=logging.INFO): self.run_with_async_db( execute_wfmodule( self.chroot_context, workflow, wf_module, module_zipfile, {}, Tab(tab.slug, tab.name), RenderResult(), {}, self.output_path, ))
def test_report_module_error(self): workflow = Workflow.create_and_init() tab = workflow.tabs.first() step = tab.steps.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, ) module_zipfile = create_module_zipfile( "x", spec_kwargs={"loads_data": True}, python_code="def render(table, params):\n undefined()", ) with self.assertLogs(level=logging.INFO): result = self.run_with_async_db( execute_step( self.chroot_context, workflow, step, module_zipfile, {}, Tab(tab.slug, tab.name), RenderResult(), {}, self.output_path, )) assert_render_result_equals( result, RenderResult(errors=[ RenderError( I18nMessage( "py.renderer.execute.step.user_visible_bug_during_render", { "message": "exit code 1: NameError: name 'undefined' is not defined" }, None, )) ]), )