def test_list_autofetches_ignore_wrong_user(self): user = User.objects.create(username='******', email='*****@*****.**') workflow = Workflow.create_and_init(owner=user) user2 = User.objects.create(username='******', email='*****@*****.**') workflow2 = Workflow.create_and_init(owner=user2) workflow2.tabs.first().wf_modules.create( order=0, module_id_name='loadurl', auto_update_data=True, next_update=timezone.now(), update_interval=600 ) result = list_autofetches_json({'user': user, 'session': None}) self.assertEqual(result['autofetches'], [])
def test_try_set_autofetch_happy_path(self): user = User.objects.create(username="******", email="*****@*****.**") workflow = Workflow.create_and_init(owner=user) wf_module = workflow.tabs.first().wf_modules.create(order=0, slug="step-1") response = self.run_handler( try_set_autofetch, user=user, workflow=workflow, wfModuleId=wf_module.id, isAutofetch=True, fetchInterval=1200, ) self.assertResponse(response, data={ "isAutofetch": True, "fetchInterval": 1200 }) wf_module.refresh_from_db() self.assertEqual(wf_module.auto_update_data, True) self.assertEqual(wf_module.update_interval, 1200) self.assertLess(wf_module.next_update, timezone.now() + datetime.timedelta(seconds=1202)) self.assertGreater(wf_module.next_update, timezone.now() + datetime.timedelta(seconds=1198))
def test_try_set_autofetch_exceed_quota(self): user = User.objects.create(username="******", email="*****@*****.**") user.user_profile.max_fetches_per_day = 10 user.user_profile.save() workflow = Workflow.create_and_init(owner=user) wf_module = workflow.tabs.first().wf_modules.create(order=0, slug="step-1") response = self.run_handler( try_set_autofetch, user=user, workflow=workflow, wfModuleId=wf_module.id, isAutofetch=True, fetchInterval=300, ) self.assertEqual(response.error, "") self.assertEqual(response.data["quotaExceeded"]["maxFetchesPerDay"], 10) self.assertEqual(response.data["quotaExceeded"]["nFetchesPerDay"], 288) self.assertEqual( response.data["quotaExceeded"]["autofetches"][0]["workflow"]["id"], workflow.id, ) wf_module.refresh_from_db() self.assertEqual(wf_module.auto_update_data, False)
def test_add_module_param_values_not_object(self): user = User.objects.create(username='******', email='*****@*****.**') workflow = Workflow.create_and_init(owner=user) # tab-1 ModuleVersion.create_or_replace_from_spec({ 'id_name': 'amodule', 'name': 'A Module', 'category': 'Clean', 'parameters': [ { 'id_name': 'foo', 'type': 'string' }, ], }) response = self.run_handler(add_module, user=user, workflow=workflow, tabSlug='tab-1', position=3, moduleIdName='amodule', paramValues='foobar') self.assertResponse(response, error='BadRequest: paramValues must be an Object')
def test_set_params_invalid_params(self): user = User.objects.create(username="******", email="*****@*****.**") workflow = Workflow.create_and_init(owner=user) wf_module = workflow.tabs.first().wf_modules.create(order=0, slug="step-1", module_id_name="x") ModuleVersion.create_or_replace_from_spec({ "id_name": "x", "name": "x", "category": "Clean", "parameters": [{ "id_name": "foo", "type": "string" }], }) response = self.run_handler( set_params, user=user, workflow=workflow, wfModuleId=wf_module.id, values={"foo1": "bar"}, ) self.assertResponse( response, error=("ValueError: Value {'foo1': 'bar'} has wrong names: " "expected names {'foo'}"), )
def test_delete_scopes_tab_delete_by_workflow(self): workflow = Workflow.create_and_init() workflow2 = Workflow.create_and_init() # Here's a soft-deleted module on workflow2. Nothing references it. It # "shouldn't" exist. tab = workflow2.tabs.create(position=1) # now delete a delta on workflow1 delta = self.run_with_async_db( ChangeWorkflowTitleCommand.create(workflow=workflow, new_value="1")) self.run_with_async_db(delta.backward()) # fix workflow.last_delta_id delta.delete_with_successors() workflow.delete_orphan_soft_deleted_models() tab.refresh_from_db() # no DoesNotExist: leave workflow2 alone
def test_add_module(self): user = User.objects.create(username='******', email='*****@*****.**') workflow = Workflow.create_and_init(owner=user) # with tab-1 module_version = ModuleVersion.create_or_replace_from_spec({ 'id_name': 'amodule', 'name': 'A Module', 'category': 'Clean', 'parameters': [ { 'id_name': 'foo', 'type': 'string' }, ], }) response = self.run_handler(add_module, user=user, workflow=workflow, tabSlug='tab-1', position=3, moduleIdName='amodule', paramValues={'foo': 'bar'}) self.assertResponse(response, data=None) command = AddModuleCommand.objects.first() self.assertEquals(command.wf_module.order, 3) self.assertEquals(command.wf_module.module_version, module_version) self.assertEquals(command.wf_module.get_params()['foo'], 'bar') self.assertEquals(command.wf_module.tab.slug, 'tab-1') self.assertEquals(command.workflow_id, workflow.id)
def test_clean_tab_wf_module_changed_raises_unneededexecution(self): """ If a user changes tabs' output during render, raise UnneededExecution. It doesn't really matter _what_ the return value is, since the render() result will never be saved if this WfModule's delta has changed. UnneededExecution seems like the simplest contract to enforce. """ # tab_output is what 'render' _thinks_ the output should be tab_output = ProcessResult(pd.DataFrame({'A': [1, 2]})) workflow = Workflow.create_and_init() tab = workflow.tabs.first() wfm = tab.wf_modules.create( order=0, last_relevant_delta_id=workflow.last_delta_id ) wfm.cache_render_result(workflow.last_delta_id, tab_output) # Simulate reality: wfm.last_relevant_delta_id will change wfm.last_relevant_delta_id += 1 wfm.save(update_fields=['last_relevant_delta_id']) context = RenderContext(workflow.id, None, None, { tab.slug: StepResultShape('ok', tab_output.table_shape), }, None) with self.assertRaises(UnneededExecution): clean_value(ParamDType.Tab(), tab.slug, context)
def test_clean_file_no_uploaded_file(self): workflow = Workflow.create_and_init() tab = workflow.tabs.first() wfm = tab.wf_modules.create(module_id_name='uploadfile', order=0) context = RenderContext(workflow.id, wfm.id, None, None, None) result = clean_value(ParamDType.File(), str(uuid.uuid4()), context) self.assertIsNone(result)
def test_clean_multicolumn_from_other_tab(self): tab_output = ProcessResult(pd.DataFrame({'A-from-tab-2': [1, 2]})) workflow = Workflow.create_and_init() tab = workflow.tabs.first() wfm = tab.wf_modules.create( order=0, last_relevant_delta_id=workflow.last_delta_id ) wfm.cache_render_result(workflow.last_delta_id, tab_output) schema = ParamDType.Dict({ 'tab': ParamDType.Tab(), 'columns': ParamDType.Multicolumn(tab_parameter='tab'), }) param_values = {'tab': tab.slug, 'columns': ['A-from-tab-1', 'A-from-tab-2']} params = Params(schema, param_values, {}) context = RenderContext(workflow.id, None, TableShape(3, [ Column('A-from-tab-1', ColumnType.NUMBER()), ]), { tab.slug: StepResultShape('ok', tab_output.table_shape), }, params) result = clean_value(schema, param_values, context) # result['tab'] is not what we're testing here self.assertEqual(result['columns'], ['A-from-tab-2'])
def test_clean_tab_tab_delete_race_raises_unneededexecution(self): """ If a user deletes the tab during render, raise UnneededExecution. It doesn't really matter _what_ the return value is, since the render() result will never be saved if this WfModule's delta has changed. UnneededExecution just seems like the quickest way out of this mess: it's an error the caller is meant to raise anyway, unlike `Tab.DoesNotExist`. """ # tab_output is what 'render' _thinks_ the output should be tab_output = ProcessResult(pd.DataFrame({'A': [1, 2]})) workflow = Workflow.create_and_init() tab = workflow.tabs.first() wfm = tab.wf_modules.create( order=0, last_relevant_delta_id=workflow.last_delta_id ) wfm.cache_render_result(workflow.last_delta_id, tab_output) tab.is_deleted = True tab.save(update_fields=['is_deleted']) # Simulate reality: wfm.last_relevant_delta_id will change wfm.last_relevant_delta_id += 1 wfm.save(update_fields=['last_relevant_delta_id']) context = RenderContext(workflow.id, None, None, { tab.slug: StepResultShape('ok', tab_output.table_shape), }, None) with self.assertRaises(UnneededExecution): clean_value(ParamDType.Tab(), tab.slug, context)
def test_change_notes(self): workflow = Workflow.create_and_init() wf_module = workflow.tabs.first().wf_modules.create( order=0, notes='text1', last_relevant_delta_id=workflow.last_delta_id) # do cmd = self.run_with_async_db( ChangeWfModuleNotesCommand.create(workflow=workflow, wf_module=wf_module, new_value='text2')) self.assertEqual(wf_module.notes, 'text2') wf_module.refresh_from_db() self.assertEqual(wf_module.notes, 'text2') # undo self.run_with_async_db(cmd.backward()) self.assertEqual(wf_module.notes, 'text1') wf_module.refresh_from_db() self.assertEqual(wf_module.notes, 'text1') # redo self.run_with_async_db(cmd.forward()) self.assertEqual(wf_module.notes, 'text2') wf_module.refresh_from_db() self.assertEqual(wf_module.notes, 'text2')
def test_clean_multicolumn_from_other_tab(self): tab_output = ProcessResult(pd.DataFrame({"A-from-tab-2": [1, 2]})) workflow = Workflow.create_and_init() tab = workflow.tabs.first() wfm = tab.wf_modules.create( order=0, slug="step-1", last_relevant_delta_id=workflow.last_delta_id) wfm.cache_render_result(workflow.last_delta_id, tab_output) schema = ParamDType.Dict({ "tab": ParamDType.Tab(), "columns": ParamDType.Multicolumn(tab_parameter="tab"), }) params = {"tab": tab.slug, "columns": ["A-from-tab-1", "A-from-tab-2"]} context = RenderContext( workflow.id, None, TableShape(3, [Column("A-from-tab-1", ColumnType.NUMBER())]), {tab.slug: StepResultShape("ok", tab_output.table_shape)}, params, ) result = clean_value(schema, params, context) # result['tab'] is not what we're testing here self.assertEqual(result["columns"], ["A-from-tab-2"])
def test_clean_file_happy_path(self): workflow = Workflow.create_and_init() tab = workflow.tabs.first() wfm = tab.wf_modules.create(module_id_name="uploadfile", order=0, slug="step-1") id = str(uuid.uuid4()) key = f"wf-${workflow.id}/wfm-${wfm.id}/${id}" minio.put_bytes(minio.UserFilesBucket, key, b"1234") UploadedFile.objects.create( wf_module=wfm, name="x.csv.gz", size=4, uuid=id, bucket=minio.UserFilesBucket, key=key, ) context = RenderContext(workflow.id, wfm.id, None, None, None) result = clean_value(ParamDType.File(), id, context) self.assertIsInstance(result, pathlib.Path) self.assertEqual(result.read_bytes(), b"1234") self.assertEqual(result.suffixes, [".csv", ".gz"]) # Assert that once `path` goes out of scope, it's deleted str_path = str(result) # get the filesystem path del result # should finalize, deleting the file on the filesystem with self.assertRaises(FileNotFoundError): os.open(str_path, 0)
def test_complete_upload_happy_path(self, send_delta): user = User.objects.create(username='******', email='*****@*****.**') workflow = Workflow.create_and_init(owner=user) uuid = str(uuidgen.uuid4()) key = f'wf-123/wfm-234/{uuid}.csv' wf_module = workflow.tabs.first().wf_modules.create( order=0, module_id_name='x', inprogress_file_upload_id=None, inprogress_file_upload_key=key, inprogress_file_upload_last_accessed_at=timezone.now(), ) # The user needs to write the file to S3 before calling complete_upload minio.put_bytes( minio.UserFilesBucket, key, b'1234567', ContentDisposition="attachment; filename*=UTF-8''file.csv", ) send_delta.side_effect = async_noop response = self.run_handler(complete_upload, user=user, workflow=workflow, wfModuleId=wf_module.id, key=key) self.assertEqual(response.error, '') self.assertEqual(response.data, {'uuid': uuid}) wf_module.refresh_from_db() self.assertIsNone(wf_module.inprogress_file_upload_id) self.assertIsNone(wf_module.inprogress_file_upload_key) self.assertIsNone(wf_module.inprogress_file_upload_last_accessed_at) uploaded_file: UploadedFile = wf_module.uploaded_files.first() self.assertEqual(uploaded_file.name, 'file.csv') self.assertEqual(uploaded_file.uuid, uuid) self.assertEqual(uploaded_file.size, 7) self.assertEqual(uploaded_file.bucket, minio.UserFilesBucket) self.assertEqual(uploaded_file.key, key)
def test_change_dependent_wf_modules(self): # tab slug: tab-1 workflow = Workflow.create_and_init(selected_tab_position=2) workflow.tabs.create(position=1, slug='tab-2') workflow.tabs.create(position=2, slug='tab-3') # Create `wf_module` depending on tabs 2+3 (and their order) ModuleVersion.create_or_replace_from_spec({ 'id_name': 'x', 'name': 'X', 'category': 'Clean', 'parameters': [ { 'id_name': 'tabs', 'type': 'multitab' }, ] }) wf_module = workflow.tabs.first().wf_modules.create( order=0, module_id_name='x', params={'tabs': ['tab-2', 'tab-3']}, last_relevant_delta_id=workflow.last_delta_id) cmd = self.run_with_async_db( ReorderTabsCommand.create(workflow=workflow, new_order=['tab-3', 'tab-1', 'tab-2'])) wf_module.refresh_from_db() self.assertEqual(wf_module.last_relevant_delta_id, cmd.id)
def test_abort_upload(self): user = User.objects.create(username='******', email='*****@*****.**') workflow = Workflow.create_and_init(owner=user) uuid = str(uuidgen.uuid4()) key = f'wf-123/wfm-234/{uuid}.csv' wf_module = workflow.tabs.first().wf_modules.create( order=0, module_id_name='x', inprogress_file_upload_id=None, inprogress_file_upload_key=key, inprogress_file_upload_last_accessed_at=timezone.now(), ) # let's pretend the user has uploaded at least partial data. minio.put_bytes( minio.UserFilesBucket, key, b'1234567', ContentDisposition="attachment; filename*=UTF-8''file.csv", ) response = self.run_handler(abort_upload, user=user, workflow=workflow, wfModuleId=wf_module.id, key=key) self.assertResponse(response, data=None) wf_module.refresh_from_db() self.assertIsNone(wf_module.inprogress_file_upload_id) self.assertIsNone(wf_module.inprogress_file_upload_key) self.assertIsNone(wf_module.inprogress_file_upload_last_accessed_at) # Ensure the file is deleted from S3 self.assertFalse(minio.exists(minio.UserFilesBucket, key))
def test_resume_without_rerunning_unneeded_renders(self, fake_load_module): workflow = Workflow.create_and_init() tab = workflow.tabs.first() delta_id = workflow.last_delta_id # wf_module1: has a valid, cached result wf_module1 = tab.wf_modules.create(order=0, last_relevant_delta_id=delta_id) result1 = ProcessResult(pd.DataFrame({'A': [1]})) wf_module1.cache_render_result(delta_id, result1) # wf_module2: has no cached result (must be rendered) wf_module2 = tab.wf_modules.create(order=1, last_relevant_delta_id=delta_id) fake_loaded_module = Mock(LoadedModule) fake_load_module.return_value = fake_loaded_module result2 = ProcessResult(pd.DataFrame({'A': [2]})) fake_loaded_module.render.return_value = result2 self._execute(workflow) wf_module2.refresh_from_db() actual = wf_module2.cached_render_result.result self.assertEqual(actual, result2) fake_loaded_module.render.assert_called_once() # only with module2
def test_delete_orphans_does_not_delete_new_tab(self): """ Don't delete a new AddTabCommand's new orphan Tab during creation. We delete orphans Deltas during creation, and we should delete their Tabs/WfModules. But we shouldn't delete _new_ Tabs/WfModules. (We need to order creation and deletion carefully to avoid doing so.) """ workflow = Workflow.create_and_init() # Create a soft-deleted Tab in an orphan Delta (via AddTabCommand) delta1 = self.run_with_async_db( AddTabCommand.create(workflow=workflow, slug="tab-2", name="name-2")) self.run_with_async_db(delta1.backward()) # Now create a new Tab in a new Delta. This will delete delta1, and it # _should_ delete `tab-2`. self.run_with_async_db( AddTabCommand.create(workflow=workflow, slug="tab-3", name="name-3")) with self.assertRaises(Tab.DoesNotExist): delta1.tab.refresh_from_db() # orphan tab was deleted with self.assertRaises(Delta.DoesNotExist): delta1.refresh_from_db()
def test_convert_to_uploaded_file_happy_path(self): workflow = Workflow.create_and_init() wf_module = workflow.tabs.first().wf_modules.create(order=0, slug="step-1", module_id_name="x") ipu = wf_module.in_progress_uploads.create() minio.put_bytes(ipu.Bucket, ipu.get_upload_key(), b"1234567") uploaded_file = ipu.convert_to_uploaded_file("test sheet.xlsx") self.assertEqual(uploaded_file.uuid, str(ipu.id)) final_key = wf_module.uploaded_file_prefix + str(ipu.id) + ".xlsx" # New file on S3 has the right bytes and metadata self.assertEqual( minio.get_object_with_data(minio.UserFilesBucket, final_key)["Body"], b"1234567", ) self.assertEqual( minio.client.head_object(Bucket=minio.UserFilesBucket, Key=final_key)["ContentDisposition"], "attachment; filename*=UTF-8''test%20sheet.xlsx", ) # InProgressUpload is completed self.assertEqual(ipu.is_completed, True) ipu.refresh_from_db() self.assertEqual(ipu.is_completed, True) # also on DB # Uploaded file is deleted self.assertFalse( minio.exists(minio.UserFilesBucket, ipu.get_upload_key()))
def test_add_module_viewer_access_denied(self): workflow = Workflow.create_and_init(public=True) # tab-1 ModuleVersion.create_or_replace_from_spec({ 'id_name': 'amodule', 'name': 'A Module', 'category': 'Clean', 'parameters': [ { 'id_name': 'foo', 'type': 'string' }, ], }) response = self.run_handler(add_module, workflow=workflow, tabSlug='tab-1', position=3, moduleIdName='amodule', paramValues={'foo': 'bar'}) self.assertResponse(response, error='AuthError: no write access to workflow')
def test_add_module_default_params(self): workflow = Workflow.create_and_init() module_version = ModuleVersion.create_or_replace_from_spec( { 'id_name': 'blah', 'name': 'Blah', 'category': 'Clean', 'parameters': [ { 'id_name': 'a', 'type': 'string', 'default': 'x' }, { 'id_name': 'c', 'type': 'checkbox', 'name': 'C', 'default': True }, ], }, source_version_hash='1.0') cmd = self.run_with_async_db( AddModuleCommand.create(workflow=workflow, tab=workflow.tabs.first(), module_id_name=module_version.id_name, position=0, param_values={})) self.assertEqual(cmd.wf_module.params, {'a': 'x', 'c': True})
def test_set_secret_writer_access_denied(self): user = User.objects.create(email="*****@*****.**") workflow = Workflow.create_and_init(public=True) workflow.acl.create(email=user.email, can_edit=True) ModuleVersion.create_or_replace_from_spec({ "id_name": "g", "name": "g", "category": "Clean", "parameters": [TestStringSecret], }) wf_module = workflow.tabs.first().wf_modules.create(module_id_name="g", order=0, slug="step-1") response = self.run_handler( set_secret, user=user, workflow=workflow, wfModuleId=wf_module.id, param="string_secret", secret="foo", ) self.assertResponse(response, error="AuthError: no owner access to workflow")
def setUp(self): super().setUp() self.workflow = Workflow.create_and_init() self.wf_module = self.workflow.tabs.first().wf_modules.create( order=0, slug="step-1", last_relevant_delta_id=self.workflow.last_delta_id )
def test_set_params_null_byte_in_json(self): user = User.objects.create(username="******", email="*****@*****.**") workflow = Workflow.create_and_init(owner=user) wf_module = workflow.tabs.first().wf_modules.create(order=0, slug="step-1", module_id_name="x") ModuleVersion.create_or_replace_from_spec({ "id_name": "x", "name": "x", "category": "Clean", "parameters": [{ "id_name": "foo", "type": "string" }], }) response = self.run_handler( set_params, user=user, workflow=workflow, wfModuleId=wf_module.id, values={"foo": "b\x00\x00r"}, ) self.assertResponse(response, data=None) command = ChangeParametersCommand.objects.first() self.assertEquals(command.new_values, {"foo": "br"})
def test_fetch_wf_module(self, save_result, load_module): result = ProcessResult(pd.DataFrame({'A': [1]}), error='hi') async def fake_fetch(*args, **kwargs): return result fake_module = Mock(LoadedModule) load_module.return_value = fake_module fake_module.fetch.side_effect = fake_fetch workflow = Workflow.create_and_init() wf_module = workflow.tabs.first().wf_modules.create( order=0, next_update=parser.parse('Aug 28 1999 2:24PM UTC'), update_interval=600) now = parser.parse('Aug 28 1999 2:24:02PM UTC') due_for_update = parser.parse('Aug 28 1999 2:34PM UTC') with self.assertLogs(fetch.__name__, logging.DEBUG): self.run_with_async_db( fetch.fetch_wf_module(workflow.id, wf_module, now)) save_result.assert_called_with(workflow.id, wf_module, result) wf_module.refresh_from_db() self.assertEqual(wf_module.last_update_check, now) self.assertEqual(wf_module.next_update, due_for_update)
def test_try_set_autofetch_disable_autofetch(self): user = User.objects.create(username="******", email="*****@*****.**") workflow = Workflow.create_and_init(owner=user) wf_module = workflow.tabs.first().wf_modules.create( order=0, slug="step-1", auto_update_data=True, update_interval=1200, next_update=timezone.now(), ) response = self.run_handler( try_set_autofetch, user=user, workflow=workflow, wfModuleId=wf_module.id, isAutofetch=False, fetchInterval=300, ) self.assertResponse(response, data={ "isAutofetch": False, "fetchInterval": 300 }) wf_module.refresh_from_db() self.assertEqual(wf_module.auto_update_data, False) self.assertEqual(wf_module.update_interval, 300) self.assertIsNone(wf_module.next_update)
def test_prepare_upload_happy_path(self): user = User.objects.create(username='******', email='*****@*****.**') workflow = Workflow.create_and_init(owner=user) wf_module = workflow.tabs.first().wf_modules.create( order=0, module_id_name='x', ) data = b'1234567' md5sum = _base64_md5sum(data) response = self.run_handler(prepare_upload, user=user, workflow=workflow, wfModuleId=wf_module.id, filename='abc.csv', nBytes=len(data), base64Md5sum=md5sum) self.assertEqual(response.error, '') wf_module.refresh_from_db() self.assertIsNone(wf_module.inprogress_file_upload_id) self.assertTrue( response.data['key'].startswith(wf_module.uploaded_file_prefix) ) self.assertTrue(response.data['key'] in response.data['url']) http = urllib3.PoolManager() response = http.request('PUT', response.data['url'], body=data, headers=response.data['headers']) self.assertEqual(response.status, 200) # the URL+headers work
def test_try_set_autofetch_allow_exceed_quota_when_reducing(self): user = User.objects.create(username="******", email="*****@*****.**") user.user_profile.max_fetches_per_day = 10 user.user_profile.save() workflow = Workflow.create_and_init(owner=user) wf_module = workflow.tabs.first().wf_modules.create( order=0, slug="step-1", auto_update_data=True, update_interval=300, next_update=timezone.now(), ) response = self.run_handler( try_set_autofetch, user=user, workflow=workflow, wfModuleId=wf_module.id, isAutofetch=True, fetchInterval=600, ) self.assertResponse(response, data={ "isAutofetch": True, "fetchInterval": 600 }) wf_module.refresh_from_db() self.assertEqual(wf_module.update_interval, 600)
def test_duplicate_nonempty_rendered_tab(self, ws_notify, queue_render): ws_notify.side_effect = async_noop workflow = Workflow.create_and_init() init_delta_id = workflow.last_delta_id tab = workflow.tabs.first() # wfm1 and wfm2 have not yet been rendered. (But while we're # duplicating, conceivably a render could be running; so when we # duplicate them, we need to queue a render.) wfm1 = tab.wf_modules.create( order=0, module_id_name='x', params={'p': 's1'}, last_relevant_delta_id=init_delta_id, ) wfm1.cache_render_result(init_delta_id, ProcessResult(error='simplest ctor')) self.run_with_async_db(DuplicateTabCommand.create( workflow=workflow, from_tab=tab, slug='tab-2', name='Tab 2', )) # No need to render: the result is already cached queue_render.assert_not_called()