def test_change_parameters_on_soft_deleted_tab(self): workflow = Workflow.objects.create() delta = InitWorkflowCommand.create(workflow) tab = workflow.tabs.create(position=0, is_deleted=True) wf_module = tab.wf_modules.create( order=0, slug="step-1", module_id_name="loadsomething", last_relevant_delta_id=delta.id, params={"url": ""}, ) cmd = self.run_with_async_db( commands.do( ChangeParametersCommand, workflow_id=workflow.id, wf_module=wf_module, new_values={"url": "https://example.com"}, ) ) self.assertIsNone(cmd)
def test_change_data_version(self): # Create two data versions, use the second date1 = self._store_fetched_table(stored_at=datetime.datetime(2021, 6, 24)) date2 = self._store_fetched_table(stored_at=datetime.datetime(2021, 6, 23)) self.step.stored_data_version = date2 self.step.save(update_fields=["stored_data_version"]) self.workflow.refresh_from_db() v1 = self.workflow.last_delta_id # Change back to first version cmd = self.run_with_async_db( commands.do( SetStepDataVersion, workflow_id=self.workflow.id, step=self.step, new_version=isoparse("2021-06-24T00:00:00.000000Z"), ) ) self.assertEqual(self.step.stored_data_version, date1) self.workflow.refresh_from_db() v2 = cmd.id # workflow revision should have been incremented self.step.refresh_from_db() self.assertEqual(self.step.last_relevant_delta_id, v2) # undo self.run_with_async_db(commands.undo(self.workflow.id)) self.step.refresh_from_db() self.assertEqual(self.step.last_relevant_delta_id, v1) self.assertEqual(self.step.stored_data_version, date2) # redo self.run_with_async_db(commands.redo(self.workflow.id)) self.step.refresh_from_db() self.assertEqual(self.step.last_relevant_delta_id, v2) self.assertEqual(self.step.stored_data_version, date1)
def test_clientside_update(self, send_delta): workflow = Workflow.create_and_init() tab = workflow.tabs.first() tab.name = "foo" tab.save(update_fields=["name"]) send_delta.return_value = async_noop() cmd = self.run_with_async_db( commands.do(SetTabNameCommand, workflow_id=workflow.id, tab=tab, new_name="bar")) send_delta.assert_called() delta1 = send_delta.call_args[0][1] self.assertEqual(delta1.tabs[tab.slug], clientside.TabUpdate(name="bar")) send_delta.return_value = async_noop() self.run_with_async_db(commands.undo(cmd)) delta2 = send_delta.call_args[0][1] self.assertEqual(delta2.tabs[tab.slug], clientside.TabUpdate(name="foo"))
def test_add_to_empty_tab_affects_dependent_tab_wf_modules(self): ModuleVersion.create_or_replace_from_spec({ "id_name": "tabby", "name": "Tabby", "category": "Clean", "parameters": [{ "id_name": "tab", "type": "tab" }], }) wfm1 = self.workflow.tabs.first().wf_modules.create( order=0, slug="step-1", module_id_name="tabby", last_relevant_delta_id=self.workflow.last_delta_id, params={"tab": "tab-2"}, ) tab2 = self.workflow.tabs.create(position=1, slug="tab-2") # Now add a module to tab2. cmd = self.run_with_async_db( commands.do( AddModuleCommand, workflow_id=self.workflow.id, tab=tab2, slug="step-2", module_id_name=self.module_version.id_name, position=0, param_values={"url": "https://x.com"}, )) # Tab1's "tabby" module depends on tab2, so it should update. wfm1.refresh_from_db() self.assertEqual(wfm1.last_relevant_delta_id, cmd.id)
def test_adjust_selected_tab_position(self): # tab slug: tab-1 workflow = Workflow.create_and_init(selected_tab_position=2) workflow.tabs.create(position=1, slug="tab-2") workflow.tabs.create(position=2, slug="tab-3") cmd = self.run_with_async_db( commands.do( ReorderTabsCommand, workflow_id=workflow.id, new_order=["tab-3", "tab-1", "tab-2"], )) workflow.refresh_from_db() self.assertEqual(workflow.selected_tab_position, 0) self.run_with_async_db(commands.undo(cmd)) workflow.refresh_from_db() self.assertEqual(workflow.selected_tab_position, 2) self.run_with_async_db(commands.redo(cmd)) workflow.refresh_from_db() self.assertEqual(workflow.selected_tab_position, 0)
def test_delete_deletes_soft_deleted_tab(self): workflow = Workflow.create_and_init() tab = workflow.tabs.create(position=1, is_deleted=True) # create a step -- it needs to be deleted, too! step = tab.steps.create(order=0, slug="step-1", module_id_name="foo", is_deleted=True) self.run_with_async_db( commands.do(SetWorkflowTitle, workflow_id=workflow.id, new_value="1")) self.run_with_async_db(commands.undo( workflow.id)) # fix workflow.last_delta_id workflow.deltas.all().delete() workflow.delete_orphan_soft_deleted_models() with self.assertRaises(Step.DoesNotExist): step.refresh_from_db() with self.assertRaises(Tab.DoesNotExist): tab.refresh_from_db()
def test_change_version_queue_render_if_listening_and_no_notification( self, queue_render, queue_render_if_listening): queue_render_if_listening.return_value = future_none date1 = self._store_fetched_table() date2 = self._store_fetched_table() self.step.notifications = False self.step.stored_data_version = date1 self.step.save() delta = self.run_with_async_db( commands.do( SetStepDataVersion, workflow_id=self.workflow.id, step=self.step, new_version=date2, )) queue_render.assert_not_called() queue_render_if_listening.assert_called_with(self.step.workflow_id, delta.id)
def test_change_parameters_on_hard_deleted_wf_module(self): workflow = Workflow.create_and_init() create_module_zipfile("loadsomething") wf_module = workflow.tabs.first().wf_modules.create( order=0, slug="step-1", module_id_name="loadsomething", last_relevant_delta_id=workflow.last_delta_id, params={"url": ""}, ) wf_module.delete() cmd = self.run_with_async_db( commands.do( ChangeParametersCommand, workflow_id=workflow.id, wf_module=wf_module, new_values={"url": "https://example.com"}, ) ) self.assertIsNone(cmd)
def test_reorder_blocks_on_custom_report(self, send_update): future_none = asyncio.Future() future_none.set_result(None) send_update.return_value = future_none workflow = Workflow.create_and_init(has_custom_report=True) workflow.blocks.create( position=0, slug="block-1", block_type="Text", text_markdown="1" ) workflow.blocks.create( position=1, slug="block-2", block_type="Text", text_markdown="1" ) workflow.blocks.create( position=2, slug="block-3", block_type="Text", text_markdown="1" ) self.run_with_async_db( commands.do( ReorderBlocks, workflow_id=workflow.id, slugs=["block-2", "block-3", "block-1"], ) ) self.assertEqual( list(workflow.blocks.values_list("slug", "position")), [("block-2", 0), ("block-3", 1), ("block-1", 2)], ) delta1 = send_update.call_args[0][1] self.assertIsNone(delta1.workflow.has_custom_report) self.assertEqual(delta1.workflow.block_slugs, ["block-2", "block-3", "block-1"]) self.run_with_async_db(commands.undo(workflow.id)) self.assertEqual( list(workflow.blocks.values_list("slug", "position")), [("block-1", 0), ("block-2", 1), ("block-3", 2)], ) delta2 = send_update.call_args[0][1] self.assertIsNone(delta2.workflow.has_custom_report) self.assertEqual(delta2.workflow.block_slugs, ["block-1", "block-2", "block-3"])
def test_change_parameters_on_hard_deleted_step(self): workflow = Workflow.create_and_init() create_module_zipfile("loadurl") step = workflow.tabs.first().steps.create( order=0, slug="step-1", module_id_name="loadurl", last_relevant_delta_id=workflow.last_delta_id, params={"url": ""}, ) step.delete() cmd = self.run_with_async_db( commands.do( SetStepParams, workflow_id=workflow.id, step=step, new_values={"url": "https://example.com"}, ) ) self.assertIsNone(cmd)
def test_add_module_default_params(self): workflow = Workflow.create_and_init() module_version = ModuleVersion.create_or_replace_from_spec( { "id_name": "blah", "name": "Blah", "category": "Clean", "parameters": [ { "id_name": "a", "type": "string", "default": "x" }, { "id_name": "c", "type": "checkbox", "name": "C", "default": True }, ], }, source_version_hash="1.0", ) cmd = self.run_with_async_db( commands.do( AddModuleCommand, workflow_id=workflow.id, tab=workflow.tabs.first(), slug="step-1", module_id_name=module_version.id_name, position=0, param_values={}, )) self.assertEqual(cmd.wf_module.params, {"a": "x", "c": True})
def test_update_workflow_fetches_per_day(self, send_user_update): send_user_update.side_effect = async_noop user = User.objects.create(email="*****@*****.**") workflow = Workflow.create_and_init(owner_id=user.id, fetches_per_day=3.0) tab = workflow.tabs.first() tab.steps.create( slug="step-1", order=0, auto_update_data=True, update_interval=86400, next_update=datetime.datetime.now(), ) step2 = tab.steps.create( slug="step-2", order=1, auto_update_data=True, update_interval=43200, next_update=datetime.datetime.now(), ) self.run_with_async_db( commands.do(DeleteStep, workflow_id=workflow.id, step=step2)) workflow.refresh_from_db() self.assertEqual(workflow.fetches_per_day, 1.0) send_user_update.assert_called_with( user.id, clientside.UserUpdate(usage=UserUsage(1.0))) send_user_update.reset_mock() # Undo doesn't increase usage (user might not expect it to) self.run_with_async_db(commands.undo(workflow.id)) send_user_update.assert_not_called() workflow.refresh_from_db() self.assertEqual(workflow.fetches_per_day, 1.0) step2.refresh_from_db() self.assertEqual(step2.auto_update_data, False)
def test_change_data_version(self): # Create two data versions, use the second date1 = self._store_fetched_table() date2 = self._store_fetched_table() self.step.stored_data_version = date2 self.step.save() self.workflow.refresh_from_db() v1 = self.workflow.last_delta_id # Change back to first version cmd = self.run_with_async_db( commands.do( SetStepDataVersion, workflow_id=self.workflow.id, step=self.step, new_version=date1, )) self.assertEqual(self.step.stored_data_version, date1) self.workflow.refresh_from_db() v2 = cmd.id # workflow revision should have been incremented self.step.refresh_from_db() self.assertEqual(self.step.last_relevant_delta_id, v2) # undo self.run_with_async_db(commands.undo(self.workflow.id)) self.step.refresh_from_db() self.assertEqual(self.step.last_relevant_delta_id, v1) self.assertEqual(self.step.stored_data_version, date2) # redo self.run_with_async_db(commands.redo(self.workflow.id)) self.step.refresh_from_db() self.assertEqual(self.step.last_relevant_delta_id, v2) self.assertEqual(self.step.stored_data_version, date1)
def test_delete_custom_report_table(self, send_update): future_none = asyncio.Future() future_none.set_result(None) send_update.return_value = future_none workflow = Workflow.create_and_init( selected_tab_position=0, has_custom_report=True ) # tab-1 tab2 = workflow.tabs.create(position=1, slug="tab-2") block1 = workflow.blocks.create( position=0, slug="block-1", block_type="Text", text_markdown="1" ) block2 = workflow.blocks.create( position=1, slug="block-2", block_type="Table", tab_id=tab2.id ) block3 = workflow.blocks.create( position=2, slug="block-3", block_type="Text", text_markdown="3" ) self.run_with_async_db( commands.do(DeleteTab, workflow_id=workflow.id, tab=tab2) ) delta1 = send_update.call_args[0][1] self.assertEqual(delta1.workflow.block_slugs, ["block-1", "block-3"]) self.assertEqual(delta1.blocks, {}) self.assertEqual(delta1.clear_block_slugs, frozenset(["block-2"])) with self.assertRaises(Block.DoesNotExist): block2.refresh_from_db() block3.refresh_from_db() self.assertEqual(block3.position, 1) self.run_with_async_db(commands.undo(workflow.id)) delta2 = send_update.call_args[0][1] self.assertEqual(delta2.workflow.block_slugs, ["block-1", "block-2", "block-3"]) self.assertEqual(delta2.blocks, {"block-2": clientside.TableBlock("tab-2")}) self.assertEqual(delta2.clear_block_slugs, frozenset())
def test_duplicate_nonempty_rendered_tab(self, send_update, queue_render): send_update.side_effect = async_noop queue_render.side_effect = async_noop workflow = Workflow.create_and_init() init_delta_id = workflow.last_delta_id tab = workflow.tabs.first() # step1 and step2 have not yet been rendered. (But while we're # duplicating, conceivably a render could be running; so when we # duplicate them, we need to queue a render.) step1 = tab.steps.create( order=0, slug="step-1", module_id_name="x", params={"p": "s1"}, last_relevant_delta_id=init_delta_id, ) write_to_rendercache( workflow, step1, init_delta_id, make_table(make_column("A", [1])) ) cmd = self.run_with_async_db( commands.do( DuplicateTab, workflow_id=workflow.id, from_tab=tab, slug="tab-2", name="Tab 2", ) ) tab2 = workflow.tabs.last() self.assertNotEqual(tab2.id, tab.id) step2 = tab2.steps.last() # We need to render: render() in Steps in the second Tab will be called # with different `tab_name` than in the first Tab, meaning their output # may be different. self.assertIsNone(step2.cached_render_result) queue_render.assert_called_with(workflow.id, cmd.id)
def test_duplicate_nonempty_rendered_tab(self, ws_notify, queue_render): ws_notify.side_effect = async_noop queue_render.side_effect = async_noop workflow = Workflow.create_and_init() init_delta_id = workflow.last_delta_id tab = workflow.tabs.first() # wfm1 and wfm2 have not yet been rendered. (But while we're # duplicating, conceivably a render could be running; so when we # duplicate them, we need to queue a render.) wfm1 = tab.wf_modules.create( order=0, slug="step-1", module_id_name="x", params={"p": "s1"}, last_relevant_delta_id=init_delta_id, ) render_result = RenderResult(arrow_table({"A": [1]})) cache_render_result(workflow, wfm1, init_delta_id, render_result) cmd = self.run_with_async_db( commands.do( DuplicateTabCommand, workflow_id=workflow.id, from_tab=tab, slug="tab-2", name="Tab 2", ) ) tab2 = workflow.tabs.last() self.assertNotEqual(tab2.id, tab.id) wfm2 = tab2.wf_modules.last() # We need to render: render() in Steps in the second Tab will be called # with different `tab_name` than in the first Tab, meaning their output # may be different. self.assertIsNone(wfm2.cached_render_result) queue_render.assert_called_with(workflow.id, cmd.id)
def test_change_last_relevant_delta_ids_of_dependent_wf_modules(self): workflow = Workflow.create_and_init() delta_id = workflow.last_delta_id tab1 = workflow.tabs.first() tab2 = workflow.tabs.create(position=1, slug="tab-2", name="Tab 2") # Add a WfModule that depends on tab1 ModuleVersion.create_or_replace_from_spec({ "id_name": "x", "name": "x", "category": "Clean", "parameters": [{ "id_name": "tab", "type": "tab" }], }) wf_module = tab2.wf_modules.create( order=0, slug="step-1", module_id_name="x", params={"tab": tab1.slug}, last_relevant_delta_id=delta_id, ) cmd = self.run_with_async_db( commands.do( SetTabNameCommand, workflow_id=workflow.id, tab=tab1, new_name=tab1.name + "X", )) wf_module.refresh_from_db() self.assertEqual(wf_module.last_relevant_delta_id, cmd.id)
def test_add_module_default_params(self): workflow = Workflow.create_and_init() create_module_zipfile( "blah", spec_kwargs={ "parameters": [ {"id_name": "a", "type": "string", "default": "x"}, {"id_name": "c", "type": "checkbox", "name": "C", "default": True}, ] }, ) cmd = self.run_with_async_db( commands.do( AddStep, workflow_id=workflow.id, tab=workflow.tabs.first(), slug="step-1", module_id_name="blah", position=0, param_values={}, ) ) self.assertEqual(cmd.step.params, {"a": "x", "c": True})
def test_change_parameters(self): # Setup: workflow with loadurl module # # loadurl is a good choice because it has three parameters, two of # which are useful. workflow = Workflow.create_and_init() ModuleVersion.create_or_replace_from_spec({ "id_name": "loadurl", "name": "loadurl", "category": "Clean", "parameters": [ { "id_name": "url", "type": "string" }, { "id_name": "has_header", "type": "checkbox", "name": "HH" }, { "id_name": "version_select", "type": "custom" }, ], }) params1 = { "url": "http://example.org", "has_header": True, "version_select": "", } wf_module = workflow.tabs.first().wf_modules.create( module_id_name="loadurl", order=0, slug="step-1", last_relevant_delta_id=workflow.last_delta_id, params=params1, ) # Create and apply delta. It should change params. cmd = self.run_with_async_db( commands.do( ChangeParametersCommand, workflow_id=workflow.id, wf_module=wf_module, new_values={ "url": "http://example.com/foo", "has_header": False }, )) wf_module.refresh_from_db() params2 = { "url": "http://example.com/foo", "has_header": False, "version_select": "", } self.assertEqual(wf_module.params, params2) # undo self.run_with_async_db(commands.undo(cmd)) wf_module.refresh_from_db() self.assertEqual(wf_module.params, params1) # redo self.run_with_async_db(commands.redo(cmd)) wf_module.refresh_from_db() self.assertEqual(wf_module.params, params2)
def test_add_module(self): existing_module = self.tab.wf_modules.create( order=0, slug="step-1", last_relevant_delta_id=self.delta.id, params={"url": ""}, ) all_modules = self.tab.live_wf_modules self.workflow.refresh_from_db() v1 = self.workflow.last_delta_id # Add a module, insert before the existing one, check to make sure it # went there and old one is after cmd = self.run_with_async_db( commands.do( AddModuleCommand, workflow_id=self.workflow.id, tab=self.workflow.tabs.first(), slug="step-2", module_id_name=self.module_version.id_name, position=0, param_values={"url": "https://x.com"}, )) self.assertEqual(all_modules.count(), 2) added_module = all_modules.get(order=0) self.assertNotEqual(added_module, existing_module) # Test that supplied param is written self.assertEqual(added_module.params["url"], "https://x.com") bumped_module = all_modules.get(order=1) self.assertEqual(bumped_module, existing_module) # workflow revision should have been incremented self.workflow.refresh_from_db() self.assertGreater(self.workflow.last_delta_id, v1) # Check the delta chain (short, but should be sweet) self.workflow.refresh_from_db() self.assertEqual(self.workflow.last_delta, cmd) self.assertEqual(cmd.prev_delta_id, self.delta.id) with self.assertRaises(Delta.DoesNotExist): cmd.next_delta # undo! undo! ahhhhh everything is on fire! undo! self.run_with_async_db(commands.undo(cmd)) self.assertEqual(all_modules.count(), 1) self.assertEqual(all_modules.first(), existing_module) # wait no, we wanted that module self.run_with_async_db(commands.redo(cmd)) self.assertEqual(all_modules.count(), 2) added_module = all_modules.get(order=0) self.assertNotEqual(added_module, existing_module) bumped_module = all_modules.get(order=1) self.assertEqual(bumped_module, existing_module) # Undo and test deleting the un-applied command. Should delete dangling # WfModule too self.run_with_async_db(commands.undo(cmd)) self.assertEqual(all_modules.count(), 1) self.assertEqual(all_modules.first(), existing_module) cmd.delete_with_successors() with self.assertRaises(WfModule.DoesNotExist): all_modules.get(pk=added_module.id) # should be gone
def test_change_parameters_across_module_versions(self, load_module): workflow = Workflow.create_and_init() # Initialize a WfModule that used module 'x' version '1' (which we # don't need to write in code -- after all, that version might be long # gone when ChangeParametersCommand is called. wf_module = workflow.tabs.first().wf_modules.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, params={ "version": "v1", "x": 1 }, # version-'1' params ) # Now install version '2' of module 'x'. # # Version '2''s migrate_params() could do anything; in this test, it # simply changes 'version' from 'v1' to 'v2' ModuleVersion.create_or_replace_from_spec( { "id_name": "x", "name": "x", "category": "Clean", "parameters": [ { "id_name": "version", "type": "string" }, { "id_name": "x", "type": "integer" }, ], }, source_version_hash="2", ) load_module.return_value.param_dtype = ParamDType.Dict({ "version": ParamDType.String(), "x": ParamDType.Integer() }) load_module.return_value.migrate_params = lambda params: { **params, "version": "v2", } # Now the user requests to change params. # # The user was _viewing_ version '2' of module 'x', though # `wf_module.params` was at version 1. (Workbench ran # `migrate_params()` without saving the result when it # presented `params` to the user.) So the changes should apply atop # _migrated_ params. cmd = self.run_with_async_db( commands.do( ChangeParametersCommand, workflow_id=workflow.id, wf_module=wf_module, new_values={"x": 2}, )) self.assertEqual( wf_module.params, { "version": "v2", # migrate_params() ran "x": 2, # and we applied changes on top of its output }, ) self.run_with_async_db(commands.undo(cmd)) self.assertEqual( wf_module.params, { "version": "v1", "x": 1 } # exactly what we had before )
def test_add_many_modules(self): existing_module = self.tab.wf_modules.create( order=0, slug="step-1", last_relevant_delta_id=self.delta.id, params={"url": ""}, ) self.workflow.refresh_from_db() v1 = self.workflow.last_delta_id # beginning state: one WfModule all_modules = self.tab.live_wf_modules # Insert at beginning cmd1 = self.run_with_async_db( commands.do( AddModuleCommand, workflow_id=self.workflow.id, tab=self.workflow.tabs.first(), slug="step-2", module_id_name=self.module_version.id_name, position=0, param_values={}, )) v2 = cmd1.id self.assertEqual(all_modules.count(), 2) self.assertEqual(cmd1.wf_module.order, 0) self.assertNotEqual(cmd1.wf_module, existing_module) v2 = cmd1.id self.assertWfModuleVersions([v2, v2]) # Insert at end cmd2 = self.run_with_async_db( commands.do( AddModuleCommand, workflow_id=self.workflow.id, tab=self.workflow.tabs.first(), slug="step-3", module_id_name=self.module_version.id_name, position=2, param_values={}, )) v3 = cmd2.id self.assertEqual(all_modules.count(), 3) self.assertEqual(cmd2.wf_module.order, 2) self.assertWfModuleVersions([v2, v2, v3]) # Insert in between two modules cmd3 = self.run_with_async_db( commands.do( AddModuleCommand, workflow_id=self.workflow.id, tab=self.workflow.tabs.first(), slug="step-4", module_id_name=self.module_version.id_name, position=2, param_values={}, )) v4 = cmd3.id self.assertEqual(all_modules.count(), 4) self.assertEqual(cmd3.wf_module.order, 2) self.assertWfModuleVersions([v2, v2, v4, v4]) # Check the delta chain, should be 1 <-> 2 <-> 3 self.workflow.refresh_from_db() cmd1.refresh_from_db() cmd2.refresh_from_db() cmd3.refresh_from_db() self.assertEqual(self.workflow.last_delta, cmd3) with self.assertRaises(Delta.DoesNotExist): cmd3.next_delta self.assertEqual(cmd3.prev_delta, cmd2) self.assertEqual(cmd2.prev_delta, cmd1) self.assertEqual(cmd1.prev_delta_id, self.delta.id) # We should be able to go all the way back self.run_with_async_db(commands.undo(cmd3)) self.assertWfModuleVersions([v2, v2, v3]) self.run_with_async_db(commands.undo(cmd2)) self.assertWfModuleVersions([v2, v2]) self.run_with_async_db(commands.undo(cmd1)) self.assertWfModuleVersions([v1]) self.assertEqual(list(all_modules.values_list("id", flat=True)), [existing_module.id])
def test_add_block_to_automatically_generated_report(self, send_update): future_none = asyncio.Future() future_none.set_result(None) send_update.return_value = future_none create_module_zipfile("chart", spec_kwargs={"html_output": True}) workflow = Workflow.create_and_init(has_custom_report=False) tab = workflow.tabs.first() step1 = tab.steps.create(order=0, slug="step-1", module_id_name="nochart") step2 = tab.steps.create(order=1, slug="step-2", module_id_name="chart") step3 = tab.steps.create(order=2, slug="step-3", module_id_name="chart") self.run_with_async_db( commands.do( AddBlock, workflow_id=workflow.id, position=1, slug="block-1", block_type="Text", text_markdown="hi!", )) workflow.refresh_from_db() self.assertEqual(workflow.has_custom_report, True) self.assertEqual( list( workflow.blocks.values_list("position", "slug", "block_type", "text_markdown", "step_id")), [ (0, "block-auto-step-2", "Chart", "", step2.id), (1, "block-1", "Text", "hi!", None), (2, "block-auto-step-3", "Chart", "", step3.id), ], ) delta1 = send_update.call_args[0][1] self.assertEqual(delta1.workflow.has_custom_report, True) self.assertEqual( delta1.workflow.block_slugs, ["block-auto-step-2", "block-1", "block-auto-step-3"], ) self.assertEqual( delta1.blocks, { "block-auto-step-2": clientside.ChartBlock("step-2"), "block-1": clientside.TextBlock("hi!"), "block-auto-step-3": clientside.ChartBlock("step-3"), }, ) self.run_with_async_db(commands.undo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.has_custom_report, False) self.assertEqual(list(workflow.blocks.values_list("slug", "position")), []) delta2 = send_update.call_args[0][1] self.assertEqual(delta2.workflow.has_custom_report, False) self.assertEqual(delta2.workflow.block_slugs, []) self.assertEqual( delta2.clear_block_slugs, frozenset(["block-auto-step-2", "block-1", "block-auto-step-3"]), ) self.assertEqual(delta2.blocks, {})
def test_duplicate_empty_tab(self, send_update, queue_render): send_update.side_effect = async_noop workflow = Workflow.create_and_init() tab = workflow.tabs.first() cmd = self.run_with_async_db( commands.do( DuplicateTab, workflow_id=workflow.id, from_tab=tab, slug="tab-2", name="Tab 2", )) # Adds new tab cmd.tab.refresh_from_db() self.assertFalse(cmd.tab.is_deleted) self.assertEqual(cmd.tab.slug, "tab-2") self.assertEqual(cmd.tab.name, "Tab 2") workflow.refresh_from_db() send_update.assert_called_with( workflow.id, clientside.Update( workflow=clientside.WorkflowUpdate( updated_at=workflow.updated_at, tab_slugs=["tab-1", "tab-2"]), tabs={ "tab-2": clientside.TabUpdate( slug="tab-2", name="Tab 2", step_ids=[], selected_step_index=None, ) }, ), ) # Backward: should delete tab self.run_with_async_db(commands.undo(workflow.id)) cmd.tab.refresh_from_db() self.assertTrue(cmd.tab.is_deleted) workflow.refresh_from_db() send_update.assert_called_with( workflow.id, clientside.Update( workflow=clientside.WorkflowUpdate( updated_at=workflow.updated_at, tab_slugs=["tab-1"]), clear_tab_slugs=frozenset(["tab-2"]), ), ) # Forward: should bring us back self.run_with_async_db(commands.redo(workflow.id)) cmd.tab.refresh_from_db() self.assertFalse(cmd.tab.is_deleted) workflow.refresh_from_db() send_update.assert_called_with( workflow.id, clientside.Update( workflow=clientside.WorkflowUpdate( updated_at=workflow.updated_at, tab_slugs=["tab-1", "tab-2"]), tabs={ "tab-2": clientside.TabUpdate( slug="tab-2", name="Tab 2", step_ids=[], selected_step_index=None, ) }, ), ) # There should never be a render: we aren't changing any module # outputs. queue_render.assert_not_called()
def test_delete_custom_report_blocks(self, send_update): future_none = asyncio.Future() future_none.set_result(None) send_update.return_value = future_none workflow = Workflow.create_and_init(has_custom_report=True) # tab-1 tab1 = workflow.tabs.first() step1 = tab1.steps.create( order=0, slug="step-1", last_relevant_delta_id=workflow.last_delta_id, params={"url": ""}, ) step2 = tab1.steps.create( order=0, slug="step-2", last_relevant_delta_id=workflow.last_delta_id, params={"url": ""}, ) # Report will include the step twice, and have another step elsewhere # that should not be touched block1 = workflow.blocks.create(position=0, slug="block-step-1-1", block_type="Chart", step=step1) block2 = workflow.blocks.create(position=1, slug="block-step-2", block_type="Chart", step=step2) block3 = workflow.blocks.create(position=2, slug="block-step-1-2", block_type="Chart", step=step1) self.run_with_async_db( commands.do(DeleteStep, workflow_id=workflow.id, step=step1)) with self.assertRaises(Block.DoesNotExist): block1.refresh_from_db() with self.assertRaises(Block.DoesNotExist): block3.refresh_from_db() block2.refresh_from_db() self.assertEqual(block2.position, 0) send_update.assert_called() update = send_update.call_args[0][1] self.assertEqual(update.workflow.block_slugs, ["block-step-2"]) self.assertEqual(update.tabs, {"tab-1": clientside.TabUpdate(step_ids=[step2.id])}) self.assertEqual(update.clear_step_ids, frozenset([step1.id])) self.assertEqual(update.blocks, {}) self.run_with_async_db(commands.undo(workflow.id)) # The old blocks are deleted. We expect new blocks with new IDs. with self.assertRaises(Block.DoesNotExist): block1.refresh_from_db() with self.assertRaises(Block.DoesNotExist): block3.refresh_from_db() new_block1 = workflow.blocks.get(slug=block1.slug) new_block3 = workflow.blocks.get(slug=block3.slug) self.assertEqual(new_block1.step_id, step1.id) self.assertEqual(new_block3.step_id, step1.id) block2.refresh_from_db() self.assertEqual(new_block1.position, 0) self.assertEqual(block2.position, 1) self.assertEqual(new_block3.position, 2) send_update.assert_called() update = send_update.call_args[0][1] self.assertEqual( update.workflow.block_slugs, ["block-step-1-1", "block-step-2", "block-step-1-2"], ) self.assertEqual( update.tabs, {"tab-1": clientside.TabUpdate(step_ids=[step1.id, step2.id])}) self.assertEqual( update.blocks, { "block-step-1-1": clientside.ChartBlock("step-1"), "block-step-1-2": clientside.ChartBlock("step-1"), }, ) self.run_with_async_db(commands.redo(workflow.id)) block2.refresh_from_db() self.assertEqual(block2.position, 0)
def test_undo_redo(self): mz = create_module_zipfile( "loadsomething", spec_kwargs={"parameters": [{"id_name": "csv", "type": "string"}]}, ) self.kernel.migrate_params.side_effect = lambda m, p: p workflow = Workflow.create_and_init() tab = workflow.tabs.first() all_modules = tab.live_wf_modules # beginning state: nothing v0 = workflow.last_delta_id # Test undoing nothing at all. Should NOP self.run_with_async_db(WorkflowUndo(workflow.id)) workflow.refresh_from_db() self.assertEqual(all_modules.count(), 0) self.assertEqual(workflow.last_delta_id, v0) # Add a module cmd1 = self.run_with_async_db( commands.do( AddModuleCommand, workflow_id=workflow.id, tab=tab, slug="step-1", module_id_name="loadsomething", position=0, param_values={}, ) ) v1 = cmd1.id workflow.refresh_from_db() self.assertEqual(all_modules.count(), 1) self.assertGreater(v1, v0) self.assertEqual(workflow.last_delta_id, v1) self.assertWfModuleVersions(tab, [v1]) # Undo, ensure we are back at start self.run_with_async_db(WorkflowUndo(workflow.id)) workflow.refresh_from_db() self.assertEqual(all_modules.count(), 0) self.assertEqual(workflow.last_delta_id, v0) self.assertWfModuleVersions(tab, []) # Redo, ensure we are back at v1 self.run_with_async_db(WorkflowRedo(workflow.id)) workflow.refresh_from_db() self.assertEqual(all_modules.count(), 1) self.assertEqual(workflow.last_delta_id, v1) self.assertWfModuleVersions(tab, [v1]) # Change a parameter with self.assertLogs(level=logging.INFO): cmd2 = self.run_with_async_db( commands.do( ChangeParametersCommand, workflow_id=workflow.id, wf_module=tab.live_wf_modules.first(), new_values={"csv": "some value"}, ) ) v2 = cmd2.id workflow.refresh_from_db() self.assertEqual(tab.live_wf_modules.first().params["csv"], "some value") self.assertEqual(workflow.last_delta_id, v2) self.assertGreater(v2, v1) self.assertWfModuleVersions(tab, [v2]) # Undo parameter change with self.assertLogs(level=logging.INFO): self.run_with_async_db(WorkflowUndo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.last_delta_id, v1) self.assertEqual(tab.live_wf_modules.first().params["csv"], "") self.assertWfModuleVersions(tab, [v1]) # Redo with self.assertLogs(level=logging.INFO): self.run_with_async_db(WorkflowRedo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.last_delta_id, v2) self.assertEqual(tab.live_wf_modules.first().params["csv"], "some value") self.assertWfModuleVersions(tab, [v2]) # Redo again should do nothing self.run_with_async_db(WorkflowRedo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.last_delta_id, v2) self.assertEqual(tab.live_wf_modules.first().params["csv"], "some value") self.assertWfModuleVersions(tab, [v2]) # Add one more command so the stack is 3 deep cmd3 = self.run_with_async_db( commands.do( ChangeWorkflowTitleCommand, workflow_id=workflow.id, new_value="New Title", ) ) v3 = cmd3.id self.assertGreater(v3, v2) self.assertWfModuleVersions(tab, [v2]) # Undo twice self.run_with_async_db(WorkflowUndo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.last_delta, cmd2) self.assertWfModuleVersions(tab, [v2]) with self.assertLogs(level=logging.INFO): self.run_with_async_db(WorkflowUndo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.last_delta, cmd1) self.assertWfModuleVersions(tab, [v1]) # Redo twice with self.assertLogs(level=logging.INFO): self.run_with_async_db(WorkflowRedo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.last_delta, cmd2) self.assertWfModuleVersions(tab, [v2]) self.run_with_async_db(WorkflowRedo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.last_delta, cmd3) self.assertWfModuleVersions(tab, [v2]) # Undo again to get to a place where we have two commands to redo self.run_with_async_db(WorkflowUndo(workflow.id)) with self.assertLogs(level=logging.INFO): self.run_with_async_db(WorkflowUndo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.last_delta, cmd1) # Now add a new command. It should remove cmd2, cmd3 from the redo # stack and delete them from the db step = all_modules.first() cmd4 = self.run_with_async_db( commands.do( ChangeWfModuleNotesCommand, workflow_id=workflow.id, wf_module=step, new_value="Note of no note", ) ) v4 = cmd4.id workflow.refresh_from_db() self.assertEqual(workflow.last_delta_id, v4) self.assertEqual( set(Delta.objects.values_list("id", flat=True)), {v0, v1, v4} ) # v2, v3 deleted # Undo back to start, then add a command, ensure it deletes dangling # commands (tests an edge case in Delta.save) self.run_with_async_db(WorkflowUndo(workflow.id)) workflow.refresh_from_db() self.assertEqual(workflow.last_delta_id, v1) cmd5 = self.run_with_async_db( commands.do( ChangeWfModuleNotesCommand, workflow_id=workflow.id, wf_module=cmd1.wf_module, new_value="Note of some note", ) ) v5 = cmd5.id workflow.refresh_from_db() self.assertEqual(workflow.last_delta_id, v5) self.assertEqual( set(Delta.objects.values_list("id", flat=True)), {v0, v1, v5} ) # v1, v4 deleted self.assertWfModuleVersions(tab, [v1])
def test_add_many_modules(self): v1 = 1 existing_module = self.tab.steps.create( order=0, slug="step-1", last_relevant_delta_id=1, params={"url": ""}, ) # beginning state: one Step all_modules = self.tab.live_steps # Insert at beginning cmd1 = self.run_with_async_db( commands.do( AddStep, workflow_id=self.workflow.id, tab=self.workflow.tabs.first(), slug="step-2", module_id_name=self.module_zipfile.module_id, position=0, param_values={}, )) v2 = cmd1.id self.assertEqual(all_modules.count(), 2) self.assertEqual(cmd1.step.order, 0) self.assertNotEqual(cmd1.step, existing_module) v2 = cmd1.id self.assertStepVersions([v2, v2]) # Insert at end cmd2 = self.run_with_async_db( commands.do( AddStep, workflow_id=self.workflow.id, tab=self.workflow.tabs.first(), slug="step-3", module_id_name=self.module_zipfile.module_id, position=2, param_values={}, )) v3 = cmd2.id self.assertEqual(all_modules.count(), 3) self.assertEqual(cmd2.step.order, 2) self.assertStepVersions([v2, v2, v3]) # Insert in between two modules cmd3 = self.run_with_async_db( commands.do( AddStep, workflow_id=self.workflow.id, tab=self.workflow.tabs.first(), slug="step-4", module_id_name=self.module_zipfile.module_id, position=2, param_values={}, )) v4 = cmd3.id self.assertEqual(all_modules.count(), 4) self.assertEqual(cmd3.step.order, 2) self.assertStepVersions([v2, v2, v4, v4]) # We should be able to go all the way back self.run_with_async_db(commands.undo(self.workflow.id)) self.assertStepVersions([v2, v2, v3]) self.run_with_async_db(commands.undo(self.workflow.id)) self.assertStepVersions([v2, v2]) self.run_with_async_db(commands.undo(self.workflow.id)) self.assertStepVersions([v1]) self.assertEqual(list(all_modules.values_list("id", flat=True)), [existing_module.id])
def test_duplicate_nonempty_unrendered_tab(self, send_update, queue_render): send_update.side_effect = async_noop queue_render.side_effect = async_noop workflow = Workflow.create_and_init() init_delta_id = workflow.last_delta_id tab = workflow.tabs.first() tab.selected_step_position = 1 tab.save(update_fields=["selected_step_position"]) # step1 and step2 have not yet been rendered. (But while we're # duplicating, conceivably a render could be running; so when we # duplicate them, we need to queue a render.) step1 = tab.steps.create( order=0, slug="step-1", module_id_name="x", params={"p": "s1"}, last_relevant_delta_id=init_delta_id, ) tab.steps.create( order=1, slug="step-2", module_id_name="y", params={"p": "s2"}, last_relevant_delta_id=init_delta_id, ) cmd = self.run_with_async_db( commands.do( DuplicateTab, workflow_id=workflow.id, from_tab=tab, slug="tab-2", name="Tab 2", )) # Adds new tab cmd.tab.refresh_from_db() [step1dup, step2dup] = list(cmd.tab.live_steps.all()) self.assertFalse(cmd.tab.is_deleted) self.assertEqual(cmd.tab.slug, "tab-2") self.assertEqual(cmd.tab.name, "Tab 2") self.assertEqual(cmd.tab.selected_step_position, 1) self.assertEqual(step1dup.order, 0) self.assertEqual(step1dup.module_id_name, "x") self.assertEqual(step1dup.params, {"p": "s1"}) self.assertEqual( step1dup.last_relevant_delta_id, # `cmd.id` would be intuitive, but that would be hard # to implement (and we assume we don't need to). # (Duplicate also duplicates _cache values_, which # means it's expensive to tweak step1's delta ID.) step1.last_relevant_delta_id, ) self.assertEqual(step2dup.order, 1) self.assertEqual(step2dup.module_id_name, "y") self.assertEqual(step2dup.params, {"p": "s2"}) self.assertNotEqual(step1dup.id, step1.id) delta = send_update.mock_calls[0][1][1] self.assertEqual(delta.tabs["tab-2"].step_ids, [step1dup.id, step2dup.id]) self.assertEqual(set(delta.steps.keys()), set([step1dup.id, step2dup.id])) step1update = delta.steps[step1dup.id] self.assertEqual(step1update.last_relevant_delta_id, step1.last_relevant_delta_id) # We should call render: we don't know whether there's a render queued; # and these new steps are in need of render. queue_render.assert_called_with(workflow.id, cmd.id) queue_render.reset_mock() # so we can assert next time # undo self.run_with_async_db(commands.undo(workflow.id)) cmd.tab.refresh_from_db() self.assertTrue(cmd.tab.is_deleted) delta = send_update.mock_calls[1][1][1] self.assertEqual(delta.clear_tab_slugs, frozenset(["tab-2"])) self.assertEqual(delta.clear_step_ids, frozenset([step1dup.id, step2dup.id])) # No need to call render(): these modules can't possibly have changed, # and nobody cares what's in their cache. queue_render.assert_not_called() # redo self.run_with_async_db(commands.redo(workflow.id)) # Need to call render() again -- these modules are still out-of-date queue_render.assert_called_with(workflow.id, cmd.id)
def test_change_parameters(self): # Setup: workflow with loadurl module # # loadurl is a good choice because it has three parameters, two of # which are useful. workflow = Workflow.create_and_init() module_zipfile = create_module_zipfile( "loadsomething", spec_kwargs={ "parameters": [ {"id_name": "url", "type": "string"}, {"id_name": "has_header", "type": "checkbox", "name": "HH"}, {"id_name": "version_select", "type": "custom"}, ] }, ) params1 = { "url": "http://example.org", "has_header": True, "version_select": "", } wf_module = workflow.tabs.first().wf_modules.create( module_id_name="loadurl", order=0, slug="step-1", last_relevant_delta_id=workflow.last_delta_id, params=params1, cached_migrated_params=params1, cached_migrated_params_module_version=module_zipfile.version, ) # Create and apply delta. It should change params. self.kernel.migrate_params.side_effect = lambda m, p: p with self.assertLogs(level=logging.INFO): cmd = self.run_with_async_db( commands.do( ChangeParametersCommand, workflow_id=workflow.id, wf_module=wf_module, new_values={"url": "http://example.com/foo", "has_header": False}, ) ) wf_module.refresh_from_db() params2 = { "url": "http://example.com/foo", "has_header": False, "version_select": "", } self.assertEqual(wf_module.params, params2) # undo with self.assertLogs(level=logging.INFO): # building clientside.Update will migrate_params(), so we need # to capture logs. self.run_with_async_db(commands.undo(cmd)) wf_module.refresh_from_db() self.assertEqual(wf_module.params, params1) # redo with self.assertLogs(level=logging.INFO): # building clientside.Update will migrate_params(), so we need # to capture logs. self.run_with_async_db(commands.redo(cmd)) wf_module.refresh_from_db() self.assertEqual(wf_module.params, params2)
def test_change_parameters_across_module_versions(self): workflow = Workflow.create_and_init() # Initialize a WfModule that used module 'x' version '1' (which we # don't need to write in code -- after all, that version might be long # gone when ChangeParametersCommand is called. wf_module = workflow.tabs.first().wf_modules.create( order=0, slug="step-1", module_id_name="x", last_relevant_delta_id=workflow.last_delta_id, params={"version": "v1", "x": 1}, # version-'1' params cached_migrated_params={"version": "v1", "x": 1}, cached_migrated_params_module_version="v1", ) # Now install version '2' of module 'x'. # # Version '2''s migrate_params() could do anything; in this test, it # simply changes 'version' from 'v1' to 'v2' create_module_zipfile( "x", spec_kwargs={ "parameters": [ {"id_name": "version", "type": "string"}, {"id_name": "x", "type": "integer"}, ] }, ) self.kernel.migrate_params.side_effect = lambda m, p: {**p, "version": "v2"} # Now the user requests to change params. # # The user was _viewing_ version '2' of module 'x', though # `wf_module.params` was at version 1. (Workbench ran # `migrate_params()` without saving the result when it # presented `params` to the user.) So the changes should apply atop # _migrated_ params. with self.assertLogs(level=logging.INFO): cmd = self.run_with_async_db( commands.do( ChangeParametersCommand, workflow_id=workflow.id, wf_module=wf_module, new_values={"x": 2}, ) ) self.assertEqual( wf_module.params, { "version": "v2", # migrate_params() ran "x": 2, # and we applied changes on top of its output }, ) with self.assertLogs(level=logging.INFO): # building clientside.Update will migrate_params(), so we need # to capture logs. self.run_with_async_db(commands.undo(cmd)) self.assertEqual( wf_module.params, {"version": "v1", "x": 1} # exactly what we had before )