def test_execute_mark_unreachable(self, send_update):
        future_none = asyncio.Future()
        future_none.set_result(None)
        send_update.return_value = future_none

        workflow = Workflow.create_and_init()
        tab = workflow.tabs.first()
        delta_id = workflow.last_delta_id
        create_module_zipfile(
            "mod",
            python_code='def render(table, params): return "error, not warning"'
        )
        wf_module1 = tab.wf_modules.create(
            order=0,
            slug="step-1",
            last_relevant_delta_id=delta_id,
            module_id_name="mod",
        )
        wf_module2 = tab.wf_modules.create(
            order=1,
            slug="step-2",
            last_relevant_delta_id=delta_id,
            module_id_name="mod",
        )
        wf_module3 = tab.wf_modules.create(
            order=2,
            slug="step-3",
            last_relevant_delta_id=delta_id,
            module_id_name="mod",
        )

        error_result = RenderResult(
            errors=[RenderError(I18nMessage.TODO_i18n("error, not warning"))])

        self._execute(workflow)

        wf_module1.refresh_from_db()
        self.assertEqual(wf_module1.cached_render_result.status, "error")
        with open_cached_render_result(
                wf_module1.cached_render_result) as result:
            assert_render_result_equals(result, error_result)

        wf_module2.refresh_from_db()
        self.assertEqual(wf_module2.cached_render_result.status, "unreachable")
        with open_cached_render_result(
                wf_module2.cached_render_result) as result:
            assert_render_result_equals(result, RenderResult())

        wf_module3.refresh_from_db()
        self.assertEqual(wf_module3.cached_render_result.status, "unreachable")
        with open_cached_render_result(
                wf_module3.cached_render_result) as result:
            assert_render_result_equals(result, RenderResult())

        send_update.assert_called_with(
            workflow.id,
            clientside.Update(
                steps={
                    wf_module3.id:
                    clientside.StepUpdate(
                        render_result=wf_module3.cached_render_result,
                        module_slug="mod")
                }),
        )
 def test_list_autofetches_session_gets_default_max_fetches_per_day(self):
     user = AnonymousUser()
     session = Session(session_key="foo")
     Workflow.create_and_init(anonymous_owner_session_key="foo")
     result = list_autofetches_json({"user": user, "session": session})
     self.assertEqual(result["maxFetchesPerDay"], 500)
Beispiel #3
0
 def test_execute_empty_tab(self):
     workflow = Workflow.create_and_init()
     tab = workflow.tabs.first()
     tab_flow = TabFlow(tab.to_arrow(), [])
     with self._execute(workflow, tab_flow, {}) as result:
         assert_render_result_equals(result, RenderResult())
 def test_auth_owner_owner(self):
     user = User()
     ret = self.run_handler(handle_owner,
                            user=user,
                            workflow=Workflow(owner=user))
     self.assertHandlerResponse(ret, {"role": "owner"})
Beispiel #5
0
    def setUp(self):
        super().setUp()

        self.workflow = Workflow.create_and_init()
        self.step1 = self.workflow.tabs.first().wf_modules.create(
            order=0, slug="step-1")
Beispiel #6
0
def _load_workflow_and_step_sync(
    request: HttpRequest,
    workflow_id_or_secret_id: Union[int, str],
    step_slug: str,
    accessing: Literal["all", "chart", "table"],
) -> Tuple[Workflow, Step]:
    """Load (Workflow, Step) from database, or raise Http404 or PermissionDenied.

    `Step.tab` will be loaded. (`Step.tab.workflow_id` is needed to access the render
    cache.)

    To avoid PermissionDenied:

    * The workflow must be public; OR
    * The user must be workflow owner, editor or viewer; OR
    * The user must be workflow report-viewer and the step must be a chart or
      table in the report.
    """
    try:
        if isinstance(workflow_id_or_secret_id, int):
            search = {"id": workflow_id_or_secret_id}
            has_secret = False
        else:
            search = {"secret_id": workflow_id_or_secret_id}
            has_secret = True

        with Workflow.lookup_and_cooperative_lock(**search) as workflow:
            if (has_secret or workflow.public
                    or workflow.request_authorized_owner(request)):
                need_report_auth = False
            elif request.user is None or request.user.is_anonymous:
                raise PermissionDenied()
            else:
                try:
                    acl_entry = workflow.acl.filter(
                        email=request.user.email).get()
                except AclEntry.DoesNotExist:
                    raise PermissionDenied()
                if acl_entry.role in {Role.VIEWER, Role.EDITOR}:
                    need_report_auth = False
                elif acl_entry.role == Role.REPORT_VIEWER:
                    need_report_auth = True
                else:
                    raise PermissionDenied()  # role we don't handle yet

            step = (Step.live_in_workflow(
                workflow.id).select_related("tab").get(slug=step_slug)
                    )  # or Step.DoesNotExist

            if need_report_auth:  # user is report-viewer
                if workflow.has_custom_report:
                    if (accessing == "chart" and
                            workflow.blocks.filter(step_id=step.id).exists()):
                        pass  # the step is a chart
                    elif (accessing == "table" and
                          workflow.blocks.filter(tab_id=step.tab_id).exists()
                          and not step.tab.live_steps.filter(
                              order__gt=step.order)):
                        pass  # step is a table (last step of a report-included tab)
                    else:
                        raise PermissionDenied()
                else:
                    # Auto-report: all Charts are allowed; everything else is not
                    try:
                        if accessing == "chart" and (MODULE_REGISTRY.latest(
                                step.module_id_name).get_spec().html_output):
                            pass
                        else:
                            raise PermissionDenied()
                    except KeyError:  # not a module
                        raise PermissionDenied()

            return workflow, step
    except (Workflow.DoesNotExist, Step.DoesNotExist):
        raise Http404()
 def test_auth_read_public(self):
     ret = self.run_handler(handle_read,
                            workflow=Workflow(owner=User(), public=True))
     self.assertHandlerResponse(ret, {"role": "read"})
Beispiel #8
0
def _first_forward_and_save_returning_clientside_update(
    cls, workflow_id: int, **kwargs
) -> Tuple[Optional[Delta], Optional[clientside.Update], Optional[int]]:
    """
    Create and execute `cls` command; return `(Delta, WebSocket data, render?)`.

    If `amend_create_kwargs()` returns `None`, return `(None, None)` here.

    All this, in a cooperative lock.

    Return `(None, None, None)` if `cls.amend_create_kwargs()` returns `None`.
    This is how `cls.amend_create_kwargs()` suggests the Delta should not be
    created at all.
    """
    now = datetime.datetime.now()
    command = NAME_TO_COMMAND[cls.__name__]
    try:
        # raise Workflow.DoesNotExist
        with Workflow.lookup_and_cooperative_lock(
                id=workflow_id) as workflow_lock:
            workflow = workflow_lock.workflow
            create_kwargs = command.amend_create_kwargs(workflow=workflow,
                                                        **kwargs)
            if not create_kwargs:
                return None, None, None

            # Lookup unapplied deltas to delete. That's the linked list that comes
            # _after_ `workflow.last_delta_id`.
            n_deltas_deleted, _ = workflow.deltas.filter(
                id__gt=workflow.last_delta_id).delete()

            # prev_delta is none when we're at the start of the undo stack
            prev_delta = workflow.deltas.filter(
                id=workflow.last_delta_id).first()

            # Delta.objects.create() and command.forward() may raise unexpected errors
            # Defer delete_orphan_soft_deleted_models(), to reduce the risk of this
            # race: 1. Delete DB objects; 2. Delete S3 files; 3. ROLLBACK. (We aren't
            # avoiding the race _entirely_ here, but we're at least avoiding causing
            # the race through errors in Delta or Command.)
            delta = Delta.objects.create(
                command_name=cls.__name__,
                prev_delta=prev_delta,
                last_applied_at=now,
                **create_kwargs,
            )
            command.forward(delta)

            # Point workflow to us
            workflow.last_delta_id = delta.id
            workflow.updated_at = datetime.datetime.now()
            workflow.save(update_fields=["last_delta_id", "updated_at"])

            if n_deltas_deleted:
                # We just deleted deltas; now we can garbage-collect Tabs and
                # Steps that are soft-deleted and have no deltas referring
                # to them.
                workflow.delete_orphan_soft_deleted_models()

            return (
                delta,
                command.load_clientside_update(delta),
                delta.id
                if command.get_modifies_render_output(delta) else None,
            )
    except Workflow.DoesNotExist:
        return None, None, None
    def test_change_parameters_across_module_versions(self):
        workflow = Workflow.create_and_init()

        # Initialize a WfModule that used module 'x' version '1' (which we
        # don't need to write in code -- after all, that version might be long
        # gone when ChangeParametersCommand is called.
        wf_module = workflow.tabs.first().wf_modules.create(
            order=0,
            slug="step-1",
            module_id_name="x",
            last_relevant_delta_id=workflow.last_delta_id,
            params={"version": "v1", "x": 1},  # version-'1' params
            cached_migrated_params={"version": "v1", "x": 1},
            cached_migrated_params_module_version="v1",
        )

        # Now install version '2' of module 'x'.
        #
        # Version '2''s migrate_params() could do anything; in this test, it
        # simply changes 'version' from 'v1' to 'v2'
        create_module_zipfile(
            "x",
            spec_kwargs={
                "parameters": [
                    {"id_name": "version", "type": "string"},
                    {"id_name": "x", "type": "integer"},
                ]
            },
        )
        self.kernel.migrate_params.side_effect = lambda m, p: {**p, "version": "v2"}

        # Now the user requests to change params.
        #
        # The user was _viewing_ version '2' of module 'x', though
        # `wf_module.params` was at version 1. (Workbench ran
        # `migrate_params()` without saving the result when it
        # presented `params` to the user.) So the changes should apply atop
        # _migrated_ params.
        with self.assertLogs(level=logging.INFO):
            cmd = self.run_with_async_db(
                commands.do(
                    ChangeParametersCommand,
                    workflow_id=workflow.id,
                    wf_module=wf_module,
                    new_values={"x": 2},
                )
            )
        self.assertEqual(
            wf_module.params,
            {
                "version": "v2",  # migrate_params() ran
                "x": 2,  # and we applied changes on top of its output
            },
        )

        with self.assertLogs(level=logging.INFO):
            # building clientside.Update will migrate_params(), so we need
            # to capture logs.
            self.run_with_async_db(commands.undo(cmd))
        self.assertEqual(
            wf_module.params, {"version": "v1", "x": 1}  # exactly what we had before
        )
Beispiel #10
0
    def test_duplicate_empty_tab(self, send_update, queue_render):
        send_update.side_effect = async_noop
        workflow = Workflow.create_and_init()
        tab = workflow.tabs.first()

        cmd = self.run_with_async_db(
            commands.do(
                DuplicateTab,
                workflow_id=workflow.id,
                from_tab=tab,
                slug="tab-2",
                name="Tab 2",
            ))

        # Adds new tab
        cmd.tab.refresh_from_db()
        self.assertFalse(cmd.tab.is_deleted)
        self.assertEqual(cmd.tab.slug, "tab-2")
        self.assertEqual(cmd.tab.name, "Tab 2")
        workflow.refresh_from_db()
        send_update.assert_called_with(
            workflow.id,
            clientside.Update(
                workflow=clientside.WorkflowUpdate(
                    updated_at=workflow.updated_at,
                    tab_slugs=["tab-1", "tab-2"]),
                tabs={
                    "tab-2":
                    clientside.TabUpdate(
                        slug="tab-2",
                        name="Tab 2",
                        step_ids=[],
                        selected_step_index=None,
                    )
                },
            ),
        )

        # Backward: should delete tab
        self.run_with_async_db(commands.undo(workflow.id))
        cmd.tab.refresh_from_db()
        self.assertTrue(cmd.tab.is_deleted)
        workflow.refresh_from_db()
        send_update.assert_called_with(
            workflow.id,
            clientside.Update(
                workflow=clientside.WorkflowUpdate(
                    updated_at=workflow.updated_at, tab_slugs=["tab-1"]),
                clear_tab_slugs=frozenset(["tab-2"]),
            ),
        )

        # Forward: should bring us back
        self.run_with_async_db(commands.redo(workflow.id))
        cmd.tab.refresh_from_db()
        self.assertFalse(cmd.tab.is_deleted)
        workflow.refresh_from_db()
        send_update.assert_called_with(
            workflow.id,
            clientside.Update(
                workflow=clientside.WorkflowUpdate(
                    updated_at=workflow.updated_at,
                    tab_slugs=["tab-1", "tab-2"]),
                tabs={
                    "tab-2":
                    clientside.TabUpdate(
                        slug="tab-2",
                        name="Tab 2",
                        step_ids=[],
                        selected_step_index=None,
                    )
                },
            ),
        )

        # There should never be a render: we aren't changing any module
        # outputs.
        queue_render.assert_not_called()
Beispiel #11
0
    def test_duplicate_nonempty_unrendered_tab(self, send_update,
                                               queue_render):
        send_update.side_effect = async_noop
        queue_render.side_effect = async_noop
        workflow = Workflow.create_and_init()
        init_delta_id = workflow.last_delta_id
        tab = workflow.tabs.first()
        tab.selected_step_position = 1
        tab.save(update_fields=["selected_step_position"])
        # step1 and step2 have not yet been rendered. (But while we're
        # duplicating, conceivably a render could be running; so when we
        # duplicate them, we need to queue a render.)
        step1 = tab.steps.create(
            order=0,
            slug="step-1",
            module_id_name="x",
            params={"p": "s1"},
            last_relevant_delta_id=init_delta_id,
        )
        tab.steps.create(
            order=1,
            slug="step-2",
            module_id_name="y",
            params={"p": "s2"},
            last_relevant_delta_id=init_delta_id,
        )

        cmd = self.run_with_async_db(
            commands.do(
                DuplicateTab,
                workflow_id=workflow.id,
                from_tab=tab,
                slug="tab-2",
                name="Tab 2",
            ))

        # Adds new tab
        cmd.tab.refresh_from_db()
        [step1dup, step2dup] = list(cmd.tab.live_steps.all())
        self.assertFalse(cmd.tab.is_deleted)
        self.assertEqual(cmd.tab.slug, "tab-2")
        self.assertEqual(cmd.tab.name, "Tab 2")
        self.assertEqual(cmd.tab.selected_step_position, 1)
        self.assertEqual(step1dup.order, 0)
        self.assertEqual(step1dup.module_id_name, "x")
        self.assertEqual(step1dup.params, {"p": "s1"})
        self.assertEqual(
            step1dup.last_relevant_delta_id,
            # `cmd.id` would be intuitive, but that would be hard
            # to implement (and we assume we don't need to).
            # (Duplicate also duplicates _cache values_, which
            # means it's expensive to tweak step1's delta ID.)
            step1.last_relevant_delta_id,
        )
        self.assertEqual(step2dup.order, 1)
        self.assertEqual(step2dup.module_id_name, "y")
        self.assertEqual(step2dup.params, {"p": "s2"})
        self.assertNotEqual(step1dup.id, step1.id)
        delta = send_update.mock_calls[0][1][1]
        self.assertEqual(delta.tabs["tab-2"].step_ids,
                         [step1dup.id, step2dup.id])
        self.assertEqual(set(delta.steps.keys()),
                         set([step1dup.id, step2dup.id]))
        step1update = delta.steps[step1dup.id]
        self.assertEqual(step1update.last_relevant_delta_id,
                         step1.last_relevant_delta_id)
        # We should call render: we don't know whether there's a render queued;
        # and these new steps are in need of render.
        queue_render.assert_called_with(workflow.id, cmd.id)
        queue_render.reset_mock()  # so we can assert next time

        # undo
        self.run_with_async_db(commands.undo(workflow.id))
        cmd.tab.refresh_from_db()
        self.assertTrue(cmd.tab.is_deleted)
        delta = send_update.mock_calls[1][1][1]
        self.assertEqual(delta.clear_tab_slugs, frozenset(["tab-2"]))
        self.assertEqual(delta.clear_step_ids,
                         frozenset([step1dup.id, step2dup.id]))
        # No need to call render(): these modules can't possibly have changed,
        # and nobody cares what's in their cache.
        queue_render.assert_not_called()

        # redo
        self.run_with_async_db(commands.redo(workflow.id))
        # Need to call render() again -- these modules are still out-of-date
        queue_render.assert_called_with(workflow.id, cmd.id)
Beispiel #12
0
 def init_db():
     return Workflow.create_and_init(
         anonymous_owner_session_key="some-other-key")
Beispiel #13
0
 def init_db():
     return Workflow.create_and_init(
         owner=None, anonymous_owner_session_key="a-key")
Beispiel #14
0
 def init_db():
     return Workflow.create_and_init(
         name="Workflow 2",
         owner=User.objects.create(username="******",
                                   email="*****@*****.**"),
     )
Beispiel #15
0
def authorized_write(workflow: Workflow, request: HttpRequest,
                     using_secret: bool) -> bool:
    return Workflow.request_authorized_write(workflow, request), False
    def test_change_parameters(self):
        # Setup: workflow with loadurl module
        #
        # loadurl is a good choice because it has three parameters, two of
        # which are useful.
        workflow = Workflow.create_and_init()

        module_zipfile = create_module_zipfile(
            "loadsomething",
            spec_kwargs={
                "parameters": [
                    {"id_name": "url", "type": "string"},
                    {"id_name": "has_header", "type": "checkbox", "name": "HH"},
                    {"id_name": "version_select", "type": "custom"},
                ]
            },
        )

        params1 = {
            "url": "http://example.org",
            "has_header": True,
            "version_select": "",
        }

        wf_module = workflow.tabs.first().wf_modules.create(
            module_id_name="loadurl",
            order=0,
            slug="step-1",
            last_relevant_delta_id=workflow.last_delta_id,
            params=params1,
            cached_migrated_params=params1,
            cached_migrated_params_module_version=module_zipfile.version,
        )

        # Create and apply delta. It should change params.
        self.kernel.migrate_params.side_effect = lambda m, p: p
        with self.assertLogs(level=logging.INFO):
            cmd = self.run_with_async_db(
                commands.do(
                    ChangeParametersCommand,
                    workflow_id=workflow.id,
                    wf_module=wf_module,
                    new_values={"url": "http://example.com/foo", "has_header": False},
                )
            )
        wf_module.refresh_from_db()

        params2 = {
            "url": "http://example.com/foo",
            "has_header": False,
            "version_select": "",
        }
        self.assertEqual(wf_module.params, params2)

        # undo
        with self.assertLogs(level=logging.INFO):
            # building clientside.Update will migrate_params(), so we need
            # to capture logs.
            self.run_with_async_db(commands.undo(cmd))
        wf_module.refresh_from_db()
        self.assertEqual(wf_module.params, params1)

        # redo
        with self.assertLogs(level=logging.INFO):
            # building clientside.Update will migrate_params(), so we need
            # to capture logs.
            self.run_with_async_db(commands.redo(cmd))
        wf_module.refresh_from_db()
        self.assertEqual(wf_module.params, params2)
Beispiel #17
0
def authorized_report_viewer(workflow: Workflow, request: HttpRequest,
                             using_secret: bool) -> bool:
    user_allowed = Workflow.request_authorized_report_viewer(workflow, request)
    return (user_allowed or using_secret, user_allowed and using_secret)
Beispiel #18
0
 def test_get_with_existing(self):
     self.log_in()
     Workflow.create_and_init(owner=self.user,
                              lesson_slug="load-public-data")
     self.client.get("/lessons/en/load-public-data")
     self.assertEqual(Workflow.objects.count(), 1)  # don't create duplicate

@decorators.websockets_handler(role="write")
async def handle_write(workflow, **kwargs):
    return {"role": "write"}


@decorators.websockets_handler(role="owner")
async def handle_owner(workflow, **kwargs):
    return {"role": "owner"}


DefaultKwargs = {
    "user": AnonymousUser(),
    "session": Session(),
    "workflow": Workflow(),
    "path": "path",
    "arguments": {},
}


class WebsocketsHandlerDecoratorTest(HandlerTestCase):
    def handle(self, **kwargs):
        """handlers.handle(), synchronous."""
        request = self.build_request(**kwargs)
        return async_to_sync(handlers.handle)(request)

    def assertHandlerResponse(self, response, data=None, error=""):
        self.assertEqual(
            {
                "data": response.data,
Beispiel #20
0
    def test_pre_finish_enforce_storage_limits(self, send_update):
        send_update.side_effect = async_noop

        _init_module("x")
        self.kernel.migrate_params.side_effect = lambda m, p: p
        workflow = Workflow.create_and_init()
        step = workflow.tabs.first().steps.create(
            order=0,
            slug="step-123",
            module_id_name="x",
            file_upload_api_token="abc123",
            params={"file": None},
        )
        s3.put_bytes(s3.UserFilesBucket, "foo/1.txt", b"1")
        step.uploaded_files.create(
            created_at=datetime.datetime(2020, 1, 1),
            name="file1.txt",
            size=1,
            uuid="df46244d-268a-0001-9b47-360502dd9b32",
            key="foo/1.txt",
        )
        s3.put_bytes(s3.UserFilesBucket, "foo/2.txt", b"22")
        step.uploaded_files.create(
            created_at=datetime.datetime(2020, 1, 2),
            name="file2.txt",
            size=2,
            uuid="df46244d-268a-0002-9b47-360502dd9b32",
            key="foo/2.txt",
        )
        s3.put_bytes(s3.UserFilesBucket, "foo/3.txt", b"333")
        step.uploaded_files.create(
            created_at=datetime.datetime(2020, 1, 3),
            name="file3.txt",
            size=3,
            uuid="df46244d-268a-0003-9b47-360502dd9b32",
            key="foo/3.txt",
        )

        # Upload the new file, "file4.txt"
        s3.put_bytes(s3.TusUploadBucket, "new-key", b"4444")
        with self.assertLogs(level=logging.INFO):
            # Logs SetStepParams's migrate_params()
            response = self.client.post(
                f"/tusd-hooks",
                {
                    "Upload": {
                        "MetaData": {
                            "filename": "file4.txt",
                            "workflowId": str(workflow.id),
                            "stepSlug": step.slug,
                            "apiToken": "abc123",
                        },
                        "Size": 7,
                        "Storage": {
                            "Bucket": s3.TusUploadBucket,
                            "Key": "new-key"
                        },
                    }
                },
                HTTP_HOOK_NAME="pre-finish",
                content_type="application/json",
            )
        self.assertEqual(response.status_code, 200)

        # Test excess uploaded files were deleted
        self.assertEqual(
            list(
                step.uploaded_files.order_by("id").values_list("name",
                                                               flat=True)),
            ["file3.txt", "file4.txt"],
        )
        self.assertFalse(s3.exists(s3.UserFilesBucket, "foo/1.txt"))
        self.assertFalse(s3.exists(s3.UserFilesBucket, "foo/2.txt"))

        # Test delta nixes old files from clients' browsers
        send_update.assert_called()
        uploaded_file = step.uploaded_files.get(name="file4.txt")
        self.assertEqual(
            send_update.mock_calls[0][1][1].steps[step.id].files,
            [
                clientside.UploadedFile(
                    name="file4.txt",
                    uuid=uploaded_file.uuid,
                    size=7,
                    created_at=uploaded_file.created_at,
                ),
                clientside.UploadedFile(
                    name="file3.txt",
                    uuid="df46244d-268a-0003-9b47-360502dd9b32",
                    size=3,
                    created_at=datetime.datetime(2020, 1, 3),
                ),
            ],
        )
 def test_auth_write_deny_public(self):
     ret = self.run_handler(handle_write,
                            workflow=Workflow(owner=User(), public=True))
     self.assertHandlerResponse(
         ret, error=("AuthError: no write access to workflow"))
Beispiel #22
0
 def test_pre_finish_happy_path(self, queue_render, send_update):
     send_update.side_effect = async_noop
     queue_render.side_effect = async_noop
     _init_module("x")
     self.kernel.migrate_params.side_effect = lambda m, p: p
     workflow = Workflow.create_and_init()
     step = workflow.tabs.first().steps.create(
         order=0,
         slug="step-123",
         module_id_name="x",
         file_upload_api_token="abc123",
         params={"file": None},
     )
     s3.put_bytes(s3.TusUploadBucket, "data", b"1234567")
     with self.assertLogs(level=logging.INFO):
         # Logs SetStepParams's migrate_params()
         response = self.client.post(
             f"/tusd-hooks",
             {
                 "Upload": {
                     "MetaData": {
                         "filename": "foo.csv",
                         "workflowId": str(workflow.id),
                         "stepSlug": step.slug,
                         "apiToken": "abc123",
                     },
                     "Size": 7,
                     "Storage": {
                         "Bucket": s3.TusUploadBucket,
                         "Key": "data"
                     },
                 }
             },
             HTTP_HOOK_NAME="pre-finish",
             content_type="application/json",
         )
     self.assertEqual(response.status_code, 200)
     self.assertEqual(response.json(), {})
     # File was created
     uploaded_file = step.uploaded_files.first()
     self.assertRegex(
         uploaded_file.key,
         f"^wf-{workflow.id}/wfm-{step.id}/[-0-9a-f]{{36}}\\.csv$")
     self.assertEqual(
         get_s3_object_with_data(s3.UserFilesBucket,
                                 uploaded_file.key)["Body"],
         b"1234567",
     )
     self.assertEqual(uploaded_file.name, "foo.csv")
     # SetStepParams ran
     uuid = uploaded_file.key[-40:-4]
     step.refresh_from_db()
     self.assertEqual(step.params, {"file": uuid})
     # Send deltas
     send_update.assert_called()
     self.assertEqual(
         send_update.mock_calls[0][1][1].steps[step.id].files,
         [
             clientside.UploadedFile(
                 name="foo.csv",
                 uuid=uuid,
                 size=7,
                 created_at=uploaded_file.created_at,
             )
         ],
     )
     queue_render.assert_called()
 def test_auth_owner_deny_non_owner(self):
     ret = self.run_handler(handle_owner, workflow=Workflow(owner=User()))
     self.assertHandlerResponse(
         ret, error=("AuthError: no owner access to workflow"))
    def test_set_name_viewer_access_denied(self):
        workflow = Workflow.create_and_init(public=True)

        response = self.run_handler(set_name, workflow=workflow, name="B")
        self.assertResponse(response, error="AuthError: no write access to workflow")
Beispiel #25
0
 def post(self, request: HttpRequest):
     """Create a new workflow."""
     workflow = Workflow.create_and_init(name="Untitled Workflow",
                                         owner=request.user,
                                         selected_tab_position=0)
     return redirect("/workflows/%d/" % workflow.id)
Beispiel #26
0
 def test_index_ignore_lesson(self):
     Workflow.create_and_init(name="Hers", owner=self.user, lesson_slug="a-lesson")
     self.client.force_login(self.user)
     response = self.client.get("/workflows")
     self.assertEqual(response.context_data["initState"]["workflows"], [])
    def test_list_autofetches_two_workflows(self):
        user = User.objects.create(username="******", email="*****@*****.**")
        workflow = Workflow.create_and_init(owner=user,
                                            name="W1",
                                            last_viewed_at=IsoDate1)
        step1 = workflow.tabs.first().wf_modules.create(
            order=0,
            slug="step-1",
            module_id_name="loadurl",
            auto_update_data=True,
            next_update=timezone.now(),
            update_interval=600,
        )
        workflow2 = Workflow.create_and_init(owner=user,
                                             name="W2",
                                             last_viewed_at=IsoDate2)
        step2 = workflow2.tabs.first().wf_modules.create(
            order=0,
            slug="step-1",
            module_id_name="loadurl",
            auto_update_data=True,
            next_update=timezone.now(),
            update_interval=1200,
        )

        result = list_autofetches_json({"user": user, "session": None})
        self.assertEqual(
            result,
            {
                "maxFetchesPerDay":
                500,
                "nFetchesPerDay":
                216,
                "autofetches": [
                    {
                        "workflow": {
                            "id": workflow.id,
                            "name": "W1",
                            "createdAt": isoformat(workflow.creation_date),
                            "lastViewedAt": IsoDate1,
                        },
                        "tab": {
                            "slug": "tab-1",
                            "name": "Tab 1"
                        },
                        "wfModule": {
                            "id": step1.id,
                            "fetchInterval": 600,
                            "order": 0
                        },
                    },
                    {
                        "workflow": {
                            "id": workflow2.id,
                            "name": "W2",
                            "createdAt": isoformat(workflow2.creation_date),
                            "lastViewedAt": IsoDate2,
                        },
                        "tab": {
                            "slug": "tab-1",
                            "name": "Tab 1"
                        },
                        "wfModule": {
                            "id": step2.id,
                            "fetchInterval": 1200,
                            "order": 0
                        },
                    },
                ],
            },
        )
    def test_undo_redo(self):
        mz = create_module_zipfile(
            "loadsomething",
            spec_kwargs={"parameters": [{"id_name": "csv", "type": "string"}]},
        )
        self.kernel.migrate_params.side_effect = lambda m, p: p

        workflow = Workflow.create_and_init()
        tab = workflow.tabs.first()

        all_modules = tab.live_wf_modules  # beginning state: nothing

        v0 = workflow.last_delta_id

        # Test undoing nothing at all. Should NOP
        self.run_with_async_db(WorkflowUndo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(all_modules.count(), 0)
        self.assertEqual(workflow.last_delta_id, v0)

        # Add a module
        cmd1 = self.run_with_async_db(
            commands.do(
                AddModuleCommand,
                workflow_id=workflow.id,
                tab=tab,
                slug="step-1",
                module_id_name="loadsomething",
                position=0,
                param_values={},
            )
        )
        v1 = cmd1.id
        workflow.refresh_from_db()
        self.assertEqual(all_modules.count(), 1)
        self.assertGreater(v1, v0)
        self.assertEqual(workflow.last_delta_id, v1)
        self.assertWfModuleVersions(tab, [v1])

        # Undo, ensure we are back at start
        self.run_with_async_db(WorkflowUndo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(all_modules.count(), 0)
        self.assertEqual(workflow.last_delta_id, v0)
        self.assertWfModuleVersions(tab, [])

        # Redo, ensure we are back at v1
        self.run_with_async_db(WorkflowRedo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(all_modules.count(), 1)
        self.assertEqual(workflow.last_delta_id, v1)
        self.assertWfModuleVersions(tab, [v1])

        # Change a parameter
        with self.assertLogs(level=logging.INFO):
            cmd2 = self.run_with_async_db(
                commands.do(
                    ChangeParametersCommand,
                    workflow_id=workflow.id,
                    wf_module=tab.live_wf_modules.first(),
                    new_values={"csv": "some value"},
                )
            )
        v2 = cmd2.id
        workflow.refresh_from_db()
        self.assertEqual(tab.live_wf_modules.first().params["csv"], "some value")
        self.assertEqual(workflow.last_delta_id, v2)
        self.assertGreater(v2, v1)
        self.assertWfModuleVersions(tab, [v2])

        # Undo parameter change
        with self.assertLogs(level=logging.INFO):
            self.run_with_async_db(WorkflowUndo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta_id, v1)
        self.assertEqual(tab.live_wf_modules.first().params["csv"], "")
        self.assertWfModuleVersions(tab, [v1])

        # Redo
        with self.assertLogs(level=logging.INFO):
            self.run_with_async_db(WorkflowRedo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta_id, v2)
        self.assertEqual(tab.live_wf_modules.first().params["csv"], "some value")
        self.assertWfModuleVersions(tab, [v2])

        # Redo again should do nothing
        self.run_with_async_db(WorkflowRedo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta_id, v2)
        self.assertEqual(tab.live_wf_modules.first().params["csv"], "some value")
        self.assertWfModuleVersions(tab, [v2])

        # Add one more command so the stack is 3 deep
        cmd3 = self.run_with_async_db(
            commands.do(
                ChangeWorkflowTitleCommand,
                workflow_id=workflow.id,
                new_value="New Title",
            )
        )
        v3 = cmd3.id
        self.assertGreater(v3, v2)
        self.assertWfModuleVersions(tab, [v2])

        # Undo twice
        self.run_with_async_db(WorkflowUndo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta, cmd2)
        self.assertWfModuleVersions(tab, [v2])
        with self.assertLogs(level=logging.INFO):
            self.run_with_async_db(WorkflowUndo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta, cmd1)
        self.assertWfModuleVersions(tab, [v1])

        # Redo twice
        with self.assertLogs(level=logging.INFO):
            self.run_with_async_db(WorkflowRedo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta, cmd2)
        self.assertWfModuleVersions(tab, [v2])
        self.run_with_async_db(WorkflowRedo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta, cmd3)
        self.assertWfModuleVersions(tab, [v2])

        # Undo again to get to a place where we have two commands to redo
        self.run_with_async_db(WorkflowUndo(workflow.id))
        with self.assertLogs(level=logging.INFO):
            self.run_with_async_db(WorkflowUndo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta, cmd1)

        # Now add a new command. It should remove cmd2, cmd3 from the redo
        # stack and delete them from the db
        step = all_modules.first()
        cmd4 = self.run_with_async_db(
            commands.do(
                ChangeWfModuleNotesCommand,
                workflow_id=workflow.id,
                wf_module=step,
                new_value="Note of no note",
            )
        )
        v4 = cmd4.id
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta_id, v4)
        self.assertEqual(
            set(Delta.objects.values_list("id", flat=True)), {v0, v1, v4}
        )  # v2, v3 deleted

        # Undo back to start, then add a command, ensure it deletes dangling
        # commands (tests an edge case in Delta.save)
        self.run_with_async_db(WorkflowUndo(workflow.id))
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta_id, v1)
        cmd5 = self.run_with_async_db(
            commands.do(
                ChangeWfModuleNotesCommand,
                workflow_id=workflow.id,
                wf_module=cmd1.wf_module,
                new_value="Note of some note",
            )
        )
        v5 = cmd5.id
        workflow.refresh_from_db()
        self.assertEqual(workflow.last_delta_id, v5)
        self.assertEqual(
            set(Delta.objects.values_list("id", flat=True)), {v0, v1, v5}
        )  # v1, v4 deleted
        self.assertWfModuleVersions(tab, [v1])
Beispiel #29
0
    def test_execute_mark_unreachable(self, send_update, fake_load_module):
        future_none = asyncio.Future()
        future_none.set_result(None)
        send_update.return_value = future_none

        workflow = Workflow.create_and_init()
        tab = workflow.tabs.first()
        delta_id = workflow.last_delta_id
        ModuleVersion.create_or_replace_from_spec({
            "id_name": "mod",
            "name": "Mod",
            "category": "Clean",
            "parameters": []
        })
        wf_module1 = tab.wf_modules.create(
            order=0,
            slug="step-1",
            last_relevant_delta_id=delta_id,
            module_id_name="mod",
        )
        wf_module2 = tab.wf_modules.create(
            order=1,
            slug="step-2",
            last_relevant_delta_id=delta_id,
            module_id_name="mod",
        )
        wf_module3 = tab.wf_modules.create(
            order=2,
            slug="step-3",
            last_relevant_delta_id=delta_id,
            module_id_name="mod",
        )

        fake_module = Mock(LoadedModule)
        fake_load_module.return_value = fake_module
        fake_module.migrate_params.return_value = {}
        error_result = RenderResult(
            errors=[RenderError(I18nMessage.TODO_i18n("error, not warning"))])
        fake_module.render.return_value = error_result

        self._execute(workflow)

        wf_module1.refresh_from_db()
        self.assertEqual(wf_module1.cached_render_result.status, "error")
        with open_cached_render_result(
                wf_module1.cached_render_result) as result:
            assert_render_result_equals(result, error_result)

        wf_module2.refresh_from_db()
        self.assertEqual(wf_module2.cached_render_result.status, "unreachable")
        with open_cached_render_result(
                wf_module2.cached_render_result) as result:
            assert_render_result_equals(result, RenderResult())

        wf_module3.refresh_from_db()
        self.assertEqual(wf_module3.cached_render_result.status, "unreachable")
        with open_cached_render_result(
                wf_module3.cached_render_result) as result:
            assert_render_result_equals(result, RenderResult())

        send_update.assert_called_with(
            workflow.id,
            clientside.Update(
                steps={
                    wf_module3.id:
                    clientside.StepUpdate(
                        render_result=wf_module3.cached_render_result)
                }),
        )
Beispiel #30
0
    def test_fetch_viewer_access_denied(self):
        workflow = Workflow.create_and_init(public=True)
        wf_module = workflow.tabs.first().wf_modules.create(order=0, slug="step-1")

        response = self.run_handler(fetch, workflow=workflow, wfModuleId=wf_module.id)
        self.assertResponse(response, error="AuthError: no write access to workflow")