Exemple #1
0
 def get_clientside_files(self) -> List[clientside.UploadedFile]:
     return [
         clientside.UploadedFile(name=name,
                                 uuid=uuid,
                                 size=size,
                                 created_at=created_at)
         for name, uuid, size, created_at in
         self.uploaded_files.order_by("-created_at").values_list(
             "name", "uuid", "size", "created_at")
     ]
Exemple #2
0
 def _get_clientside_files(
     self, module_zipfile: Optional[ModuleZipfile]
 ) -> List[clientside.UploadedFile]:
     if module_zipfile and any(
         p.type == "file" for p in module_zipfile.get_spec().param_fields
     ):
         return [
             clientside.UploadedFile(
                 name=name, uuid=uuid, size=size, created_at=created_at
             )
             for name, uuid, size, created_at in self.uploaded_files.order_by(
                 "-created_at"
             ).values_list("name", "uuid", "size", "created_at")
         ]
     else:
         # Skip the database query
         return []
    def to_clientside(self) -> clientside.StepUpdate:
        # params
        from cjwstate.models.module_registry import MODULE_REGISTRY

        try:
            module_zipfile = MODULE_REGISTRY.latest(self.module_id_name)
        except KeyError:
            module_zipfile = None

        if module_zipfile is None:
            params = {}
        else:
            from cjwstate.params import get_migrated_params

            module_spec = module_zipfile.get_spec()
            param_schema = module_spec.get_param_schema()
            # raise ModuleError
            params = get_migrated_params(self, module_zipfile=module_zipfile)
            try:
                param_schema.validate(params)
            except ValueError:
                logger.exception(
                    "%s.migrate_params() gave invalid output: %r",
                    self.module_id_name,
                    params,
                )
                params = param_schema.coerce(params)

        crr = self._build_cached_render_result_fresh_or_not()
        if crr is None:
            crr = clientside.Null

        return clientside.StepUpdate(
            id=self.id,
            slug=self.slug,
            module_slug=self.module_id_name,
            tab_slug=self.tab_slug,
            is_busy=self.is_busy,
            render_result=crr,
            files=[
                clientside.UploadedFile(
                    name=name, uuid=uuid, size=size, created_at=created_at
                )
                for name, uuid, size, created_at in self.uploaded_files.order_by(
                    "-created_at"
                ).values_list("name", "uuid", "size", "created_at")
            ],
            params=params,
            secrets=self.secret_metadata,
            is_collapsed=self.is_collapsed,
            notes=self.notes,
            is_auto_fetch=self.auto_update_data,
            fetch_interval=self.update_interval,
            last_fetched_at=self.last_update_check,
            is_notify_on_change=self.notifications,
            has_unseen_notification=self.has_unseen_notification,
            last_relevant_delta_id=self.last_relevant_delta_id,
            versions=clientside.FetchedVersionList(
                versions=[
                    clientside.FetchedVersion(created_at=created_at, is_seen=is_seen)
                    for created_at, is_seen in self.stored_objects.order_by(
                        "-stored_at"
                    ).values_list("stored_at", "read")
                ],
                selected=self.stored_data_version,
            ),
        )
Exemple #4
0
 def test_pre_finish_happy_path(self, queue_render, send_update):
     send_update.side_effect = async_noop
     queue_render.side_effect = async_noop
     _init_module("x")
     self.kernel.migrate_params.side_effect = lambda m, p: p
     workflow = Workflow.create_and_init()
     step = workflow.tabs.first().steps.create(
         order=0,
         slug="step-123",
         module_id_name="x",
         file_upload_api_token="abc123",
         params={"file": None},
     )
     s3.put_bytes(s3.TusUploadBucket, "data", b"1234567")
     with self.assertLogs(level=logging.INFO):
         # Logs SetStepParams's migrate_params()
         response = self.client.post(
             f"/tusd-hooks",
             {
                 "Upload": {
                     "MetaData": {
                         "filename": "foo.csv",
                         "workflowId": str(workflow.id),
                         "stepSlug": step.slug,
                         "apiToken": "abc123",
                     },
                     "Size": 7,
                     "Storage": {
                         "Bucket": s3.TusUploadBucket,
                         "Key": "data"
                     },
                 }
             },
             HTTP_HOOK_NAME="pre-finish",
             content_type="application/json",
         )
     self.assertEqual(response.status_code, 200)
     self.assertEqual(response.json(), {})
     # File was created
     uploaded_file = step.uploaded_files.first()
     self.assertRegex(
         uploaded_file.key,
         f"^wf-{workflow.id}/wfm-{step.id}/[-0-9a-f]{{36}}\\.csv$")
     self.assertEqual(
         get_s3_object_with_data(s3.UserFilesBucket,
                                 uploaded_file.key)["Body"],
         b"1234567",
     )
     self.assertEqual(uploaded_file.name, "foo.csv")
     # SetStepParams ran
     uuid = uploaded_file.key[-40:-4]
     step.refresh_from_db()
     self.assertEqual(step.params, {"file": uuid})
     # Send deltas
     send_update.assert_called()
     self.assertEqual(
         send_update.mock_calls[0][1][1].steps[step.id].files,
         [
             clientside.UploadedFile(
                 name="foo.csv",
                 uuid=uuid,
                 size=7,
                 created_at=uploaded_file.created_at,
             )
         ],
     )
     queue_render.assert_called()
Exemple #5
0
    def test_pre_finish_enforce_storage_limits(self, send_update):
        send_update.side_effect = async_noop

        _init_module("x")
        self.kernel.migrate_params.side_effect = lambda m, p: p
        workflow = Workflow.create_and_init()
        step = workflow.tabs.first().steps.create(
            order=0,
            slug="step-123",
            module_id_name="x",
            file_upload_api_token="abc123",
            params={"file": None},
        )
        s3.put_bytes(s3.UserFilesBucket, "foo/1.txt", b"1")
        step.uploaded_files.create(
            created_at=datetime.datetime(2020, 1, 1),
            name="file1.txt",
            size=1,
            uuid="df46244d-268a-0001-9b47-360502dd9b32",
            key="foo/1.txt",
        )
        s3.put_bytes(s3.UserFilesBucket, "foo/2.txt", b"22")
        step.uploaded_files.create(
            created_at=datetime.datetime(2020, 1, 2),
            name="file2.txt",
            size=2,
            uuid="df46244d-268a-0002-9b47-360502dd9b32",
            key="foo/2.txt",
        )
        s3.put_bytes(s3.UserFilesBucket, "foo/3.txt", b"333")
        step.uploaded_files.create(
            created_at=datetime.datetime(2020, 1, 3),
            name="file3.txt",
            size=3,
            uuid="df46244d-268a-0003-9b47-360502dd9b32",
            key="foo/3.txt",
        )

        # Upload the new file, "file4.txt"
        s3.put_bytes(s3.TusUploadBucket, "new-key", b"4444")
        with self.assertLogs(level=logging.INFO):
            # Logs SetStepParams's migrate_params()
            response = self.client.post(
                f"/tusd-hooks",
                {
                    "Upload": {
                        "MetaData": {
                            "filename": "file4.txt",
                            "workflowId": str(workflow.id),
                            "stepSlug": step.slug,
                            "apiToken": "abc123",
                        },
                        "Size": 7,
                        "Storage": {
                            "Bucket": s3.TusUploadBucket,
                            "Key": "new-key"
                        },
                    }
                },
                HTTP_HOOK_NAME="pre-finish",
                content_type="application/json",
            )
        self.assertEqual(response.status_code, 200)

        # Test excess uploaded files were deleted
        self.assertEqual(
            list(
                step.uploaded_files.order_by("id").values_list("name",
                                                               flat=True)),
            ["file3.txt", "file4.txt"],
        )
        self.assertFalse(s3.exists(s3.UserFilesBucket, "foo/1.txt"))
        self.assertFalse(s3.exists(s3.UserFilesBucket, "foo/2.txt"))

        # Test delta nixes old files from clients' browsers
        send_update.assert_called()
        uploaded_file = step.uploaded_files.get(name="file4.txt")
        self.assertEqual(
            send_update.mock_calls[0][1][1].steps[step.id].files,
            [
                clientside.UploadedFile(
                    name="file4.txt",
                    uuid=uploaded_file.uuid,
                    size=7,
                    created_at=uploaded_file.created_at,
                ),
                clientside.UploadedFile(
                    name="file3.txt",
                    uuid="df46244d-268a-0003-9b47-360502dd9b32",
                    size=3,
                    created_at=datetime.datetime(2020, 1, 3),
                ),
            ],
        )