示例#1
0
 def test_convert_to_uploaded_file_happy_path(self):
     workflow = Workflow.create_and_init()
     wf_module = workflow.tabs.first().wf_modules.create(order=0,
                                                         slug="step-1",
                                                         module_id_name="x")
     ipu = wf_module.in_progress_uploads.create()
     minio.put_bytes(ipu.Bucket, ipu.get_upload_key(), b"1234567")
     uploaded_file = ipu.convert_to_uploaded_file("test sheet.xlsx")
     self.assertEqual(uploaded_file.uuid, str(ipu.id))
     final_key = wf_module.uploaded_file_prefix + str(ipu.id) + ".xlsx"
     # New file on S3 has the right bytes and metadata
     self.assertEqual(
         minio.get_object_with_data(minio.UserFilesBucket,
                                    final_key)["Body"],
         b"1234567",
     )
     self.assertEqual(
         minio.client.head_object(Bucket=minio.UserFilesBucket,
                                  Key=final_key)["ContentDisposition"],
         "attachment; filename*=UTF-8''test%20sheet.xlsx",
     )
     # InProgressUpload is completed
     self.assertEqual(ipu.is_completed, True)
     ipu.refresh_from_db()
     self.assertEqual(ipu.is_completed, True)  # also on DB
     # Uploaded file is deleted
     self.assertFalse(
         minio.exists(minio.UserFilesBucket, ipu.get_upload_key()))
示例#2
0
 def test_assume_role_to_write(self):
     client = self._assume_role_session_client_with_write_access(
         Bucket, "key")
     data = b"1234567"
     client.upload_fileobj(io.BytesIO(data), Bucket, "key")
     self.assertEqual(
         minio.get_object_with_data(Bucket, "key")["Body"], data)
示例#3
0
    def test_wf_module_duplicate(self):
        workflow = Workflow.create_and_init()
        step1 = workflow.tabs.first().wf_modules.create(order=0, slug="step-1")

        # store data to test that it is duplicated
        with tempfile_context() as path1:
            path1.write_bytes(b"12345")
            create_stored_object(workflow.id, step1.id, path1)
        with tempfile_context() as path2:
            path1.write_bytes(b"23456")
            so2 = create_stored_object(workflow.id, step1.id, path2)
        step1.secrets = {"do not copy": {"name": "evil", "secret": "evil"}}
        step1.stored_data_version = so2.stored_at
        step1.save(update_fields=["stored_data_version"])

        # duplicate into another workflow, as we would do when duplicating a workflow
        workflow2 = Workflow.create_and_init()
        tab2 = workflow2.tabs.first()
        step1d = step1.duplicate_into_new_workflow(tab2)
        step1d.refresh_from_db()  # test what we actually have in the db

        self.assertEqual(step1d.slug, "step-1")
        self.assertEqual(step1d.workflow, workflow2)
        self.assertEqual(step1d.module_id_name, step1.module_id_name)
        self.assertEqual(step1d.order, step1.order)
        self.assertEqual(step1d.notes, step1.notes)
        self.assertEqual(step1d.last_update_check, step1.last_update_check)
        self.assertEqual(step1d.is_collapsed, step1.is_collapsed)
        self.assertEqual(step1d.params, step1.params)
        self.assertEqual(step1d.secrets, {})

        # Stored data should contain a clone of content only, not complete version history
        self.assertEqual(step1d.stored_objects.count(), 1)
        self.assertEqual(step1d.stored_data_version, step1.stored_data_version)
        so2d = step1d.stored_objects.first()
        # The StoredObject was copied byte for byte into a different file
        self.assertNotEqual(so2d.key, so2.key)
        self.assertEqual(
            minio.get_object_with_data(minio.StoredObjectsBucket,
                                       so2d.key)["Body"],
            minio.get_object_with_data(minio.StoredObjectsBucket,
                                       so2.key)["Body"],
        )
def _external_module_get_html_bytes(id_name: str,
                                    version: str) -> Optional[bytes]:
    prefix = "%s/%s/" % (id_name, version)
    all_keys = minio.list_file_keys(minio.ExternalModulesBucket, prefix)
    try:
        html_key = next(k for k in all_keys if k.endswith(".html"))
    except StopIteration:
        return None  # there is no HTML file

    return minio.get_object_with_data(minio.ExternalModulesBucket,
                                      html_key)["Body"]
示例#5
0
 def test_complete_happy_path(self, queue_render, send_update):
     send_update.side_effect = async_noop
     queue_render.side_effect = async_noop
     _init_module("x")
     self.kernel.migrate_params.side_effect = lambda m, p: p
     workflow = Workflow.create_and_init()
     wf_module = workflow.tabs.first().wf_modules.create(
         order=0,
         slug="step-123",
         module_id_name="x",
         file_upload_api_token="abc123",
         params={"file": None},
     )
     upload = wf_module.in_progress_uploads.create()
     uuid = str(upload.id)
     key = upload.get_upload_key()
     minio.put_bytes(upload.Bucket, key, b"1234567")
     with self.assertLogs(level=logging.INFO):
         # Logs ChangeParametersCommand's migrate_params()
         response = self.client.post(
             f"/api/v1/workflows/{workflow.id}/steps/step-123/uploads/{upload.id}",
             {"filename": "test.csv"},
             content_type="application/json",
             HTTP_AUTHORIZATION="Bearer abc123",
         )
     self.assertEqual(response.status_code, 200)
     # Upload and its S3 data were deleted
     self.assertFalse(minio.exists(upload.Bucket, key))
     upload.refresh_from_db()
     self.assertTrue(upload.is_completed)
     # Final upload was created
     uploaded_file = wf_module.uploaded_files.first()
     self.assertEqual(uploaded_file.key,
                      f"wf-{workflow.id}/wfm-{wf_module.id}/{uuid}.csv")
     self.assertEqual(
         minio.get_object_with_data(minio.UserFilesBucket,
                                    uploaded_file.key)["Body"],
         b"1234567",
     )
     self.assertEqual(uploaded_file.name, "test.csv")
     # Return value includes uuid
     data = json.loads(response.content)
     self.assertEqual(data["uuid"], uuid)
     self.assertEqual(data["name"], "test.csv")
     self.assertEqual(data["size"], 7)
     # ChangeParametersCommand ran
     wf_module.refresh_from_db()
     self.assertEqual(wf_module.params, {"file": uuid})
     # Send deltas
     send_update.assert_called()
     queue_render.assert_called()
示例#6
0
    def test_assume_role_to_write_multipart(self):
        client = self._assume_role_session_client_with_write_access(
            Bucket, "key")
        from boto3.s3.transfer import TransferConfig

        data = b"1234567" * 1024 * 1024  # 7MB => 5MB+2MB parts
        client.upload_fileobj(
            io.BytesIO(data),
            Bucket,
            "key",
            Config=TransferConfig(multipart_threshold=5 * 1024 * 1024),
        )
        self.assertEqual(
            minio.get_object_with_data(Bucket, "key")["Body"], data)
示例#7
0
    def test_duplicate_bytes(self):
        key = f"{self.workflow.id}/{self.step1.id}/{uuid1()}"
        minio.put_bytes(minio.StoredObjectsBucket, key, b"12345")
        self.step2 = self.step1.tab.wf_modules.create(order=1, slug="step-2")
        so1 = self.step1.stored_objects.create(
            bucket=minio.StoredObjectsBucket, key=key, size=5)
        so2 = so1.duplicate(self.step2)

        # new StoredObject should have same time,
        # different file with same contents
        self.assertEqual(so1.stored_at, so2.stored_at)
        self.assertEqual(so1.size, so2.size)
        self.assertEqual(so1.bucket, so2.bucket)
        self.assertNotEqual(so1.key, so2.key)
        self.assertEqual(
            minio.get_object_with_data(so2.bucket, so2.key)["Body"], b"12345")
 def test_finish_upload_happy_path(self, send_update):
     user = User.objects.create(username="******", email="*****@*****.**")
     workflow = Workflow.create_and_init(owner=user)
     wf_module = workflow.tabs.first().wf_modules.create(order=0,
                                                         slug="step-1",
                                                         module_id_name="x")
     in_progress_upload = wf_module.in_progress_uploads.create(
         id="147a9f5d-5b3e-41c3-a968-a84a5a9d587f")
     key = in_progress_upload.get_upload_key()
     minio.put_bytes(in_progress_upload.Bucket, key, b"1234567")
     send_update.side_effect = async_noop
     response = self.run_handler(
         finish_upload,
         user=user,
         workflow=workflow,
         wfModuleId=wf_module.id,
         key=key,
         filename="test sheet.csv",
     )
     self.assertResponse(
         response, data={"uuid": "147a9f5d-5b3e-41c3-a968-a84a5a9d587f"})
     # The uploaded file is deleted
     self.assertFalse(minio.exists(in_progress_upload.Bucket, key))
     # A new upload is created
     uploaded_file = wf_module.uploaded_files.first()
     self.assertEqual(uploaded_file.name, "test sheet.csv")
     self.assertEqual(uploaded_file.size, 7)
     self.assertEqual(uploaded_file.uuid,
                      "147a9f5d-5b3e-41c3-a968-a84a5a9d587f")
     self.assertEqual(uploaded_file.bucket, in_progress_upload.Bucket)
     final_key = f"wf-{workflow.id}/wfm-{wf_module.id}/147a9f5d-5b3e-41c3-a968-a84a5a9d587f.csv"
     self.assertEqual(uploaded_file.key, final_key)
     # The file has the right bytes and metadata
     self.assertEqual(
         minio.get_object_with_data(minio.UserFilesBucket,
                                    final_key)["Body"],
         b"1234567",
     )
     self.assertEqual(
         minio.client.head_object(Bucket=minio.UserFilesBucket,
                                  Key=final_key)["ContentDisposition"],
         "attachment; filename*=UTF-8''test%20sheet.csv",
     )
     # wf_module is updated
     send_update.assert_called()
示例#9
0
    def test_wf_module_duplicate_copy_uploaded_file(self):
        workflow = Workflow.create_and_init()
        tab = workflow.tabs.first()
        wf_module = tab.wf_modules.create(order=0,
                                          slug="step-1",
                                          module_id_name="upload")
        uuid = str(uuidgen.uuid4())
        key = f"{wf_module.uploaded_file_prefix}{uuid}.csv"
        minio.put_bytes(minio.UserFilesBucket, key, b"1234567")
        # Write the uuid to the old module -- we'll check the new module points
        # to a valid file
        wf_module.params = {"file": uuid, "has_header": True}
        wf_module.save(update_fields=["params"])
        uploaded_file = wf_module.uploaded_files.create(
            name="t.csv",
            uuid=uuid,
            bucket=minio.UserFilesBucket,
            key=key,
            size=7)

        workflow2 = Workflow.create_and_init()
        tab2 = workflow2.tabs.first()
        wf_module2 = wf_module.duplicate_into_new_workflow(tab2)

        uploaded_file2 = wf_module2.uploaded_files.first()
        self.assertIsNotNone(uploaded_file2)
        # New file gets same uuid -- because it's the same file and we don't
        # want to edit params during copy
        self.assertEqual(uploaded_file2.uuid, uuid)
        self.assertEqual(wf_module2.params["file"], uuid)
        self.assertTrue(
            # The new file should be in a different path
            uploaded_file2.key.startswith(wf_module2.uploaded_file_prefix))
        self.assertEqual(uploaded_file2.name, "t.csv")
        self.assertEqual(uploaded_file2.size, 7)
        self.assertEqual(uploaded_file2.created_at, uploaded_file.created_at)
        self.assertEqual(
            minio.get_object_with_data(uploaded_file2.bucket,
                                       uploaded_file2.key)["Body"],
            b"1234567",
        )
示例#10
0
    def test_integration_happy_path(self):
        workflow = Workflow.create_and_init()
        wf_module = workflow.tabs.first().wf_modules.create(order=0,
                                                            slug="step-1",
                                                            module_id_name="x")
        ipu = wf_module.in_progress_uploads.create()
        updated_at1 = ipu.updated_at
        time.sleep(0.000001)  # so updated_at changes
        params = ipu.generate_upload_parameters()
        ipu.refresh_from_db()  # ensure we wrote updated_at
        updated_at2 = ipu.updated_at
        self.assertGreater(updated_at2, updated_at1)

        # Upload using a separate S3 client
        # Import _after_ we've imported minio -- so cjwstate.minio's monkey-patch
        # takes effect.
        import boto3

        credentials = params["credentials"]
        session = boto3.session.Session(
            aws_access_key_id=credentials["accessKeyId"],
            aws_secret_access_key=credentials["secretAccessKey"],
            aws_session_token=credentials["sessionToken"],
            region_name=params["region"],
        )
        client = session.client("s3", endpoint_url=params["endpoint"])
        client.put_object(Bucket=ipu.Bucket,
                          Key=ipu.get_upload_key(),
                          Body=b"1234567")

        # Complete the upload
        uploaded_file = ipu.convert_to_uploaded_file("test.csv")
        self.assertEqual(
            minio.get_object_with_data(uploaded_file.bucket,
                                       uploaded_file.key)["Body"],
            b"1234567",
        )