コード例 #1
0
    def test_step_duplicate(self):
        workflow = Workflow.create_and_init()
        step1 = workflow.tabs.first().steps.create(order=0, slug="step-1")

        # store data to test that it is duplicated
        with tempfile_context() as path1:
            path1.write_bytes(b"12345")
            create_stored_object(workflow.id, step1.id, path1)
        with tempfile_context() as path2:
            path1.write_bytes(b"23456")
            so2 = create_stored_object(workflow.id, step1.id, path2)
        step1.secrets = {"do not copy": {"name": "evil", "secret": "evil"}}
        step1.stored_data_version = so2.stored_at
        step1.save(update_fields=["stored_data_version"])

        # duplicate into another workflow, as we would do when duplicating a workflow
        workflow2 = Workflow.create_and_init()
        tab2 = workflow2.tabs.first()
        step1d = step1.duplicate_into_new_workflow(tab2)
        step1d.refresh_from_db()  # test what we actually have in the db

        self.assertEqual(step1d.slug, "step-1")
        self.assertEqual(step1d.workflow, workflow2)
        self.assertEqual(step1d.module_id_name, step1.module_id_name)
        self.assertEqual(step1d.order, step1.order)
        self.assertEqual(step1d.notes, step1.notes)
        self.assertEqual(step1d.last_update_check, step1.last_update_check)
        self.assertEqual(step1d.is_collapsed, step1.is_collapsed)
        self.assertEqual(step1d.params, step1.params)
        self.assertEqual(step1d.secrets, {})

        # Stored data should contain a clone of content only, not complete version history
        self.assertEqual(step1d.stored_objects.count(), 1)
        self.assertEqual(step1d.stored_data_version, step1.stored_data_version)
        so2d = step1d.stored_objects.first()
        # The StoredObject was copied byte for byte into a different file
        self.assertNotEqual(so2d.key, so2.key)
        self.assertEqual(
            get_s3_object_with_data(s3.StoredObjectsBucket, so2d.key)["Body"],
            get_s3_object_with_data(s3.StoredObjectsBucket, so2.key)["Body"],
        )
コード例 #2
0
    def test_duplicate_bytes(self):
        key = f"{self.workflow.id}/{self.step1.id}/{uuid1()}"
        s3.put_bytes(s3.StoredObjectsBucket, key, b"12345")
        self.step2 = self.step1.tab.steps.create(order=1, slug="step-2")
        so1 = self.step1.stored_objects.create(key=key, size=5)
        so2 = so1.duplicate(self.step2)

        # new StoredObject should have same time,
        # different file with same contents
        self.assertEqual(so2.stored_at, so1.stored_at)
        self.assertEqual(so2.size, so1.size)
        self.assertNotEqual(so2.key, so1.key)
        self.assertEqual(
            get_s3_object_with_data(s3.StoredObjectsBucket, so2.key)["Body"],
            b"12345",
        )
コード例 #3
0
    def test_step_duplicate_copy_uploaded_file(self):
        workflow = Workflow.create_and_init()
        tab = workflow.tabs.first()
        step = tab.steps.create(order=0,
                                slug="step-1",
                                module_id_name="upload")
        uuid = str(uuidgen.uuid4())
        key = f"{step.uploaded_file_prefix}{uuid}.csv"
        s3.put_bytes(s3.UserFilesBucket, key, b"1234567")
        # Write the uuid to the old module -- we'll check the new module points
        # to a valid file
        step.params = {"file": uuid, "has_header": True}
        step.save(update_fields=["params"])
        uploaded_file = step.uploaded_files.create(name="t.csv",
                                                   uuid=uuid,
                                                   key=key,
                                                   size=7)

        workflow2 = Workflow.create_and_init()
        tab2 = workflow2.tabs.first()
        step2 = step.duplicate_into_new_workflow(tab2)

        uploaded_file2 = step2.uploaded_files.first()
        self.assertIsNotNone(uploaded_file2)
        # New file gets same uuid -- because it's the same file and we don't
        # want to edit params during copy
        self.assertEqual(uploaded_file2.uuid, uuid)
        self.assertEqual(step2.params["file"], uuid)
        self.assertTrue(
            # The new file should be in a different path
            uploaded_file2.key.startswith(step2.uploaded_file_prefix))
        self.assertEqual(uploaded_file2.name, "t.csv")
        self.assertEqual(uploaded_file2.size, 7)
        self.assertEqual(uploaded_file2.created_at, uploaded_file.created_at)
        self.assertEqual(
            get_s3_object_with_data(s3.UserFilesBucket,
                                    uploaded_file2.key)["Body"],
            b"1234567",
        )
コード例 #4
0
 def test_pre_finish_happy_path(self, queue_render, send_update):
     send_update.side_effect = async_noop
     queue_render.side_effect = async_noop
     _init_module("x")
     self.kernel.migrate_params.side_effect = lambda m, p: p
     workflow = Workflow.create_and_init()
     step = workflow.tabs.first().steps.create(
         order=0,
         slug="step-123",
         module_id_name="x",
         file_upload_api_token="abc123",
         params={"file": None},
     )
     s3.put_bytes(s3.TusUploadBucket, "data", b"1234567")
     with self.assertLogs(level=logging.INFO):
         # Logs SetStepParams's migrate_params()
         response = self.client.post(
             f"/tusd-hooks",
             {
                 "Upload": {
                     "MetaData": {
                         "filename": "foo.csv",
                         "workflowId": str(workflow.id),
                         "stepSlug": step.slug,
                         "apiToken": "abc123",
                     },
                     "Size": 7,
                     "Storage": {
                         "Bucket": s3.TusUploadBucket,
                         "Key": "data"
                     },
                 }
             },
             HTTP_HOOK_NAME="pre-finish",
             content_type="application/json",
         )
     self.assertEqual(response.status_code, 200)
     self.assertEqual(response.json(), {})
     # File was created
     uploaded_file = step.uploaded_files.first()
     self.assertRegex(
         uploaded_file.key,
         f"^wf-{workflow.id}/wfm-{step.id}/[-0-9a-f]{{36}}\\.csv$")
     self.assertEqual(
         get_s3_object_with_data(s3.UserFilesBucket,
                                 uploaded_file.key)["Body"],
         b"1234567",
     )
     self.assertEqual(uploaded_file.name, "foo.csv")
     # SetStepParams ran
     uuid = uploaded_file.key[-40:-4]
     step.refresh_from_db()
     self.assertEqual(step.params, {"file": uuid})
     # Send deltas
     send_update.assert_called()
     self.assertEqual(
         send_update.mock_calls[0][1][1].steps[step.id].files,
         [
             clientside.UploadedFile(
                 name="foo.csv",
                 uuid=uuid,
                 size=7,
                 created_at=uploaded_file.created_at,
             )
         ],
     )
     queue_render.assert_called()