def test_duplicate_copies_fresh_cache(self): # The cache's filename depends on workflow_id and step_id. # Duplicating it would need more complex code :). table = make_table(make_column("A", [1], format="${:,.2f}")) write_to_rendercache( self.workflow, self.step, 1, table=table, errors=[RenderError(I18nMessage("X", {}, None))], json={"foo": "bar"}, ) workflow2 = Workflow.objects.create() tab2 = workflow2.tabs.create(position=0) dup = self.step.duplicate_into_new_workflow(tab2) dup_cached_result = dup.cached_render_result self.assertEqual( dup_cached_result, replace( self.step.cached_render_result, workflow_id=workflow2.id, step_id=dup.id, delta_id=0, ), ) with open_cached_render_result(dup_cached_result) as result2: assert_arrow_table_equals(result2.table, table) self.assertEqual(result2.errors, [RenderError(I18nMessage("X", {}, None))]) self.assertEqual(result2.json, {"foo": "bar"})
def test_invalid_parquet_is_corrupt_cache_error(self): with arrow_table_context(make_column("A", ["x"])) as (path, table): result = LoadedRenderResult( path=path, table=table, columns=[Column("A", ColumnType.Text())], errors=[], json={}, ) cache_render_result(self.workflow, self.step, 1, result) crr = self.step.cached_render_result s3.put_bytes(BUCKET, crr_parquet_key(crr), b"NOT PARQUET") with tempfile_context() as arrow_path: with self.assertRaises(CorruptCacheError): with open_cached_render_result(crr) as loaded: pass
def test_duplicate_copies_fresh_cache(self): # The cache's filename depends on workflow_id and wf_module_id. # Duplicating it would need more complex code :). result = RenderResult( arrow_table({"A": [1]}), [RenderError(I18nMessage("X", []), [])], {} ) cache_render_result(self.workflow, self.wf_module, self.delta.id, result) workflow2 = Workflow.objects.create() tab2 = workflow2.tabs.create(position=0) InitWorkflowCommand.create(workflow2) dup = self.wf_module.duplicate_into_new_workflow(tab2) dup_cached_result = dup.cached_render_result with open_cached_render_result(dup_cached_result) as result2: self.assertEqual(result2, result)
def test_cache_render_result(self): with arrow_table_context(make_column("A", [1])) as (table_path, table): result = LoadedRenderResult( path=table_path, table=table, columns=[Column("A", ColumnType.Number(format="{:,}"))], errors=[ RenderError( I18nMessage("e1", {"text": "hi"}, None), [ QuickFix( I18nMessage("q1", {"var": 2}, None), QuickFixAction.PrependStep("filter", {"a": "x"}), ) ], ), RenderError(I18nMessage("e2", {}, None), []), ], json={"foo": "bar"}, ) cache_render_result(self.workflow, self.step, 1, result) cached = self.step.cached_render_result self.assertEqual(cached.step_id, self.step.id) self.assertEqual(cached.delta_id, 1) self.assertEqual( crr_parquet_key(cached), f"wf-{self.workflow.id}/wfm-{self.step.id}/delta-1.dat", ) # Reading completely freshly from the DB should give the same thing db_step = Step.objects.get(id=self.step.id) from_db = db_step.cached_render_result self.assertEqual(from_db, cached) with open_cached_render_result(from_db) as result2: assert_arrow_table_equals( result2.table, make_table(make_column("A", [1], format="{:,}")) ) self.assertEqual( result2.columns, [Column("A", ColumnType.Number(format="{:,}"))] )
def test_cache_render_result(self): result = RenderResult( arrow_table({"A": [1]}), [ RenderError( I18nMessage("e1", [1, "x"]), [ QuickFix( I18nMessage("q1", []), QuickFixAction.PrependStep("filter", {"a": "x"}), ) ], ), RenderError(I18nMessage("e2", []), []), ], {"foo": "bar"}, ) cache_render_result(self.workflow, self.wf_module, self.delta.id, result) cached = self.wf_module.cached_render_result self.assertEqual(cached.wf_module_id, self.wf_module.id) self.assertEqual(cached.delta_id, self.delta.id) self.assertEqual( crr_parquet_key(cached), f"wf-{self.workflow.id}/wfm-{self.wf_module.id}/delta-{self.delta.id}.dat", ) # Reading completely freshly from the DB should give the same thing db_wf_module = WfModule.objects.get(id=self.wf_module.id) from_db = db_wf_module.cached_render_result self.assertEqual(from_db, cached) with open_cached_render_result(from_db) as result2: assert_render_result_equals(result2, result)
def test_duplicate_copies_fresh_cache(self): # The cache's filename depends on workflow_id and step_id. # Duplicating it would need more complex code :). result = RenderResult(arrow_table({"A": [1]}), [RenderError(I18nMessage("X", {}, None), [])], {}) cache_render_result(self.workflow, self.step, 1, result) workflow2 = Workflow.objects.create() tab2 = workflow2.tabs.create(position=0) dup = self.step.duplicate_into_new_workflow(tab2) dup_cached_result = dup.cached_render_result self.assertEqual( dup_cached_result, replace( self.step.cached_render_result, workflow_id=workflow2.id, step_id=dup.id, delta_id=0, ), ) with open_cached_render_result(dup_cached_result) as result2: assert_render_result_equals(result2, result)