def _call_backward_and_load_clientside_update( workflow_id: int, ) -> Tuple[Optional[Delta], Optional[clientside.Update], Optional[int]]: now = datetime.datetime.now() try: with Workflow.lookup_and_cooperative_lock(id=workflow_id) as workflow: # raise Delta.DoesNotExist if we're at the beginning of the undo chain delta = workflow.deltas.exclude( command_name=InitWorkflow.__name__).get( id=workflow.last_delta_id) command = NAME_TO_COMMAND[delta.command_name] command.backward(delta) # Point workflow to previous delta # Only update prev_delta_id: other columns may have been edited in # backward(). workflow.last_delta_id = delta.prev_delta_id or 0 workflow.updated_at = now workflow.save(update_fields=["last_delta_id", "updated_at"]) delta.last_applied_at = now delta.save(update_fields=["last_applied_at"]) return ( delta, command.load_clientside_update(delta), (workflow.last_delta_id if command.get_modifies_render_output(delta) else None), ) except (Workflow.DoesNotExist, Delta.DoesNotExist): return None, None, None
def _call_forward_and_load_clientside_update( workflow_id: int, ) -> Tuple[Optional[Delta], Optional[clientside.Update], Optional[int]]: now = datetime.datetime.now() try: with Workflow.lookup_and_cooperative_lock(id=workflow_id) as workflow: delta = workflow.deltas.filter( id__gt=workflow.last_delta_id).first() if delta is None: # Nothing to redo: we're at the end of the delta chain return None, None, None command = NAME_TO_COMMAND[delta.command_name] command.forward(delta) workflow.last_delta_id = delta.id workflow.updated_at = now workflow.save(update_fields=["last_delta_id", "updated_at"]) delta.last_applied_at = now delta.save(update_fields=["last_applied_at"]) return ( delta, command.load_clientside_update(delta), delta.id if command.get_modifies_render_output(delta) else None, ) except Workflow.DoesNotExist: return None, None, None
def delete_workflow_stale_deltas( workflow_id: int, min_last_applied_at: datetime.datetime) -> None: from cjwstate.models.workflow import Workflow try: with Workflow.lookup_and_cooperative_lock( id=workflow_id) as workflow_lock: workflow = workflow_lock.workflow with django.db.connections["default"].cursor() as cursor: cursor.execute( """ DELETE FROM delta WHERE workflow_id = %(workflow_id)s AND ( id <= ( SELECT MAX(delta.id) FROM delta WHERE last_applied_at < %(min_last_applied_at)s AND workflow_id = %(workflow_id)s AND delta.id <= (SELECT last_delta_id FROM workflow WHERE id = %(workflow_id)s) ) OR id >= ( SELECT MIN(delta.id) FROM delta WHERE last_applied_at < %(min_last_applied_at)s AND workflow_id = %(workflow_id)s AND delta.id > (SELECT last_delta_id FROM workflow WHERE id = %(workflow_id)s) ) ) """, dict( workflow_id=workflow_id, min_last_applied_at=min_last_applied_at.replace( tzinfo=datetime.timezone.utc), ), ) # Set the first delta's prev_delta_id to NULL. (The foreign-key # constraint is DEFERRABLE INITIALLY DEFERRED.) cursor.execute( # Hack around Work around Postgres picking the wrong index. # [2021-02-02] on production the inner SELECT chose the # delta.id index (id IS NOT NULL) instead of the # delta.workflow_id index (workflow_id = X). Maybe VACUUM # would fix this? Meh. "+ 0" disqualifies the delta.id # index, forcing a better choice. """ UPDATE delta SET prev_delta_id = NULL WHERE id = ( SELECT MIN(id + 0) FROM delta WHERE workflow_id = %(workflow_id)s ) """, dict(workflow_id=workflow_id), ) workflow.delete_orphan_soft_deleted_models() except Workflow.DoesNotExist: pass # Race: I guess there aren't any deltas after all.
def _lookup_requested_workflow_with_auth_and_cooperative_lock( self, ) -> ContextManager[Workflow]: """Either yield the requested workflow, or raise Workflow.DoesNotExist Workflow.DoesNotExist means "permission denied" or "workflow does not exist". """ workflow_id_or_secret_id = self.scope["url_route"]["kwargs"][ "workflow_id_or_secret_id"] if isinstance(workflow_id_or_secret_id, int): return Workflow.authorized_lookup_and_cooperative_lock( "read", self.scope["user"], self.scope["session"], id=workflow_id_or_secret_id, ) # raise Workflow.DoesNotExist else: return Workflow.lookup_and_cooperative_lock( secret_id=workflow_id_or_secret_id ) # raise Workflow.DoesNotExist
def locked_and_loaded_step( workflow_id: int, step_slug: str ) -> ContextManager[Tuple[DbObjectCooperativeLock, Step, str]]: """Yield `WorkflowLock`, `step` and `file_param_id_name`. SECURITY: the caller may want to test the Step's `file_upload_api_token`. Raise UploadError(404, "workflow-not-found") on missing/deleted Workflow. Raise UploadError(404, "step-not-found") on missing/deleted Step. Raise UploadError(400, "step-module-deleted") on code-less Step. Raise UploadError(400, "step-has-no-file-param") on a Step with no File param. """ try: with Workflow.lookup_and_cooperative_lock( id=workflow_id) as workflow_lock: workflow = workflow_lock.workflow try: step = Step.live_in_workflow(workflow).get(slug=step_slug) except Step.DoesNotExist: raise UploadError(404, "step-not-found") try: module_zipfile = MODULE_REGISTRY.latest(step.module_id_name) except KeyError: raise UploadError(400, "step-module-deleted") try: file_param_id_name = next( iter(pf.id_name for pf in module_zipfile.get_spec().param_fields if pf.type == "file")) except StopIteration: raise UploadError(400, "step-has-no-file-param") yield workflow_lock, step, file_param_id_name except Workflow.DoesNotExist: raise UploadError(404, "workflow-not-found")
def _first_forward_and_save_returning_clientside_updates( cls, workflow_id: int, **kwargs ) -> Tuple[Optional[Delta], Optional[clientside.Update], Optional[PendingOwnerUpdate], Optional[int], ]: """ Create and execute `cls` command; return `(Delta, WebSocket data, render?)`. If `amend_create_kwargs()` returns `None`, return `(None, None)` here. All this, in a cooperative lock. Return `(None, None, None)` if `cls.amend_create_kwargs()` returns `None`. This is how `cls.amend_create_kwargs()` suggests the Delta should not be created at all. """ now = datetime.datetime.now() command = NAME_TO_COMMAND[cls.__name__] try: # raise Workflow.DoesNotExist with Workflow.lookup_and_cooperative_lock(id=workflow_id) as workflow: create_kwargs = command.amend_create_kwargs(workflow=workflow, **kwargs) if not create_kwargs: return None, None, None, None # Lookup unapplied deltas to delete. That's the linked list that comes # _after_ `workflow.last_delta_id`. n_deltas_deleted, _ = workflow.deltas.filter( id__gt=workflow.last_delta_id).delete() # prev_delta is none when we're at the start of the undo stack prev_delta = workflow.deltas.filter( id=workflow.last_delta_id).first() # Delta.objects.create() and command.forward() may raise unexpected errors # Defer delete_orphan_soft_deleted_models(), to reduce the risk of this # race: 1. Delete DB objects; 2. Delete S3 files; 3. ROLLBACK. (We aren't # avoiding the race _entirely_ here, but we're at least avoiding causing # the race through errors in Delta or Command.) delta = Delta.objects.create( command_name=cls.__name__, prev_delta=prev_delta, last_applied_at=now, **create_kwargs, ) command.forward(delta) # Point workflow to us workflow.last_delta_id = delta.id workflow.updated_at = datetime.datetime.now() workflow.save(update_fields=["last_delta_id", "updated_at"]) if n_deltas_deleted: # We just deleted deltas; now we can garbage-collect Tabs and # Steps that are soft-deleted and have no deltas referring # to them. workflow.delete_orphan_soft_deleted_models() if cls.modifies_owner_usage and workflow.owner_id: # We lock after running the command, but it's still correct. DB # commits are atomic: nothing is written yet. lock_user_by_id(workflow.owner_id, for_write=True) pending_owner_update = PendingOwnerUpdate( user_id=workflow.owner_id, user_update=clientside.UserUpdate( usage=query_user_usage(workflow.owner_id)), ) else: pending_owner_update = None return ( delta, command.load_clientside_update(delta), pending_owner_update, delta.id if command.get_modifies_render_output(delta) else None, ) except Workflow.DoesNotExist: return None, None, None, None