class Execution(mb.MistralSecureModelBase): """Abstract execution object.""" __tablename__ = 'executions_v2' __table_args__ = ( sa.Index('%s_project_id' % __tablename__, 'project_id'), sa.Index('%s_scope' % __tablename__, 'scope'), sa.Index('%s_state' % __tablename__, 'state'), sa.Index('%s_type' % __tablename__, 'type'), sa.Index('%s_updated_at' % __tablename__, 'updated_at'), ) type = sa.Column(sa.String(50)) __mapper_args__ = { 'polymorphic_on': type, 'polymorphic_identity': 'execution' } # Main properties. id = mb.id_column() name = sa.Column(sa.String(80)) description = sa.Column(sa.String(255), nullable=True) workflow_name = sa.Column(sa.String(80)) workflow_id = sa.Column(sa.String(80)) spec = sa.Column(st.JsonDictType()) state = sa.Column(sa.String(20)) state_info = sa.Column(sa.Text(), nullable=True) tags = sa.Column(st.JsonListType()) # Runtime context like iteration_no of a repeater. # Effectively internal engine properties which will be used to determine # execution of a task. runtime_context = sa.Column(st.JsonLongDictType())
class Task(mb.MistralBase): """Contains info about particular task.""" __tablename__ = 'tasks' id = _id_column() name = sa.Column(sa.String(80)) requires = sa.Column(st.JsonDictType()) workbook_name = sa.Column(sa.String(80)) execution_id = sa.Column(sa.String(36)) description = sa.Column(sa.String(200)) task_spec = sa.Column(st.JsonDictType()) action_spec = sa.Column(st.JsonDictType()) state = sa.Column(sa.String(20)) tags = sa.Column(st.JsonListType()) # Data Flow properties. in_context = sa.Column(st.JsonDictType()) parameters = sa.Column(st.JsonDictType()) output = sa.Column(st.JsonDictType()) # Runtime context like iteration_no of a repeater. # Effectively internal engine properties which will be used to determine # execution of a task. task_runtime_context = sa.Column(st.JsonDictType())
class Definition(mb.MistralSecureModelBase): __abstract__ = True id = mb.id_column() name = sa.Column(sa.String(80)) definition = sa.Column(sa.Text(), nullable=True) spec = sa.Column(st.JsonDictType()) tags = sa.Column(st.JsonListType())
class Definition(mb.MistralSecureModelBase): __abstract__ = True id = mb.id_column() name = sa.Column(sa.String(255)) definition = sa.Column(st.MediumText(), nullable=True) spec = sa.Column(st.JsonMediumDictType()) tags = sa.Column(st.JsonListType()) is_system = sa.Column(sa.Boolean())
class TaskExecution(Execution): """Contains task runtime information.""" __tablename__ = 'task_executions_v2' __table_args__ = (sa.Index('%s_project_id' % __tablename__, 'project_id'), sa.Index('%s_scope' % __tablename__, 'scope'), sa.Index('%s_state' % __tablename__, 'state'), sa.Index('%s_updated_at' % __tablename__, 'updated_at'), sa.UniqueConstraint('unique_key')) # Main properties. spec = sa.orm.deferred(sa.Column(st.JsonMediumDictType())) action_spec = sa.Column(st.JsonLongDictType()) unique_key = sa.Column(sa.String(255), nullable=True) type = sa.Column(sa.String(10)) started_at = sa.Column(sa.DateTime, nullable=True) finished_at = sa.Column(sa.DateTime, nullable=True) # Whether the task is fully processed (publishing and calculating commands # after it). It allows to simplify workflow controller implementations # significantly. processed = sa.Column(sa.BOOLEAN, default=False) # Set to True if the completion of the task led to starting new # tasks. # The value of this property should be ignored if the task # is not completed. has_next_tasks = sa.Column(sa.Boolean, default=False) # The names of the next tasks. # [(task_name, event)] next_tasks = sa.Column(st.JsonListType()) # Set to True if the task finished with an error and the error # is handled (e.g. with 'on-error' clause for direct workflows) # so that the error shouldn't bubble up to the workflow level. # The value of this property should be ignored if the task # is not completed. error_handled = sa.Column(sa.Boolean, default=False) # Data Flow properties. in_context = sa.Column(st.JsonLongDictType()) published = sa.Column(st.JsonLongDictType()) @property def executions(self): return (self.action_executions if not self.spec.get('workflow') else self.workflow_executions) def to_dict(self): d = super(TaskExecution, self).to_dict() utils.datetime_to_str_in_dict(d, 'started_at') utils.datetime_to_str_in_dict(d, 'finished_at') return d
class Workbook(mb.MistralBase): """Contains info about workbook (including definition in Mistral DSL).""" __tablename__ = 'workbooks' __table_args__ = (sa.UniqueConstraint('name'), ) id = _id_column() name = sa.Column(sa.String(80), primary_key=True) definition = sa.Column(sa.Text(), nullable=True) description = sa.Column(sa.String(200)) tags = sa.Column(st.JsonListType()) scope = sa.Column(sa.String(80)) project_id = sa.Column(sa.String(80)) trust_id = sa.Column(sa.String(80))
class Execution(mb.MistralSecureModelBase): __abstract__ = True # Common properties. id = mb.id_column() name = sa.Column(sa.String(255)) description = sa.Column(sa.String(255), nullable=True) workflow_name = sa.Column(sa.String(255)) workflow_namespace = sa.Column(sa.String(255)) workflow_id = sa.Column(sa.String(80)) state = sa.Column(sa.String(20)) state_info = sa.Column(sa.Text(), nullable=True) tags = sa.Column(st.JsonListType()) # Internal properties which can be used by engine. runtime_context = sa.Column(st.JsonLongDictType())
class CodeSource(mb.MistralSecureModelBase): """Contains info about registered CodeSources.""" __tablename__ = 'code_sources' __table_args__ = ( sa.UniqueConstraint('name', 'namespace', 'project_id'), sa.Index('%s_project_id' % __tablename__, 'project_id'), sa.Index('%s_scope' % __tablename__, 'scope'), ) # Main properties. id = mb.id_column() name = sa.Column(sa.String(255)) content = sa.Column(sa.Text()) version = sa.Column(sa.Integer()) namespace = sa.Column(sa.String(255), nullable=True) tags = sa.Column(st.JsonListType())
def upgrade(): op.create_table( 'action_executions_v2', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('scope', sa.String(length=80), nullable=True), sa.Column('project_id', sa.String(length=80), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('description', sa.String(length=255), nullable=True), sa.Column('workflow_name', sa.String(length=255), nullable=True), sa.Column('workflow_id', sa.String(length=80), nullable=True), sa.Column('spec', st.JsonMediumDictType(), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('state_info', sa.TEXT(), nullable=True), sa.Column('tags', st.JsonListType(), nullable=True), sa.Column('runtime_context', st.JsonLongDictType(), nullable=True), sa.Column('accepted', sa.Boolean(), nullable=True), sa.Column('input', st.JsonLongDictType(), nullable=True), sa.Column('output', st.JsonLongDictType(), nullable=True), sa.Column('task_execution_id', sa.String(length=36), nullable=True), sa.PrimaryKeyConstraint('id'), sa.Index( 'action_executions_v2_project_id', 'project_id' ), sa.Index( 'action_executions_v2_scope', 'scope' ), sa.Index( 'action_executions_v2_state', 'state' ), sa.Index( 'action_executions_v2_updated_at', 'updated_at' ), ) op.create_table( 'workflow_executions_v2', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('scope', sa.String(length=80), nullable=True), sa.Column('project_id', sa.String(length=80), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('description', sa.String(length=255), nullable=True), sa.Column('workflow_name', sa.String(length=255), nullable=True), sa.Column('workflow_id', sa.String(length=80), nullable=True), sa.Column('spec', st.JsonMediumDictType(), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('state_info', sa.TEXT(), nullable=True), sa.Column('tags', st.JsonListType(), nullable=True), sa.Column('runtime_context', st.JsonLongDictType(), nullable=True), sa.Column('accepted', sa.Boolean(), nullable=True), sa.Column('input', st.JsonLongDictType(), nullable=True), sa.Column('output', st.JsonLongDictType(), nullable=True), sa.Column('params', st.JsonLongDictType(), nullable=True), sa.Column('context', st.JsonLongDictType(), nullable=True), sa.Column('task_execution_id', sa.String(length=36), nullable=True), sa.PrimaryKeyConstraint('id'), sa.Index( 'workflow_executions_v2_project_id', 'project_id' ), sa.Index( 'workflow_executions_v2_scope', 'scope' ), sa.Index( 'workflow_executions_v2_state', 'state' ), sa.Index( 'workflow_executions_v2_updated_at', 'updated_at' ), ) op.create_table( 'task_executions_v2', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('scope', sa.String(length=80), nullable=True), sa.Column('project_id', sa.String(length=80), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('description', sa.String(length=255), nullable=True), sa.Column('workflow_name', sa.String(length=255), nullable=True), sa.Column('workflow_id', sa.String(length=80), nullable=True), sa.Column('spec', st.JsonMediumDictType(), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('state_info', sa.TEXT(), nullable=True), sa.Column('tags', st.JsonListType(), nullable=True), sa.Column('runtime_context', st.JsonLongDictType(), nullable=True), sa.Column('action_spec', st.JsonLongDictType(), nullable=True), sa.Column('processed', sa.Boolean(), nullable=True), sa.Column('in_context', st.JsonLongDictType(), nullable=True), sa.Column('published', st.JsonLongDictType(), nullable=True), sa.Column( 'workflow_execution_id', sa.String(length=36), nullable=True ), sa.PrimaryKeyConstraint('id'), sa.Index( 'task_executions_v2_project_id', 'project_id' ), sa.Index( 'task_executions_v2_scope', 'scope' ), sa.Index( 'task_executions_v2_state', 'state' ), sa.Index( 'task_executions_v2_updated_at', 'updated_at' ), sa.Index( 'task_executions_v2_workflow_execution_id', 'workflow_execution_id' ), sa.ForeignKeyConstraint( ['workflow_execution_id'], [u'workflow_executions_v2.id'], ondelete='CASCADE' ), ) # 2 foreign keys are added here because all 3 tables are dependent. op.create_foreign_key( None, 'action_executions_v2', 'task_executions_v2', ['task_execution_id'], ['id'], ondelete='CASCADE' ) op.create_foreign_key( None, 'workflow_executions_v2', 'task_executions_v2', ['task_execution_id'], ['id'], ondelete='CASCADE' ) op.alter_column( 'workbooks_v2', 'name', type_=sa.String(length=255) ) op.alter_column( 'workbooks_v2', 'definition', type_=st.MediumText() ) op.alter_column( 'workbooks_v2', 'spec', type_=st.JsonMediumDictType() ) op.alter_column( 'workflow_definitions_v2', 'name', type_=sa.String(length=255) ) op.alter_column( 'workflow_definitions_v2', 'definition', type_=st.MediumText() ) op.alter_column( 'workflow_definitions_v2', 'spec', type_=st.JsonMediumDictType() ) op.alter_column( 'action_definitions_v2', 'name', type_=sa.String(length=255) ) op.alter_column( 'action_definitions_v2', 'definition', type_=st.MediumText() ) op.alter_column( 'action_definitions_v2', 'spec', type_=st.JsonMediumDictType() ) op.alter_column( 'cron_triggers_v2', 'workflow_name', type_=sa.String(length=255) )