class WorkflowExecution(Execution): """Contains workflow execution information.""" __tablename__ = 'workflow_executions_v2' __table_args__ = ( sa.Index('%s_project_id' % __tablename__, 'project_id'), sa.Index('%s_scope' % __tablename__, 'scope'), sa.Index('%s_state' % __tablename__, 'state'), sa.Index('%s_updated_at' % __tablename__, 'updated_at'), ) # Main properties. spec = sa.orm.deferred(sa.Column(st.JsonMediumDictType())) accepted = sa.Column(sa.Boolean(), default=False) input = sa.orm.deferred(sa.Column(st.JsonLongDictType(), nullable=True)) output = sa.orm.deferred(sa.Column(st.JsonLongDictType(), nullable=True)) params = sa.orm.deferred(sa.Column(st.JsonLongDictType())) # Initial workflow context containing workflow variables, environment, # openstack security context etc. # NOTES: # * Data stored in this structure should not be copied into inbound # contexts of tasks. No need to duplicate it. # * This structure does not contain workflow input. context = sa.orm.deferred(sa.Column(st.JsonLongDictType()))
class Definition(mb.MistralSecureModelBase): __abstract__ = True id = mb.id_column() name = sa.Column(sa.String(255)) definition = sa.Column(st.MediumText(), nullable=True) spec = sa.Column(st.JsonMediumDictType()) tags = sa.Column(st.JsonListType()) is_system = sa.Column(sa.Boolean())
class TaskExecution(Execution): """Contains task runtime information.""" __tablename__ = 'task_executions_v2' __table_args__ = (sa.Index('%s_project_id' % __tablename__, 'project_id'), sa.Index('%s_scope' % __tablename__, 'scope'), sa.Index('%s_state' % __tablename__, 'state'), sa.Index('%s_updated_at' % __tablename__, 'updated_at'), sa.UniqueConstraint('unique_key')) # Main properties. spec = sa.orm.deferred(sa.Column(st.JsonMediumDictType())) action_spec = sa.Column(st.JsonLongDictType()) unique_key = sa.Column(sa.String(255), nullable=True) type = sa.Column(sa.String(10)) started_at = sa.Column(sa.DateTime, nullable=True) finished_at = sa.Column(sa.DateTime, nullable=True) # Whether the task is fully processed (publishing and calculating commands # after it). It allows to simplify workflow controller implementations # significantly. processed = sa.Column(sa.BOOLEAN, default=False) # Set to True if the completion of the task led to starting new # tasks. # The value of this property should be ignored if the task # is not completed. has_next_tasks = sa.Column(sa.Boolean, default=False) # The names of the next tasks. # [(task_name, event)] next_tasks = sa.Column(st.JsonListType()) # Set to True if the task finished with an error and the error # is handled (e.g. with 'on-error' clause for direct workflows) # so that the error shouldn't bubble up to the workflow level. # The value of this property should be ignored if the task # is not completed. error_handled = sa.Column(sa.Boolean, default=False) # Data Flow properties. in_context = sa.Column(st.JsonLongDictType()) published = sa.Column(st.JsonLongDictType()) @property def executions(self): return (self.action_executions if not self.spec.get('workflow') else self.workflow_executions) def to_dict(self): d = super(TaskExecution, self).to_dict() utils.datetime_to_str_in_dict(d, 'started_at') utils.datetime_to_str_in_dict(d, 'finished_at') return d
class DelayedCall(mb.MistralModelBase): """Contains info about delayed calls.""" __tablename__ = 'delayed_calls_v2' id = mb.id_column() factory_method_path = sa.Column(sa.String(200), nullable=True) target_method_name = sa.Column(sa.String(80), nullable=False) method_arguments = sa.Column(st.JsonDictType()) serializers = sa.Column(st.JsonDictType()) key = sa.Column(sa.String(250), nullable=True) auth_context = sa.Column(st.JsonMediumDictType()) execution_time = sa.Column(sa.DateTime, nullable=False) processing = sa.Column(sa.Boolean, default=False, nullable=False)
class Execution(mb.MistralSecureModelBase): __abstract__ = True # Common properties. id = mb.id_column() name = sa.Column(sa.String(255)) description = sa.Column(sa.String(255), nullable=True) workflow_name = sa.Column(sa.String(255)) workflow_id = sa.Column(sa.String(80)) spec = sa.Column(st.JsonMediumDictType()) state = sa.Column(sa.String(20)) state_info = sa.Column(sa.Text(), nullable=True) tags = sa.Column(st.JsonListType()) # Internal properties which can be used by engine. runtime_context = sa.Column(st.JsonLongDictType())
class ActionExecution(Execution): """Contains action execution information.""" __tablename__ = 'action_executions_v2' __table_args__ = (sa.Index('%s_project_id' % __tablename__, 'project_id'), sa.Index('%s_scope' % __tablename__, 'scope'), sa.Index('%s_state' % __tablename__, 'state'), sa.Index('%s_updated_at' % __tablename__, 'updated_at')) # Main properties. spec = sa.Column(st.JsonMediumDictType()) accepted = sa.Column(sa.Boolean(), default=False) input = sa.Column(st.JsonLongDictType(), nullable=True) output = sa.orm.deferred(sa.Column(st.JsonLongDictType(), nullable=True)) last_heartbeat = sa.Column( sa.DateTime, default=lambda: utils.utc_now_sec() + datetime.timedelta( seconds=CONF.action_heartbeat.first_heartbeat_timeout)) is_sync = sa.Column(sa.Boolean(), default=None, nullable=True)
def upgrade(): op.create_table( 'action_executions_v2', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('scope', sa.String(length=80), nullable=True), sa.Column('project_id', sa.String(length=80), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('description', sa.String(length=255), nullable=True), sa.Column('workflow_name', sa.String(length=255), nullable=True), sa.Column('workflow_id', sa.String(length=80), nullable=True), sa.Column('spec', st.JsonMediumDictType(), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('state_info', sa.TEXT(), nullable=True), sa.Column('tags', st.JsonListType(), nullable=True), sa.Column('runtime_context', st.JsonLongDictType(), nullable=True), sa.Column('accepted', sa.Boolean(), nullable=True), sa.Column('input', st.JsonLongDictType(), nullable=True), sa.Column('output', st.JsonLongDictType(), nullable=True), sa.Column('task_execution_id', sa.String(length=36), nullable=True), sa.PrimaryKeyConstraint('id'), sa.Index( 'action_executions_v2_project_id', 'project_id' ), sa.Index( 'action_executions_v2_scope', 'scope' ), sa.Index( 'action_executions_v2_state', 'state' ), sa.Index( 'action_executions_v2_updated_at', 'updated_at' ), ) op.create_table( 'workflow_executions_v2', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('scope', sa.String(length=80), nullable=True), sa.Column('project_id', sa.String(length=80), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('description', sa.String(length=255), nullable=True), sa.Column('workflow_name', sa.String(length=255), nullable=True), sa.Column('workflow_id', sa.String(length=80), nullable=True), sa.Column('spec', st.JsonMediumDictType(), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('state_info', sa.TEXT(), nullable=True), sa.Column('tags', st.JsonListType(), nullable=True), sa.Column('runtime_context', st.JsonLongDictType(), nullable=True), sa.Column('accepted', sa.Boolean(), nullable=True), sa.Column('input', st.JsonLongDictType(), nullable=True), sa.Column('output', st.JsonLongDictType(), nullable=True), sa.Column('params', st.JsonLongDictType(), nullable=True), sa.Column('context', st.JsonLongDictType(), nullable=True), sa.Column('task_execution_id', sa.String(length=36), nullable=True), sa.PrimaryKeyConstraint('id'), sa.Index( 'workflow_executions_v2_project_id', 'project_id' ), sa.Index( 'workflow_executions_v2_scope', 'scope' ), sa.Index( 'workflow_executions_v2_state', 'state' ), sa.Index( 'workflow_executions_v2_updated_at', 'updated_at' ), ) op.create_table( 'task_executions_v2', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('scope', sa.String(length=80), nullable=True), sa.Column('project_id', sa.String(length=80), nullable=True), sa.Column('id', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('description', sa.String(length=255), nullable=True), sa.Column('workflow_name', sa.String(length=255), nullable=True), sa.Column('workflow_id', sa.String(length=80), nullable=True), sa.Column('spec', st.JsonMediumDictType(), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('state_info', sa.TEXT(), nullable=True), sa.Column('tags', st.JsonListType(), nullable=True), sa.Column('runtime_context', st.JsonLongDictType(), nullable=True), sa.Column('action_spec', st.JsonLongDictType(), nullable=True), sa.Column('processed', sa.Boolean(), nullable=True), sa.Column('in_context', st.JsonLongDictType(), nullable=True), sa.Column('published', st.JsonLongDictType(), nullable=True), sa.Column( 'workflow_execution_id', sa.String(length=36), nullable=True ), sa.PrimaryKeyConstraint('id'), sa.Index( 'task_executions_v2_project_id', 'project_id' ), sa.Index( 'task_executions_v2_scope', 'scope' ), sa.Index( 'task_executions_v2_state', 'state' ), sa.Index( 'task_executions_v2_updated_at', 'updated_at' ), sa.Index( 'task_executions_v2_workflow_execution_id', 'workflow_execution_id' ), sa.ForeignKeyConstraint( ['workflow_execution_id'], [u'workflow_executions_v2.id'], ondelete='CASCADE' ), ) # 2 foreign keys are added here because all 3 tables are dependent. op.create_foreign_key( None, 'action_executions_v2', 'task_executions_v2', ['task_execution_id'], ['id'], ondelete='CASCADE' ) op.create_foreign_key( None, 'workflow_executions_v2', 'task_executions_v2', ['task_execution_id'], ['id'], ondelete='CASCADE' ) op.alter_column( 'workbooks_v2', 'name', type_=sa.String(length=255) ) op.alter_column( 'workbooks_v2', 'definition', type_=st.MediumText() ) op.alter_column( 'workbooks_v2', 'spec', type_=st.JsonMediumDictType() ) op.alter_column( 'workflow_definitions_v2', 'name', type_=sa.String(length=255) ) op.alter_column( 'workflow_definitions_v2', 'definition', type_=st.MediumText() ) op.alter_column( 'workflow_definitions_v2', 'spec', type_=st.JsonMediumDictType() ) op.alter_column( 'action_definitions_v2', 'name', type_=sa.String(length=255) ) op.alter_column( 'action_definitions_v2', 'definition', type_=st.MediumText() ) op.alter_column( 'action_definitions_v2', 'spec', type_=st.JsonMediumDictType() ) op.alter_column( 'cron_triggers_v2', 'workflow_name', type_=sa.String(length=255) )
revision = '020' down_revision = '019' from alembic import op from mistral.db.sqlalchemy import types as st import sqlalchemy as sa # A simple model of the task executions table with only the fields needed for # the migration. task_executions = sa.Table( 'task_executions_v2', sa.MetaData(), sa.Column('id', sa.String(36), nullable=False), sa.Column( 'spec', st.JsonMediumDictType() ), sa.Column('type', sa.String(10), nullable=True) ) def upgrade(): op.add_column( 'task_executions_v2', sa.Column('type', sa.String(length=10), nullable=True) ) session = sa.orm.Session(bind=op.get_bind()) values = []
def upgrade(): # Changing column type from JsonDictType to JsonLongDictType op.alter_column('delayed_calls_v2', 'auth_context', type_=st.JsonMediumDictType())