def get_controller(wf_ex, wf_spec=None): """Gets a workflow controller instance by given workflow execution object. :param wf_ex: Workflow execution object. :param wf_spec: Workflow specification object. If passed, the method works faster. :returns: Workflow controller class. """ if not wf_spec: wf_spec = spec_parser.get_workflow_spec(wf_ex['spec']) wf_type = wf_spec.get_type() ctrl_cls = None for cls in u.iter_subclasses(WorkflowController): if cls.__workflow_type__ == wf_type: ctrl_cls = cls break if not ctrl_cls: raise exc.NotFoundException( 'Failed to find a workflow controller [type=%s]' % wf_type ) return ctrl_cls(wf_ex, wf_spec)
def register_length_validator(attr_name): """Register an event listener on the attribute that will validate the size every time a 'set' occurs. """ for cls in utils.iter_subclasses(Execution): if hasattr(cls, attr_name): event.listen( getattr(cls, attr_name), "set", lambda t, v, o, i: validate_long_type_length(cls, attr_name, v) )
def register_secure_model_hooks(): # Make sure 'project_id' is always properly set. for sec_model_class in utils.iter_subclasses(MistralSecureModelBase): if '__abstract__' not in sec_model_class.__dict__: event.listen( sec_model_class.project_id, 'set', _set_project_id, retval=True )
def _get_class(wf_type): """Gets a workflow controller class by given workflow type. :param wf_type: Workflow type. :returns: Workflow controller class. """ for wf_ctrl_cls in u.iter_subclasses(WorkflowController): if wf_type == wf_ctrl_cls.__workflow_type__: return wf_ctrl_cls msg = 'Failed to find a workflow controller [type=%s]' % wf_type raise exc.NotFoundException(msg)
def get_class(wf_type): """Gets a task specification list class by given workflow type. :param wf_type: Workflow type :returns: Task specification list class """ for spec_list_cls in utils.iter_subclasses(TaskSpecList): if wf_type == spec_list_cls.__type__: return spec_list_cls msg = ("Can not find task list specification with workflow type:" " %s" % wf_type) raise exc.NotFoundException(msg)
def test_itersubclasses(self): class A(object): pass class B(A): pass class C(A): pass class D(C): pass self.assertEqual([B, C, D], list(utils.iter_subclasses(A)))
def instantiate_spec(spec_cls, data): """Instantiates specification accounting for specification hierarchies. :param spec_cls: Specification concrete or base class. In case if base class or the hierarchy is provided this method relies on attributes _polymorphic_key and _polymorphic_value in order to find a concrete class that needs to be instantiated. :param data: Raw specification data as a dictionary. """ if issubclass(spec_cls, BaseSpecList): # Ignore polymorphic search for specification lists because # it doesn't make sense for them. return spec_cls(data) if not hasattr(spec_cls, '_polymorphic_key'): spec = spec_cls(data) spec.validate_semantics() return spec key = spec_cls._polymorphic_key if not isinstance(key, tuple): key_name = key key_default = None else: key_name = key[0] key_default = key[1] for cls in utils.iter_subclasses(spec_cls): if not hasattr(cls, '_polymorphic_value'): raise exc.DSLParsingException( "Class '%s' is expected to have attribute '_polymorphic_value'" " because it's a part of specification hierarchy inherited " "from class '%s'." % (cls, spec_cls) ) if cls._polymorphic_value == data.get(key_name, key_default): spec = cls(data) spec.validate_semantics() return cls(data) raise exc.DSLParsingException( 'Failed to find a specification class to instantiate ' '[spec_cls=%s, data=%s]' % (spec_cls, data) )
def instantiate_spec(spec_cls, data): """Instantiates specification accounting for specification hierarchies. :param spec_cls: Specification concrete or base class. In case if base class or the hierarchy is provided this method relies on attributes _polymorphic_key and _polymorphic_value in order to find a concrete class that needs to be instantiated. :param data: Raw specification data as a dictionary. """ if issubclass(spec_cls, BaseSpecList): # Ignore polymorphic search for specification lists because # it doesn't make sense for them. return spec_cls(data) if not hasattr(spec_cls, '_polymorphic_key'): spec = spec_cls(data) spec.validate_semantics() return spec key = spec_cls._polymorphic_key if not isinstance(key, tuple): key_name = key key_default = None else: key_name = key[0] key_default = key[1] for cls in utils.iter_subclasses(spec_cls): if not hasattr(cls, '_polymorphic_value'): raise exc.DSLParsingException( "Class '%s' is expected to have attribute '_polymorphic_value'" " because it's a part of specification hierarchy inherited " "from class '%s'." % (cls, spec_cls)) if cls._polymorphic_value == data.get(key_name, key_default): spec = cls(data) spec.validate_semantics() return spec raise exc.DSLParsingException( 'Failed to find a specification class to instantiate ' '[spec_cls=%s, data=%s]' % (spec_cls, data))
} # Main properties. action_spec = sa.Column(st.JsonDictType()) # Whether the task is fully processed (publishing and calculating commands # after it). It allows to simplify workflow controller implementations # significantly. processed = sa.Column(sa.BOOLEAN, default=False) # Data Flow properties. in_context = sa.Column(st.JsonLongDictType()) published = sa.Column(st.JsonDictType()) for cls in utils.iter_subclasses(Execution): event.listen( # Catch and trim Execution.state_info to always fit allocated size. cls.state_info, 'set', lambda t, v, o, i: utils.cut(v, 1020), retval=True ) # Many-to-one for 'Execution' and 'TaskExecution'. Execution.task_execution_id = sa.Column( sa.String(36), sa.ForeignKey(TaskExecution.id), nullable=True )
def instantiate_spec(spec_cls, data, validate=False): """Instantiates specification accounting for specification hierarchies. :param spec_cls: Specification concrete or base class. In case if base class or the hierarchy is provided this method relies on attributes _polymorphic_key and _polymorphic_value in order to find a concrete class that needs to be instantiated. :param data: Raw specification data as a dictionary. :type data: dict :param validate: If it equals False then semantics and schema validation will be skipped :type validate: bool """ if issubclass(spec_cls, BaseSpecList): # Ignore polymorphic search for specification lists because # it doesn't make sense for them. return spec_cls(data, validate) if not hasattr(spec_cls, '_polymorphic_key'): spec = spec_cls(data, validate) if validate: spec.validate_semantics() return spec # In order to do polymorphic search we need to make sure that # a spec is backed by a dictionary. Otherwise we can't extract # a polymorphic key. if not isinstance(data, dict): raise exc.InvalidModelException( "A specification with polymorphic key must be backed by" " a dictionary [spec_cls=%s, data=%s]" % (spec_cls, data)) key = spec_cls._polymorphic_key if not isinstance(key, tuple): key_name = key key_default = None else: key_name = key[0] key_default = key[1] for cls in utils.iter_subclasses(spec_cls): if not hasattr(cls, '_polymorphic_value'): raise exc.DSLParsingException( "Class '%s' is expected to have attribute '_polymorphic_value'" " because it's a part of specification hierarchy inherited " "from class '%s'." % (cls, spec_cls)) if cls._polymorphic_value == data.get(key_name, key_default): spec = cls(data, validate) if validate: spec.validate_semantics() return spec raise exc.DSLParsingException( 'Failed to find a specification class to instantiate ' '[spec_cls=%s, data=%s]' % (spec_cls, data))
processed = sa.Column(sa.BOOLEAN, default=False) # Data Flow properties. in_context = sa.Column(st.JsonLongDictType()) published = sa.Column(st.JsonLongDictType()) @property def executions(self): return ( self.action_executions if not self.spec.get('workflow') else self.workflow_executions ) for cls in utils.iter_subclasses(Execution): event.listen( # Catch and trim Execution.state_info to always fit allocated size. # Note that the limit is 65500 which is less than 65535 (2^16 -1). # The reason is that utils.cut() is not exactly accurate in case if # the value is not a string, but, for example, a dictionary. If we # limit it exactly to 65535 then once in a while it may go slightly # beyond the allowed maximum size. It may depend on the order of # keys in a string representation and other things that are hidden # inside utils.cut_dict() method. cls.state_info, 'set', lambda t, v, o, i: utils.cut(v, 65500), retval=True )
def instantiate_spec(spec_cls, data, validate=False): """Instantiates specification accounting for specification hierarchies. :param spec_cls: Specification concrete or base class. In case if base class or the hierarchy is provided this method relies on attributes _polymorphic_key and _polymorphic_value in order to find a concrete class that needs to be instantiated. :param data: Raw specification data as a dictionary. :type data: dict :param validate: If it equals False then semantics and schema validation will be skipped :type validate: bool """ if issubclass(spec_cls, BaseSpecList): # Ignore polymorphic search for specification lists because # it doesn't make sense for them. return spec_cls(data, validate) if not hasattr(spec_cls, '_polymorphic_key'): spec = spec_cls(data, validate) if validate: spec.validate_semantics() return spec # In order to do polymorphic search we need to make sure that # a spec is backed by a dictionary. Otherwise we can't extract # a polymorphic key. if not isinstance(data, dict): raise exc.InvalidModelException( "A specification with polymorphic key must be backed by" " a dictionary [spec_cls=%s, data=%s]" % (spec_cls, data) ) key = spec_cls._polymorphic_key if not isinstance(key, tuple): key_name = key key_default = None else: key_name = key[0] key_default = key[1] for cls in utils.iter_subclasses(spec_cls): if not hasattr(cls, '_polymorphic_value'): raise exc.DSLParsingException( "Class '%s' is expected to have attribute '_polymorphic_value'" " because it's a part of specification hierarchy inherited " "from class '%s'." % (cls, spec_cls) ) if cls._polymorphic_value == data.get(key_name, key_default): spec = cls(data, validate) if validate: spec.validate_semantics() return spec raise exc.DSLParsingException( 'Failed to find a specification class to instantiate ' '[spec_cls=%s, data=%s]' % (spec_cls, data) )