class Risk(Roleable, HasObjectState, mixins.CustomAttributable, Relatable, Personable, PublicDocumentable, mixins.LastDeprecatedTimeboxed, mixins.BusinessObject, Indexed, db.Model): __tablename__ = 'risks' # Overriding mixin to make mandatory @declared_attr def description(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Text, nullable=False), cls.__name__) risk_objects = db.relationship('RiskObject', backref='risk', cascade='all, delete-orphan') objects = association_proxy('risk_objects', 'object', 'RiskObject') _api_attrs = reflection.ApiAttributes( 'risk_objects', reflection.Attribute('objects', create=False, update=False), ) _aliases = { "document_url": None, "document_evidence": None, "status": { "display_name": "State", "mandatory": False, "description": "Options are: \n {}".format('\n'.join( mixins.BusinessObject.VALID_STATES)) } }
class Risk(Roleable, HasObjectState, mixins.CustomAttributable, mixins.Stateful, Relatable, mixins.Described, Ownable, Personable, mixins.Titled, mixins.Timeboxed, mixins.Noted, mixins.Hyperlinked, mixins.Slugged, Indexed, db.Model): __tablename__ = 'risks' VALID_STATES = ['Draft', 'Deprecated', 'Active'] # Overriding mixin to make mandatory @declared_attr def description(cls): return deferred(db.Column(db.Text, nullable=False), cls.__name__) risk_objects = db.relationship('RiskObject', backref='risk', cascade='all, delete-orphan') objects = association_proxy('risk_objects', 'object', 'RiskObject') _publish_attrs = [ 'risk_objects', PublishOnly('objects'), ] _aliases = { "status": { "display_name": "State", "mandatory": False, "description": "Options are: \n {}".format('\n'.join(VALID_STATES)) } }
class Risk(HasObjectState, mixins.CustomAttributable, mixins.Stateful, Relatable, mixins.Described, Ownable, Personable, mixins.WithContact, mixins.Titled, mixins.Timeboxed, mixins.Slugged, mixins.Noted, mixins.Hyperlinked, mixins.Base, db.Model): __tablename__ = 'risks' VALID_STATES = ['Draft', 'Deprecated', 'Active'] # Overriding mixin to make mandatory @declared_attr def description(cls): return deferred(db.Column(db.Text, nullable=False), cls.__name__) risk_objects = db.relationship('RiskObject', backref='risk', cascade='all, delete-orphan') objects = association_proxy('risk_objects', 'object', 'RiskObject') _publish_attrs = [ 'risk_objects', PublishOnly('objects'), ] _aliases = { "contact": { "display_name": "Contact", "filter_by": "_filter_by_contact", }, "secondary_contact": None, }
class Workflow(mixins.CustomAttributable, HasOwnContext, mixins.Timeboxed, mixins.Described, mixins.Titled, mixins.Notifiable, mixins.Stateful, mixins.Slugged, mixins.Folderable, Indexed, db.Model): """Basic Workflow first class object. """ __tablename__ = 'workflows' _title_uniqueness = False DRAFT = u"Draft" ACTIVE = u"Active" INACTIVE = u"Inactive" VALID_STATES = [DRAFT, ACTIVE, INACTIVE] @classmethod def default_status(cls): return cls.DRAFT notify_on_change = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') notify_custom_message = deferred( db.Column(db.Text, nullable=False, default=u""), 'Workflow') object_approval = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') recurrences = db.Column(db.Boolean, default=False, nullable=False) workflow_people = db.relationship( 'WorkflowPerson', backref='workflow', cascade='all, delete-orphan') people = association_proxy( 'workflow_people', 'person', 'WorkflowPerson') task_groups = db.relationship( 'TaskGroup', backref='workflow', cascade='all, delete-orphan') cycles = db.relationship( 'Cycle', backref='workflow', cascade='all, delete-orphan') next_cycle_start_date = db.Column(db.Date, nullable=True) non_adjusted_next_cycle_start_date = db.Column(db.Date, nullable=True) # this is an indicator if the workflow exists from before the change where # we deleted cycle objects, which changed how the cycle is created and # how objects are mapped to the cycle tasks is_old_workflow = deferred( db.Column(db.Boolean, default=False, nullable=True), 'Workflow') # This column needs to be deferred because one of the migrations # uses Workflow as a model and breaks since at that point in time # there is no 'kind' column yet kind = deferred( db.Column(db.String, default=None, nullable=True), 'Workflow') IS_VERIFICATION_NEEDED_DEFAULT = True is_verification_needed = db.Column( db.Boolean, default=IS_VERIFICATION_NEEDED_DEFAULT, nullable=False) repeat_every = deferred(db.Column(db.Integer, nullable=True, default=None), 'Workflow') DAY_UNIT = 'day' WEEK_UNIT = 'week' MONTH_UNIT = 'month' VALID_UNITS = (DAY_UNIT, WEEK_UNIT, MONTH_UNIT) unit = deferred(db.Column(db.Enum(*VALID_UNITS), nullable=True, default=None), 'Workflow') repeat_multiplier = deferred(db.Column(db.Integer, nullable=False, default=0), 'Workflow') UNIT_FREQ_MAPPING = { None: "one_time", DAY_UNIT: "daily", WEEK_UNIT: "weekly", MONTH_UNIT: "monthly" } @hybrid.hybrid_property def frequency(self): """Hybrid property for SearchAPI filtering backward compatibility""" return self.UNIT_FREQ_MAPPING[self.unit] @frequency.expression def frequency(self): """Hybrid property for SearchAPI filtering backward compatibility""" return case([ (self.unit.is_(None), self.UNIT_FREQ_MAPPING[None]), (self.unit == self.DAY_UNIT, self.UNIT_FREQ_MAPPING[self.DAY_UNIT]), (self.unit == self.WEEK_UNIT, self.UNIT_FREQ_MAPPING[self.WEEK_UNIT]), (self.unit == self.MONTH_UNIT, self.UNIT_FREQ_MAPPING[self.MONTH_UNIT]), ]) @property def tasks(self): return list(itertools.chain(*[t.task_group_tasks for t in self.task_groups])) @property def min_task_start_date(self): """Fetches non adjusted setup cycle start date based on TGT user's setup. Args: self: Workflow instance. Returns: Date when first cycle should be started based on user's setup. """ tasks = self.tasks min_date = None for task in tasks: min_date = min(task.start_date, min_date or task.start_date) return min_date WORK_WEEK_LEN = 5 @classmethod def first_work_day(cls, day): holidays = google_holidays.GoogleHolidays() while day.isoweekday() > cls.WORK_WEEK_LEN or day in holidays: day -= relativedelta.relativedelta(days=1) return day def calc_next_adjusted_date(self, setup_date): """Calculates adjusted date which are expected in next cycle. Args: setup_date: Date which was setup by user. Returns: Adjusted date which are expected to be in next Workflow cycle. """ if self.repeat_every is None or self.unit is None: return self.first_work_day(setup_date) try: key = { self.WEEK_UNIT: "weeks", self.MONTH_UNIT: "months", self.DAY_UNIT: "days", }[self.unit] except KeyError: raise ValueError("Invalid Workflow unit") repeater = self.repeat_every * self.repeat_multiplier if self.unit == self.DAY_UNIT: weeks = repeater / self.WORK_WEEK_LEN days = repeater % self.WORK_WEEK_LEN # append weekends if it's needed days += ((setup_date.isoweekday() + days) > self.WORK_WEEK_LEN) * 2 return setup_date + relativedelta.relativedelta( setup_date, weeks=weeks, days=days) calc_date = setup_date + relativedelta.relativedelta( setup_date, **{key: repeater} ) if self.unit == self.MONTH_UNIT: # check if setup date is the last day of the month # and if it is then calc_date should be the last day of hte month too setup_day = calendar.monthrange(setup_date.year, setup_date.month)[1] if setup_day == setup_date.day: calc_date = datetime.date( calc_date.year, calc_date.month, calendar.monthrange(calc_date.year, calc_date.month)[1]) return self.first_work_day(calc_date) @orm.validates('repeat_every') def validate_repeat_every(self, _, value): """Validate repeat_every field for Workflow. repeat_every shouldn't have 0 value. """ if value is not None and not isinstance(value, (int, long)): raise ValueError("'repeat_every' should be integer or 'null'") if value is not None and value <= 0: raise ValueError("'repeat_every' should be strictly greater than 0") return value @orm.validates('unit') def validate_unit(self, _, value): """Validate unit field for Workflow. Unit should have one of the value from VALID_UNITS list or None. """ if value is not None and value not in self.VALID_UNITS: raise ValueError("'unit' field should be one of the " "value: null, {}".format(", ".join(self.VALID_UNITS))) return value @orm.validates('is_verification_needed') def validate_is_verification_needed(self, _, value): # pylint: disable=unused-argument """Validate is_verification_needed field for Workflow. It's not allowed to change is_verification_needed flag after creation. If is_verification_needed doesn't send, then is_verification_needed flag is True. """ if self.is_verification_needed is None: return self.IS_VERIFICATION_NEEDED_DEFAULT if value is None else value if value is None: return self.is_verification_needed if self.status != self.DRAFT and value != self.is_verification_needed: raise ValueError("is_verification_needed value isn't changeble " "on workflow with '{}' status".format(self.status)) return value @builder.simple_property def workflow_state(self): return WorkflowState.get_workflow_state(self.cycles) _sanitize_html = [ 'notify_custom_message', ] _api_attrs = reflection.ApiAttributes( 'workflow_people', reflection.Attribute('people', create=False, update=False), 'task_groups', 'notify_on_change', 'notify_custom_message', 'cycles', 'object_approval', 'recurrences', 'is_verification_needed', 'repeat_every', 'unit', reflection.Attribute('next_cycle_start_date', create=False, update=False), reflection.Attribute('non_adjusted_next_cycle_start_date', create=False, update=False), reflection.Attribute('workflow_state', create=False, update=False), reflection.Attribute('kind', create=False, update=False), ) _aliases = { "repeat_every": { "display_name": "Repeat Every", "description": "'Repeat Every' value\nmust fall into\nthe range 1~30" "\nor '-' for None", }, "unit": { "display_name": "Unit", "description": "Allowed values for\n'Unit' are:\n{}" "\nor '-' for None".format("\n".join(VALID_UNITS)), }, "is_verification_needed": { "display_name": "Need Verification", "mandatory": True, "description": "This field is not changeable\nafter creation.", }, "notify_custom_message": "Custom email message", "notify_on_change": { "display_name": "Force real-time email updates", "mandatory": False, }, "workflow_owner": { "display_name": "Manager", "mandatory": True, "filter_by": "_filter_by_workflow_owner", }, "workflow_member": { "display_name": "Member", "filter_by": "_filter_by_workflow_member", }, "status": None, "start_date": None, "end_date": None, } @classmethod def _filter_by_workflow_owner(cls, predicate): return cls._filter_by_role("WorkflowOwner", predicate) @classmethod def _filter_by_workflow_member(cls, predicate): return cls._filter_by_role("WorkflowMember", predicate) def copy(self, _other=None, **kwargs): """Create a partial copy of the current workflow. """ columns = ['title', 'description', 'notify_on_change', 'notify_custom_message', 'end_date', 'start_date', 'repeat_every', 'unit', 'is_verification_needed'] target = self.copy_into(_other, columns, **kwargs) return target def copy_task_groups(self, target, **kwargs): """Copy all task groups and tasks mapped to this workflow. """ for task_group in self.task_groups: obj = task_group.copy( workflow=target, context=target.context, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", False), modified_by=get_current_user(), ) target.task_groups.append(obj) if kwargs.get("clone_tasks"): task_group.copy_tasks( obj, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", True) ) return target @classmethod def eager_query(cls): return super(Workflow, cls).eager_query().options( orm.subqueryload('cycles').undefer_group('Cycle_complete') .subqueryload("cycle_task_group_object_tasks") .undefer_group("CycleTaskGroupObjectTask_complete"), orm.subqueryload('task_groups').undefer_group('TaskGroup_complete'), orm.subqueryload( 'task_groups' ).subqueryload( "task_group_tasks" ).undefer_group( 'TaskGroupTask_complete' ), orm.subqueryload('workflow_people'), ) @classmethod def indexed_query(cls): return super(Workflow, cls).indexed_query().options( orm.Load(cls).undefer_group( "Workflow_complete", ), ) @classmethod def ensure_backlog_workflow_exists(cls): """Ensures there is at least one backlog workflow with an active cycle. If such workflow does not exist it creates one.""" def any_active_cycle(workflows): """Checks if any active cycle exists from given workflows""" for workflow in workflows: for cur_cycle in workflow.cycles: if cur_cycle.is_current: return True return False # Check if backlog workflow already exists backlog_workflows = Workflow.query.filter( and_(Workflow.kind == "Backlog", # the following means one_time wf Workflow.unit is None) ).all() if len(backlog_workflows) > 0 and any_active_cycle(backlog_workflows): return "At least one backlog workflow already exists" # Create a backlog workflow backlog_workflow = Workflow(description="Backlog workflow", title="Backlog (one time)", status="Active", recurrences=0, kind="Backlog") # create wf context wf_ctx = backlog_workflow.get_or_create_object_context(context=1) backlog_workflow.context = wf_ctx db.session.flush(backlog_workflow) # create a cycle backlog_cycle = cycle.Cycle(description="Backlog workflow", title="Backlog (one time)", is_current=1, status="Assigned", start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context(), workflow=backlog_workflow) # create a cycletaskgroup backlog_ctg = cycle_task_group\ .CycleTaskGroup(description="Backlog workflow taskgroup", title="Backlog TaskGroup", cycle=backlog_cycle, status="InProgress", start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context()) db.session.add_all([backlog_workflow, backlog_cycle, backlog_ctg]) db.session.flush() # add fulltext entries indexer = get_indexer() indexer.create_record(indexer.fts_record_for(backlog_workflow)) return "Backlog workflow created"
class TaskGroup(WithContact, Timeboxed, Described, Titled, Slugged, Indexed, db.Model): """Workflow TaskGroup model.""" __tablename__ = 'task_groups' _title_uniqueness = False workflow_id = db.Column( db.Integer, db.ForeignKey('workflows.id', ondelete="CASCADE"), nullable=False, ) lock_task_order = db.Column(db.Boolean(), nullable=True) task_group_objects = db.relationship('TaskGroupObject', backref='task_group', cascade='all, delete-orphan') objects = association_proxy('task_group_objects', 'object', 'TaskGroupObject') task_group_tasks = db.relationship('TaskGroupTask', backref='task_group', cascade='all, delete-orphan') cycle_task_groups = db.relationship('CycleTaskGroup', backref='task_group') sort_index = db.Column(db.String(length=250), default="", nullable=False) _publish_attrs = [ 'workflow', 'task_group_objects', PublishOnly('objects'), 'task_group_tasks', 'lock_task_order', 'sort_index', # Intentionally do not include `cycle_task_groups` # 'cycle_task_groups', ] _aliases = { "title": "Summary", "description": "Details", "contact": { "display_name": "Assignee", "mandatory": True, }, "secondary_contact": None, "start_date": None, "end_date": None, "workflow": { "display_name": "Workflow", "mandatory": True, "filter_by": "_filter_by_workflow", }, "task_group_objects": { "display_name": "Objects", "type": AttributeInfo.Type.SPECIAL_MAPPING, "filter_by": "_filter_by_objects", }, } def copy(self, _other=None, **kwargs): columns = [ 'title', 'description', 'workflow', 'sort_index', 'modified_by', 'context' ] if kwargs.get('clone_people', False) and getattr(self, "contact"): columns.append("contact") else: kwargs["contact"] = get_current_user() target = self.copy_into(_other, columns, **kwargs) if kwargs.get('clone_objects', False): self.copy_objects(target, **kwargs) if kwargs.get('clone_tasks', False): self.copy_tasks(target, **kwargs) return target def copy_objects(self, target, **kwargs): # pylint: disable=unused-argument for task_group_object in self.task_group_objects: target.task_group_objects.append( task_group_object.copy( task_group=target, context=target.context, )) return target def copy_tasks(self, target, **kwargs): for task_group_task in self.task_group_tasks: target.task_group_tasks.append( task_group_task.copy( None, task_group=target, context=target.context, clone_people=kwargs.get("clone_people", False), )) return target @classmethod def _filter_by_workflow(cls, predicate): from ggrc_workflows.models import Workflow return Workflow.query.filter((Workflow.id == cls.workflow_id) & (predicate(Workflow.slug) | predicate(Workflow.title))).exists() @classmethod def _filter_by_objects(cls, predicate): parts = [] for model_name in all_models.__all__: model = getattr(all_models, model_name) query = getattr(model, "query", None) field = getattr(model, "slug", getattr(model, "email", None)) if query is None or field is None or not hasattr(model, "id"): continue parts.append( query.filter((TaskGroupObject.object_type == model_name) & (model.id == TaskGroupObject.object_id) & predicate(field)).exists()) return TaskGroupObject.query.filter( (TaskGroupObject.task_group_id == cls.id) & or_(*parts)).exists()
class TaskGroup(roleable.Roleable, relationship.Relatable, WithContact, Timeboxed, Described, Titled, base.ContextRBAC, Slugged, Indexed, db.Model): """Workflow TaskGroup model.""" __tablename__ = 'task_groups' _title_uniqueness = False workflow_id = db.Column( db.Integer, db.ForeignKey('workflows.id', ondelete="CASCADE"), nullable=False, ) lock_task_order = db.Column(db.Boolean(), nullable=True) task_group_objects = db.relationship('TaskGroupObject', backref='_task_group', cascade='all, delete-orphan') objects = association_proxy('task_group_objects', 'object', 'TaskGroupObject') task_group_tasks = db.relationship('TaskGroupTask', backref='_task_group', cascade='all, delete-orphan') cycle_task_groups = db.relationship('CycleTaskGroup', backref='task_group') sort_index = db.Column(db.String(length=250), default="", nullable=False) _api_attrs = reflection.ApiAttributes( 'workflow', 'task_group_objects', reflection.Attribute('objects', create=False, update=False), 'task_group_tasks', 'lock_task_order', 'sort_index', # Intentionally do not include `cycle_task_groups` # 'cycle_task_groups', ) _aliases = { "title": "Summary", "description": "Details", "contact": { "display_name": "Assignee", "mandatory": True, "description": ("One person could be added " "as a Task Group assignee") }, "secondary_contact": None, "start_date": None, "end_date": None, "workflow": { "display_name": "Workflow", "mandatory": True, "filter_by": "_filter_by_workflow", }, "task_group_objects": { "display_name": "Objects", "type": AttributeInfo.Type.SPECIAL_MAPPING, "filter_by": "_filter_by_objects", }, } # This parameter is overridden by workflow backref, but is here to ensure # pylint does not complain _workflow = None @hybrid.hybrid_property def workflow(self): """Getter for workflow foreign key.""" return self._workflow @workflow.setter def workflow(self, workflow): """Setter for workflow foreign key.""" if not self._workflow and workflow: all_models.Relationship(source=workflow, destination=self) self._workflow = workflow def ensure_assignee_is_workflow_member(self): # pylint: disable=invalid-name """Add Workflow Member role to user without role in scope of Workflow.""" people_with_role_ids = ( self.workflow.get_person_ids_for_rolename("Admin") + self.workflow.get_person_ids_for_rolename("Workflow Member")) if self.contact.id in people_with_role_ids: return self.workflow.add_person_with_role_name(self.contact, "Workflow Member") def copy(self, _other=None, **kwargs): columns = [ 'title', 'description', 'workflow', 'sort_index', 'modified_by', 'context' ] if kwargs.get('clone_people', False) and getattr(self, "contact"): columns.append("contact") else: kwargs["contact"] = get_current_user() target = self.copy_into(_other, columns, **kwargs) target.ensure_assignee_is_workflow_member() if kwargs.get('clone_objects', False): self.copy_objects(target, **kwargs) if kwargs.get('clone_tasks', False): self.copy_tasks(target, **kwargs) return target def copy_objects(self, target, **kwargs): # pylint: disable=unused-argument for task_group_object in self.task_group_objects: target.task_group_objects.append( task_group_object.copy( task_group=target, context=target.context, )) return target def copy_tasks(self, target, **kwargs): for task_group_task in self.task_group_tasks: target.task_group_tasks.append( task_group_task.copy( None, task_group=target, context=target.context, clone_people=kwargs.get("clone_people", False), )) return target @classmethod def eager_query(cls): query = super(TaskGroup, cls).eager_query() return query.options( orm.Load(cls).subqueryload('task_group_objects'), orm.Load(cls).subqueryload('task_group_tasks')) @classmethod def _filter_by_workflow(cls, predicate): from ggrc_workflows.models import Workflow return Workflow.query.filter((Workflow.id == cls.workflow_id) & (predicate(Workflow.slug) | predicate(Workflow.title))).exists() @classmethod def _filter_by_objects(cls, predicate): parts = [] for model_name in all_models.__all__: model = getattr(all_models, model_name) query = getattr(model, "query", None) field = getattr(model, "slug", getattr(model, "email", None)) if query is None or field is None or not hasattr(model, "id"): continue parts.append( query.filter((TaskGroupObject.object_type == model_name) & (model.id == TaskGroupObject.object_id) & predicate(field)).exists()) return TaskGroupObject.query.filter( (TaskGroupObject.task_group_id == cls.id) & or_(*parts)).exists()
class Workflow(mixins.CustomAttributable, HasOwnContext, mixins.Timeboxed, mixins.Described, mixins.Titled, mixins.Notifiable, mixins.Stateful, mixins.Slugged, Indexed, db.Model): """Basic Workflow first class object. """ __tablename__ = 'workflows' _title_uniqueness = False VALID_STATES = [u"Draft", u"Active", u"Inactive"] # valid Frequency to user readable values mapping VALID_FREQUENCIES = { "one_time": "one time", "weekly": "weekly", "monthly": "monthly", "quarterly": "quarterly", "annually": "annually" } @classmethod def default_frequency(cls): return 'one_time' @orm.validates('frequency') def validate_frequency(self, _, value): """Make sure that value is listed in valid frequencies. Args: value: A string value for requested frequency Returns: default_frequency which is 'one_time' if the value is None, or the value itself. Raises: Value error, if the value is not in the VALID_FREQUENCIES """ if value is None: value = self.default_frequency() if value not in self.VALID_FREQUENCIES: message = u"Invalid state '{}'".format(value) raise ValueError(message) return value notify_on_change = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') notify_custom_message = deferred( db.Column(db.Text, nullable=True), 'Workflow') frequency = deferred( db.Column(db.String, nullable=True, default=default_frequency), 'Workflow' ) object_approval = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') recurrences = db.Column(db.Boolean, default=False, nullable=False) workflow_people = db.relationship( 'WorkflowPerson', backref='workflow', cascade='all, delete-orphan') people = association_proxy( 'workflow_people', 'person', 'WorkflowPerson') task_groups = db.relationship( 'TaskGroup', backref='workflow', cascade='all, delete-orphan') cycles = db.relationship( 'Cycle', backref='workflow', cascade='all, delete-orphan') next_cycle_start_date = db.Column(db.Date, nullable=True) non_adjusted_next_cycle_start_date = db.Column(db.Date, nullable=True) # this is an indicator if the workflow exists from before the change where # we deleted cycle objects, which changed how the cycle is created and # how objects are mapped to the cycle tasks is_old_workflow = deferred( db.Column(db.Boolean, default=False, nullable=True), 'Workflow') # This column needs to be deferred because one of the migrations # uses Workflow as a model and breaks since at that point in time # there is no 'kind' column yet kind = deferred( db.Column(db.String, default=None, nullable=True), 'Workflow') IS_VERIFICATION_NEEDED_DEFAULT = True is_verification_needed = db.Column( db.Boolean, default=IS_VERIFICATION_NEEDED_DEFAULT, nullable=False) @orm.validates('is_verification_needed') def validate_is_verification_needed(self, key, value): # pylint: disable=unused-argument """Validate is_verification_needed field for Workflow. It's not allowed to change is_verification_needed flag after creation. If is_verification_needed doesn't send, then is_verification_needed flag is True. """ if self.is_verification_needed is None: return self.IS_VERIFICATION_NEEDED_DEFAULT if value is None else value if value is None: return self.is_verification_needed if value != self.is_verification_needed: raise ValueError("is_verification_needed value isn't changeble") return value @builder.simple_property def workflow_state(self): return WorkflowState.get_workflow_state(self.cycles) _sanitize_html = [ 'notify_custom_message', ] _publish_attrs = [ 'workflow_people', reflection.PublishOnly('people'), 'task_groups', 'frequency', 'notify_on_change', 'notify_custom_message', 'cycles', 'object_approval', 'recurrences', 'is_verification_needed', reflection.PublishOnly('next_cycle_start_date'), reflection.PublishOnly('non_adjusted_next_cycle_start_date'), reflection.PublishOnly('workflow_state'), reflection.PublishOnly('kind'), ] _fulltext_attrs = [ ValueMapFullTextAttr( "frequency", "frequency", value_map=VALID_FREQUENCIES, ) ] _aliases = { "frequency": { "display_name": "Frequency", "mandatory": True, }, "is_verification_needed": { "display_name": "Need Verification", "mandatory": True, }, "notify_custom_message": "Custom email message", "notify_on_change": "Force real-time email updates", "workflow_owner": { "display_name": "Manager", "type": reflection.AttributeInfo.Type.USER_ROLE, "mandatory": True, "filter_by": "_filter_by_workflow_owner", }, "workflow_member": { "display_name": "Member", "type": reflection.AttributeInfo.Type.USER_ROLE, "filter_by": "_filter_by_workflow_member", }, "status": None, "start_date": None, "end_date": None, } @classmethod def _filter_by_workflow_owner(cls, predicate): return cls._filter_by_role("WorkflowOwner", predicate) @classmethod def _filter_by_workflow_member(cls, predicate): return cls._filter_by_role("WorkflowMember", predicate) def copy(self, _other=None, **kwargs): """Create a partial copy of the current workflow. """ columns = [ 'title', 'description', 'notify_on_change', 'notify_custom_message', 'frequency', 'end_date', 'start_date' ] target = self.copy_into(_other, columns, **kwargs) return target def copy_task_groups(self, target, **kwargs): """Copy all task groups and tasks mapped to this workflow. """ for task_group in self.task_groups: obj = task_group.copy( workflow=target, context=target.context, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", False), modified_by=get_current_user(), ) target.task_groups.append(obj) if kwargs.get("clone_tasks"): task_group.copy_tasks( obj, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", True) ) return target @classmethod def eager_query(cls): return super(Workflow, cls).eager_query().options( orm.subqueryload('cycles').undefer_group('Cycle_complete') .subqueryload("cycle_task_group_object_tasks") .undefer_group("CycleTaskGroupObjectTask_complete"), orm.subqueryload('task_groups'), orm.subqueryload('workflow_people'), ) @classmethod def indexed_query(cls): return super(Workflow, cls).indexed_query().options( orm.Load(cls).undefer_group( "Workflow_complete", ), ) @classmethod def ensure_backlog_workflow_exists(cls): """Ensures there is at least one backlog workflow with an active cycle. If such workflow does not exist it creates one.""" def any_active_cycle(workflows): """Checks if any active cycle exists from given workflows""" for workflow in workflows: for cur_cycle in workflow.cycles: if cur_cycle.is_current: return True return False # Check if backlog workflow already exists backlog_workflows = Workflow.query\ .filter(and_ (Workflow.kind == "Backlog", Workflow.frequency == "one_time"))\ .all() if len(backlog_workflows) > 0 and any_active_cycle(backlog_workflows): return "At least one backlog workflow already exists" # Create a backlog workflow backlog_workflow = Workflow(description="Backlog workflow", title="Backlog (one time)", frequency="one_time", status="Active", recurrences=0, kind="Backlog") # create wf context wf_ctx = backlog_workflow.get_or_create_object_context(context=1) backlog_workflow.context = wf_ctx db.session.flush(backlog_workflow) # create a cycle backlog_cycle = cycle.Cycle(description="Backlog workflow", title="Backlog (one time)", is_current=1, status="Assigned", start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context(), workflow=backlog_workflow) # create a cycletaskgroup backlog_ctg = cycle_task_group\ .CycleTaskGroup(description="Backlog workflow taskgroup", title="Backlog TaskGroup", cycle=backlog_cycle, status="InProgress", start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context()) db.session.add_all([backlog_workflow, backlog_cycle, backlog_ctg]) db.session.flush() # add fulltext entries indexer = get_indexer() indexer.create_record(indexer.fts_record_for(backlog_workflow)) return "Backlog workflow created"
class Workflow(mixins.CustomAttributable, HasOwnContext, mixins.Timeboxed, mixins.Described, mixins.Titled, mixins.Slugged, mixins.Stateful, mixins.Base, db.Model): """Basic Workflow first class object. """ __tablename__ = 'workflows' _title_uniqueness = False VALID_STATES = [u"Draft", u"Active", u"Inactive"] VALID_FREQUENCIES = [ "one_time", "weekly", "monthly", "quarterly", "annually" ] @classmethod def default_frequency(cls): return 'one_time' @orm.validates('frequency') def validate_frequency(self, _, value): """Make sure that value is listed in valid frequencies. Args: value: A string value for requested frequency Returns: default_frequency which is 'one_time' if the value is None, or the value itself. Raises: Value error, if the value is not None or in the VALID_FREQUENCIES array. """ if value is None: value = self.default_frequency() if value not in self.VALID_FREQUENCIES: message = u"Invalid state '{}'".format(value) raise ValueError(message) return value notify_on_change = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') notify_custom_message = deferred(db.Column(db.Text, nullable=True), 'Workflow') frequency = deferred( db.Column(db.String, nullable=True, default=default_frequency), 'Workflow') object_approval = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') recurrences = db.Column(db.Boolean, default=False, nullable=False) workflow_people = db.relationship('WorkflowPerson', backref='workflow', cascade='all, delete-orphan') people = association_proxy('workflow_people', 'person', 'WorkflowPerson') task_groups = db.relationship('TaskGroup', backref='workflow', cascade='all, delete-orphan') cycles = db.relationship('Cycle', backref='workflow', cascade='all, delete-orphan') next_cycle_start_date = db.Column(db.Date, nullable=True) non_adjusted_next_cycle_start_date = db.Column(db.Date, nullable=True) # this is an indicator if the workflow exists from before the change where # we deleted cycle objects, which changed how the cycle is created and # how objects are mapped to the cycle tasks is_old_workflow = deferred( db.Column(db.Boolean, default=False, nullable=True), 'Workflow') # This column needs to be deferred because one of the migrations # uses Workflow as a model and breaks since at that point in time # there is no 'kind' column yet kind = deferred(db.Column(db.String, default=None, nullable=True), 'Workflow') @computed_property def workflow_state(self): return WorkflowState.get_workflow_state(self.cycles) _sanitize_html = [ 'notify_custom_message', ] _publish_attrs = [ 'workflow_people', reflection.PublishOnly('people'), 'task_groups', 'frequency', 'notify_on_change', 'notify_custom_message', 'cycles', 'object_approval', 'recurrences', reflection.PublishOnly('next_cycle_start_date'), reflection.PublishOnly('non_adjusted_next_cycle_start_date'), reflection.PublishOnly('workflow_state'), reflection.PublishOnly('kind') ] _aliases = { "frequency": { "display_name": "Frequency", "mandatory": True, }, "notify_custom_message": "Custom email message", "notify_on_change": "Force real-time email updates", "workflow_owner": { "display_name": "Manager", "type": reflection.AttributeInfo.Type.USER_ROLE, "mandatory": True, "filter_by": "_filter_by_workflow_owner", }, "workflow_member": { "display_name": "Member", "type": reflection.AttributeInfo.Type.USER_ROLE, "filter_by": "_filter_by_workflow_member", }, "workflow_mapped": { "display_name": "No Access", "type": reflection.AttributeInfo.Type.USER_ROLE, "filter_by": "_filter_by_no_access", }, "status": None, "start_date": None, "end_date": None, } @classmethod def _filter_by_workflow_owner(cls, predicate): return cls._filter_by_role("WorkflowOwner", predicate) @classmethod def _filter_by_workflow_member(cls, predicate): return cls._filter_by_role("WorkflowMember", predicate) @classmethod def _filter_by_no_access(cls, predicate): """Get query that filters workflows with mapped users. Args: predicate: lambda function that excepts a single parameter and returns true of false. Returns: An sqlalchemy query that evaluates to true or false and can be used in filtering workflows by no_access users. """ is_no_access = not_( UserRole.query.filter((UserRole.person_id == Person.id) & (UserRole.context_id == workflow_person. WorkflowPerson.context_id)).exists()) return workflow_person.WorkflowPerson.query.filter( (cls.id == workflow_person.WorkflowPerson.workflow_id) & is_no_access).join(Person).filter( (predicate(Person.name) | predicate(Person.email))).exists() def copy(self, _other=None, **kwargs): """Create a partial copy of the current workflow. """ columns = [ 'title', 'description', 'notify_on_change', 'notify_custom_message', 'frequency', 'end_date', 'start_date' ] target = self.copy_into(_other, columns, **kwargs) return target def copy_task_groups(self, target, **kwargs): """Copy all task groups and tasks mapped to this workflow. """ for task_group in self.task_groups: obj = task_group.copy( workflow=target, context=target.context, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", False), modified_by=get_current_user(), ) target.task_groups.append(obj) if kwargs.get("clone_tasks"): task_group.copy_tasks( obj, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", True)) return target @classmethod def eager_query(cls): return super(Workflow, cls).eager_query().options( orm.subqueryload('cycles').undefer_group('Cycle_complete'). subqueryload("cycle_task_group_object_tasks").undefer_group( "CycleTaskGroupObjectTask_complete"), orm.subqueryload('task_groups'), orm.subqueryload('workflow_people'), ) @classmethod def ensure_backlog_workflow_exists(cls): """Ensures there is at least one backlog workflow with an active cycle. If such workflow does not exist it creates one.""" def any_active_cycle(workflows): """Checks if any active cycle exists from given workflows""" for workflow in workflows: for cur_cycle in workflow.cycles: if cur_cycle.is_current: return True return False # Check if backlog workflow already exists backlog_workflows = Workflow.query\ .filter(and_ (Workflow.kind == "Backlog", Workflow.frequency == "one_time"))\ .all() if len(backlog_workflows) > 0 and any_active_cycle(backlog_workflows): return "At least one backlog workflow already exists" # Create a backlog workflow backlog_workflow = Workflow(description="Backlog workflow", title="Backlog (one time)", frequency="one_time", status="Active", recurrences=0, kind="Backlog") # create wf context wf_ctx = backlog_workflow.get_or_create_object_context(context=1) backlog_workflow.context = wf_ctx db.session.flush(backlog_workflow) # create a cycle backlog_cycle = cycle.Cycle( description="Backlog workflow", title="Backlog (one time)", is_current=1, status="Assigned", start_date=None, end_date=None, context=backlog_workflow.get_or_create_object_context(), workflow=backlog_workflow) # create a cycletaskgroup backlog_ctg = cycle_task_group\ .CycleTaskGroup(description="Backlog workflow taskgroup", title="Backlog TaskGroup", cycle=backlog_cycle, status="InProgress", start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context()) db.session.add_all([backlog_workflow, backlog_cycle, backlog_ctg]) db.session.flush() # add fulltext entries get_indexer().create_record(fts_record_for(backlog_workflow)) get_indexer().create_record(fts_record_for(backlog_cycle)) get_indexer().create_record(fts_record_for(backlog_ctg)) return "Backlog workflow created"