def created_at(cls): column = db.Column( db.DateTime, nullable=False, default=db.text('current_timestamp'), ) return deferred(column, cls.__name__)
def created_at(cls): # pylint: disable=no-self-argument """Date of creation. Set to current time on object creation.""" column = db.Column( db.DateTime, nullable=False, default=db.text('current_timestamp'), ) return deferred(column, cls.__name__)
def status(cls): # pylint: disable=no-self-argument return deferred( db.Column( db.Enum(*cls.VALID_STATES), nullable=False, default=cls.default_status() ), cls.__name__ )
def folder(cls): return deferred(db.Column(db.Text, nullable=False, default=""), cls.__name__)
class Assessment(Assignable, statusable.Statusable, AuditRelationship, AutoStatusChangeable, TestPlanned, CustomAttributable, WithEvidence, Commentable, Personable, reminderable.Reminderable, Relatable, LastDeprecatedTimeboxed, WithSimilarityScore, FinishedDate, VerifiedDate, Notifiable, WithAction, labeled.Labeled, with_last_comment.WithLastComment, issue_tracker_mixins.IssueTrackedWithUrl, base.ContextRBAC, BusinessObject, Indexed, db.Model): """Class representing Assessment. Assessment is an object representing an individual assessment performed on a specific object during an audit to ascertain whether or not certain conditions were met for that object. """ __tablename__ = 'assessments' _title_uniqueness = False REWORK_NEEDED = u"Rework Needed" NOT_DONE_STATES = statusable.Statusable.NOT_DONE_STATES | { REWORK_NEEDED, } VALID_STATES = tuple(NOT_DONE_STATES | statusable.Statusable.DONE_STATES | statusable.Statusable.INACTIVE_STATES) REMINDERABLE_HANDLERS = { "statusToPerson": { "handler": reminderable.Reminderable.handle_state_to_person_reminder, "data": { statusable.Statusable.START_STATE: "Assignees", "In Progress": "Assignees" }, "reminders": { "assessment_assignees_reminder", } } } design = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") operationally = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") audit_id = deferred( db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=False), 'Assessment') assessment_type = deferred( db.Column(db.String, nullable=False, server_default="Control"), "Assessment") # whether to use the object test plan on snapshot mapping test_plan_procedure = db.Column(db.Boolean, nullable=False, default=True) @declared_attr def object_level_definitions(cls): # pylint: disable=no-self-argument """Set up a backref so that we can create an object level custom attribute definition without the need to do a flush to get the assessment id. This is used in the relate_ca method in hooks/assessment.py. """ return db.relationship( 'CustomAttributeDefinition', primaryjoin=lambda: and_( remote(CustomAttributeDefinition.definition_id) == cls.id, remote(CustomAttributeDefinition.definition_type) == "assessment"), foreign_keys=[ CustomAttributeDefinition.definition_id, CustomAttributeDefinition.definition_type ], backref='assessment_definition', cascade='all, delete-orphan') object = {} # we add this for the sake of client side error checking VALID_CONCLUSIONS = frozenset( ["Effective", "Ineffective", "Needs improvement", "Not Applicable"]) # REST properties _api_attrs = reflection.ApiAttributes( 'design', 'operationally', 'audit', 'assessment_type', 'test_plan_procedure', reflection.Attribute('archived', create=False, update=False), reflection.Attribute('folder', create=False, update=False), reflection.Attribute('object', create=False, update=False), ) _fulltext_attrs = [ 'archived', 'design', 'operationally', 'folder', ] AUTO_REINDEX_RULES = [ mixin.ReindexRule("Audit", lambda x: x.assessments, ["archived"]), ] _custom_publish = { 'audit': audit.build_audit_stub, } @classmethod def _populate_query(cls, query): return query.options( orm.Load(cls).undefer_group("Assessment_complete"), orm.Load(cls).joinedload("audit").undefer_group("Audit_complete"), orm.Load(cls).joinedload("audit").joinedload( audit.Audit.issuetracker_issue), ) @classmethod def eager_query(cls): return cls._populate_query(super(Assessment, cls).eager_query()) @classmethod def indexed_query(cls): return super(Assessment, cls).indexed_query().options( orm.Load(cls).load_only( "id", "design", "operationally", "audit_id", ), orm.Load(cls).joinedload("audit").load_only("archived", "folder"), ) def log_json(self): out_json = super(Assessment, self).log_json() out_json["folder"] = self.folder return out_json _aliases = { "owners": None, "assessment_template": { "display_name": "Template", "ignore_on_update": True, "filter_by": "_ignore_filter", "type": reflection.AttributeInfo.Type.MAPPING, }, "assessment_type": { "display_name": "Assessment Type", "mandatory": False, }, "design": "Conclusion: Design", "operationally": "Conclusion: Operation", "archived": { "display_name": "Archived", "mandatory": False, "ignore_on_update": True, "view_only": True, }, "test_plan": "Assessment Procedure", # Currently we decided to have 'Due Date' alias for start_date, # but it can be changed in future "start_date": "Due Date", "status": { "display_name": "State", "mandatory": False, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) }, "issue_tracker": { "display_name": "Ticket Tracker", "mandatory": False, "view_only": True, }, } @simple_property def archived(self): """Returns a boolean whether assessment is archived or not.""" return self.audit.archived if self.audit else False @simple_property def folder(self): return self.audit.folder if self.audit else "" def validate_conclusion(self, value): return value if value in self.VALID_CONCLUSIONS else "" @validates("status") def validate_status(self, key, value): value = super(Assessment, self).validate_status(key, value) # pylint: disable=unused-argument if self.status == value: return value if self.status == self.REWORK_NEEDED: valid_states = [self.DONE_STATE, self.FINAL_STATE, self.DEPRECATED] if value not in valid_states: if not getattr(self, "skip_rework_validation", False): raise ValueError("Assessment in `Rework Needed` " "state can be only moved to: [{}]".format( ",".join(valid_states))) return value @validates("operationally") def validate_opperationally(self, key, value): """Validate assessment operationally by validating conclusion""" # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("design") def validate_design(self, key, value): """Validate assessment design by validating conclusion""" # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("assessment_type") def validate_assessment_type(self, key, value): """Validate assessment type to be the same as existing model name""" # pylint: disable=unused-argument # pylint: disable=no-self-use from ggrc.snapshotter.rules import Types if value and value not in Types.all: raise ValueError( "Assessment type '{}' is not snapshotable".format(value)) return value @classmethod def _ignore_filter(cls, _): return None
def notes(cls): return deferred(db.Column(db.Text), cls.__name__)
def parent_id(cls): return deferred(db.Column( db.Integer, db.ForeignKey('{0}.id'.format(cls.__tablename__))), cls.__name__)
def secondary_contact_id(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Integer, db.ForeignKey('people.id')), cls.__name__)
def last_deprecated_date(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Date), cls.__name__)
def categories(cls): # pylint: disable=no-self-argument return deferred.deferred( db.Column(db.String, nullable=True), cls.__name__ )
def description(cls): return deferred(db.Column(db.Text), cls.__name__)
def test_plan(cls): return deferred(db.Column(db.Text), cls.__name__)
def secondary_contact_id(cls): return deferred( db.Column(db.Integer, db.ForeignKey('people.id')), cls.__name__)
def slug(cls): return deferred(db.Column(db.String, nullable=False), cls.__name__)
def verified_date(cls): return deferred( db.Column(db.Date, nullable=True), cls.__name__ )
def status(cls): return deferred(db.Column( db.String, default=cls.default_status, nullable=False), cls.__name__)
def field_declaration(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.DateTime, nullable=True), cls.__name__)
def parent_id(cls): # pylint: disable=no-self-argument return deferred( db.Column(db.Integer, db.ForeignKey('{0}.id'.format(cls.__tablename__))), cls.__name__)
def modified_by_id(cls): # pylint: disable=no-self-argument """Id of user who did the last modification of the object.""" return deferred(db.Column(db.Integer), cls.__name__)
def verified_date(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.DateTime, nullable=True), cls.__name__)
def end_date(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Date), cls.__name__)
def end_date(cls): return deferred(db.Column(db.Date), cls.__name__)
def secondary_contact_id(cls): # pylint: disable=no-self-argument return deferred( db.Column(db.Integer, db.ForeignKey('people.id')), cls.__name__)
def reference_url(cls): return deferred(db.Column(db.String), cls.__name__)
def verified_date(cls): # pylint: disable=no-self-argument return deferred( db.Column(db.DateTime, nullable=True), cls.__name__ )
def reference_url(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.String), cls.__name__)
def os_state(cls): # pylint: disable=no-self-argument """os_state attribute is used to track object review status""" return deferred(db.Column(db.String, nullable=False, default='Unreviewed'), cls.__name__)
class Issue(Roleable, HasObjectState, TestPlanned, CustomAttributable, PublicDocumentable, Personable, LastDeprecatedTimeboxed, Relatable, Commentable, AuditRelationship, WithAction, BusinessObject, Indexed, db.Model): """Issue Model.""" __tablename__ = 'issues' FIXED = "Fixed" FIXED_AND_VERIFIED = "Fixed and Verified" VALID_STATES = BusinessObject.VALID_STATES + ( FIXED, FIXED_AND_VERIFIED, ) # REST properties _api_attrs = reflection.ApiAttributes( reflection.Attribute("audit", create=False, update=False), reflection.Attribute("allow_map_to_audit", create=False, update=False), reflection.Attribute("allow_unmap_from_audit", create=False, update=False), ) _aliases = { "test_plan": { "display_name": "Remediation Plan" }, "status": { "display_name": "State", "mandatory": False, "description": "Options are: \n{} ".format('\n'.join(VALID_STATES)) }, "audit": None, } audit_id = deferred( db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=True), 'Issue') @builder.simple_property def allow_map_to_audit(self): """False if self.audit or self.audit_id is set, True otherwise.""" return self.audit_id is None and self.audit is None @builder.simple_property def allow_unmap_from_audit(self): """False if Issue is mapped to any Assessment/Snapshot, True otherwise.""" from ggrc.models import all_models restricting_types = {all_models.Assessment, all_models.Snapshot} restricting_types = set(m.__name__.lower() for m in restricting_types) # pylint: disable=not-an-iterable restricting_srcs = (rel.source_type.lower() in restricting_types for rel in self.related_sources if rel not in db.session.deleted) restricting_dsts = (rel.destination_type.lower() in restricting_types for rel in self.related_destinations if rel not in db.session.deleted) return not any(itertools.chain(restricting_srcs, restricting_dsts))
class RiskAssessment(Documentable, Slugged, Timeboxed, Noted, Described, CustomAttributable, Titled, Relatable, Base, db.Model): __tablename__ = 'risk_assessments' _title_uniqueness = False ra_manager_id = deferred( db.Column(db.Integer, db.ForeignKey('people.id')), 'RiskAssessment') ra_manager = db.relationship( 'Person', uselist=False, foreign_keys='RiskAssessment.ra_manager_id') ra_counsel_id = deferred( db.Column(db.Integer, db.ForeignKey('people.id')), 'RiskAssessment') ra_counsel = db.relationship( 'Person', uselist=False, foreign_keys='RiskAssessment.ra_counsel_id') program_id = deferred( db.Column(db.Integer, db.ForeignKey('programs.id'), nullable=False), 'RiskAssessment') program = db.relationship( 'Program', backref='risk_assessments', uselist=False, foreign_keys='RiskAssessment.program_id') _fulltext_attrs = [] _publish_attrs = [ 'ra_manager', 'ra_counsel', 'program', ] _aliases = { "ra_manager": { "display_name": "Risk Manager", "filter_by": "_filter_by_risk_manager", }, "ra_counsel": { "display_name": "Risk Counsel", "filter_by": "_filter_by_risk_counsel", }, "start_date": { "display_name": "Start Date", "mandatory": True, }, "end_date": { "display_name": "End Date", "mandatory": True, }, "program": { "display_name": "Program", "mandatory": True, "filter_by": "_filter_by_program", } } @classmethod def _filter_by_program(cls, predicate): return Program.query.filter( (Program.id == cls.program_id) & (predicate(Program.slug) | predicate(Program.title)) ).exists() @classmethod def _filter_by_risk_manager(cls, predicate): return Person.query.filter( (Person.id == cls.ra_manager_id) & (predicate(Person.name) | predicate(Person.email)) ).exists() @classmethod def _filter_by_risk_counsel(cls, predicate): return Person.query.filter( (Person.id == cls.ra_counsel_id) & (predicate(Person.name) | predicate(Person.email)) ).exists()
class Assessment(Assignable, statusable.Statusable, AuditRelationship, AutoStatusChangeable, TestPlanned, CustomAttributable, WithEvidence, Commentable, Personable, reminderable.Reminderable, Relatable, LastDeprecatedTimeboxed, WithSimilarityScore, FinishedDate, VerifiedDate, Notifiable, WithAction, labeled.Labeled, with_last_comment.WithLastComment, issue_tracker_mixins.IssueTrackedWithUrl, base.ContextRBAC, BusinessObject, with_sox_302.WithSOX302FlowReadOnly, WithCustomRestrictions, Indexed, db.Model): """Class representing Assessment. Assessment is an object representing an individual assessment performed on a specific object during an audit to ascertain whether or not certain conditions were met for that object. """ __tablename__ = 'assessments' _title_uniqueness = False REWORK_NEEDED = u"Rework Needed" NOT_DONE_STATES = statusable.Statusable.NOT_DONE_STATES | { REWORK_NEEDED, } VALID_STATES = tuple(NOT_DONE_STATES | statusable.Statusable.DONE_STATES | statusable.Statusable.INACTIVE_STATES) REMINDERABLE_HANDLERS = { "statusToPerson": { "handler": reminderable.Reminderable.handle_state_to_person_reminder, "data": { statusable.Statusable.START_STATE: "Assignees", "In Progress": "Assignees" }, "reminders": { "assessment_assignees_reminder", } } } design = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") operationally = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") audit_id = deferred( db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=False), 'Assessment') assessment_type = deferred( db.Column(db.String, nullable=False, server_default="Control"), "Assessment") # whether to use the object test plan on snapshot mapping test_plan_procedure = db.Column(db.Boolean, nullable=False, default=True) @declared_attr def object_level_definitions(cls): # pylint: disable=no-self-argument """Set up a backref so that we can create an object level custom attribute definition without the need to do a flush to get the assessment id. This is used in the relate_ca method in hooks/assessment.py. """ cad = custom_attribute_definition.CustomAttributeDefinition current_type = cls.__name__ def join_expr(): return sa.and_( orm.foreign(orm.remote(cad.definition_id)) == cls.id, cad.definition_type == utils.underscore_from_camelcase( current_type), ) # Since there is some kind of generic relationship on CAD side, correct # join expression for backref should be provided. If default, every call of # "{}_definition".format(definition_type) on CAD will produce a lot of # unnecessary DB queries returning nothing. def backref_join_expr(): return orm.remote(cls.id) == orm.foreign(cad.definition_id) return db.relationship( "CustomAttributeDefinition", primaryjoin=join_expr, backref=db.backref( "{}_definition".format( utils.underscore_from_camelcase(current_type)), lazy="joined", primaryjoin=backref_join_expr, ), cascade="all, delete-orphan", ) object = {} # we add this for the sake of client side error checking VALID_CONCLUSIONS = ("Effective", "Ineffective", "Needs improvement", "Not Applicable") # REST properties _api_attrs = reflection.ApiAttributes( 'design', 'operationally', 'audit', 'assessment_type', 'test_plan_procedure', reflection.Attribute('archived', create=False, update=False), reflection.Attribute('folder', create=False, update=False), reflection.Attribute('object', create=False, update=False), ) _fulltext_attrs = [ 'archived', 'design', 'operationally', 'folder', ] AUTO_REINDEX_RULES = [ mixin.ReindexRule("Audit", lambda x: x.assessments, ["archived"]), ] _custom_publish = { 'audit': audit.build_audit_stub, } _in_progress_restrictions = ( "access_control_list", "description", "title", "labels", "test_plan", "assessment_type", "slug", "notes", "start_date", "design", "operationally", "reminderType", "issue_tracker", "global_custom_attributes_values", "map: Snapshot", "map: Issue", ) _done_state_restrictions = _in_progress_restrictions + ( "custom_attributes_values", "map: Evidence", "import: status", ) _restriction_condition = { "status": { (statusable.Statusable.START_STATE, statusable.Statusable.PROGRESS_STATE, REWORK_NEEDED, statusable.Statusable.DONE_STATE): _in_progress_restrictions, (statusable.Statusable.VERIFIED_STATE, statusable.Statusable.FINAL_STATE, statusable.Statusable.DEPRECATED): _done_state_restrictions } } @classmethod def _populate_query(cls, query): return query.options( orm.Load(cls).undefer_group("Assessment_complete"), orm.Load(cls).joinedload("audit").undefer_group("Audit_complete"), orm.Load(cls).joinedload("audit").joinedload( audit.Audit.issuetracker_issue), ) @classmethod def eager_query(cls, **kwargs): return cls._populate_query( super(Assessment, cls).eager_query(**kwargs)) @classmethod def indexed_query(cls): return super(Assessment, cls).indexed_query().options( orm.Load(cls).load_only( "id", "design", "operationally", "audit_id", ), orm.Load(cls).joinedload("audit").load_only("archived", "folder"), ) def log_json(self): out_json = super(Assessment, self).log_json() out_json["folder"] = self.folder return out_json ASSESSMENT_TYPE_OPTIONS = ( "Access Groups", "Account Balances", "Data Assets", "Facilities", "Key Reports", "Markets", "Org Groups", "Processes", "Product Groups", "Products", "Systems", "Technology Environments", "Vendors", "Contracts", "Controls", "Objectives", "Policies", "Regulations", "Requirements", "Risks", "Standards", "Threats", ) _aliases = { "owners": None, "assessment_template": { "display_name": "Template", "ignore_on_update": True, "filter_by": "_ignore_filter", "type": reflection.AttributeInfo.Type.MAPPING, }, "assessment_type": { "display_name": "Assessment Type", "mandatory": False, "description": "Allowed values are:\n{}".format( '\n'.join(ASSESSMENT_TYPE_OPTIONS)), }, "design": { "display_name": "Conclusion: Design", "description": "Allowed values are:\n{}".format('\n'.join(VALID_CONCLUSIONS)), }, "operationally": { "display_name": "Conclusion: Operation", "description": "Allowed values are:\n{}".format('\n'.join(VALID_CONCLUSIONS)), }, "archived": { "display_name": "Archived", "mandatory": False, "ignore_on_update": True, "view_only": True, "description": "Allowed values are:\nyes\nno" }, "test_plan": "Assessment Procedure", # Currently we decided to have 'Due Date' alias for start_date, # but it can be changed in future "start_date": "Due Date", "status": { "display_name": "State", "mandatory": False, "description": "Allowed values are:\n{}".format('\n'.join(VALID_STATES)) }, "issue_tracker": { "display_name": "Ticket Tracker", "mandatory": False, "view_only": True, }, } @simple_property def archived(self): """Returns a boolean whether assessment is archived or not.""" return self.audit.archived if self.audit else False @simple_property def folder(self): return self.audit.folder if self.audit else "" def validate_conclusion(self, value): return value if value in self.VALID_CONCLUSIONS else "" @validates("status") def validate_status(self, key, value): value = super(Assessment, self).validate_status(key, value) # pylint: disable=unused-argument if self.status == value: return value if self.status == self.REWORK_NEEDED: valid_states = [self.DONE_STATE, self.FINAL_STATE, self.DEPRECATED] if value not in valid_states: if not getattr(self, "skip_rework_validation", False): raise ValueError("Assessment in `Rework Needed` " "state can be only moved to: [{}]".format( ",".join(valid_states))) return value @validates("operationally") def validate_opperationally(self, key, value): """Validate assessment operationally by validating conclusion""" # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("design") def validate_design(self, key, value): """Validate assessment design by validating conclusion""" # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("assessment_type") def validate_assessment_type(self, key, value): """Validate assessment type to be the same as existing model name""" # pylint: disable=unused-argument # pylint: disable=no-self-use from ggrc.snapshotter.rules import Types if value and value not in Types.all: raise ValueError( "Assessment type '{}' is not snapshotable".format(value)) return value @classmethod def _ignore_filter(cls, _): return None
class Person(CustomAttributable, CustomAttributeMapable, HasOwnContext, Relatable, base.ContextRBAC, Base, Indexed, db.Model): """Person model definition.""" def __init__(self, *args, **kwargs): """Initialize profile relationship while creating Person instance""" super(Person, self).__init__(*args, **kwargs) self.profile = PersonProfile() self.build_object_context(context=1, name='Personal Context', description='') __tablename__ = 'people' email = deferred(db.Column(db.String, nullable=False), 'Person') name = deferred(db.Column(db.String), 'Person') language_id = deferred(db.Column(db.Integer), 'Person') company = deferred(db.Column(db.String), 'Person') object_people = db.relationship('ObjectPerson', backref='person', cascade='all, delete-orphan') language = db.relationship( 'Option', primaryjoin='and_(foreign(Person.language_id) == Option.id, ' 'Option.role == "person_language")', uselist=False, ) profile = db.relationship( "PersonProfile", foreign_keys='PersonProfile.person_id', uselist=False, backref="person", ) access_control_people = db.relationship( 'AccessControlPerson', foreign_keys='AccessControlPerson.person_id', backref="person", ) @staticmethod def _extra_table_args(_): return ( db.Index('ix_people_name_email', 'name', 'email'), db.Index('uq_people_email', 'email', unique=True), ) _fulltext_attrs = [ 'company', 'email', 'name', ] _api_attrs = reflection.ApiAttributes( 'company', 'email', 'language', 'name', reflection.Attribute('profile', create=False, update=False), reflection.Attribute('object_people', create=False, update=False), reflection.Attribute('system_wide_role', create=False, update=False), ) _sanitize_html = [ 'company', 'name', ] _include_links = [] _aliases = { "name": "Name", "email": { "display_name": "Email", "unique": True, }, "company": "Company", "user_role": { "display_name": "Role", "type": "user_role", "filter_by": "_filter_by_user_role", }, } @classmethod def _filter_by_user_role(cls, predicate): """Custom filter by user roles.""" from ggrc_basic_permissions.models import Role, UserRole return UserRole.query.join( Role).filter((UserRole.person_id == cls.id) & (UserRole.context_id.is_(None)) & # noqa predicate(Role.name)).exists() # Methods required by Flask-Login # pylint: disable=no-self-use def is_authenticated(self): return self.system_wide_role != 'No Access' @property def user_name(self): return self.email.split("@")[0] @property def title(self): return self.name or self.email def is_active(self): # pylint: disable=no-self-use return True # self.active def is_anonymous(self): # pylint: disable=no-self-use return False def get_id(self): return unicode(self.id) # noqa @validates('language') def validate_person_options(self, key, option): return validate_option(self.__class__.__name__, key, option, 'person_language') @validates('email') def validate_email(self, _, email): """Email property validator.""" if not Person.is_valid_email(email): message = "Email address '{}' is invalid. Valid email must be provided" raise ValidationError(message.format(email)) return email @staticmethod def is_valid_email(val): """Check for valid email. Borrowed from Django. Literal form, ipv4 address (SMTP 4.1.3). """ email_re = re.compile( r'^[-!#$%&\'*+\\.\/0-9=?A-Z^_`{|}~]+@([-0-9A-Z]+\.)+([0-9A-Z]){2,4}$', re.IGNORECASE) return email_re.match(val) if val else False @classmethod def eager_query(cls): from sqlalchemy import orm # query = super(Person, cls).eager_query() # Completely overriding eager_query to avoid eager loading of the # modified_by relationship return super(Person, cls).eager_query().options( orm.joinedload('language'), orm.joinedload('profile'), orm.subqueryload('object_people'), ) @classmethod def indexed_query(cls): from sqlalchemy import orm return super(Person, cls).indexed_query().options( orm.Load(cls).undefer_group("Person_complete", ), ) def _display_name(self): return self.email @builder.simple_property def system_wide_role(self): """For choosing the role string to show to the user; of all the roles in the system-wide context, it shows the highest ranked one (if there are multiple) or "No Access" if there are none. """ if self.email in getattr(settings, "BOOTSTRAP_ADMIN_USERS", []): return SystemWideRoles.SUPERUSER from ggrc.utils.user_generator import is_external_app_user_email if is_external_app_user_email(self.email): return SystemWideRoles.SUPERUSER role_hierarchy = { SystemWideRoles.ADMINISTRATOR: 0, SystemWideRoles.EDITOR: 1, SystemWideRoles.READER: 2, SystemWideRoles.CREATOR: 3, } unique_roles = set([ user_role.role.name for user_role in self.user_roles if user_role.role.name in role_hierarchy ]) if not unique_roles: return u"No Access" # -1 as default to make items not in this list appear on top # and thus shown to the user sorted_roles = sorted(unique_roles, key=lambda x: role_hierarchy.get(x, -1)) return sorted_roles[0]
class SystemOrProcess(track_object_state.HasObjectState, Commentable, TestPlanned, LastDeprecatedTimeboxed, BusinessObject, db.Model): # Override model_inflector _table_plural = 'systems_or_processes' __tablename__ = 'systems' infrastructure = deferred(db.Column(db.Boolean), 'SystemOrProcess') is_biz_process = db.Column(db.Boolean, default=False) version = deferred(db.Column(db.String), 'SystemOrProcess') network_zone_id = deferred(db.Column(db.Integer), 'SystemOrProcess') network_zone = db.relationship( 'Option', primaryjoin='and_(foreign(SystemOrProcess.network_zone_id) == Option.id,' ' Option.role == "network_zone")', uselist=False, ) __mapper_args__ = {'polymorphic_on': is_biz_process} # REST properties _api_attrs = reflection.ApiAttributes( 'infrastructure', 'version', 'network_zone', reflection.Attribute('is_biz_process', create=False, update=False), ) _fulltext_attrs = [ 'infrastructure', 'version', 'network_zone', ] _sanitize_html = ['version'] _aliases = { "document_url": None, "document_evidence": None, "network_zone": { "display_name": "Network Zone", }, } @validates('network_zone') def validate_system_options(self, key, option): return validate_option(self.__class__.__name__, key, option, 'network_zone') @classmethod def eager_query(cls): from sqlalchemy import orm query = super(SystemOrProcess, cls).eager_query() return query.options(orm.joinedload('network_zone')) @classmethod def indexed_query(cls): from sqlalchemy import orm query = super(SystemOrProcess, cls).eager_query() return query.options( orm.joinedload('network_zone', ).undefer_group( "Option_complete", )) @staticmethod def _extra_table_args(cls): return (db.Index('ix_{}_is_biz_process'.format(cls.__tablename__), 'is_biz_process'), )
def field_declaration(cls): # pylint: disable=no-self-argument return deferred( db.Column(db.Integer, db.ForeignKey('people.id'), nullable=True), cls.__name__)
class Document(Roleable, Relatable, Base, Indexed, db.Model): """Audit model.""" __tablename__ = 'documents' # TODO: inherit from Titled mixin (note: title is nullable here) title = deferred(db.Column(db.String), 'Document') link = deferred(db.Column(db.String), 'Document') description = deferred(db.Column(db.Text, nullable=False, default=u""), 'Document') kind_id = db.Column(db.Integer, db.ForeignKey('options.id'), nullable=True) year_id = db.Column(db.Integer, db.ForeignKey('options.id'), nullable=True) language_id = db.Column(db.Integer, db.ForeignKey('options.id'), nullable=True) URL = "URL" ATTACHMENT = "EVIDENCE" REFERENCE_URL = "REFERENCE_URL" VALID_DOCUMENT_TYPES = [URL, ATTACHMENT, REFERENCE_URL] document_type = deferred( db.Column(db.Enum(*VALID_DOCUMENT_TYPES), default=URL, nullable=False), 'Document') kind = db.relationship( 'Option', primaryjoin='and_(foreign(Document.kind_id) == Option.id, ' 'Option.role == "reference_type")', uselist=False, lazy="joined", ) year = db.relationship( 'Option', primaryjoin='and_(foreign(Document.year_id) == Option.id, ' 'Option.role == "document_year")', uselist=False, lazy="joined", ) language = db.relationship( 'Option', primaryjoin='and_(foreign(Document.language_id) == Option.id, ' 'Option.role == "language")', uselist=False, lazy="joined", ) _fulltext_attrs = [ 'title', 'link', 'description', "document_type", ] _api_attrs = reflection.ApiAttributes( 'title', 'link', 'description', 'kind', 'year', 'language', "document_type", ) _sanitize_html = [ 'title', 'description', ] _aliases = { 'title': "Title", 'link': "Link", 'description': "description", } @orm.validates('kind', 'year', 'language') def validate_document_options(self, key, option): """Returns correct option, otherwise rises an error""" if key == 'year': desired_role = 'document_year' elif key == 'kind': desired_role = 'reference_type' else: desired_role = key return validate_option(self.__class__.__name__, key, option, desired_role) @orm.validates('document_type') def validate_document_type(self, key, document_type): """Returns correct option, otherwise rises an error""" if document_type is None: document_type = self.URL if document_type not in self.VALID_DOCUMENT_TYPES: raise exceptions.ValidationError( "Invalid value for attribute {attr}. " "Expected options are `{url}`, `{attachment}`, `{reference_url}`" .format(attr=key, url=self.URL, attachment=self.ATTACHMENT, reference_url=self.REFERENCE_URL)) return document_type @classmethod def indexed_query(cls): return super(Document, cls).indexed_query().options( orm.Load(cls).undefer_group("Document_complete", ), ) @classmethod def eager_query(cls): return super(Document, cls).eager_query().options( orm.joinedload('kind'), orm.joinedload('year'), orm.joinedload('language'), ) @hybrid_property def slug(self): if self.document_type in (self.URL, self.REFERENCE_URL): return self.link return u"{} {}".format(self.link, self.title) # pylint: disable=no-self-argument @slug.expression def slug(cls): return case([(cls.document_type == cls.ATTACHMENT, func.concat(cls.link, ' ', cls.title))], else_=cls.link) def log_json(self): tmp = super(Document, self).log_json() tmp['type'] = "Document" return tmp
class Assessment(Roleable, statusable.Statusable, AuditRelationship, AutoStatusChangeable, Assignable, HasObjectState, TestPlanned, CustomAttributable, EvidenceURL, Commentable, Personable, reminderable.Reminderable, Timeboxed, Relatable, WithSimilarityScore, FinishedDate, VerifiedDate, ValidateOnComplete, Notifiable, BusinessObject, Indexed, db.Model): """Class representing Assessment. Assessment is an object representing an individual assessment performed on a specific object during an audit to ascertain whether or not certain conditions were met for that object. """ __tablename__ = 'assessments' _title_uniqueness = False ASSIGNEE_TYPES = (u"Creator", u"Assessor", u"Verifier") REMINDERABLE_HANDLERS = { "statusToPerson": { "handler": reminderable.Reminderable.handle_state_to_person_reminder, "data": { statusable.Statusable.START_STATE: "Assessor", "In Progress": "Assessor" }, "reminders": {"assessment_assessor_reminder", } } } design = deferred(db.Column(db.String), "Assessment") operationally = deferred(db.Column(db.String), "Assessment") audit_id = deferred( db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=False), 'Assessment') @declared_attr def object_level_definitions(self): """Set up a backref so that we can create an object level custom attribute definition without the need to do a flush to get the assessment id. This is used in the relate_ca method in hooks/assessment.py. """ return db.relationship( 'CustomAttributeDefinition', primaryjoin=lambda: and_( remote(CustomAttributeDefinition.definition_id) == Assessment.id, remote(CustomAttributeDefinition.definition_type) == "assessment"), foreign_keys=[ CustomAttributeDefinition.definition_id, CustomAttributeDefinition.definition_type ], backref='assessment_definition', cascade='all, delete-orphan') object = {} # we add this for the sake of client side error checking VALID_CONCLUSIONS = frozenset([ "Effective", "Ineffective", "Needs improvement", "Not Applicable" ]) # REST properties _publish_attrs = [ 'design', 'operationally', 'audit', PublishOnly('object') ] _fulltext_attrs = [ 'design', 'operationally', MultipleSubpropertyFullTextAttr('related_assessors', 'assessors', ['user_name', 'email', 'name']), MultipleSubpropertyFullTextAttr('related_creators', 'creators', ['user_name', 'email', 'name']), MultipleSubpropertyFullTextAttr('related_verifiers', 'verifiers', ['user_name', 'email', 'name']), MultipleSubpropertyFullTextAttr('document_evidence', 'document_evidence', ['title', 'link']), MultipleSubpropertyFullTextAttr('document_url', 'document_url', ['link']), ] @classmethod def indexed_query(cls): query = super(Assessment, cls).indexed_query() return query.options( orm.Load(cls).load_only( "design", "operationally", ) ) _tracked_attrs = { 'description', 'design', 'notes', 'operationally', 'reference_url', 'test_plan', 'title', 'url', 'start_date', 'end_date' } _aliases = { "owners": None, "assessment_template": { "display_name": "Template", "ignore_on_update": True, "filter_by": "_ignore_filter", "type": reflection.AttributeInfo.Type.MAPPING, }, "url": "Assessment URL", "design": "Conclusion: Design", "operationally": "Conclusion: Operation", "related_creators": { "display_name": "Creators", "mandatory": True, "type": reflection.AttributeInfo.Type.MAPPING, }, "related_assessors": { "display_name": "Assignees", "mandatory": True, "type": reflection.AttributeInfo.Type.MAPPING, }, "related_verifiers": { "display_name": "Verifiers", "type": reflection.AttributeInfo.Type.MAPPING, }, } AUTO_REINDEX_RULES = [ ReindexRule("RelationshipAttr", reindex_by_relationship_attr), ReindexRule("Relationship", reindex_by_relationship) ] similarity_options = { "relevant_types": { "Objective": {"weight": 2}, "Control": {"weight": 2}, }, "threshold": 1, } @property def assessors(self): """Get the list of assessor assignees""" return self.assignees_by_type.get("Assessor", []) @property def creators(self): """Get the list of creator assignees""" return self.assignees_by_type.get("Creator", []) @property def verifiers(self): """Get the list of verifier assignees""" return self.assignees_by_type.get("Verifier", []) @property def document_evidence(self): return self.documents_by_type("document_evidence") @property def document_url(self): return self.documents_by_type("document_url") def validate_conclusion(self, value): return value if value in self.VALID_CONCLUSIONS else None @validates("operationally") def validate_opperationally(self, key, value): # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("design") def validate_design(self, key, value): # pylint: disable=unused-argument return self.validate_conclusion(value) @classmethod def _ignore_filter(cls, _): return None
class Comment(Roleable, Relatable, Described, Notifiable, base.ContextRBAC, Base, Indexed, db.Model): """Basic comment model.""" __tablename__ = "comments" assignee_type = db.Column(db.String, nullable=False, default=u"") revision_id = deferred( db.Column( db.Integer, db.ForeignKey('revisions.id', ondelete='SET NULL'), nullable=True, ), 'Comment') revision = db.relationship( 'Revision', uselist=False, ) custom_attribute_definition_id = deferred( db.Column( db.Integer, db.ForeignKey('custom_attribute_definitions.id', ondelete='SET NULL'), nullable=True, ), 'Comment') custom_attribute_definition = db.relationship( 'CustomAttributeDefinition', uselist=False, ) initiator_instance_id = db.Column(db.Integer, nullable=True) initiator_instance_type = db.Column(db.String, nullable=True) INITIATOR_INSTANCE_TMPL = "{}_comment_initiated_by" initiator_instance = utils.PolymorphicRelationship( "initiator_instance_id", "initiator_instance_type", INITIATOR_INSTANCE_TMPL) # REST properties _api_attrs = reflection.ApiAttributes( "assignee_type", reflection.Attribute("custom_attribute_revision", create=False, update=False), reflection.Attribute("custom_attribute_revision_upd", read=False), reflection.Attribute("header_url_link", create=False, update=False), ) _sanitize_html = [ "description", ] def get_objects_to_reindex(self): """Return list required objects for reindex if comment C.U.D.""" source_qs = db.session.query( Relationship.destination_type, Relationship.destination_id).filter( Relationship.source_type == self.__class__.__name__, Relationship.source_id == self.id) destination_qs = db.session.query( Relationship.source_type, Relationship.source_id).filter( Relationship.destination_type == self.__class__.__name__, Relationship.destination_id == self.id) result_qs = source_qs.union(destination_qs) klass_dict = defaultdict(set) for klass, object_id in result_qs: klass_dict[klass].add(object_id) queries = [] for klass, object_ids in klass_dict.iteritems(): model = inflector.get_model(klass) if not model: continue if issubclass(model, (Indexed, Commentable)): queries.append( model.query.filter(model.id.in_(list(object_ids)))) return list(itertools.chain(*queries)) AUTO_REINDEX_RULES = [ ReindexRule("Comment", lambda x: x.get_objects_to_reindex()), ReindexRule("Relationship", reindex_by_relationship), ] @builder.simple_property def header_url_link(self): """Return header url link to comment if that comment related to proposal and that proposal is only proposed.""" if self.initiator_instance_type != "Proposal": return "" proposed_status = self.initiator_instance.STATES.PROPOSED if self.initiator_instance.status == proposed_status: return "proposal_link" return "" @classmethod def eager_query(cls): query = super(Comment, cls).eager_query() return query.options( orm.joinedload('revision'), orm.joinedload('custom_attribute_definition').undefer_group( 'CustomAttributeDefinition_complete'), ) def log_json(self): """Log custom attribute revisions.""" res = super(Comment, self).log_json() res["custom_attribute_revision"] = self.custom_attribute_revision return res @builder.simple_property def custom_attribute_revision(self): """Get the historical value of the relevant CA value.""" if not self.revision: return None revision = self.revision.content cav_stored_value = revision['attribute_value'] cad = self.custom_attribute_definition return { 'custom_attribute': { 'id': cad.id if cad else None, 'title': cad.title if cad else 'DELETED DEFINITION', }, 'custom_attribute_stored_value': cav_stored_value, } def custom_attribute_revision_upd(self, value): """Create a Comment-CA mapping with current CA value stored.""" ca_revision_dict = value.get('custom_attribute_revision_upd') if not ca_revision_dict: return ca_val_dict = self._get_ca_value(ca_revision_dict) ca_val_id = ca_val_dict['id'] ca_val_revision = Revision.query.filter_by( resource_type='CustomAttributeValue', resource_id=ca_val_id, ).order_by(Revision.created_at.desc(), ).limit(1).first() if not ca_val_revision: raise BadRequest( "No Revision found for CA value with id provided under " "'custom_attribute_value': {}".format(ca_val_dict)) self.revision_id = ca_val_revision.id self.revision = ca_val_revision # Here *attribute*_id is assigned to *definition*_id, strange but, # as you can see in src/ggrc/models/custom_attribute_value.py # custom_attribute_id is link to custom_attribute_definitions.id # possible best way is use definition id from request: # ca_revision_dict["custom_attribute_definition"]["id"] # but needs to be checked that is always exist in request self.custom_attribute_definition_id = ca_val_revision.content.get( 'custom_attribute_id', ) self.custom_attribute_definition = CustomAttributeDefinition.query.get( self.custom_attribute_definition_id, ) @staticmethod def _get_ca_value(ca_revision_dict): """Get CA value dict from json and do a basic validation.""" ca_val_dict = ca_revision_dict.get('custom_attribute_value') if not ca_val_dict: raise ValueError( "CA value expected under " "'custom_attribute_value': {}".format(ca_revision_dict)) if not ca_val_dict.get('id'): raise ValueError( "CA value id expected under 'id': {}".format(ca_val_dict)) return ca_val_dict
class Workflow(roleable.Roleable, relationship.Relatable, mixins.CustomAttributable, HasOwnContext, mixins.Timeboxed, mixins.Described, mixins.Titled, mixins.Notifiable, mixins.Stateful, base.ContextRBAC, mixins.Slugged, mixins.Folderable, Indexed, db.Model): """Basic Workflow first class object. """ __tablename__ = 'workflows' _title_uniqueness = False DRAFT = u"Draft" ACTIVE = u"Active" INACTIVE = u"Inactive" VALID_STATES = [DRAFT, ACTIVE, INACTIVE] @classmethod def default_status(cls): return cls.DRAFT notify_on_change = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') notify_custom_message = deferred( db.Column(db.Text, nullable=False, default=u""), 'Workflow') object_approval = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') recurrences = db.Column(db.Boolean, default=False, nullable=False) task_groups = db.relationship( 'TaskGroup', backref='_workflow', cascade='all, delete-orphan') cycles = db.relationship( 'Cycle', backref='_workflow', cascade='all, delete-orphan') next_cycle_start_date = db.Column(db.Date, nullable=True) non_adjusted_next_cycle_start_date = db.Column(db.Date, nullable=True) # this is an indicator if the workflow exists from before the change where # we deleted cycle objects, which changed how the cycle is created and # how objects are mapped to the cycle tasks is_old_workflow = deferred( db.Column(db.Boolean, default=False, nullable=True), 'Workflow') # This column needs to be deferred because one of the migrations # uses Workflow as a model and breaks since at that point in time # there is no 'kind' column yet kind = deferred( db.Column(db.String, default=None, nullable=True), 'Workflow') IS_VERIFICATION_NEEDED_DEFAULT = True is_verification_needed = db.Column( db.Boolean, default=IS_VERIFICATION_NEEDED_DEFAULT, nullable=False) repeat_every = deferred(db.Column(db.Integer, nullable=True, default=None), 'Workflow') DAY_UNIT = 'day' WEEK_UNIT = 'week' MONTH_UNIT = 'month' VALID_UNITS = (DAY_UNIT, WEEK_UNIT, MONTH_UNIT) unit = deferred(db.Column(db.Enum(*VALID_UNITS), nullable=True, default=None), 'Workflow') repeat_multiplier = deferred(db.Column(db.Integer, nullable=False, default=0), 'Workflow') UNIT_FREQ_MAPPING = { None: "one_time", DAY_UNIT: "daily", WEEK_UNIT: "weekly", MONTH_UNIT: "monthly" } # pylint: disable=unnecessary-lambda REPEAT_MAPPING = { None: lambda px, sx: "off", DAY_UNIT: lambda px, sx: "every {}weekday{}".format(px, sx), WEEK_UNIT: lambda px, sx: "every {}week{}".format(px, sx), MONTH_UNIT: lambda px, sx: "every {}month{}".format(px, sx) } REPEAT_ORDER_MAPPING = { None: 0, DAY_UNIT: 1, WEEK_UNIT: 2, MONTH_UNIT: 3 } @hybrid.hybrid_property def frequency(self): """Hybrid property for SearchAPI filtering backward compatibility""" return self.UNIT_FREQ_MAPPING[self.unit] @frequency.expression def frequency(self): """Hybrid property for SearchAPI filtering backward compatibility""" return case([ (self.unit.is_(None), self.UNIT_FREQ_MAPPING[None]), (self.unit == self.DAY_UNIT, self.UNIT_FREQ_MAPPING[self.DAY_UNIT]), (self.unit == self.WEEK_UNIT, self.UNIT_FREQ_MAPPING[self.WEEK_UNIT]), (self.unit == self.MONTH_UNIT, self.UNIT_FREQ_MAPPING[self.MONTH_UNIT]), ]) @classmethod def _get_repeat(cls, unit, repeat_every): """Return repeat field representation for QueryAPI""" if repeat_every is None or repeat_every == 1: prefix, suffix = "", "" else: prefix, suffix = "{} ".format(repeat_every), "s" func = cls.REPEAT_MAPPING[unit] return func(prefix, suffix) @hybrid.hybrid_property def repeat(self): """Hybrid property for filtering in QueryAPI""" return self._get_repeat(self.unit, self.repeat_every) @repeat.expression def repeat(self): """Hybrid property for filtering in QueryAPI""" case_ = [(self.unit.is_(None), self.REPEAT_MAPPING[None](None, None))] case_.extend(((self.unit == unit) & (self.repeat_every == repeat_every), self._get_repeat(unit, repeat_every)) for unit in self.VALID_UNITS for repeat_every in xrange(1, 31)) return case(case_) @property def repeat_order(self): """Property for ordering in QueryAPI""" unit_map = self.REPEAT_ORDER_MAPPING[self.unit] repeat_every_map = self.repeat_every or 0 return u"{:0>4}_{:0>4}".format(unit_map, repeat_every_map) @builder.simple_property def can_start_cycle(self): """Can start cycle. Boolean property, returns True if all task groups have at least one task group task, False otherwise. """ return not any(tg for tg in self.task_groups if not tg.task_group_tasks) @property def tasks(self): return list(itertools.chain(*[t.task_group_tasks for t in self.task_groups])) @property def min_task_start_date(self): """Fetches non adjusted setup cycle start date based on TGT user's setup. Args: self: Workflow instance. Returns: Date when first cycle should be started based on user's setup. """ tasks = self.tasks min_date = None for task in tasks: min_date = min(task.start_date, min_date or task.start_date) return min_date WORK_WEEK_LEN = 5 @classmethod def first_work_day(cls, day): holidays = google_holidays.GoogleHolidays() while day.isoweekday() > cls.WORK_WEEK_LEN or day in holidays: day -= relativedelta.relativedelta(days=1) return day def calc_next_adjusted_date(self, setup_date): """Calculates adjusted date which are expected in next cycle. Args: setup_date: Date which was setup by user. Returns: Adjusted date which are expected to be in next Workflow cycle. """ if self.repeat_every is None or self.unit is None: return self.first_work_day(setup_date) try: key = { self.WEEK_UNIT: "weeks", self.MONTH_UNIT: "months", self.DAY_UNIT: "days", }[self.unit] except KeyError: raise ValueError("Invalid Workflow unit") repeater = self.repeat_every * self.repeat_multiplier if self.unit == self.DAY_UNIT: weeks = repeater / self.WORK_WEEK_LEN days = repeater % self.WORK_WEEK_LEN # append weekends if it's needed days += ((setup_date.isoweekday() + days) > self.WORK_WEEK_LEN) * 2 return setup_date + relativedelta.relativedelta( setup_date, weeks=weeks, days=days) calc_date = setup_date + relativedelta.relativedelta( setup_date, **{key: repeater} ) if self.unit == self.MONTH_UNIT: # check if setup date is the last day of the month # and if it is then calc_date should be the last day of hte month too setup_day = calendar.monthrange(setup_date.year, setup_date.month)[1] if setup_day == setup_date.day: calc_date = datetime.date( calc_date.year, calc_date.month, calendar.monthrange(calc_date.year, calc_date.month)[1]) return self.first_work_day(calc_date) @orm.validates('repeat_every') def validate_repeat_every(self, _, value): """Validate repeat_every field for Workflow. repeat_every shouldn't have 0 value. """ if value is not None and not isinstance(value, (int, long)): raise ValueError("'repeat_every' should be integer or 'null'") if value is not None and value <= 0: raise ValueError("'repeat_every' should be strictly greater than 0") return value @orm.validates('unit') def validate_unit(self, _, value): """Validate unit field for Workflow. Unit should have one of the value from VALID_UNITS list or None. """ if value is not None and value not in self.VALID_UNITS: raise ValueError("'unit' field should be one of the " "value: null, {}".format(", ".join(self.VALID_UNITS))) return value @orm.validates('is_verification_needed') def validate_is_verification_needed(self, _, value): # pylint: disable=unused-argument """Validate is_verification_needed field for Workflow. It's not allowed to change is_verification_needed flag after creation. If is_verification_needed doesn't send, then is_verification_needed flag is True. """ if self.is_verification_needed is None: return self.IS_VERIFICATION_NEEDED_DEFAULT if value is None else value if value is None: return self.is_verification_needed if self.status != self.DRAFT and value != self.is_verification_needed: raise ValueError("is_verification_needed value isn't changeble " "on workflow with '{}' status".format(self.status)) return value @builder.simple_property def workflow_state(self): return WorkflowState.get_workflow_state(self.cycles) _sanitize_html = [ 'notify_custom_message', ] _fulltext_attrs = [ attributes.CustomOrderingFullTextAttr( 'repeat', 'repeat', order_prop_getter='repeat_order' ) ] _api_attrs = reflection.ApiAttributes( 'task_groups', 'notify_on_change', 'notify_custom_message', 'cycles', 'recurrences', 'is_verification_needed', 'repeat_every', 'unit', reflection.Attribute('object_approval', update=False), reflection.Attribute('next_cycle_start_date', create=False, update=False), reflection.Attribute('can_start_cycle', create=False, update=False), reflection.Attribute('non_adjusted_next_cycle_start_date', create=False, update=False), reflection.Attribute('workflow_state', create=False, update=False), reflection.Attribute('kind', create=False, update=False), reflection.Attribute('repeat', create=False, update=False) ) _aliases = { "repeat_every": { "display_name": "Repeat Every", "description": "'Repeat Every' value\nmust fall into\nthe range 1~30" "\nor '-' for None", }, "unit": { "display_name": "Unit", "description": "Allowed values for\n'Unit' are:\n{}" "\nor '-' for None".format("\n".join(VALID_UNITS)), }, "is_verification_needed": { "display_name": "Need Verification", "mandatory": True, "description": "This field is not changeable\nafter creation.", }, "notify_custom_message": "Custom email message", "notify_on_change": { "display_name": "Force real-time email updates", "mandatory": False, }, "status": None, "start_date": None, "end_date": None, } def copy(self, _other=None, **kwargs): """Create a partial copy of the current workflow. """ columns = ['title', 'description', 'notify_on_change', 'notify_custom_message', 'end_date', 'start_date', 'repeat_every', 'unit', 'is_verification_needed'] if kwargs.get('clone_people', False): access_control_list = [ { "ac_role_id": acl.ac_role.id, "person": {"id": person.id} } for person, acl in self.access_control_list ] else: role_id = { name: ind for (ind, name) in role.get_custom_roles_for(self.type).iteritems() }['Admin'] access_control_list = [{"ac_role_id": role_id, "person": {"id": get_current_user().id}}] target = self.copy_into(_other, columns, access_control_list=access_control_list, **kwargs) return target def copy_task_groups(self, target, **kwargs): """Copy all task groups and tasks mapped to this workflow. """ for task_group in self.task_groups: obj = task_group.copy( workflow=target, context=target.context, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", False), modified_by=get_current_user(), ) target.task_groups.append(obj) if kwargs.get("clone_tasks"): task_group.copy_tasks( obj, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", True) ) return target @classmethod def eager_query(cls): return super(Workflow, cls).eager_query().options( orm.subqueryload('cycles').undefer_group('Cycle_complete') .subqueryload("cycle_task_group_object_tasks") .undefer_group("CycleTaskGroupObjectTask_complete"), orm.subqueryload('task_groups').undefer_group('TaskGroup_complete'), orm.subqueryload( 'task_groups' ).subqueryload( "task_group_tasks" ).undefer_group( 'TaskGroupTask_complete' ), ) @classmethod def indexed_query(cls): return super(Workflow, cls).indexed_query().options( orm.Load(cls).undefer_group( "Workflow_complete", ), )
def status(cls): # pylint: disable=no-self-argument return deferred( db.Column(db.String, default=cls.default_status, nullable=False), cls.__name__)
def is_verification_needed(cls): # pylint: disable=no-self-argument return deferred.deferred( db.Column(db.Boolean, default=True, nullable=False), cls.__name__, )
def title(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.String, nullable=False), cls.__name__)
class Workflow(mixins.CustomAttributable, HasOwnContext, mixins.Timeboxed, mixins.Described, mixins.Titled, mixins.Notifiable, mixins.Stateful, mixins.Slugged, mixins.Folderable, Indexed, db.Model): """Basic Workflow first class object. """ __tablename__ = 'workflows' _title_uniqueness = False DRAFT = u"Draft" ACTIVE = u"Active" INACTIVE = u"Inactive" VALID_STATES = [DRAFT, ACTIVE, INACTIVE] @classmethod def default_status(cls): return cls.DRAFT notify_on_change = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') notify_custom_message = deferred( db.Column(db.Text, nullable=False, default=u""), 'Workflow') object_approval = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') recurrences = db.Column(db.Boolean, default=False, nullable=False) workflow_people = db.relationship( 'WorkflowPerson', backref='workflow', cascade='all, delete-orphan') people = association_proxy( 'workflow_people', 'person', 'WorkflowPerson') task_groups = db.relationship( 'TaskGroup', backref='workflow', cascade='all, delete-orphan') cycles = db.relationship( 'Cycle', backref='workflow', cascade='all, delete-orphan') next_cycle_start_date = db.Column(db.Date, nullable=True) non_adjusted_next_cycle_start_date = db.Column(db.Date, nullable=True) # this is an indicator if the workflow exists from before the change where # we deleted cycle objects, which changed how the cycle is created and # how objects are mapped to the cycle tasks is_old_workflow = deferred( db.Column(db.Boolean, default=False, nullable=True), 'Workflow') # This column needs to be deferred because one of the migrations # uses Workflow as a model and breaks since at that point in time # there is no 'kind' column yet kind = deferred( db.Column(db.String, default=None, nullable=True), 'Workflow') IS_VERIFICATION_NEEDED_DEFAULT = True is_verification_needed = db.Column( db.Boolean, default=IS_VERIFICATION_NEEDED_DEFAULT, nullable=False) repeat_every = deferred(db.Column(db.Integer, nullable=True, default=None), 'Workflow') DAY_UNIT = 'day' WEEK_UNIT = 'week' MONTH_UNIT = 'month' VALID_UNITS = (DAY_UNIT, WEEK_UNIT, MONTH_UNIT) unit = deferred(db.Column(db.Enum(*VALID_UNITS), nullable=True, default=None), 'Workflow') repeat_multiplier = deferred(db.Column(db.Integer, nullable=False, default=0), 'Workflow') UNIT_FREQ_MAPPING = { None: "one_time", DAY_UNIT: "daily", WEEK_UNIT: "weekly", MONTH_UNIT: "monthly" } @hybrid.hybrid_property def frequency(self): """Hybrid property for SearchAPI filtering backward compatibility""" return self.UNIT_FREQ_MAPPING[self.unit] @frequency.expression def frequency(self): """Hybrid property for SearchAPI filtering backward compatibility""" return case([ (self.unit.is_(None), self.UNIT_FREQ_MAPPING[None]), (self.unit == self.DAY_UNIT, self.UNIT_FREQ_MAPPING[self.DAY_UNIT]), (self.unit == self.WEEK_UNIT, self.UNIT_FREQ_MAPPING[self.WEEK_UNIT]), (self.unit == self.MONTH_UNIT, self.UNIT_FREQ_MAPPING[self.MONTH_UNIT]), ]) @property def tasks(self): return list(itertools.chain(*[t.task_group_tasks for t in self.task_groups])) @property def min_task_start_date(self): """Fetches non adjusted setup cycle start date based on TGT user's setup. Args: self: Workflow instance. Returns: Date when first cycle should be started based on user's setup. """ tasks = self.tasks min_date = None for task in tasks: min_date = min(task.start_date, min_date or task.start_date) return min_date WORK_WEEK_LEN = 5 @classmethod def first_work_day(cls, day): holidays = google_holidays.GoogleHolidays() while day.isoweekday() > cls.WORK_WEEK_LEN or day in holidays: day -= relativedelta.relativedelta(days=1) return day def calc_next_adjusted_date(self, setup_date): """Calculates adjusted date which are expected in next cycle. Args: setup_date: Date which was setup by user. Returns: Adjusted date which are expected to be in next Workflow cycle. """ if self.repeat_every is None or self.unit is None: return self.first_work_day(setup_date) try: key = { self.WEEK_UNIT: "weeks", self.MONTH_UNIT: "months", self.DAY_UNIT: "days", }[self.unit] except KeyError: raise ValueError("Invalid Workflow unit") repeater = self.repeat_every * self.repeat_multiplier if self.unit == self.DAY_UNIT: weeks = repeater / self.WORK_WEEK_LEN days = repeater % self.WORK_WEEK_LEN # append weekends if it's needed days += ((setup_date.isoweekday() + days) > self.WORK_WEEK_LEN) * 2 return setup_date + relativedelta.relativedelta( setup_date, weeks=weeks, days=days) calc_date = setup_date + relativedelta.relativedelta( setup_date, **{key: repeater} ) if self.unit == self.MONTH_UNIT: # check if setup date is the last day of the month # and if it is then calc_date should be the last day of hte month too setup_day = calendar.monthrange(setup_date.year, setup_date.month)[1] if setup_day == setup_date.day: calc_date = datetime.date( calc_date.year, calc_date.month, calendar.monthrange(calc_date.year, calc_date.month)[1]) return self.first_work_day(calc_date) @orm.validates('repeat_every') def validate_repeat_every(self, _, value): """Validate repeat_every field for Workflow. repeat_every shouldn't have 0 value. """ if value is not None and not isinstance(value, (int, long)): raise ValueError("'repeat_every' should be integer or 'null'") if value is not None and value <= 0: raise ValueError("'repeat_every' should be strictly greater than 0") return value @orm.validates('unit') def validate_unit(self, _, value): """Validate unit field for Workflow. Unit should have one of the value from VALID_UNITS list or None. """ if value is not None and value not in self.VALID_UNITS: raise ValueError("'unit' field should be one of the " "value: null, {}".format(", ".join(self.VALID_UNITS))) return value @orm.validates('is_verification_needed') def validate_is_verification_needed(self, _, value): # pylint: disable=unused-argument """Validate is_verification_needed field for Workflow. It's not allowed to change is_verification_needed flag after creation. If is_verification_needed doesn't send, then is_verification_needed flag is True. """ if self.is_verification_needed is None: return self.IS_VERIFICATION_NEEDED_DEFAULT if value is None else value if value is None: return self.is_verification_needed if self.status != self.DRAFT and value != self.is_verification_needed: raise ValueError("is_verification_needed value isn't changeble " "on workflow with '{}' status".format(self.status)) return value @builder.simple_property def workflow_state(self): return WorkflowState.get_workflow_state(self.cycles) _sanitize_html = [ 'notify_custom_message', ] _api_attrs = reflection.ApiAttributes( 'workflow_people', reflection.Attribute('people', create=False, update=False), 'task_groups', 'notify_on_change', 'notify_custom_message', 'cycles', 'object_approval', 'recurrences', 'is_verification_needed', 'repeat_every', 'unit', reflection.Attribute('next_cycle_start_date', create=False, update=False), reflection.Attribute('non_adjusted_next_cycle_start_date', create=False, update=False), reflection.Attribute('workflow_state', create=False, update=False), reflection.Attribute('kind', create=False, update=False), ) _aliases = { "repeat_every": { "display_name": "Repeat Every", "description": "'Repeat Every' value\nmust fall into\nthe range 1~30" "\nor '-' for None", }, "unit": { "display_name": "Unit", "description": "Allowed values for\n'Unit' are:\n{}" "\nor '-' for None".format("\n".join(VALID_UNITS)), }, "is_verification_needed": { "display_name": "Need Verification", "mandatory": True, "description": "This field is not changeable\nafter creation.", }, "notify_custom_message": "Custom email message", "notify_on_change": { "display_name": "Force real-time email updates", "mandatory": False, }, "workflow_owner": { "display_name": "Manager", "mandatory": True, "filter_by": "_filter_by_workflow_owner", }, "workflow_member": { "display_name": "Member", "filter_by": "_filter_by_workflow_member", }, "status": None, "start_date": None, "end_date": None, } @classmethod def _filter_by_workflow_owner(cls, predicate): return cls._filter_by_role("WorkflowOwner", predicate) @classmethod def _filter_by_workflow_member(cls, predicate): return cls._filter_by_role("WorkflowMember", predicate) def copy(self, _other=None, **kwargs): """Create a partial copy of the current workflow. """ columns = ['title', 'description', 'notify_on_change', 'notify_custom_message', 'end_date', 'start_date', 'repeat_every', 'unit', 'is_verification_needed'] target = self.copy_into(_other, columns, **kwargs) return target def copy_task_groups(self, target, **kwargs): """Copy all task groups and tasks mapped to this workflow. """ for task_group in self.task_groups: obj = task_group.copy( workflow=target, context=target.context, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", False), modified_by=get_current_user(), ) target.task_groups.append(obj) if kwargs.get("clone_tasks"): task_group.copy_tasks( obj, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", True) ) return target @classmethod def eager_query(cls): return super(Workflow, cls).eager_query().options( orm.subqueryload('cycles').undefer_group('Cycle_complete') .subqueryload("cycle_task_group_object_tasks") .undefer_group("CycleTaskGroupObjectTask_complete"), orm.subqueryload('task_groups').undefer_group('TaskGroup_complete'), orm.subqueryload( 'task_groups' ).subqueryload( "task_group_tasks" ).undefer_group( 'TaskGroupTask_complete' ), orm.subqueryload('workflow_people'), ) @classmethod def indexed_query(cls): return super(Workflow, cls).indexed_query().options( orm.Load(cls).undefer_group( "Workflow_complete", ), ) @classmethod def ensure_backlog_workflow_exists(cls): """Ensures there is at least one backlog workflow with an active cycle. If such workflow does not exist it creates one.""" def any_active_cycle(workflows): """Checks if any active cycle exists from given workflows""" for workflow in workflows: for cur_cycle in workflow.cycles: if cur_cycle.is_current: return True return False # Check if backlog workflow already exists backlog_workflows = Workflow.query.filter( and_(Workflow.kind == "Backlog", # the following means one_time wf Workflow.unit is None) ).all() if len(backlog_workflows) > 0 and any_active_cycle(backlog_workflows): return "At least one backlog workflow already exists" # Create a backlog workflow backlog_workflow = Workflow(description="Backlog workflow", title="Backlog (one time)", status="Active", recurrences=0, kind="Backlog") # create wf context wf_ctx = backlog_workflow.get_or_create_object_context(context=1) backlog_workflow.context = wf_ctx db.session.flush(backlog_workflow) # create a cycle backlog_cycle = cycle.Cycle(description="Backlog workflow", title="Backlog (one time)", is_current=1, status="Assigned", start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context(), workflow=backlog_workflow) # create a cycletaskgroup backlog_ctg = cycle_task_group\ .CycleTaskGroup(description="Backlog workflow taskgroup", title="Backlog TaskGroup", cycle=backlog_cycle, status="InProgress", start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context()) db.session.add_all([backlog_workflow, backlog_cycle, backlog_ctg]) db.session.flush() # add fulltext entries indexer = get_indexer() indexer.create_record(indexer.fts_record_for(backlog_workflow)) return "Backlog workflow created"
def test_plan(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Text, nullable=False, default=u""), cls.__name__)
class Control(WithLastAssessmentDate, HasObjectState, Roleable, Relatable, mixins.CustomAttributable, Personable, ControlCategorized, PublicDocumentable, AssertionCategorized, mixins.LastDeprecatedTimeboxed, mixins.TestPlanned, Commentable, WithSimilarityScore, base.ContextRBAC, mixins.BusinessObject, Indexed, mixins.Folderable, proposal.Proposalable, db.Model): """Control model definition.""" __tablename__ = 'controls' company_control = deferred(db.Column(db.Boolean), 'Control') directive_id = deferred( db.Column(db.Integer, db.ForeignKey('directives.id')), 'Control') kind_id = deferred(db.Column(db.Integer), 'Control') means_id = deferred(db.Column(db.Integer), 'Control') version = deferred(db.Column(db.String), 'Control') verify_frequency_id = deferred(db.Column(db.Integer), 'Control') fraud_related = deferred(db.Column(db.Boolean), 'Control') key_control = deferred(db.Column(db.Boolean), 'Control') active = deferred(db.Column(db.Boolean), 'Control') kind = db.relationship( 'Option', primaryjoin='and_(foreign(Control.kind_id) == Option.id, ' 'Option.role == "control_kind")', uselist=False) means = db.relationship( 'Option', primaryjoin='and_(foreign(Control.means_id) == Option.id, ' 'Option.role == "control_means")', uselist=False) verify_frequency = db.relationship( 'Option', primaryjoin='and_(foreign(Control.verify_frequency_id) == Option.id, ' 'Option.role == "verify_frequency")', uselist=False) # REST properties _api_attrs = reflection.ApiAttributes( 'active', 'company_control', 'directive', 'fraud_related', 'key_control', 'kind', 'means', 'verify_frequency', 'version', ) _fulltext_attrs = [ 'active', 'company_control', 'directive', attributes.BooleanFullTextAttr('fraud_related', 'fraud_related', true_value="yes", false_value="no"), attributes.BooleanFullTextAttr('key_control', 'key_control', true_value="key", false_value="non-key"), 'kind', 'means', 'verify_frequency', 'version', ] _sanitize_html = [ 'version', ] @classmethod def indexed_query(cls): return super(Control, cls).indexed_query().options( orm.Load(cls).undefer_group("Control_complete"), orm.Load(cls).joinedload("directive").undefer_group( "Directive_complete"), orm.Load(cls).joinedload( 'kind', ).undefer_group("Option_complete"), orm.Load(cls).joinedload( 'means', ).undefer_group("Option_complete"), orm.Load(cls).joinedload( 'verify_frequency', ).undefer_group("Option_complete"), ) _include_links = [] _aliases = { "kind": "Kind/Nature", "means": "Type/Means", "verify_frequency": "Frequency", "fraud_related": "Fraud Related", "key_control": { "display_name": "Significance", "description": "Allowed values are:\nkey\nnon-key\n---", }, "test_plan": "Assessment Procedure", } @validates('kind', 'means', 'verify_frequency') def validate_control_options(self, key, option): """Validate control 'kind', 'means', 'verify_frequency'""" desired_role = key if key == 'verify_frequency' else 'control_' + key return validate_option(self.__class__.__name__, key, option, desired_role) @classmethod def eager_query(cls): query = super(Control, cls).eager_query() return cls.eager_inclusions(query, Control._include_links).options( orm.joinedload('directive'), orm.joinedload('kind'), orm.joinedload('means'), orm.joinedload('verify_frequency'), ) def log_json(self): out_json = super(Control, self).log_json() # so that event log can refer to deleted directive if self.directive: out_json["mapped_directive"] = self.directive.display_name return out_json
class Directive(HasObjectState, Timeboxed, BusinessObject, db.Model): __tablename__ = 'directives' version = deferred(db.Column(db.String), 'Directive') organization = deferred(db.Column(db.String), 'Directive') scope = deferred(db.Column(db.Text), 'Directive') kind_id = deferred(db.Column(db.Integer), 'Directive') audit_start_date = deferred(db.Column(db.DateTime), 'Directive') audit_frequency_id = deferred(db.Column(db.Integer), 'Directive') audit_duration_id = deferred(db.Column(db.Integer), 'Directive') meta_kind = db.Column(db.String) kind = deferred(db.Column(db.String), 'Directive') # TODO: FIX jost! # sections = db.relationship( # 'Section', backref='directive', # order_by='Section.slug', cascade='all, delete-orphan') controls = db.relationship( 'Control', backref='directive', order_by='Control.slug') audit_frequency = db.relationship( 'Option', primaryjoin='and_(foreign(Directive.audit_frequency_id) == Option.id, ' 'Option.role == "audit_frequency")', uselist=False, ) audit_duration = db.relationship( 'Option', primaryjoin='and_(foreign(Directive.audit_duration_id) == Option.id, ' 'Option.role == "audit_duration")', uselist=False, ) __mapper_args__ = { 'polymorphic_on': meta_kind } _publish_attrs = [ 'audit_start_date', 'audit_frequency', 'audit_duration', 'controls', 'kind', 'organization', 'scope', 'version', ] _sanitize_html = [ 'organization', 'scope', 'version', ] _include_links = [] _aliases = {'kind': "Kind/Type", } @validates('kind') def validate_kind(self, key, value): if not value: return None if value not in self.VALID_KINDS: message = "Invalid value '{}' for attribute {}.{}.".format( value, self.__class__.__name__, key) raise ValueError(message) return value @validates('audit_duration', 'audit_frequency') def validate_directive_options(self, key, option): return validate_option(self.__class__.__name__, key, option, key) @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Directive, cls).eager_query() return cls.eager_inclusions(query, Directive._include_links).options( orm.joinedload('audit_frequency'), orm.joinedload('audit_duration'), orm.subqueryload('controls')) @staticmethod def _extra_table_args(cls): return ( db.Index('ix_{}_meta_kind'.format(cls.__tablename__), 'meta_kind'), )
def field_declaration(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Integer, db.ForeignKey('people.id'), nullable=True), cls.__name__)
class Directive(mixins.LastDeprecatedTimeboxed, Commentable, mixins.TestPlanned, mixins.base.ContextRBAC, mixins.BusinessObject, mixins.Folderable, db.Model): """Directive model""" __tablename__ = 'directives' version = deferred(db.Column(db.String), 'Directive') organization = deferred(db.Column(db.String), 'Directive') scope = deferred(db.Column(db.Text, nullable=False, default=u""), 'Directive') kind_id = deferred(db.Column(db.Integer), 'Directive') audit_start_date = deferred(db.Column(db.DateTime), 'Directive') audit_frequency_id = deferred(db.Column(db.Integer), 'Directive') audit_duration_id = deferred(db.Column(db.Integer), 'Directive') meta_kind = db.Column(db.String) kind = deferred(db.Column(db.String), 'Directive') # TODO: FIX jost! # requirements = db.relationship( # 'Requirement', backref='directive', # order_by='Requirement.slug', cascade='all, delete-orphan') controls = db.relationship('Control', backref='directive', order_by='Control.slug') audit_frequency = db.relationship( 'Option', primaryjoin='and_(foreign(Directive.audit_frequency_id) == Option.id, ' 'Option.role == "audit_frequency")', uselist=False, ) audit_duration = db.relationship( 'Option', primaryjoin='and_(foreign(Directive.audit_duration_id) == Option.id, ' 'Option.role == "audit_duration")', uselist=False, ) __mapper_args__ = {'polymorphic_on': meta_kind} _api_attrs = reflection.ApiAttributes( 'audit_start_date', 'audit_frequency', 'audit_duration', 'controls', 'kind', 'organization', 'scope', 'version', ) _fulltext_attrs = [ 'audit_start_date', 'audit_frequency', 'audit_duration', 'controls', 'kind', 'organization', 'scope', 'version', ] @classmethod def indexed_query(cls): return super(Directive, cls).indexed_query().options( orm.Load(cls).joinedload('audit_frequency'), orm.Load(cls).joinedload('audit_duration'), orm.Load(cls).subqueryload('controls'), orm.Load(cls).load_only( 'audit_start_date', 'kind', 'organization', 'scope', 'version', ), ) _sanitize_html = [ 'organization', 'scope', 'version', ] _include_links = [] _aliases = { 'kind': "Kind/Type", "documents_file": None, } @validates('kind') def validate_kind(self, key, value): if not value: return None if value not in self.VALID_KINDS: message = "Invalid value '{}' for attribute {}.{}.".format( value, self.__class__.__name__, key) raise ValueError(message) return value @validates('audit_duration', 'audit_frequency') def validate_directive_options(self, key, option): return validate_option(self.__class__.__name__, key, option, key) @classmethod def eager_query(cls): query = super(Directive, cls).eager_query() return cls.eager_inclusions(query, Directive._include_links).options( orm.joinedload('audit_frequency'), orm.joinedload('audit_duration'), orm.subqueryload('controls')) @staticmethod def _extra_table_args(cls): return (db.Index('ix_{}_meta_kind'.format(cls.__tablename__), 'meta_kind'), )
def parent_id(cls): # pylint: disable=no-self-argument return deferred(db.Column( db.Integer, db.ForeignKey('{0}.id'.format(cls.__tablename__))), cls.__name__)
class Audit(Snapshotable, clonable.Clonable, CustomAttributable, Personable, HasOwnContext, Relatable, Timeboxed, Noted, Described, Hyperlinked, WithContact, Titled, Stateful, Slugged, Indexed, db.Model): """Audit model.""" __tablename__ = 'audits' _slug_uniqueness = False VALID_STATES = ( u'Planned', u'In Progress', u'Manager Review', u'Ready for External Review', u'Completed' ) CLONEABLE_CHILDREN = {"AssessmentTemplate"} report_start_date = deferred(db.Column(db.Date), 'Audit') report_end_date = deferred(db.Column(db.Date), 'Audit') audit_firm_id = deferred( db.Column(db.Integer, db.ForeignKey('org_groups.id')), 'Audit') audit_firm = db.relationship('OrgGroup', uselist=False) gdrive_evidence_folder = deferred(db.Column(db.String), 'Audit') program_id = deferred( db.Column(db.Integer, db.ForeignKey('programs.id'), nullable=False), 'Audit') audit_objects = db.relationship( 'AuditObject', backref='audit', cascade='all, delete-orphan') object_type = db.Column( db.String(length=250), nullable=False, default='Control') assessments = db.relationship('Assessment', backref='audit') issues = db.relationship('Issue', backref='audit') _publish_attrs = [ 'report_start_date', 'report_end_date', 'audit_firm', 'status', 'gdrive_evidence_folder', 'program', 'object_type', PublishOnly('audit_objects') ] _fulltext_attrs = [ 'report_start_date', 'report_end_date', 'audit_firm', 'status', 'gdrive_evidence_folder', ] @classmethod def indexed_query(cls): return super(Audit, cls).indexed_query().options( orm.Load(cls).joinedload("audit_firm"), orm.Load(cls).load_only( 'report_start_date', 'report_end_date', 'status', 'gdrive_evidence_folder', ), ) _sanitize_html = [ 'gdrive_evidence_folder', 'description', ] _include_links = [] _aliases = { "program": { "display_name": "Program", "filter_by": "_filter_by_program", "mandatory": True, }, "user_role:Auditor": { "display_name": "Auditors", "type": AttributeInfo.Type.USER_ROLE, "filter_by": "_filter_by_auditor", }, "start_date": "Planned Start Date", "end_date": "Planned End Date", "report_start_date": "Planned Report Period from", "report_end_date": "Planned Report Period to", "contact": { "display_name": "Internal Audit Lead", "mandatory": True, }, "secondary_contact": None, "notes": None, "url": None, "reference_url": None, "status": { "display_name": "Status", "mandatory": True, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) } } def _clone(self, source_object): """Clone audit and all relevant attributes. Keeps the internals of actual audit cloning and everything that is related to audit itself (auditors, audit firm, context setting, custom attribute values, etc.) """ from ggrc_basic_permissions import create_audit_context data = { "title": source_object.generate_attribute("title"), "description": source_object.description, "audit_firm": source_object.audit_firm, "start_date": source_object.start_date, "end_date": source_object.end_date, "program": source_object.program, "status": source_object.VALID_STATES[0], "report_start_date": source_object.report_start_date, "report_end_date": source_object.report_end_date, "contact": source_object.contact } self.update_attrs(data) db.session.flush() create_audit_context(self) self._clone_auditors(source_object) self.clone_custom_attribute_values(source_object) def _clone_auditors(self, audit): """Clone auditors of specified audit. Args: audit: Audit instance """ from ggrc_basic_permissions.models import Role, UserRole role = Role.query.filter_by(name="Auditor").first() auditors = [ur.person for ur in UserRole.query.filter_by( role=role, context=audit.context).all()] for auditor in auditors: user_role = UserRole( context=self.context, person=auditor, role=role ) db.session.add(user_role) db.session.flush() def clone(self, source_id, mapped_objects=None): """Clone audit with specified whitelisted children. Children that can be cloned should be specified in CLONEABLE_CHILDREN. Args: mapped_objects: A list of related objects that should also be copied and linked to a new audit. """ if not mapped_objects: mapped_objects = [] source_object = Audit.query.get(source_id) self._clone(source_object) if any(mapped_objects): related_children = source_object.related_objects(mapped_objects) for obj in related_children: obj.clone(self) @classmethod def _filter_by_program(cls, predicate): return Program.query.filter( (Program.id == Audit.program_id) & (predicate(Program.slug) | predicate(Program.title)) ).exists() @classmethod def _filter_by_auditor(cls, predicate): from ggrc_basic_permissions.models import Role, UserRole return UserRole.query.join(Role, Person).filter( (Role.name == "Auditor") & (UserRole.context_id == cls.context_id) & (predicate(Person.name) | predicate(Person.email)) ).exists() @classmethod def eager_query(cls): query = super(Audit, cls).eager_query() return query.options( orm.joinedload('program'), orm.subqueryload('object_people').joinedload('person'), orm.subqueryload('audit_objects'), )
def status(cls): # pylint: disable=no-self-argument return deferred(db.Column( db.String, default=cls.default_status, nullable=False), cls.__name__)
class Audit(Snapshotable, clonable.SingleClonable, PublicDocumentable, mixins.CustomAttributable, Personable, HasOwnContext, Relatable, Roleable, issuetracker_issue.IssueTracked, WithLastDeprecatedDate, mixins.Timeboxed, mixins.BusinessObject, mixins.Folderable, Indexed, db.Model): """Audit model.""" __tablename__ = 'audits' _slug_uniqueness = False VALID_STATES = (u'Planned', u'In Progress', u'Manager Review', u'Ready for External Review', u'Completed', u'Deprecated') CLONEABLE_CHILDREN = {"AssessmentTemplate"} report_start_date = deferred(db.Column(db.Date), 'Audit') report_end_date = deferred(db.Column(db.Date), 'Audit') audit_firm_id = deferred( db.Column(db.Integer, db.ForeignKey('org_groups.id')), 'Audit') audit_firm = db.relationship('OrgGroup', uselist=False) gdrive_evidence_folder = deferred(db.Column(db.String), 'Audit') program_id = deferred( db.Column(db.Integer, db.ForeignKey('programs.id'), nullable=False), 'Audit') audit_objects = db.relationship('AuditObject', backref='audit', cascade='all, delete-orphan') object_type = db.Column(db.String(length=250), nullable=False, default='Control') assessments = db.relationship('Assessment', backref='audit') issues = db.relationship('Issue', backref='audit') archived = deferred(db.Column(db.Boolean, nullable=False, default=False), 'Audit') assessment_templates = db.relationship('AssessmentTemplate', backref='audit') _api_attrs = reflection.ApiAttributes( 'report_start_date', 'report_end_date', 'audit_firm', 'gdrive_evidence_folder', 'program', 'object_type', 'archived', reflection.Attribute('issue_tracker', create=False, update=False), reflection.Attribute('audit_objects', create=False, update=False), ) _fulltext_attrs = [ 'archived', 'report_start_date', 'report_end_date', 'audit_firm', 'gdrive_evidence_folder', ] @classmethod def indexed_query(cls): return super(Audit, cls).indexed_query().options( orm.Load(cls).undefer_group("Audit_complete", ), ) _sanitize_html = [ 'gdrive_evidence_folder', 'description', ] _include_links = [] _aliases = { "program": { "display_name": "Program", "filter_by": "_filter_by_program", "mandatory": True, }, "start_date": "Planned Start Date", "end_date": "Planned End Date", "report_start_date": "Planned Report Period from", "report_end_date": "Planned Report Period to", "notes": None, "reference_url": None, "archived": { "display_name": "Archived", "mandatory": False }, "status": { "display_name": "State", "mandatory": True, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) } } @simple_property def issue_tracker(self): """Returns representation of issue tracker related info as a dict.""" issue_obj = issuetracker_issue.IssuetrackerIssue.get_issue( 'Audit', self.id) return issue_obj.to_dict() if issue_obj is not None else {} def _clone(self, source_object): """Clone audit and all relevant attributes. Keeps the internals of actual audit cloning and everything that is related to audit itself (auditors, audit firm, context setting, custom attribute values, etc.) """ from ggrc_basic_permissions import create_audit_context data = { "title": source_object.generate_attribute("title"), "description": source_object.description, "audit_firm": source_object.audit_firm, "start_date": source_object.start_date, "end_date": source_object.end_date, "last_deprecated_date": source_object.last_deprecated_date, "program": source_object.program, "status": source_object.VALID_STATES[0], "report_start_date": source_object.report_start_date, "report_end_date": source_object.report_end_date } self.update_attrs(data) db.session.flush() create_audit_context(self) self.clone_acls(source_object) self.clone_custom_attribute_values(source_object) def clone_acls(self, audit): """Clone acl roles like auditors and audit captains Args: audit: Audit instance """ for acl in audit.access_control_list: data = { "person": acl.person, "ac_role": acl.ac_role, "object": self, "context": acl.context, } new_acl = AccessControlList(**data) db.session.add(new_acl) def clone(self, source_id, mapped_objects=None): """Clone audit with specified whitelisted children. Children that can be cloned should be specified in CLONEABLE_CHILDREN. Args: mapped_objects: A list of related objects that should also be copied and linked to a new audit. """ if not mapped_objects: mapped_objects = [] source_object = Audit.query.get(source_id) self._clone(source_object) if any(mapped_objects): related_children = source_object.related_objects(mapped_objects) for obj in related_children: obj.clone(self) @orm.validates("archived") def archived_check(self, _, value): """Only Admins and Program Managers are allowed to (un)archive Audit.""" user = get_current_user() if getattr(user, 'system_wide_role', None) in SystemWideRoles.admins: return value if self.archived is not None and self.archived != value and \ not any(acl for acl in list(self.program.access_control_list) if acl.ac_role.name == "Program Managers" and acl.person.id == user.id): raise Forbidden() return value @classmethod def _filter_by_program(cls, predicate): """Helper for filtering by program""" return Program.query.filter((Program.id == Audit.program_id) & (predicate(Program.slug) | predicate(Program.title))).exists() @classmethod def eager_query(cls): query = super(Audit, cls).eager_query() return query.options( orm.joinedload('program'), orm.subqueryload('object_people').joinedload('person'), orm.subqueryload('audit_objects'), )
def slug(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.String, nullable=False), cls.__name__)
class Workflow(mixins.CustomAttributable, HasOwnContext, mixins.Timeboxed, mixins.Described, mixins.Titled, mixins.Notifiable, mixins.Stateful, mixins.Slugged, Indexed, db.Model): """Basic Workflow first class object. """ __tablename__ = 'workflows' _title_uniqueness = False VALID_STATES = [u"Draft", u"Active", u"Inactive"] # valid Frequency to user readable values mapping VALID_FREQUENCIES = { "one_time": "one time", "weekly": "weekly", "monthly": "monthly", "quarterly": "quarterly", "annually": "annually" } @classmethod def default_frequency(cls): return 'one_time' @orm.validates('frequency') def validate_frequency(self, _, value): """Make sure that value is listed in valid frequencies. Args: value: A string value for requested frequency Returns: default_frequency which is 'one_time' if the value is None, or the value itself. Raises: Value error, if the value is not in the VALID_FREQUENCIES """ if value is None: value = self.default_frequency() if value not in self.VALID_FREQUENCIES: message = u"Invalid state '{}'".format(value) raise ValueError(message) return value notify_on_change = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') notify_custom_message = deferred( db.Column(db.Text, nullable=True), 'Workflow') frequency = deferred( db.Column(db.String, nullable=True, default=default_frequency), 'Workflow' ) object_approval = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') recurrences = db.Column(db.Boolean, default=False, nullable=False) workflow_people = db.relationship( 'WorkflowPerson', backref='workflow', cascade='all, delete-orphan') people = association_proxy( 'workflow_people', 'person', 'WorkflowPerson') task_groups = db.relationship( 'TaskGroup', backref='workflow', cascade='all, delete-orphan') cycles = db.relationship( 'Cycle', backref='workflow', cascade='all, delete-orphan') next_cycle_start_date = db.Column(db.Date, nullable=True) non_adjusted_next_cycle_start_date = db.Column(db.Date, nullable=True) # this is an indicator if the workflow exists from before the change where # we deleted cycle objects, which changed how the cycle is created and # how objects are mapped to the cycle tasks is_old_workflow = deferred( db.Column(db.Boolean, default=False, nullable=True), 'Workflow') # This column needs to be deferred because one of the migrations # uses Workflow as a model and breaks since at that point in time # there is no 'kind' column yet kind = deferred( db.Column(db.String, default=None, nullable=True), 'Workflow') IS_VERIFICATION_NEEDED_DEFAULT = True is_verification_needed = db.Column( db.Boolean, default=IS_VERIFICATION_NEEDED_DEFAULT, nullable=False) @orm.validates('is_verification_needed') def validate_is_verification_needed(self, key, value): # pylint: disable=unused-argument """Validate is_verification_needed field for Workflow. It's not allowed to change is_verification_needed flag after creation. If is_verification_needed doesn't send, then is_verification_needed flag is True. """ if self.is_verification_needed is None: return self.IS_VERIFICATION_NEEDED_DEFAULT if value is None else value if value is None: return self.is_verification_needed if value != self.is_verification_needed: raise ValueError("is_verification_needed value isn't changeble") return value @builder.simple_property def workflow_state(self): return WorkflowState.get_workflow_state(self.cycles) _sanitize_html = [ 'notify_custom_message', ] _publish_attrs = [ 'workflow_people', reflection.PublishOnly('people'), 'task_groups', 'frequency', 'notify_on_change', 'notify_custom_message', 'cycles', 'object_approval', 'recurrences', 'is_verification_needed', reflection.PublishOnly('next_cycle_start_date'), reflection.PublishOnly('non_adjusted_next_cycle_start_date'), reflection.PublishOnly('workflow_state'), reflection.PublishOnly('kind'), ] _fulltext_attrs = [ ValueMapFullTextAttr( "frequency", "frequency", value_map=VALID_FREQUENCIES, ) ] _aliases = { "frequency": { "display_name": "Frequency", "mandatory": True, }, "is_verification_needed": { "display_name": "Need Verification", "mandatory": True, }, "notify_custom_message": "Custom email message", "notify_on_change": "Force real-time email updates", "workflow_owner": { "display_name": "Manager", "type": reflection.AttributeInfo.Type.USER_ROLE, "mandatory": True, "filter_by": "_filter_by_workflow_owner", }, "workflow_member": { "display_name": "Member", "type": reflection.AttributeInfo.Type.USER_ROLE, "filter_by": "_filter_by_workflow_member", }, "status": None, "start_date": None, "end_date": None, } @classmethod def _filter_by_workflow_owner(cls, predicate): return cls._filter_by_role("WorkflowOwner", predicate) @classmethod def _filter_by_workflow_member(cls, predicate): return cls._filter_by_role("WorkflowMember", predicate) def copy(self, _other=None, **kwargs): """Create a partial copy of the current workflow. """ columns = [ 'title', 'description', 'notify_on_change', 'notify_custom_message', 'frequency', 'end_date', 'start_date' ] target = self.copy_into(_other, columns, **kwargs) return target def copy_task_groups(self, target, **kwargs): """Copy all task groups and tasks mapped to this workflow. """ for task_group in self.task_groups: obj = task_group.copy( workflow=target, context=target.context, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", False), modified_by=get_current_user(), ) target.task_groups.append(obj) if kwargs.get("clone_tasks"): task_group.copy_tasks( obj, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", True) ) return target @classmethod def eager_query(cls): return super(Workflow, cls).eager_query().options( orm.subqueryload('cycles').undefer_group('Cycle_complete') .subqueryload("cycle_task_group_object_tasks") .undefer_group("CycleTaskGroupObjectTask_complete"), orm.subqueryload('task_groups'), orm.subqueryload('workflow_people'), ) @classmethod def indexed_query(cls): return super(Workflow, cls).indexed_query().options( orm.Load(cls).undefer_group( "Workflow_complete", ), ) @classmethod def ensure_backlog_workflow_exists(cls): """Ensures there is at least one backlog workflow with an active cycle. If such workflow does not exist it creates one.""" def any_active_cycle(workflows): """Checks if any active cycle exists from given workflows""" for workflow in workflows: for cur_cycle in workflow.cycles: if cur_cycle.is_current: return True return False # Check if backlog workflow already exists backlog_workflows = Workflow.query\ .filter(and_ (Workflow.kind == "Backlog", Workflow.frequency == "one_time"))\ .all() if len(backlog_workflows) > 0 and any_active_cycle(backlog_workflows): return "At least one backlog workflow already exists" # Create a backlog workflow backlog_workflow = Workflow(description="Backlog workflow", title="Backlog (one time)", frequency="one_time", status="Active", recurrences=0, kind="Backlog") # create wf context wf_ctx = backlog_workflow.get_or_create_object_context(context=1) backlog_workflow.context = wf_ctx db.session.flush(backlog_workflow) # create a cycle backlog_cycle = cycle.Cycle(description="Backlog workflow", title="Backlog (one time)", is_current=1, status="Assigned", start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context(), workflow=backlog_workflow) # create a cycletaskgroup backlog_ctg = cycle_task_group\ .CycleTaskGroup(description="Backlog workflow taskgroup", title="Backlog TaskGroup", cycle=backlog_cycle, status="InProgress", start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context()) db.session.add_all([backlog_workflow, backlog_cycle, backlog_ctg]) db.session.flush() # add fulltext entries indexer = get_indexer() indexer.create_record(indexer.fts_record_for(backlog_workflow)) return "Backlog workflow created"
def test_plan(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Text), cls.__name__)
class BackgroundTask(base.ContextRBAC, Base, Stateful, db.Model): """Background task model.""" __tablename__ = 'background_tasks' VALID_STATES = [ "Pending", "Running", "Success", "Failure" ] name = db.Column(db.String, nullable=False, unique=True) parameters = deferred(db.Column(CompressedType), 'BackgroundTask') payload = deferred(db.Column(CompressedType), 'BackgroundTask') result = deferred(db.Column(CompressedType), 'BackgroundTask') bg_operation = db.relationship( "BackgroundOperation", backref='bg_task', uselist=False ) _api_attrs_complete = reflection.ApiAttributes("id", "status", "type") _aliases = { "status": { "display_name": "State", "mandatory": False, "description": "Options are: \n{}".format('\n'.join(VALID_STATES)) } } def start(self): """Mark the current task as running.""" self.status = "Running" db.session.add(self) db.session.commit() def finish(self, status, result): """Finish the current bg task.""" # Ensure to not commit any not-yet-committed changes db.session.rollback() if isinstance(result, Response): self.result = {'content': result.response[0], 'status_code': result.status_code, 'headers': result.headers.items()} else: self.result = {'content': result, 'status_code': 200, 'headers': [('Content-Type', 'text/html')]} self.status = status db.session.add(self) db.session.commit() def make_response(self, default=None): """Create task status response.""" if self.result is None: return default from ggrc.app import app return app.make_response((self.result['content'], self.result['status_code'], self.result['headers'])) def task_scheduled_response(self): """Create success response with info about scheduled task.""" from ggrc.app import app return self.make_response( app.make_response(( json.dumps("scheduled %s" % self.name), 200, [("Content-Type", "application/json")] )) ) def get_content(self): """Get result content of the task.""" try: content_json = self.result.get("content", "{}") if self.result else "{}" content = json.loads(content_json) except (TypeError, ValueError): content = {} return content
def description(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Text), cls.__name__)
def os_state(cls): # pylint: disable=no-self-argument """os_state attribute is used to track object review status""" return deferred( db.Column(db.String, nullable=False, default='Unreviewed'), cls.__name__)