class Attributes(base.Base, db.Model): """Attributes model.""" __tablename__ = 'attributes' attribute_id = db.Column(db.Integer, primary_key=True) # object id should eventually be a foreign key to objects table object_id = db.Column(db.Integer) object_type = db.Column(db.Unicode(250)) attribute_definition_id = db.Column( db.Integer, db.ForeignKey('attribute_definitions.attribute_definition_id') ) attribute_template_id = db.Column( db.Integer, db.ForeignKey('attribute_templates.attribute_template_id') ) # value string is not nullable to avoid weird filtering behavior. This is # different than on data platform, but needed here because we decided to make # all text fields mandatory with default empty string value_string = db.Column(db.UnicodeText, nullable=False, default=u"") value_integer = db.Column(db.Integer) value_datetime = db.Column(db.DateTime) # ggrc specific code, needs to be added back to DP. source_type = db.Column(db.Unicode(250)) source_id = db.Column(db.Integer) source_attr = db.Column(db.Unicode(250)) namespace_id = db.Column( db.Integer, db.ForeignKey('namespaces.namespace_id'), nullable=True) deleted = db.Column(db.Boolean, default=False) version = db.Column(db.Integer) attribute_definition = orm.relationship( "AttributeDefinitions", backref="attributes" ) attribute_template = orm.relationship( "AttributeTemplates", backref="attributes" ) namespace = orm.relationship("Namespaces", backref="attributes") @declared_attr def __table_args__(cls): # pylint: disable=no-self-argument return ( db.Index("ix_source", "source_type", "source_id", "source_attr"), # db.Index("value_string"), not needed yet db.Index("ix_value_integer", "value_integer"), db.Index("ix_value_datetime", "value_datetime"), db.UniqueConstraint( "object_id", "object_type", "attribute_definition_id", "attribute_template_id", name="uq_attributes", ), )
def folder(cls): return deferred(db.Column(db.Text, nullable=False, default=""), cls.__name__)
def field_declaration(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.DateTime, nullable=True), cls.__name__)
def verified_date(cls): # pylint: disable=no-self-argument return deferred( db.Column(db.DateTime, nullable=True), cls.__name__ )
def title(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.String, nullable=False), cls.__name__)
def modified_by_id(cls): # pylint: disable=no-self-argument """Id of user who did the last modification of the object.""" return db.Column(db.Integer)
def last_deprecated_date(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Date), cls.__name__)
class Evidence(Roleable, Relatable, mixins.Titled, bfh.BeforeFlushHandleable, Statusable, mixins.WithLastDeprecatedDate, comment.Commentable, WithAutoDeprecation, base.ContextRBAC, mixins.Slugged, mixin.Indexed, db.Model): """Evidence (Audit-scope URLs, FILE's) model.""" __tablename__ = "evidence" _title_uniqueness = False URL = "URL" FILE = "FILE" VALID_EVIDENCE_KINDS = [URL, FILE] START_STATE = 'Active' DEPRECATED = 'Deprecated' VALID_STATES = ( START_STATE, DEPRECATED, ) kind = deferred( db.Column(db.Enum(*VALID_EVIDENCE_KINDS), default=URL, nullable=False), "Evidence") source_gdrive_id = deferred( db.Column(db.String, nullable=False, default=u""), "Evidence") gdrive_id = deferred(db.Column(db.String, nullable=False, default=u""), "Evidence") link = deferred(db.Column(db.String), "Evidence") description = deferred(db.Column(db.Text, nullable=False, default=u""), "Evidence") # Override from Commentable mixin (can be removed after GGRC-5192) send_by_default = db.Column(db.Boolean, nullable=False, default=True) _api_attrs = reflection.ApiAttributes( "title", reflection.Attribute("link", update=False), reflection.Attribute("source_gdrive_id", update=False), "description", "status", reflection.Attribute("kind", update=False), reflection.Attribute("parent_obj", read=False, update=False), reflection.Attribute("archived", create=False, update=False), reflection.Attribute("is_uploaded", read=False, update=False), ) _fulltext_attrs = [ "title", "link", "description", "kind", "status", "archived" ] AUTO_REINDEX_RULES = [ mixin.ReindexRule("Audit", lambda x: x.assessments, ["archived"]), ] _sanitize_html = [ "title", "description", ] _aliases = { "title": "Title", "link": "Link", "description": "Description", "kind": "Type", "archived": { "display_name": "Archived", "mandatory": False }, } _allowed_parents = {"Assessment", "Audit"} FILE_NAME_SEPARATOR = "_ggrc" @orm.validates("kind") def validate_kind(self, key, kind): """Returns correct option, otherwise rises an error""" if kind is None: kind = self.URL if kind not in self.VALID_EVIDENCE_KINDS: raise exceptions.ValidationError( "Invalid value for attribute {attr}. " "Expected options are `{url}`, `{file}`".format( attr=key, url=self.URL, file=self.FILE)) return kind @classmethod def _populate_query(cls, query): return query.options( orm.subqueryload(cls._related_assessment), orm.subqueryload(cls._related_audit).load_only("archived"), orm.Load(cls).undefer_group("Evidence_complete", ), ) @classmethod def indexed_query(cls): return cls._populate_query(super(Evidence, cls).indexed_query()) @classmethod def eager_query(cls): return cls._populate_query(super(Evidence, cls).eager_query()) @simple_property def archived(self): """Evidence archived if related Assessment/Audit is archived""" # pylint: disable=unsubscriptable-object if self._related_assessment: return self._related_assessment.audit.archived elif self._related_audit: return self._related_audit.archived return False def log_json(self): tmp = super(Evidence, self).log_json() tmp["type"] = "Evidence" return tmp @simple_property def is_uploaded(self): """This flag is used to know if file uploaded from a local user folder. In that case we need just rename file, not copy. """ return self._is_uploaded if hasattr(self, "_is_uploaded") else False @is_uploaded.setter def is_uploaded(self, value): # pylint: disable=attribute-defined-outside-init self._is_uploaded = value @simple_property def parent_obj(self): return self._parent_obj @parent_obj.setter def parent_obj(self, value): # pylint: disable=attribute-defined-outside-init self._parent_obj = value def _get_parent_obj(self): """Get parent object specified""" if "id" not in self._parent_obj: raise exceptions.ValidationError( "'id' is mandatory for parent_obj") if "type" not in self._parent_obj: raise exceptions.ValidationError( "'type' is mandatory for parent_obj") if self._parent_obj["type"] not in self._allowed_parents: raise exceptions.ValidationError("Allowed types are: {}.".format( ", ".join(self._allowed_parents))) parent_type = self._parent_obj["type"] parent_id = self._parent_obj["id"] obj = referenced_objects.get(parent_type, parent_id) if not obj: raise ValueError("Parent object not found: {type} {id}".format( type=parent_type, id=parent_id)) return obj @staticmethod def _build_file_name_postfix(parent_obj): """Build postfix for given parent object""" postfix_parts = [Evidence.FILE_NAME_SEPARATOR, parent_obj.slug] related_snapshots = parent_obj.related_objects(_types=["Snapshot"]) related_snapshots = sorted(related_snapshots, key=lambda it: it.id) slugs = (sn.revision.content["slug"] for sn in related_snapshots if sn.child_type == parent_obj.assessment_type) postfix_parts.extend(slugs) postfix_sting = "_".join(postfix_parts).lower() return postfix_sting def _build_relationship(self, parent_obj): """Build relationship between evidence and parent object""" from ggrc.models import all_models rel = all_models.Relationship(source=parent_obj, destination=self) db.session.add(rel) signals.Restful.model_put.send(rel.__class__, obj=rel, service=self) def _update_fields(self, response): """Update fields of evidence with values of the copied file""" self.gdrive_id = response["id"] self.link = response["webViewLink"] self.title = response["name"] self.kind = Evidence.FILE @staticmethod def _get_folder(parent): return parent.folder if hasattr(parent, "folder") else "" def _map_parent(self): """Maps evidence to parent object If Document.FILE and source_gdrive_id => copy file """ if self.is_with_parent_obj(): parent = self._get_parent_obj() if self.kind == Evidence.FILE and self.source_gdrive_id: self.exec_gdrive_file_copy_flow(parent) self._build_relationship(parent) self._parent_obj = None def exec_gdrive_file_copy_flow(self, parent): """Execute google gdrive file copy flow Build file name, destination folder and copy file to that folder. After coping fills evidence object fields with new gdrive URL """ postfix = self._build_file_name_postfix(parent) folder_id = self._get_folder(parent) file_id = self.source_gdrive_id from ggrc.gdrive.file_actions import process_gdrive_file response = process_gdrive_file(file_id, folder_id, postfix, separator=Evidence.FILE_NAME_SEPARATOR, is_uploaded=self.is_uploaded) self._update_fields(response) def is_with_parent_obj(self): return bool(hasattr(self, "_parent_obj") and self._parent_obj) def add_admin_role(self): """Add current user as Evidence admin""" from ggrc.models import all_models admin_role = db.session.query(all_models.AccessControlRole).filter_by( name="Admin", object_type=self.type).one() self.extend_access_control_list([{ "ac_role": admin_role, "person": login.get_current_user() }]) def handle_before_flush(self): """Handler that called before SQLAlchemy flush event""" self._map_parent() @declared_attr def _related_audit(cls): # pylint: disable=no-self-argument """Audits mapped to Evidence""" def primary_join_function(): return or_( and_(Relationship.source_id == cls.id, Relationship.source_type == cls.__name__, Relationship.destination_type == "Audit"), and_(Relationship.destination_id == cls.id, Relationship.destination_type == cls.__name__, Relationship.source_type == "Audit")) def secondary_join_function(): from ggrc.models import all_models return or_( and_( all_models.Audit.id == Relationship.destination_id, Relationship.destination_type == "Audit", ), and_( all_models.Audit.id == Relationship.source_id, Relationship.source_type == "Audit", )) return db.relationship("Audit", primaryjoin=primary_join_function, secondary=Relationship.__table__, secondaryjoin=secondary_join_function, viewonly=True, uselist=False) @declared_attr def _related_assessment(cls): # pylint: disable=no-self-argument """Assessments mapped to Evidence""" def primary_join_function(): return or_( and_(Relationship.source_id == cls.id, Relationship.source_type == cls.__name__, Relationship.destination_type == "Assessment"), and_(Relationship.destination_id == cls.id, Relationship.destination_type == cls.__name__, Relationship.source_type == "Assessment")) def secondary_join_function(): from ggrc.models import all_models return or_( and_( all_models.Assessment.id == Relationship.destination_id, Relationship.destination_type == "Assessment", ), and_( all_models.Assessment.id == Relationship.source_id, Relationship.source_type == "Assessment", )) return db.relationship("Assessment", primaryjoin=primary_join_function, secondary=Relationship.__table__, secondaryjoin=secondary_join_function, viewonly=True, uselist=False)
class Relationship(base.ContextRBAC, Base, db.Model): """Relationship model.""" __tablename__ = 'relationships' source_id = db.Column(db.Integer, nullable=False) source_type = db.Column(db.String, nullable=False) destination_id = db.Column(db.Integer, nullable=False) destination_type = db.Column(db.String, nullable=False) parent_id = db.Column( db.Integer, db.ForeignKey('relationships.id', ondelete='SET NULL'), nullable=True, ) parent = db.relationship( lambda: Relationship, remote_side=lambda: Relationship.id ) automapping_id = db.Column( db.Integer, db.ForeignKey('automappings.id', ondelete='CASCADE'), nullable=True, ) is_external = db.Column(db.Boolean, nullable=False, default=False) def get_related_for(self, object_type): """Return related object for sent type.""" if object_type == self.source_type: return self.destination if object_type == self.destination_type: return self.source @property def source_attr(self): return '{0}_source'.format(self.source_type) @property def source(self): """Source getter.""" if not hasattr(self, self.source_attr): logger.warning( "Relationship source attr '%s' does not exist. " "This indicates invalid data in our database!", self.source_attr ) return None return getattr(self, self.source_attr) @source.setter def source(self, value): self.source_id = getattr(value, 'id', None) self.source_type = getattr(value, 'type', None) self.validate_relatable_type("source", value) return setattr(self, self.source_attr, value) @property def destination_attr(self): return '{0}_destination'.format(self.destination_type) @property def destination(self): """Destination getter.""" if not hasattr(self, self.destination_attr): logger.warning( "Relationship destination attr '%s' does not exist. " "This indicates invalid data in our database!", self.destination_attr ) return None return getattr(self, self.destination_attr) @destination.setter def destination(self, value): self.destination_id = getattr(value, 'id', None) self.destination_type = getattr(value, 'type', None) self.validate_relatable_type("destination", value) return setattr(self, self.destination_attr, value) @classmethod def find_related(cls, object1, object2): return cls.get_related_query(object1, object2).first() @classmethod def get_related_query_by_type_id(cls, type1, id1, type2, id2, strict_id=True): """Return query to find relationship(s) This function prepares query for the following cases: 1) Find relationships between 2 objects. In this case strict_id=True 2) Find relationships between on object and other objects of specified type In this case string_id=False :param type1: type of first object :param id1: id of first object :param type2: type of second object :param id2: if of second object :param strict_id: True if id must be specified, else False :return: prepared query """ def predicate(src_type, src_id, dst_type, dst_id): filters = [ Relationship.source_type == src_type, Relationship.destination_type == dst_type ] if src_id is not None: filters.append(Relationship.source_id == src_id) if dst_id is not None: filters.append(Relationship.destination_id == dst_id) return and_(*filters) if (strict_id and None in (id1, id2)) or None in (type1, type2): # One of the following occurred: # 1) One of ids is None, but it's requested to have ids specified # 2) One of types is None # Make filter to return empty list return Relationship.query.filter(false()) return Relationship.query.filter( or_(predicate(type1, id1, type2, id2), predicate(type2, id2, type1, id1)) ) @classmethod def get_related_query(cls, object1, object2): return cls.get_related_query_by_type_id( type1=object1.type, id1=object1.id, type2=object2.type, id2=object2.id, strict_id=False) @staticmethod def _extra_table_args(cls): return ( db.UniqueConstraint( 'source_id', 'source_type', 'destination_id', 'destination_type'), db.Index( 'ix_relationships_source', 'source_type', 'source_id'), db.Index( 'ix_relationships_destination', 'destination_type', 'destination_id'), ) _api_attrs = reflection.ApiAttributes( 'source', 'destination', reflection.Attribute( 'is_external', create=True, update=False, read=True), ) def _display_name(self): return "{}:{} <-> {}:{}".format(self.source_type, self.source_id, self.destination_type, self.destination_id) def validate_relatable_type(self, field, value): if value is None: raise ValidationError(u"{}.{} can't be None." .format(self.__class__.__name__, field)) if not isinstance(value, Relatable): raise ValidationError(u"You are trying to create relationship with not " u"Relatable type: {}".format(value.type)) tgt_type = self.source_type tgt_id = self.source_id self.validate_relation_by_type(self.source_type, self.destination_type) if field == "source": tgt_type = self.destination_type tgt_id = self.destination_id if value and getattr(value, "type") == "Snapshot": if not tgt_type: return if value.child_type == tgt_type and value.child_id == tgt_id: raise ValidationError( u"Invalid source-destination types pair for {}: " u"source_type={!r}, destination_type={!r}" .format(self.type, self.source_type, self.destination_type) ) # else check if the opposite is a Snapshot elif tgt_type == "Snapshot": from ggrc.models import Snapshot snapshot = db.session.query(Snapshot).get(tgt_id) if snapshot.child_type == value.type and snapshot.child_id == value.id: raise ValidationError( u"Invalid source-destination types pair for {}: " u"source_type={!r}, destination_type={!r}" .format(self.type, self.source_type, self.destination_type) ) @staticmethod def _check_relation_types_group(type1, type2, group1, group2): """Checks if 2 types belong to 2 groups Args: type1: name of model 1 type2: name of model 2 group1: Collection of model names which belong to group 1 group1: Collection of model names which belong to group 2 Return: True if types belong to different groups, else False """ if (type1 in group1 and type2 in group2) or (type2 in group1 and type1 in group2): return True return False # pylint:disable=unused-argument @classmethod def validate_delete(cls, mapper, connection, target): """Validates is delete of Relationship is allowed.""" from ggrc.utils.user_generator import is_ext_app_request cls.validate_relation_by_type(target.source_type, target.destination_type) if is_ext_app_request() and not target.is_external: raise ValidationError( 'External application can delete only external relationships.') @classmethod def validate_relation_by_type(cls, source_type, destination_type): """Checks if a mapping is allowed between given types.""" if is_external_app_user(): # external users can map and unmap scoping objects return from ggrc.models import all_models scoping_models_names = all_models.get_scope_model_names() # Check Regulation and Standard if cls._check_relation_types_group(source_type, destination_type, scoping_models_names, ("Regulation", "Standard")): raise ValidationError( u"You do not have the necessary permissions to map and unmap " u"scoping objects to directives in this application. Please " u"contact your administrator if you have any questions.") # Check Control control_external_only_mappings = set(scoping_models_names) control_external_only_mappings.update(("Regulation", "Standard", "Risk")) if cls._check_relation_types_group(source_type, destination_type, control_external_only_mappings, ("Control", )): raise ValidationError( u"You do not have the necessary permissions to map and unmap " u"controls to scoping objects, standards and regulations in this " u"application. Please contact your administrator " u"if you have any questions.") # Check Risk risk_external_only_mappings = set(scoping_models_names) risk_external_only_mappings.update(("Regulation", "Standard", "Control")) if cls._check_relation_types_group(source_type, destination_type, risk_external_only_mappings, ("Risk", )): raise ValidationError( u"You do not have the necessary permissions to map and unmap " u"risks to scoping objects, controls, standards " u"and regulations in this application." u"Please contact your administrator if you have any questions.")
class Comment(Relatable, Described, Ownable, Notifiable, Base, Indexed, db.Model): """Basic comment model.""" __tablename__ = "comments" assignee_type = db.Column(db.String) revision_id = deferred( db.Column( db.Integer, db.ForeignKey('revisions.id', ondelete='SET NULL'), nullable=True, ), 'Comment') revision = db.relationship( 'Revision', uselist=False, ) custom_attribute_definition_id = deferred( db.Column( db.Integer, db.ForeignKey('custom_attribute_definitions.id', ondelete='SET NULL'), nullable=True, ), 'Comment') custom_attribute_definition = db.relationship( 'CustomAttributeDefinition', uselist=False, ) # REST properties _publish_attrs = [ "assignee_type", "custom_attribute_revision", ] _update_attrs = [ "assignee_type", "custom_attribute_revision_upd", ] _sanitize_html = [ "description", ] def get_objects_to_reindex(self): """Return list required objects for reindex if comment C.U.D.""" source_qs = db.session.query( Relationship.destination_type, Relationship.destination_id).filter( Relationship.source_type == self.__class__.__name__, Relationship.source_id == self.id) destination_qs = db.session.query( Relationship.source_type, Relationship.source_id).filter( Relationship.destination_type == self.__class__.__name__, Relationship.destination_id == self.id) result_qs = source_qs.union(destination_qs) klass_dict = defaultdict(set) for klass, object_id in result_qs: klass_dict[klass].add(object_id) queries = [] for klass, object_ids in klass_dict.iteritems(): model = inflector.get_model(klass) if not model: continue if issubclass(model, (Indexed, Commentable)): queries.append( model.query.filter(model.id.in_(list(object_ids)))) return list(itertools.chain(*queries)) AUTO_REINDEX_RULES = [ ReindexRule("Comment", lambda x: x.get_objects_to_reindex()), ReindexRule("Relationship", reindex_by_relationship), ] @classmethod def eager_query(cls): query = super(Comment, cls).eager_query() return query.options( orm.joinedload('revision'), orm.joinedload('custom_attribute_definition').undefer_group( 'CustomAttributeDefinition_complete'), ) @computed_property def custom_attribute_revision(self): """Get the historical value of the relevant CA value.""" if not self.revision: return None revision = self.revision.content cav_stored_value = revision['attribute_value'] cad = self.custom_attribute_definition return { 'custom_attribute': { 'id': cad.id if cad else None, 'title': cad.title if cad else 'DELETED DEFINITION', }, 'custom_attribute_stored_value': cav_stored_value, } def custom_attribute_revision_upd(self, value): """Create a Comment-CA mapping with current CA value stored.""" ca_revision_dict = value.get('custom_attribute_revision_upd') if not ca_revision_dict: return ca_val_dict = self._get_ca_value(ca_revision_dict) ca_val_id = ca_val_dict['id'] ca_val_revision = Revision.query.filter_by( resource_type='CustomAttributeValue', resource_id=ca_val_id, ).order_by(Revision.created_at.desc(), ).limit(1).first() if not ca_val_revision: raise BadRequest( "No Revision found for CA value with id provided under " "'custom_attribute_value': {}".format(ca_val_dict)) self.revision_id = ca_val_revision.id self.custom_attribute_definition_id = ca_val_revision.content.get( 'custom_attribute_id', ) @staticmethod def _get_ca_value(ca_revision_dict): """Get CA value dict from json and do a basic validation.""" ca_val_dict = ca_revision_dict.get('custom_attribute_value') if not ca_val_dict: raise ValueError( "CA value expected under " "'custom_attribute_value': {}".format(ca_revision_dict)) if not ca_val_dict.get('id'): raise ValueError( "CA value id expected under 'id': {}".format(ca_val_dict)) return ca_val_dict
class Commentable(object): """Mixin for commentable objects. This is a mixin for adding default options to objects on which people can comment. recipients is used for setting who gets notified (Verifer, Requester, ...). send_by_default should be used for setting the "send notification" flag in the comment modal. """ # pylint: disable=too-few-public-methods VALID_RECIPIENTS = frozenset([ "Assessor", "Assignee", "Creator", "Requester", "Verifier", ]) @validates("recipients") def validate_recipients(self, key, value): """ Validate recipients list Args: value (string): Can be either empty, or list of comma separated `VALID_RECIPIENTS` """ # pylint: disable=unused-argument if value: value = set(name for name in value.split(",") if name) if value and value.issubset(self.VALID_RECIPIENTS): # The validator is a bit more smart and also makes some filtering of the # given data - this is intended. return ",".join(value) elif not value: return None else: raise ValueError( value, 'Value should be either empty ' + 'or comma separated list of ' + ', '.join(sorted(self.VALID_RECIPIENTS))) recipients = db.Column(db.String, nullable=True, default=u"Assessor,Creator,Verifier") send_by_default = db.Column(db.Boolean, nullable=True, default=True) _publish_attrs = [ "recipients", "send_by_default", ] _aliases = { "recipients": "Recipients", "send_by_default": "Send by default", } _fulltext_attrs = [ MultipleSubpropertyFullTextAttr("comment", "comments", ["description"]), ] @declared_attr def comments(self): """Comments related to self via Relationship table.""" comment_id = case( [(Relationship.destination_type == "Comment", Relationship.destination_id)], else_=Relationship.source_id, ) commentable_id = case( [(Relationship.destination_type == "Comment", Relationship.source_id)], else_=Relationship.destination_id, ) return db.relationship( Comment, primaryjoin=lambda: self.id == commentable_id, secondary=Relationship.__table__, secondaryjoin=lambda: Comment.id == comment_id, viewonly=True, )
class Revision(before_flush_handleable.BeforeFlushHandleable, synchronizable.ChangesSynchronized, filterable.Filterable, base.ContextRBAC, mixins.Base, db.Model): """Revision object holds a JSON snapshot of the object at a time.""" __tablename__ = 'revisions' resource_id = db.Column(db.Integer, nullable=False) resource_type = db.Column(db.String, nullable=False) event_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False) action = db.Column(db.Enum(u'created', u'modified', u'deleted'), nullable=False) _content = db.Column('content', types.LongJsonType, nullable=False) resource_slug = db.Column(db.String, nullable=True) source_type = db.Column(db.String, nullable=True) source_id = db.Column(db.Integer, nullable=True) destination_type = db.Column(db.String, nullable=True) destination_id = db.Column(db.Integer, nullable=True) is_empty = db.Column(db.Boolean, nullable=False, default=False) @staticmethod def _extra_table_args(_): return ( db.Index("revisions_modified_by", "modified_by_id"), db.Index("ix_revisions_resource_action", "resource_type", "resource_id", "action"), db.Index("fk_revisions_source", "source_type", "source_id"), db.Index("fk_revisions_destination", "destination_type", "destination_id"), db.Index('ix_revisions_resource_slug', 'resource_slug'), ) _api_attrs = reflection.ApiAttributes( 'resource_id', 'resource_type', 'source_type', 'source_id', 'destination_type', 'destination_id', 'action', 'content', 'description', reflection.Attribute('diff_with_current', create=False, update=False), reflection.Attribute('meta', create=False, update=False), ) _filterable_attrs = [ 'action', 'resource_id', 'resource_type', 'source_type', 'source_id', 'destination_type', 'destination_id', ] @classmethod def eager_query(cls, **kwargs): from sqlalchemy import orm query = super(Revision, cls).eager_query(**kwargs) return query.options( orm.subqueryload('modified_by'), # Event's action is loaded here since it is used in description. orm.joinedload('event').load_only('action'), ) def __init__(self, obj, modified_by_id, action, content): self.resource_id = obj.id self.resource_type = obj.__class__.__name__ self.resource_slug = getattr(obj, "slug", None) self.modified_by_id = modified_by_id self.action = action if "access_control_list" in content and content["access_control_list"]: for acl in content["access_control_list"]: acl["person"] = { "id": acl["person_id"], "type": "Person", "href": "/api/people/{}".format(acl["person_id"]), } self._content = content for attr in [ "source_type", "source_id", "destination_type", "destination_id" ]: setattr(self, attr, getattr(obj, attr, None)) @builder.callable_property def diff_with_current(self): """Callable lazy property for revision.""" referenced_objects.mark_to_cache(self.resource_type, self.resource_id) revisions_diff.mark_for_latest_content(self.resource_type, self.resource_id) def lazy_loader(): """Lazy load diff for revisions.""" referenced_objects.rewarm_cache() revisions_diff.rewarm_latest_content() instance = referenced_objects.get(self.resource_type, self.resource_id) if instance: return revisions_diff.prepare(instance, self.content) # return empty diff object has already been removed return {} return lazy_loader @builder.callable_property def meta(self): """Callable lazy property for revision.""" referenced_objects.mark_to_cache(self.resource_type, self.resource_id) def lazy_loader(): """Lazy load diff for revisions.""" referenced_objects.rewarm_cache() instance = referenced_objects.get(self.resource_type, self.resource_id) meta_dict = {} if instance: instance_meta_info = meta_info.MetaInfo(instance) meta_dict["mandatory"] = instance_meta_info.mandatory return meta_dict return lazy_loader @builder.simple_property def description(self): """Compute a human readable description from action and content.""" if 'display_name' not in self._content: return '' display_name = self._content['display_name'] if not display_name: result = u"{0} {1}".format(self.resource_type, self.action) elif u'<->' in display_name: if self.action == 'created': msg = u"{destination} linked to {source}" elif self.action == 'deleted': msg = u"{destination} unlinked from {source}" else: msg = u"{display_name} {action}" source, destination = self._content['display_name'].split( '<->')[:2] result = msg.format(source=source, destination=destination, display_name=self._content['display_name'], action=self.action) elif 'mapped_directive' in self._content: # then this is a special case of combined map/creation # should happen only for Requirement and Control mapped_directive = self._content['mapped_directive'] if self.action == 'created': result = u"New {0}, {1}, created and mapped to {2}".format( self.resource_type, display_name, mapped_directive) elif self.action == 'deleted': result = u"{0} unmapped from {1} and deleted".format( display_name, mapped_directive) else: result = u"{0} {1}".format(display_name, self.action) else: # otherwise, it's a normal creation event result = u"{0} {1}".format(display_name, self.action) if self.event.action == "BULK": result += ", via bulk action" return result def populate_reference_url(self): """Add reference_url info for older revisions.""" if 'url' not in self._content: return {} reference_url_list = [] for key in ('url', 'reference_url'): link = self._content[key] # link might exist, but can be an empty string - we treat those values # as non-existing (empty) reference URLs if not link: continue # if creation/modification date is not available, we estimate it by # using the corresponding information from the Revision itself created_at = (self._content.get("created_at") or self.created_at.isoformat()) updated_at = (self._content.get("updated_at") or self.updated_at.isoformat()) reference_url_list.append({ "display_name": link, "kind": "REFERENCE_URL", "link": link, "title": link, "id": None, "created_at": created_at, "updated_at": updated_at, }) return {'reference_url': reference_url_list} @classmethod def _filter_internal_acls(cls, access_control_list): """Remove internal access control list entries. This is needed due to bugs in older code that in some cases the revisions stored internal ACL entries. Due to possible role removal, the parent_id is the only true flag that we can use for filtering Args: access_control_list: list of dicts containing ACL entries. Returns: access_control_list but without any ACL entry that was generated from some other ACL entry. """ return [ acl for acl in access_control_list if acl.get("parent_id") is None ] @classmethod def _populate_acl_with_people(cls, access_control_list): """Add person property with person stub on access control list.""" for acl in access_control_list: if "person" not in acl: acl["person"] = {"id": acl.get("person_id"), "type": "Person"} return access_control_list def populate_acl(self): """Add access_control_list info for older revisions.""" # pylint: disable=too-many-locals roles_dict = role.get_custom_roles_for(self.resource_type) reverted_roles_dict = {n: i for i, n in roles_dict.iteritems()} access_control_list = self._content.get("access_control_list") or [] map_field_to_role = { "principal_assessor": reverted_roles_dict.get("Principal Assignees"), "secondary_assessor": reverted_roles_dict.get("Secondary Assignees"), "contact": reverted_roles_dict.get("Primary Contacts"), "secondary_contact": reverted_roles_dict.get("Secondary Contacts"), "owners": reverted_roles_dict.get("Admin"), } is_control = bool(self.resource_type == "Control") is_control_snapshot = bool( self.resource_type == "Snapshot" and self._content["child_type"] == "Control") # for Control type we do not have Primary and Secondary Contacts roles. if is_control or is_control_snapshot: map_field_to_role.update({ "contact": reverted_roles_dict.get("Control Operators"), "secondary_contact": reverted_roles_dict.get("Control Owners") }) exists_roles = {i["ac_role_id"] for i in access_control_list} for field, role_id in map_field_to_role.items(): if role_id in exists_roles or role_id is None: continue if field not in self._content: continue field_content = self._content.get(field) or {} if not field_content: continue if not isinstance(field_content, list): field_content = [field_content] person_ids = {fc.get("id") for fc in field_content if fc.get("id")} for person_id in person_ids: access_control_list.append({ "display_name": roles_dict[role_id], "ac_role_id": role_id, "context_id": None, "created_at": None, "object_type": self.resource_type, "updated_at": None, "object_id": self.resource_id, "modified_by_id": None, "person_id": person_id, # Frontend require data in such format "person": { "id": person_id, "type": "Person", "href": "/api/people/{}".format(person_id) }, "modified_by": None, "id": None, }) acl_with_people = self._populate_acl_with_people(access_control_list) filtered_acl = self._filter_internal_acls(acl_with_people) result_acl = [ acl for acl in filtered_acl if acl["ac_role_id"] in roles_dict ] return { "access_control_list": result_acl, } def populate_folder(self): """Add folder info for older revisions.""" if "folder" in self._content: return {} folders = self._content.get("folders") or [{"id": ""}] return {"folder": folders[0]["id"]} def populate_labels(self): """Add labels info for older revisions.""" if "label" not in self._content: return {} label = self._content["label"] return { "labels": [{ "id": None, "name": label }] } if label else { "labels": [] } def populate_status(self): """Update status for older revisions or add it if status does not exist.""" workflow_models = { "Cycle", "CycleTaskGroup", "CycleTaskGroupObjectTask", } statuses_mapping = {"InProgress": "In Progress"} status = statuses_mapping.get(self._content.get("status")) if self.resource_type in workflow_models and status: return {"status": status} pop_models = { # ggrc "AccessGroup", "AccountBalance", "Control", "DataAsset", "Directive", "Facility", "Issue", "KeyReport", "Market", "Objective", "OrgGroup", "Product", "Program", "Project", "Requirement", "System", "Vendor", "Risk", "Threat", } if self.resource_type not in pop_models: return {} statuses_mapping = { "Active": "Active", "Deprecated": "Deprecated", "Effective": "Active", "Final": "Active", "In Scope": "Active", "Ineffective": "Active", "Launched": "Active", } return { "status": statuses_mapping.get(self._content.get("status"), "Draft") } def populate_review_status(self): """Replace os_state with review state for old revisions""" from ggrc.models import review result = {} if "os_state" in self._content: if self._content["os_state"] is not None: result["review_status"] = self._content["os_state"] else: result["review_status"] = review.Review.STATES.UNREVIEWED return result def populate_review_status_display_name(self, result): """Get review_status if review_status_display_name is not found""" # pylint: disable=invalid-name if self.resource_type in ("Control", "Risk"): if "review_status_display_name" in self._content: result["review_status_display_name"] = self._content[ "review_status_display_name"] elif "review_status" in result: result["review_status_display_name"] = result["review_status"] def populate_readonly(self): """Add readonly=False to older revisions of WithReadOnlyAccess models""" from ggrc.models import all_models model = getattr(all_models, self.resource_type, None) if not model or not issubclass(model, wroa.WithReadOnlyAccess): return dict() if "readonly" in self._content: # revision has flag "readonly", use it return {"readonly": self._content["readonly"]} # not flag "readonly" in revision, use default value False return {"readonly": False} def _document_evidence_hack(self): """Update display_name on evideces Evidences have display names from links and titles, and until now they used slug property to calculate the display name. This hack is here since we must support older revisions with bad data, and to avoid using slug differently than everywhere else in the app. This function only modifies existing evidence entries on any given object. If an object does not have and document evidences then an empty dict is returned. Returns: dict with updated display name for each of the evidence entries if there are any. """ if "document_evidence" not in self._content: return {} document_evidence = self._content.get("document_evidence") for evidence in document_evidence: evidence[u"display_name"] = u"{link} {title}".format( link=evidence.get("link"), title=evidence.get("title"), ).strip() return {u"documents_file": document_evidence} def populate_categoies(self, key_name): """Return names of categories.""" if self.resource_type != "Control": return {} result = [] categories = self._content.get(key_name) if isinstance(categories, (str, unicode)) and categories: result = json.loads(categories) elif isinstance(categories, list): for category in categories: if isinstance(category, dict): result.append(category.get("name")) elif isinstance(category, (str, unicode)): result.append(category) return {key_name: result} def _get_cavs(self): """Return cavs values from content.""" if "custom_attribute_values" in self._content: return self._content["custom_attribute_values"] if "custom_attributes" in self._content: return self._content["custom_attributes"] return [] def _get_cads(self): """Return cads definitions from content and new CADs from db.""" if "custom_attribute_definitions" in self._content: return self._content["custom_attribute_definitions"] return [] def populate_cavs(self): """Setup cads in cav list if they are not presented in content but now they are associated to instance.""" cads = self._get_cads() cavs = {int(i["custom_attribute_id"]): i for i in self._get_cavs()} cads_ids = set() for cad in cads: custom_attribute_id = int(cad["id"]) cads_ids.add(custom_attribute_id) if custom_attribute_id in cavs: # Old revisions can contain falsy values for a Checkbox if cad["attribute_type"] == "Checkbox" \ and not cavs[custom_attribute_id]["attribute_value"]: cavs[custom_attribute_id]["attribute_value"] = cad[ "default_value"] continue if cad["attribute_type"] == "Map:Person": value = "Person" else: value = cad["default_value"] cavs[custom_attribute_id] = { "attribute_value": value, "custom_attribute_id": custom_attribute_id, "attributable_id": self.resource_id, "attributable_type": self.resource_type, "display_name": "", "attribute_object": None, "type": "CustomAttributeValue", "context_id": None, } cavs = { cad_id: value for cad_id, value in cavs.iteritems() if cad_id in cads_ids } return { "custom_attribute_values": cavs.values(), "custom_attribute_definitions": cads } def populate_cad_default_values(self): """Setup default_value to CADs if it's needed.""" from ggrc.models import all_models if "custom_attribute_definitions" not in self._content: return {} cads = [] for cad in self._content["custom_attribute_definitions"]: if "default_value" not in cad: cad["default_value"] = ( all_models.CustomAttributeDefinition.get_default_value_for( cad["attribute_type"])) cads.append(cad) return {"custom_attribute_definitions": cads} def populate_requirements(self, populated_content): # noqa pylint: disable=too-many-branches """Populates revision content for Requirement models and models with fields that can contain Requirement old names. This fields would be checked and updated where necessary """ # change to add Requirement old names requirement_type = ["Section", "Clause"] # change to add models and fields that can contain Requirement old names affected_models = { "AccessControlList": [ "object_type", ], "AccessControlRole": [ "object_type", ], "Assessment": [ "assessment_type", ], "AssessmentTemplate": [ "template_object_type", ], "Automapping": [ "source_type", "destination_type", ], "CustomAttributeValue": [ "attributable_type", ], "ExternalCustomAttributeValue": [ "attributable_type", ], "Event": [ "resource_type", ], "ObjectPerson": [ "personable_type", ], "Relationship": [ "source_type", "destination_type", ], "Revision": [ "resource_type", ], "Label": [ "object_type", ], "Context": [ "related_object_type", ], "IssuetrackerIssue": [ "object_type", ], "ObjectLabel": [ "object_type", ], "ObjectTemplates": [ "name", ], "Proposal": [ "instance_type", ], "Snapshot": [ "child_type", "parent_type", ], } # change to add special values cases special_cases = { "CustomAttributeDefinition": { "fields": [ "definition_type", ], "old_values": ["section", "clause"], "new_value": "requirement", } } obj_type = self.resource_type # populate fields if they contain old names if obj_type in affected_models.keys(): for field in affected_models[obj_type]: if populated_content.get(field) in requirement_type: populated_content[field] = "Requirement" # populate fields for models that contain old names in special spelling if obj_type in special_cases.keys(): for field in special_cases[obj_type]["fields"]: if populated_content[field] in special_cases[obj_type][ "old_values"]: populated_content[field] = special_cases[obj_type][ "new_value"] # populate Requirements revisions if obj_type == "Requirement": populated_content["type"] = "Requirement" acls = populated_content.get("access_control_list", {}) if acls: for acl in acls: if acl.get("object_type") in requirement_type: acl["object_type"] = "Requirement" populated_content["access_control_list"] = acls cavs = populated_content.get("custom_attribute_values", {}) if cavs: for cav in cavs: if cav.get("attributable_type") in requirement_type: cav["attributable_type"] = "Requirement" populated_content["custom_attribute_values"] = cavs def populate_options(self, populated_content): """Update revisions for Sync models to have Option fields as string.""" if self.resource_type == "Control": for attr in ["kind", "means", "verify_frequency"]: attr_value = populated_content.get(attr) if isinstance(attr_value, dict): populated_content[attr] = attr_value.get("title") elif isinstance(attr_value, (str, unicode)): populated_content[attr] = attr_value else: populated_content[attr] = None def populate_automappings(self): """Add automapping info in revisions. Populate Relationship revisions with automapping info to help FE show Change Log, but we should not show automapping info in case of deleted relationship""" if ("automapping_id" not in self._content or not self._content["automapping_id"] or self.action != "created"): return {} automapping_id = self._content["automapping_id"] if not hasattr(flask.g, "automappings_cache"): flask.g.automappings_cache = dict() if automapping_id not in flask.g.automappings_cache: automapping_obj = automapping.Automapping.query.get(automapping_id) if automapping_obj is None: return {} automapping_json = automapping_obj.log_json() flask.g.automappings_cache[automapping_id] = automapping_json else: automapping_json = flask.g.automappings_cache[automapping_id] return {"automapping": automapping_json} @builder.simple_property def content(self): """Property. Contains the revision content dict. Updated by required values, generated from saved content dict.""" # pylint: disable=too-many-locals populated_content = self._content.copy() populated_content.update(self.populate_acl()) populated_content.update(self.populate_reference_url()) populated_content.update(self.populate_folder()) populated_content.update(self.populate_labels()) populated_content.update(self.populate_status()) populated_content.update(self.populate_review_status()) populated_content.update(self._document_evidence_hack()) populated_content.update(self.populate_categoies("categories")) populated_content.update(self.populate_categoies("assertions")) populated_content.update(self.populate_cad_default_values()) populated_content.update(self.populate_cavs()) populated_content.update(self.populate_readonly()) populated_content.update(self.populate_automappings()) populated_content["custom_attribute_definitions"] = sorted( populated_content["custom_attribute_definitions"], key=lambda x: x['id']) self.populate_requirements(populated_content) self.populate_options(populated_content) self.populate_review_status_display_name(populated_content) # remove custom_attributes, # it's old style interface and now it's not needed populated_content.pop("custom_attributes", None) # remove attribute_object_id not used by FE anymore for item in populated_content["custom_attribute_values"]: item.pop("attribute_object_id", None) return populated_content @content.setter def content(self, value): """ Setter for content property.""" self._content = value def _handle_if_empty(self): """Check if revision is empty and update is_empty flag if true.""" # Check if new revision contains any changes in resource state. Revisions # created with "created" or "deleted" action are not considered empty. if self in db.session.new and self.action == u"modified": obj = referenced_objects.get(self.resource_type, self.resource_id) # Content serialization and deserialization is needed since content of # prev revision stored in DB was serialized before storing and due to # this couldn't be correctly compared to content of revision in hands. content = json.loads(utils.as_json(self.content)) self.is_empty = bool( obj and not revisions_diff.changes_present(obj, content)) def handle_before_flush(self): """Handler that called before SQLAlchemy flush event.""" self._handle_if_empty()
class CustomAttributeValue(base.ContextRBAC, Base, Indexed, db.Model): """Custom attribute value model""" __tablename__ = 'custom_attribute_values' _api_attrs = reflection.ApiAttributes( 'custom_attribute_id', 'attributable_id', 'attributable_type', 'attribute_value', 'attribute_object', reflection.Attribute('preconditions_failed', create=False, update=False), ) _fulltext_attrs = ["attribute_value"] REQUIRED_GLOBAL_REINDEX = False _sanitize_html = [ "attribute_value", ] custom_attribute_id = db.Column( db.Integer, db.ForeignKey('custom_attribute_definitions.id', ondelete="CASCADE")) attributable_id = db.Column(db.Integer) attributable_type = db.Column(db.String) attribute_value = db.Column(db.String, nullable=False, default=u"") # When the attibute is of a mapping type this will hold the id of the mapped # object while attribute_value will hold the type name. # For example an instance of attribute type Map:Person will have a person id # in attribute_object_id and string 'Person' in attribute_value. attribute_object_id = db.Column(db.Integer) # pylint: disable=protected-access # This is just a mapping for accessing local functions so protected access # warning is a false positive _validator_map = { "Text": lambda self: self._validate_text(), "Rich Text": lambda self: self._validate_rich_text(), "Date": lambda self: self._validate_date(), "Dropdown": lambda self: self._validate_dropdown(), "Map:Person": lambda self: self._validate_map_person(), "Checkbox": lambda self: self._validate_checkbox(), } @property def latest_revision(self): """Latest revision of CAV (used for comment precondition check).""" # TODO: make eager_query fetch only the first Revision return self._related_revisions[0] def delere_record(self): get_indexer().delete_record(self.attributable_id, self.attributable_type, False) def get_reindex_pair(self): return (self.attributable_type, self.attributable_id) @declared_attr def _related_revisions(cls): # pylint: disable=no-self-argument def join_function(): """Function to join CAV to its latest revision.""" resource_id = foreign(Revision.resource_id) resource_type = foreign(Revision.resource_type) return and_(resource_id == cls.id, resource_type == "CustomAttributeValue") return db.relationship( Revision, primaryjoin=join_function, viewonly=True, order_by=Revision.created_at.desc(), ) @classmethod def eager_query(cls): query = super(CustomAttributeValue, cls).eager_query() query = query.options( orm.subqueryload('_related_revisions'), orm.joinedload('custom_attribute'), ) return query @property def attributable_attr(self): return '{0}_custom_attributable'.format(self.attributable_type) @property def attributable(self): return getattr(self, self.attributable_attr) @attributable.setter def attributable(self, value): self.attributable_id = value.id if value is not None else None self.attributable_type = value.__class__.__name__ if value is not None \ else None return setattr(self, self.attributable_attr, value) @property def attribute_object(self): """Fetch the object referred to by attribute_object_id. Use backrefs defined in CustomAttributeMapable. Returns: A model instance of type specified in attribute_value """ return getattr(self, self._attribute_object_attr) @attribute_object.setter def attribute_object(self, value): """Set attribute_object_id via whole object. Args: value: model instance """ if value is None: # We get here if "attribute_object" does not get resolved. # TODO: make sure None value can be set for removing CA attribute object # value return self.attribute_object_id = value.id return setattr(self, self._attribute_object_attr, value) @property def attribute_object_type(self): """Fetch the mapped object pointed to by attribute_object_id. Returns: A model of type referenced in attribute_value """ attr_type = self.custom_attribute.attribute_type if not attr_type.startswith("Map:"): return None return self.attribute_object.__class__.__name__ @property def _attribute_object_attr(self): """Compute the relationship property based on object type. Returns: Property name """ attr_type = self.custom_attribute.attribute_type if not attr_type.startswith("Map:"): return None return 'attribute_{0}'.format(self.attribute_value) @classmethod def mk_filter_by_custom(cls, obj_class, custom_attribute_id): """Get filter for custom attributable object. This returns an exists filter for the given predicate, matching it to either a custom attribute value, or a value of the matched object. Args: obj_class: Class of the attributable object. custom_attribute_id: Id of the attribute definition. Returns: A function that will generate a filter for a given predicate. """ from ggrc.models import all_models attr_def = all_models.CustomAttributeDefinition.query.filter_by( id=custom_attribute_id).first() if attr_def and attr_def.attribute_type.startswith("Map:"): map_type = attr_def.attribute_type[4:] map_class = getattr(all_models, map_type, None) if map_class: fields = [ getattr(map_class, name, None) for name in ["email", "title", "slug"] ] fields = [field for field in fields if field is not None] def filter_by_mapping(predicate): return cls.query.filter( (cls.custom_attribute_id == custom_attribute_id) & (cls.attributable_type == obj_class.__name__) & (cls.attributable_id == obj_class.id) & (map_class.query.filter( (map_class.id == cls.attribute_object_id) & or_(*[predicate(f) for f in fields])).exists())).exists() return filter_by_mapping def filter_by_custom(predicate): return cls.query.filter( (cls.custom_attribute_id == custom_attribute_id) & (cls.attributable_type == obj_class.__name__) & (cls.attributable_id == obj_class.id) & predicate(cls.attribute_value)).exists() return filter_by_custom def _clone(self, obj): """Clone a custom value to a new object.""" data = { "custom_attribute_id": self.custom_attribute_id, "attributable_id": obj.id, "attributable_type": self.attributable_type, "attribute_value": self.attribute_value, "attribute_object_id": self.attribute_object_id } ca_value = CustomAttributeValue(**data) db.session.add(ca_value) db.session.flush() return ca_value @staticmethod def _extra_table_args(_): return (db.UniqueConstraint('attributable_id', 'custom_attribute_id'), ) def _validate_map_person(self): """Validate and correct mapped person values Mapped person custom attribute is only valid if both attribute_value and attribute_object_id are set. To keep the custom attribute api consistent with other types, we allow setting the value to a string containing both in this way "attribute_value:attribute_object_id". This validator checks Both scenarios and changes the string value to proper values needed by this custom attribute. Note: this validator does not check if id is a proper person id. """ if self.attribute_value and ":" in self.attribute_value: value, id_ = self.attribute_value.split(":") self.attribute_value = value self.attribute_object_id = id_ def _validate_dropdown(self): """Validate dropdown opiton.""" valid_options = set( self.custom_attribute.multi_choice_options.split(",")) if self.attribute_value: self.attribute_value = self.attribute_value.strip() if self.attribute_value not in valid_options: raise ValueError( "Invalid custom attribute dropdown option: {v}, " "expected one of {l}".format(v=self.attribute_value, l=valid_options)) def _validate_date(self): """Convert date format.""" if self.attribute_value: # Validate the date format by trying to parse it self.attribute_value = utils.convert_date_format( self.attribute_value, utils.DATE_FORMAT_ISO, utils.DATE_FORMAT_ISO, ) def _validate_text(self): """Trim whitespaces.""" if self.attribute_value: self.attribute_value = self.attribute_value.strip() def _validate_rich_text(self): """Add tags for links.""" self.attribute_value = url_parser.parse(self.attribute_value) def _validate_checkbox(self): """Set falsy value to zero.""" if not self.attribute_value: self.attribute_value = "0" def validate(self): """Validate custom attribute value.""" # pylint: disable=protected-access attributable_type = self.attributable._inflector.table_singular if not self.custom_attribute: raise ValueError("Custom attribute definition not found: Can not " "validate custom attribute value") if self.custom_attribute.definition_type != attributable_type: raise ValueError("Invalid custom attribute definition used.") validator = self._validator_map.get( self.custom_attribute.attribute_type) if validator: validator(self) @builder.simple_property def is_empty(self): """Return True if the CAV is empty or holds a logically empty value.""" # The CAV is considered empty when: # - the value is empty if not self.attribute_value: return True # - the type is Checkbox and the value is 0 if (self.custom_attribute.attribute_type == self.custom_attribute.ValidTypes.CHECKBOX and str(self.attribute_value) == "0"): return True # - the type is a mapping and the object value id is empty if (self.attribute_object_type is not None and not self.attribute_object_id): return True # Otherwise it the CAV is not empty return False @builder.simple_property def preconditions_failed(self): """A list of requirements self introduces that are unsatisfied. Returns: [str] - a list of unsatisfied requirements; possible items are: "value" - missing mandatory value, "comment" - missing mandatory comment, "evidence" - missing mandatory evidence. """ failed_preconditions = [] if self.custom_attribute.mandatory and self.is_empty: failed_preconditions += ["value"] if (self.custom_attribute.attribute_type == self.custom_attribute.ValidTypes.DROPDOWN): failed_preconditions += self._check_dropdown_requirements() return failed_preconditions or None def _check_dropdown_requirements(self): """Check mandatory comment and mandatory evidence for dropdown CAV.""" failed_preconditions = [] options_to_flags = self.multi_choice_options_to_flags( self.custom_attribute, ) flags = options_to_flags.get(self.attribute_value) if flags: for requirement in flags.keys(): if not flags[requirement]: continue if requirement == "comment": failed_preconditions += self._check_mandatory_comment() else: failed_preconditions += self.attributable \ .check_mandatory_requirement(requirement) return failed_preconditions def _check_mandatory_comment(self): """Check presence of mandatory comment.""" if hasattr(self.attributable, "comments"): comment_found = any( self.custom_attribute_id == ( comment.custom_attribute_definition_id) and self.latest_revision.id == comment.revision_id for comment in self.attributable.comments) else: comment_found = False if not comment_found: return ["comment"] return [] @staticmethod def multi_choice_options_to_flags(cad): """Parse mandatory comment and evidence flags from dropdown CA definition. Args: cad - a CA definition object Returns: {option_value: Flags} - a dict from dropdown options values to dict where keys "comment", "evidence" and "url" corresponds to the values from multi_choice_mandatory bitmasks """ def make_flags(multi_choice_mandatory): flags_mask = int(multi_choice_mandatory) return { "comment": flags_mask & (cad.MultiChoiceMandatoryFlags.COMMENT_REQUIRED), "evidence": flags_mask & (cad.MultiChoiceMandatoryFlags.EVIDENCE_REQUIRED), "url": flags_mask & (cad.MultiChoiceMandatoryFlags.URL_REQUIRED), } if not cad.multi_choice_options or not cad.multi_choice_mandatory: return {} return dict( zip( cad.multi_choice_options.split(","), (make_flags(mask) for mask in cad.multi_choice_mandatory.split(",")), ))
class RiskAssessment(Documentable, Timeboxed, CustomAttributable, Relatable, TestPlanned, base.ContextRBAC, BusinessObject, Indexed, db.Model): """Risk Assessment model.""" __tablename__ = 'risk_assessments' _title_uniqueness = False ra_manager_id = deferred(db.Column(db.Integer, db.ForeignKey('people.id')), 'RiskAssessment') ra_manager = db.relationship('Person', uselist=False, foreign_keys='RiskAssessment.ra_manager_id') ra_counsel_id = deferred(db.Column(db.Integer, db.ForeignKey('people.id')), 'RiskAssessment') ra_counsel = db.relationship('Person', uselist=False, foreign_keys='RiskAssessment.ra_counsel_id') program_id = deferred( db.Column(db.Integer, db.ForeignKey('programs.id'), nullable=False), 'RiskAssessment') program = db.relationship('Program', backref='risk_assessments', uselist=False, foreign_keys='RiskAssessment.program_id') _fulltext_attrs = [] _api_attrs = reflection.ApiAttributes( 'ra_manager', 'ra_counsel', 'program', ) _aliases = { "ra_manager": { "display_name": "Risk Manager", "filter_by": "_filter_by_risk_manager", }, "ra_counsel": { "display_name": "Risk Counsel", "filter_by": "_filter_by_risk_counsel", }, "start_date": { "display_name": "Start Date", "mandatory": True, }, "end_date": { "display_name": "End Date", "mandatory": True, }, "program": { "display_name": "Program", "mandatory": True, "filter_by": "_filter_by_program", }, } @classmethod def _filter_by_program(cls, predicate): return Program.query.filter((Program.id == cls.program_id) & (predicate(Program.slug) | predicate(Program.title))).exists() @classmethod def _filter_by_risk_manager(cls, predicate): return Person.query.filter((Person.id == cls.ra_manager_id) & (predicate(Person.name) | predicate(Person.email))).exists() @classmethod def _filter_by_risk_counsel(cls, predicate): return Person.query.filter((Person.id == cls.ra_counsel_id) & (predicate(Person.name) | predicate(Person.email))).exists()
def os_state(cls): return deferred( db.Column(db.String, nullable=False, default=ObjectStates.DRAFT), cls.__name__)
class Person(CustomAttributable, CustomAttributeMapable, HasOwnContext, Relatable, base.ContextRBAC, Base, Indexed, db.Model): """Person model definition.""" def __init__(self, *args, **kwargs): """Initialize profile relationship while creating Person instance""" super(Person, self).__init__(*args, **kwargs) self.profile = PersonProfile() self.build_object_context(context=1, name='Personal Context', description='') __tablename__ = 'people' email = deferred(db.Column(db.String, nullable=False), 'Person') name = deferred(db.Column(db.String), 'Person') language_id = deferred(db.Column(db.Integer), 'Person') company = deferred(db.Column(db.String), 'Person') object_people = db.relationship('ObjectPerson', backref='person', cascade='all, delete-orphan') language = db.relationship( 'Option', primaryjoin='and_(foreign(Person.language_id) == Option.id, ' 'Option.role == "person_language")', uselist=False, ) profile = relationship( "PersonProfile", uselist=False, back_populates="person", ) access_control_people = db.relationship( 'AccessControlPerson', foreign_keys='AccessControlPerson.person_id', backref="person", ) @staticmethod def _extra_table_args(_): return ( db.Index('ix_people_name_email', 'name', 'email'), db.Index('uq_people_email', 'email', unique=True), ) _fulltext_attrs = [ 'company', 'email', 'name', ] _api_attrs = reflection.ApiAttributes( 'company', 'email', 'language', 'name', reflection.Attribute('object_people', create=False, update=False), reflection.Attribute('system_wide_role', create=False, update=False), ) _sanitize_html = [ 'company', 'name', ] _include_links = [] _aliases = { "name": "Name", "email": { "display_name": "Email", "unique": True, }, "company": "Company", "user_role": { "display_name": "Role", "type": "user_role", "filter_by": "_filter_by_user_role", }, } @classmethod def _filter_by_user_role(cls, predicate): """Custom filter by user roles.""" from ggrc_basic_permissions.models import Role, UserRole return UserRole.query.join( Role).filter((UserRole.person_id == cls.id) & (UserRole.context_id.is_(None)) & # noqa predicate(Role.name)).exists() # Methods required by Flask-Login # pylint: disable=no-self-use def is_authenticated(self): return self.system_wide_role != 'No Access' @property def user_name(self): return self.email.split("@")[0] def is_active(self): # pylint: disable=no-self-use return True # self.active def is_anonymous(self): # pylint: disable=no-self-use return False def get_id(self): return unicode(self.id) # noqa @validates('language') def validate_person_options(self, key, option): return validate_option(self.__class__.__name__, key, option, 'person_language') @validates('email') def validate_email(self, _, email): """Email property validator.""" if not Person.is_valid_email(email): message = "Email address '{}' is invalid. Valid email must be provided" raise ValidationError(message.format(email)) return email @staticmethod def is_valid_email(val): """Check for valid email. Borrowed from Django. Literal form, ipv4 address (SMTP 4.1.3). """ email_re = re.compile( r'^[-!#$%&\'*+\\.\/0-9=?A-Z^_`{|}~]+@([-0-9A-Z]+\.)+([0-9A-Z]){2,4}$', re.IGNORECASE) return email_re.match(val) if val else False @classmethod def eager_query(cls): from sqlalchemy import orm # query = super(Person, cls).eager_query() # Completely overriding eager_query to avoid eager loading of the # modified_by relationship return super(Person, cls).eager_query().options( orm.joinedload('language'), orm.subqueryload('object_people'), ) @classmethod def indexed_query(cls): from sqlalchemy import orm return super(Person, cls).indexed_query().options( orm.Load(cls).undefer_group("Person_complete", ), ) def _display_name(self): return self.email @builder.simple_property def system_wide_role(self): """For choosing the role string to show to the user; of all the roles in the system-wide context, it shows the highest ranked one (if there are multiple) or "No Access" if there are none. """ if self.email in getattr(settings, "BOOTSTRAP_ADMIN_USERS", []): return SystemWideRoles.SUPERUSER from ggrc.utils.user_generator import is_external_app_user_email if is_external_app_user_email(self.email): return SystemWideRoles.SUPERUSER role_hierarchy = { SystemWideRoles.ADMINISTRATOR: 0, SystemWideRoles.EDITOR: 1, SystemWideRoles.READER: 2, SystemWideRoles.CREATOR: 3, } unique_roles = set([ user_role.role.name for user_role in self.user_roles if user_role.role.name in role_hierarchy ]) if not unique_roles: return u"No Access" # -1 as default to make items not in this list appear on top # and thus shown to the user sorted_roles = sorted(unique_roles, key=lambda x: role_hierarchy.get(x, -1)) return sorted_roles[0]
def context_id(cls): # pylint: disable=no-self-argument return db.Column(db.Integer, db.ForeignKey('contexts.id'))
class Directive(HasObjectState, Timeboxed, BusinessObject, db.Model): __tablename__ = 'directives' version = deferred(db.Column(db.String), 'Directive') organization = deferred(db.Column(db.String), 'Directive') scope = deferred(db.Column(db.Text), 'Directive') kind_id = deferred(db.Column(db.Integer), 'Directive') audit_start_date = deferred(db.Column(db.DateTime), 'Directive') audit_frequency_id = deferred(db.Column(db.Integer), 'Directive') audit_duration_id = deferred(db.Column(db.Integer), 'Directive') meta_kind = db.Column(db.String) kind = deferred(db.Column(db.String), 'Directive') # TODO: FIX jost! # sections = db.relationship( # 'Section', backref='directive', # order_by='Section.slug', cascade='all, delete-orphan') controls = db.relationship( 'Control', backref='directive', order_by='Control.slug') audit_frequency = db.relationship( 'Option', primaryjoin='and_(foreign(Directive.audit_frequency_id) == Option.id, ' 'Option.role == "audit_frequency")', uselist=False, ) audit_duration = db.relationship( 'Option', primaryjoin='and_(foreign(Directive.audit_duration_id) == Option.id, ' 'Option.role == "audit_duration")', uselist=False, ) __mapper_args__ = { 'polymorphic_on': meta_kind } _publish_attrs = [ 'audit_start_date', 'audit_frequency', 'audit_duration', 'controls', 'kind', 'organization', 'scope', 'version', ] _fulltext_attrs = [ 'audit_start_date', 'audit_frequency', 'audit_duration', 'controls', 'kind', 'organization', 'scope', 'version', ] _sanitize_html = [ 'organization', 'scope', 'version', ] _include_links = [] _aliases = {'kind': "Kind/Type", } @validates('kind') def validate_kind(self, key, value): if not value: return None if value not in self.VALID_KINDS: message = "Invalid value '{}' for attribute {}.{}.".format( value, self.__class__.__name__, key) raise ValueError(message) return value @validates('audit_duration', 'audit_frequency') def validate_directive_options(self, key, option): return validate_option(self.__class__.__name__, key, option, key) @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Directive, cls).eager_query() return cls.eager_inclusions(query, Directive._include_links).options( orm.joinedload('audit_frequency'), orm.joinedload('audit_duration'), orm.subqueryload('controls')) @staticmethod def _extra_table_args(cls): return ( db.Index('ix_{}_meta_kind'.format(cls.__tablename__), 'meta_kind'), )
def end_date(cls): return deferred(db.Column(db.Date), cls.__name__)
class Audit(Snapshotable, clonable.SingleClonable, WithEvidence, mixins.CustomAttributable, Personable, HasOwnContext, Relatable, Roleable, issue_tracker_mixins.IssueTrackedWithConfig, WithLastDeprecatedDate, mixins.Timeboxed, base.ContextRBAC, mixins.BusinessObject, mixins.Folderable, Indexed, db.Model): """Audit model.""" __tablename__ = 'audits' _slug_uniqueness = False VALID_STATES = ( u'Planned', u'In Progress', u'Manager Review', u'Ready for External Review', u'Completed', u'Deprecated' ) CLONEABLE_CHILDREN = {"AssessmentTemplate"} report_start_date = deferred(db.Column(db.Date), 'Audit') report_end_date = deferred(db.Column(db.Date), 'Audit') audit_firm_id = deferred( db.Column(db.Integer, db.ForeignKey('org_groups.id')), 'Audit') audit_firm = db.relationship('OrgGroup', uselist=False) gdrive_evidence_folder = deferred(db.Column(db.String), 'Audit') program_id = deferred( db.Column(db.Integer, db.ForeignKey('programs.id'), nullable=False), 'Audit') object_type = db.Column( db.String(length=250), nullable=False, default='Control') assessments = db.relationship('Assessment', backref='audit') issues = db.relationship('Issue', backref='audit') snapshots = db.relationship('Snapshot', backref='audit') archived = deferred(db.Column(db.Boolean, nullable=False, default=False), 'Audit') manual_snapshots = deferred(db.Column(db.Boolean, nullable=False, default=False), 'Audit') assessment_templates = db.relationship('AssessmentTemplate', backref='audit') _api_attrs = reflection.ApiAttributes( 'report_start_date', 'report_end_date', 'audit_firm', 'gdrive_evidence_folder', 'program', 'object_type', 'archived', 'manual_snapshots', ) _fulltext_attrs = [ 'archived', 'report_start_date', 'report_end_date', 'audit_firm', 'gdrive_evidence_folder', ] @classmethod def indexed_query(cls): return super(Audit, cls).indexed_query().options( orm.Load(cls).undefer_group( "Audit_complete", ), ) _sanitize_html = [ 'gdrive_evidence_folder', 'description', ] _include_links = [] _aliases = { "program": { "display_name": "Program", "filter_by": "_filter_by_program", "mandatory": True, }, "start_date": "Planned Start Date", "end_date": "Planned End Date", "report_start_date": "Planned Report Period from", "report_end_date": "Planned Report Period to", "notes": None, "archived": { "display_name": "Archived", "mandatory": False }, "status": { "display_name": "State", "mandatory": True, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) } } def _clone(self, source_object): """Clone audit and all relevant attributes. Keeps the internals of actual audit cloning and everything that is related to audit itself (auditors, audit firm, context setting, custom attribute values, etc.) """ from ggrc_basic_permissions import create_audit_context data = { "title": source_object.generate_attribute("title"), "description": source_object.description, "audit_firm": source_object.audit_firm, "start_date": source_object.start_date, "end_date": source_object.end_date, "last_deprecated_date": source_object.last_deprecated_date, "program": source_object.program, "status": source_object.VALID_STATES[0], "report_start_date": source_object.report_start_date, "report_end_date": source_object.report_end_date } self.update_attrs(data) db.session.flush() create_audit_context(self) self.clone_acls(source_object) self.clone_custom_attribute_values(source_object) def clone_acls(self, audit): """Clone acl roles like auditors and audit captains Args: audit: Audit instance """ for person, acl in audit.access_control_list: self.add_person_with_role(person, acl.ac_role) def clone(self, source_id, mapped_objects=None): """Clone audit with specified whitelisted children. Children that can be cloned should be specified in CLONEABLE_CHILDREN. Args: mapped_objects: A list of related objects that should also be copied and linked to a new audit. """ if not mapped_objects: mapped_objects = [] source_object = Audit.query.get(source_id) self._clone(source_object) if any(mapped_objects): related_children = source_object.related_objects(mapped_objects) for obj in related_children: obj.clone(self) @orm.validates("archived") def archived_check(self, _, value): """Only Admins and Program Managers are allowed to (un)archive Audit.""" user = get_current_user() if getattr(user, 'system_wide_role', None) in SystemWideRoles.admins: return value if self.archived is not None and self.archived != value and \ not any(acl for person, acl in list(self.program.access_control_list) if acl.ac_role.name == "Program Managers" and person.id == user.id): raise Forbidden() return value @classmethod def _filter_by_program(cls, predicate): """Helper for filtering by program""" return Program.query.filter( (Program.id == Audit.program_id) & (predicate(Program.slug) | predicate(Program.title)) ).exists() @classmethod def eager_query(cls): query = super(Audit, cls).eager_query() return query.options( orm.joinedload('program'), orm.subqueryload('object_people').joinedload('person'), ) def get_evidences_from_assessments(self, objects=False): """Return all related evidences from assessments. audit <--> assessment -> evidence :param objects: bool. optional argument. If True object Evidence ORM objects return :return: sqlalchemy.Query or sqlalchemy.orm.query.Query objects """ from ggrc.models.assessment import Assessment evid_as_dest = db.session.query( Relationship.destination_id.label("id"), ).join( Assessment, Assessment.id == Relationship.source_id, ).filter( Relationship.destination_type == Evidence.__name__, Relationship.source_type == Assessment.__name__, Assessment.audit_id == self.id, ) evid_as_source = db.session.query( Relationship.source_id.label("id"), ).join( Assessment, Assessment.id == Relationship.destination_id, ).filter( Relationship.source_type == Evidence.__name__, Relationship.destination_type == Assessment.__name__, Assessment.audit_id == self.id, ) evidence_assessment = evid_as_dest.union(evid_as_source) if objects: return db.session.query(Evidence).filter( Evidence.id.in_(evidence_assessment), ) return evidence_assessment def get_evidences_from_audit(self, objects=False): """Return all related evidence. In relation audit <--> evidence :param objects: bool. optional argument. If True object Evidence ORM objects return :return: sqlalchemy.Query or sqlalchemy.orm.query.Query objects """ evid_a_source = db.session.query( Relationship.source_id.label("id"), ).filter( Relationship.source_type == Evidence.__name__, Relationship.destination_type == Audit.__name__, Relationship.destination_id == self.id, ) evid_a_dest = db.session.query( Relationship.destination_id.label("id"), ).filter( Relationship.destination_type == Evidence.__name__, Relationship.source_type == Audit.__name__, Relationship.source_id == self.id, ) evidence_audit = evid_a_dest.union(evid_a_source) if objects: return db.session.query(Evidence).filter( Evidence.id.in_(evidence_audit), ) return evidence_audit @simple_property def all_related_evidences(self): """Return all related evidences of audit""" evidence_assessment = self.get_evidences_from_assessments() evidence_audit = self.get_evidences_from_audit() evidence_ids = evidence_assessment.union(evidence_audit) return db.session.query(Evidence).filter( Evidence.id.in_(evidence_ids) )
def status(cls): # pylint: disable=no-self-argument return deferred(db.Column( db.String, default=cls.default_status, nullable=False), cls.__name__)
class CustomAttributeValue(CustomAttributeValueBase): """Custom attribute value model""" __tablename__ = 'custom_attribute_values' # When the attribute is of a mapping type this will hold the id of the mapped # object while attribute_value will hold the type name. # For example an instance of attribute type Map:Person will have a person id # in attribute_object_id and string 'Person' in attribute_value. attribute_object_id = db.Column(db.Integer) custom_attribute_id = db.Column( db.Integer, db.ForeignKey('custom_attribute_definitions.id', ondelete="CASCADE") ) _api_attrs = reflection.ApiAttributes( 'custom_attribute_id', 'attributable_id', 'attributable_type', 'attribute_value', 'attribute_object', reflection.Attribute('preconditions_failed', create=False, update=False), ) # pylint: disable=protected-access _validator_map = { "Text": lambda self: self._validate_text(), "Rich Text": lambda self: self._validate_rich_text(), "Date": lambda self: self._validate_date(), "Dropdown": lambda self: self._validate_dropdown(), "Multiselect": lambda self: self._validate_multiselect(), "Map:Person": lambda self: self._validate_map_object(), "Checkbox": lambda self: self._validate_checkbox(), } @property def attribute_object(self): """Fetch the object referred to by attribute_object_id. Use backrefs defined in CustomAttributeMapable. Returns: A model instance of type specified in attribute_value """ try: return getattr(self, self._attribute_object_attr) except: # pylint: disable=bare-except return None @attribute_object.setter def attribute_object(self, value): """Set attribute_object_id via whole object. Args: value: model instance """ if value is None: # We get here if "attribute_object" does not get resolved. # TODO: make sure None value can be set for removing CA attribute object # value return None self.attribute_object_id = value.id return setattr(self, self._attribute_object_attr, value) @property def attribute_object_type(self): """Fetch the mapped object pointed to by attribute_object_id. Returns: A model of type referenced in attribute_value """ attr_type = self.custom_attribute.attribute_type if not attr_type.startswith("Map:"): return None return self.attribute_object.__class__.__name__ @property def _attribute_object_attr(self): """Compute the relationship property based on object type. Returns: Property name """ attr_type = self.custom_attribute.attribute_type if not attr_type.startswith("Map:"): return None return 'attribute_{0}'.format(self.attribute_value) @classmethod def mk_filter_by_custom(cls, obj_class, custom_attribute_id): """Get filter for custom attributable object. This returns an exists filter for the given predicate, matching it to either a custom attribute value, or a value of the matched object. Args: obj_class: Class of the attributable object. custom_attribute_id: Id of the attribute definition. Returns: A function that will generate a filter for a given predicate. """ from ggrc.models import all_models attr_def = all_models.CustomAttributeDefinition.query.filter_by( id=custom_attribute_id ).first() if attr_def and attr_def.attribute_type.startswith("Map:"): map_type = attr_def.attribute_type[4:] map_class = getattr(all_models, map_type, None) if map_class: fields = [getattr(map_class, name, None) for name in ["email", "title", "slug"]] fields = [field for field in fields if field is not None] def filter_by_mapping(predicate): return cls.query.filter( (cls.custom_attribute_id == custom_attribute_id) & (cls.attributable_type == obj_class.__name__) & (cls.attributable_id == obj_class.id) & (map_class.query.filter( (map_class.id == cls.attribute_object_id) & or_(*[predicate(f) for f in fields])).exists()) ).exists() return filter_by_mapping def filter_by_custom(predicate): return cls.query.filter( (cls.custom_attribute_id == custom_attribute_id) & (cls.attributable_type == obj_class.__name__) & (cls.attributable_id == obj_class.id) & predicate(cls.attribute_value) ).exists() return filter_by_custom def _validate_checkbox(self): """Set falsy value to zero.""" if not self.attribute_value: self.attribute_value = "0" def _validate_map_object(self): """Validate and correct mapped object values Mapped object custom attribute is only valid if both attribute_value and attribute_object_id are set. To keep the custom attribute api consistent with other types, we allow setting the value to a string containing both in this way "attribute_value:attribute_object_id". This validator checks Both scenarios and changes the string value to proper values needed by this custom attribute. """ self._extract_object_id_from_value() self._validate_map_type() self._validate_object_existence() def _extract_object_id_from_value(self): """Extract attribute_object_id from attribute_value""" if self.attribute_value and ":" in self.attribute_value: value, id_ = self.attribute_value.split(":") self.attribute_value = value self.attribute_object_id = id_ def _validate_map_type(self): """Validate related CAD attribute_type and provided attribute_value Related custom attribute definition's attribute_type column must starts with "Map:". Example: "Map:Person" - for mapping with Person model Provided attribute_value should match to custom attribute definition's attribute_type. If definition have "Map:Person" attribute_type, attribute_value must be "Person". """ from ggrc.models import all_models mapping_prefix = 'Map:' defined_type = self.custom_attribute.attribute_type if not defined_type.startswith(mapping_prefix): raise ValueError('Invalid definition type: %s expected mapping' % defined_type) if not self.attribute_value: return try: expected_type = defined_type.split(mapping_prefix)[1] except IndexError: raise ValueError("Invalid definition type: mapping type didn't provided") if self.attribute_value != expected_type: raise ValueError('Invalid attribute type: %s expected %s' % (self.attribute_value, expected_type)) related_model = getattr(all_models, self.attribute_value) if not related_model or not issubclass(related_model, db.Model): raise ValueError('Invalid attribute type: %s' % self.attribute_value) def _validate_object_existence(self): """Validate existence of provided attribute_object_id To verify that attribute type is correct, must be called after '_validate_map_type()' method. """ from ggrc.models import all_models if not self.attribute_object_id: return related_model = getattr(all_models, self.attribute_value) related_object = related_model.query.filter_by( id=self.attribute_object_id) object_existence = db.session.query(related_object.exists()).scalar() if not object_existence: raise ValueError('Invalid attribute value: %s' % self.custom_attribute.title) def _clone(self, obj): """Clone a custom value to a new object.""" data = { "custom_attribute_id": self.custom_attribute_id, "attributable_id": obj.id, "attributable_type": self.attributable_type, "attribute_value": self.attribute_value, "attribute_object_id": self.attribute_object_id } ca_value = self.__class__(**data) db.session.add(ca_value) db.session.flush() return ca_value @builder.simple_property def is_empty(self): """Return True if the CAV is empty or holds a logically empty value.""" # The CAV is considered empty when: # - the value is empty if not self.attribute_value: return True # - the type is Checkbox and the value is 0 if (self.custom_attribute.attribute_type == self.custom_attribute.ValidTypes.CHECKBOX and str(self.attribute_value) == "0"): return True # - the type is a mapping and the object value id is empty if (self.attribute_object_type is not None and not self.attribute_object_id): return True # Otherwise it the CAV is not empty return False @builder.simple_property def preconditions_failed(self): """A list of requirements self introduces that are unsatisfied. Returns: [str] - a list of unsatisfied requirements; possible items are: "value" - missing mandatory value, "comment" - missing mandatory comment, "evidence" - missing mandatory evidence. """ failed_preconditions = [] if self.custom_attribute.mandatory and self.is_empty: failed_preconditions += ["value"] if (self.custom_attribute.attribute_type == self.custom_attribute.ValidTypes.DROPDOWN): failed_preconditions += self._check_dropdown_requirements() return failed_preconditions or None def _check_dropdown_requirements(self): """Check mandatory comment and mandatory evidence for dropdown CAV.""" failed_preconditions = [] options_to_flags = self.multi_choice_options_to_flags( self.custom_attribute, ) flags = options_to_flags.get(self.attribute_value) if flags: for requirement in flags.keys(): if not flags[requirement]: continue if requirement == "comment": failed_preconditions += self._check_mandatory_comment() else: failed_preconditions += self.attributable \ .check_mandatory_requirement(requirement) return failed_preconditions def _check_mandatory_comment(self): """Check presence of mandatory comment.""" if hasattr(self.attributable, "comments"): comment_found = any( self.custom_attribute_id == (comment .custom_attribute_definition_id) and self.latest_revision.id == comment.revision_id for comment in self.attributable.comments ) else: comment_found = False if not comment_found: return ["comment"] return [] @staticmethod def multi_choice_options_to_flags(cad): """Parse mandatory comment and evidence flags from dropdown CA definition. Args: cad - a CA definition object Returns: {option_value: Flags} - a dict from dropdown options values to dict where keys "comment", "evidence" and "url" corresponds to the values from multi_choice_mandatory bitmasks """ def make_flags(multi_choice_mandatory): flags_mask = int(multi_choice_mandatory) return { "comment": flags_mask & (cad .MultiChoiceMandatoryFlags .COMMENT_REQUIRED), "evidence": flags_mask & (cad .MultiChoiceMandatoryFlags .EVIDENCE_REQUIRED), "url": flags_mask & (cad .MultiChoiceMandatoryFlags .URL_REQUIRED), } if not cad.multi_choice_options or not cad.multi_choice_mandatory: return {} return dict(zip( cad.multi_choice_options.split(","), (make_flags(mask) for mask in cad.multi_choice_mandatory.split(",")), )) def log_json_base(self): res = super(CustomAttributeValue, self).log_json_base() if self.attribute_object_id is not None and \ self._attribute_object_attr is not None: res["attribute_object"] = self.attribute_object return res
def secondary_contact_id(cls): # pylint: disable=no-self-argument return deferred( db.Column(db.Integer, db.ForeignKey('people.id')), cls.__name__)
class CustomAttributeValueBase(base.ContextRBAC, base.Base, ft_mixin.Indexed, db.Model): """Custom attribute value base class""" __abstract__ = True REQUIRED_GLOBAL_REINDEX = False attributable_type = db.Column(db.String) attributable_id = db.Column(db.Integer) attribute_value = db.Column(db.String, nullable=False, default=u"") _fulltext_attrs = ["attribute_value"] _sanitize_html = ["attribute_value"] # pylint: disable=protected-access # This is just a mapping for accessing local functions so protected access # warning is a false positive _validator_map = { "Text": lambda self: self._validate_text(), "Rich Text": lambda self: self._validate_rich_text(), "Date": lambda self: self._validate_date(), "Dropdown": lambda self: self._validate_dropdown(), "Multiselect": lambda self: self._validate_multiselect(), } TYPES_NO_RICHTEXT_VALIDATE = ["Control"] @property def attributable_attr(self): return '{0}_custom_attributable'.format(self.attributable_type) @property def attributable(self): return getattr(self, self.attributable_attr) @attributable.setter def attributable(self, value): self.attributable_id = value.id if value is not None else None self.attributable_type = value.__class__.__name__ if value is not None \ else None return setattr(self, self.attributable_attr, value) @staticmethod def _extra_table_args(_): return ( db.UniqueConstraint('attributable_id', 'custom_attribute_id'), ) @property def latest_revision(self): """Latest revision of CAV (used for comment precondition check).""" # TODO: make eager_query fetch only the first Revision return self._related_revisions[0] def get_reindex_pair(self): return self.attributable_type, self.attributable_id @classmethod def eager_query(cls, **kwargs): query = super(CustomAttributeValueBase, cls).eager_query(**kwargs) query = query.options( orm.subqueryload('_related_revisions'), orm.joinedload('custom_attribute'), ) return query @declared_attr def _related_revisions(cls): # pylint: disable=no-self-argument """Returns related revisions.""" def join_function(): """Function to join CAV to its latest revision.""" resource_id = foreign(Revision.resource_id) resource_type = foreign(Revision.resource_type) return and_(resource_id == cls.id, resource_type == cls.__name__) return db.relationship( Revision, primaryjoin=join_function, viewonly=True, order_by=Revision.created_at.desc(), ) def _validate_dropdown(self): """Validate dropdown option.""" valid_options = set(self.custom_attribute.multi_choice_options.split(",")) if self.attribute_value: self.attribute_value = str(self.attribute_value).strip() if self.attribute_value not in valid_options: raise ValueError("Invalid custom attribute dropdown option: {v}, " "expected one of {l}" .format(v=self.attribute_value, l=valid_options)) def _validate_date(self): """Convert date format.""" if self.attribute_value: # Validate the date format by trying to parse it self.attribute_value = utils.convert_date_format( self.attribute_value, utils.DATE_FORMAT_ISO, utils.DATE_FORMAT_ISO, ) def _validate_text(self): """Trim whitespaces.""" if self.attribute_value: self.attribute_value = self.attribute_value.strip() def _validate_rich_text(self): """Add tags for links.""" if self.attributable_type not in self.TYPES_NO_RICHTEXT_VALIDATE: self.attribute_value = url_parser.parse(self.attribute_value) def _validate_multiselect(self): """Validate multiselect checkbox values.""" if self.attribute_value: valid_options = set( self.custom_attribute.multi_choice_options.split(",")) attr_values = set(self.attribute_value.split(",")) if not attr_values.issubset(valid_options): raise ValueError("Invalid custom attribute multiselect options {act}. " "Expected some of {exp}".format(act=attr_values, exp=valid_options)) def validate(self): """Validate custom attribute value.""" # pylint: disable=protected-access attributable_type = self.attributable._inflector.table_singular if not self.custom_attribute: raise ValueError("Custom attribute definition not found: Can not " "validate custom attribute value") if self.custom_attribute.definition_type != attributable_type: raise ValueError("Invalid custom attribute definition used.") validator = self._validator_map.get(self.custom_attribute.attribute_type) if validator: validator(self)
def test_plan(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Text, nullable=False, default=u""), cls.__name__)
class Evidence(Roleable, Relatable, mixins.Titled, bfh.BeforeFlushHandleable, Statusable, mixins.WithLastDeprecatedDate, comment.Commentable, WithAutoDeprecation, mixin.Indexed, base.ContextRBAC, mixins.Slugged, db.Model): """Evidence (Audit-scope URLs, FILE's) model.""" __tablename__ = "evidence" _title_uniqueness = False URL = "URL" FILE = "FILE" VALID_EVIDENCE_KINDS = [URL, FILE] START_STATE = 'Active' DEPRECATED = 'Deprecated' VALID_STATES = ( START_STATE, DEPRECATED, ) kind = deferred( db.Column(db.Enum(*VALID_EVIDENCE_KINDS), default=URL, nullable=False), "Evidence") source_gdrive_id = deferred( db.Column(db.String, nullable=False, default=u""), "Evidence") gdrive_id = deferred(db.Column(db.String, nullable=False, default=u""), "Evidence") link = deferred(db.Column(db.String), "Evidence") description = deferred(db.Column(db.Text, nullable=False, default=u""), "Evidence") notes = deferred(db.Column(db.Text, nullable=True), "Notes") # Override from Commentable mixin (can be removed after GGRC-5192) send_by_default = db.Column(db.Boolean, nullable=False, default=True) _api_attrs = reflection.ApiAttributes( "title", reflection.Attribute("link", update=False), reflection.Attribute("source_gdrive_id", update=False), "description", "status", reflection.Attribute("kind", update=False), reflection.Attribute("parent_obj", read=False, update=False), reflection.Attribute('archived', create=False, update=False), reflection.Attribute('is_uploaded', read=False, update=False), "notes", ) _fulltext_attrs = [ "link", "description", "kind", "status", "archived", "notes", ] AUTO_REINDEX_RULES = [ mixin.ReindexRule("Audit", lambda x: x.all_related_evidences, ["archived"]), ] _sanitize_html = [ "title", "description", ] _aliases = { "title": "Title", "link": "Link", "description": "Description", "kind": "Type", "archived": { "display_name": "Archived", "mandatory": False }, } _allowed_parents = {'Assessment', 'Audit'} FILE_NAME_SEPARATOR = '_ggrc' @orm.validates("kind") def validate_kind(self, key, kind): """Returns correct option, otherwise rises an error""" if kind is None: kind = self.URL if kind not in self.VALID_EVIDENCE_KINDS: raise exceptions.ValidationError( "Invalid value for attribute {attr}. " "Expected options are `{url}`, `{file}`".format( attr=key, url=self.URL, file=self.FILE)) return kind @classmethod def indexed_query(cls): return super(Evidence, cls).indexed_query().options( orm.Load(cls).undefer_group("Evidence_complete", ), orm.Load(cls).subqueryload('related_sources'), orm.Load(cls).subqueryload('related_destinations'), ) @simple_property def archived(self): """Returns a boolean whether parent is archived or not.""" parent_candidates = self.related_objects( _types=Evidence._allowed_parents) if parent_candidates: parent = parent_candidates.pop() return parent.archived return False def log_json(self): tmp = super(Evidence, self).log_json() tmp['type'] = 'Evidence' return tmp @simple_property def is_uploaded(self): """This flag is used to know if file uploaded from a local user folder. In that case we need just rename file, not copy. """ return self._is_uploaded if hasattr(self, '_is_uploaded') else False @is_uploaded.setter def is_uploaded(self, value): # pylint: disable=attribute-defined-outside-init self._is_uploaded = value @simple_property def parent_obj(self): """Getter for local parent object property.""" # pylint: disable=attribute-defined-outside-init return self._parent_obj @parent_obj.setter def parent_obj(self, value): # pylint: disable=attribute-defined-outside-init self._parent_obj = value def _get_parent_obj(self): """Get parent object specified""" if 'id' not in self._parent_obj: raise exceptions.ValidationError( '"id" is mandatory for parent_obj') if 'type' not in self._parent_obj: raise exceptions.ValidationError( '"type" is mandatory for parent_obj') if self._parent_obj['type'] not in self._allowed_parents: raise exceptions.ValidationError('Allowed types are: {}.'.format( ', '.join(self._allowed_parents))) parent_type = self._parent_obj['type'] parent_id = self._parent_obj['id'] obj = referenced_objects.get(parent_type, parent_id) if not obj: raise ValueError('Parent object not found: {type} {id}'.format( type=parent_type, id=parent_id)) return obj @staticmethod def _build_mapped_to_string(parent_obj): """Build description string with information to what objects this evidence is mapped to for given parent object""" mapped_to = [ parent_obj.slug, ] related_snapshots = parent_obj.related_objects(_types=['Snapshot']) related_snapshots = sorted(related_snapshots, key=lambda it: it.id) slugs = (sn.revision.content['slug'] for sn in related_snapshots if sn.child_type == parent_obj.assessment_type) mapped_to.extend(slugs) mapped_to_sting = 'Mapped to: {}'.format(', '.join(mapped_to).lower()) return mapped_to_sting def _build_relationship(self, parent_obj): """Build relationship between evidence and parent object""" from ggrc.models import all_models rel = all_models.Relationship(source=parent_obj, destination=self) db.session.add(rel) signals.Restful.model_put.send(rel.__class__, obj=rel, service=self) def _update_fields(self, response, parent): """Update fields of evidence with values of the copied file""" self.description = self._build_mapped_to_string(parent) self.gdrive_id = response['id'] self.link = response['webViewLink'] self.title = response['name'] self.kind = Evidence.FILE @staticmethod def _get_folder(parent): return parent.folder if hasattr(parent, 'folder') else '' def exec_gdrive_file_copy_flow(self): """Execute google gdrive file copy flow Build file name, destination folder and copy file to that folder. After coping fills evidence object fields with new gdrive URL """ if self.is_with_parent_obj() and \ self.kind == Evidence.FILE and \ self.source_gdrive_id: parent = self._get_parent_obj() folder_id = self._get_folder(parent) file_id = self.source_gdrive_id from ggrc.gdrive.file_actions import process_gdrive_file response = process_gdrive_file(file_id, folder_id, is_uploaded=self.is_uploaded) self._update_fields(response, parent) self._parent_obj = None # pylint: disable=attribute-defined-outside-init def is_with_parent_obj(self): return bool(hasattr(self, '_parent_obj') and self._parent_obj) def add_admin_role(self): """Add current user as Evidence admin""" self.add_person_with_role_name(login.get_current_user(), "Admin") def handle_before_flush(self): """Handler that called before SQLAlchemy flush event""" self.exec_gdrive_file_copy_flow()
def field_declaration(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.Integer, db.ForeignKey('people.id'), nullable=True), cls.__name__)
class TaskGroup(WithContact, Timeboxed, Described, Titled, Slugged, db.Model): """Workflow TaskGroup model.""" __tablename__ = 'task_groups' _title_uniqueness = False workflow_id = db.Column( db.Integer, db.ForeignKey('workflows.id', ondelete="CASCADE"), nullable=False, ) lock_task_order = db.Column(db.Boolean(), nullable=True) task_group_objects = db.relationship('TaskGroupObject', backref='task_group', cascade='all, delete-orphan') objects = association_proxy('task_group_objects', 'object', 'TaskGroupObject') task_group_tasks = db.relationship('TaskGroupTask', backref='task_group', cascade='all, delete-orphan') cycle_task_groups = db.relationship('CycleTaskGroup', backref='task_group') sort_index = db.Column(db.String(length=250), default="", nullable=False) _publish_attrs = [ 'workflow', 'task_group_objects', PublishOnly('objects'), 'task_group_tasks', 'lock_task_order', 'sort_index', # Intentionally do not include `cycle_task_groups` # 'cycle_task_groups', ] _aliases = { "title": "Summary", "description": "Details", "contact": { "display_name": "Assignee", "mandatory": True, "filter_by": "_filter_by_contact", }, "secondary_contact": None, "start_date": None, "end_date": None, "workflow": { "display_name": "Workflow", "mandatory": True, "filter_by": "_filter_by_workflow", }, "task_group_objects": { "display_name": "Objects", "type": AttributeInfo.Type.SPECIAL_MAPPING, "filter_by": "_filter_by_objects", }, } def copy(self, _other=None, **kwargs): columns = [ 'title', 'description', 'workflow', 'sort_index', 'modified_by', 'context' ] if kwargs.get('clone_people', False) and getattr(self, "contact"): columns.append("contact") else: kwargs["contact"] = get_current_user() target = self.copy_into(_other, columns, **kwargs) if kwargs.get('clone_objects', False): self.copy_objects(target, **kwargs) if kwargs.get('clone_tasks', False): self.copy_tasks(target, **kwargs) return target def copy_objects(self, target, **kwargs): # pylint: disable=unused-argument for task_group_object in self.task_group_objects: target.task_group_objects.append( task_group_object.copy( task_group=target, context=target.context, )) return target def copy_tasks(self, target, **kwargs): for task_group_task in self.task_group_tasks: target.task_group_tasks.append( task_group_task.copy( None, task_group=target, context=target.context, clone_people=kwargs.get("clone_people", False), )) return target @classmethod def _filter_by_workflow(cls, predicate): from ggrc_workflows.models import Workflow return Workflow.query.filter((Workflow.id == cls.workflow_id) & (predicate(Workflow.slug) | predicate(Workflow.title))).exists() @classmethod def _filter_by_objects(cls, predicate): parts = [] for model_name in all_models.__all__: model = getattr(all_models, model_name) query = getattr(model, "query", None) field = getattr(model, "slug", getattr(model, "email", None)) if query is None or field is None or not hasattr(model, "id"): continue parts.append( query.filter((TaskGroupObject.object_type == model_name) & (model.id == TaskGroupObject.object_id) & predicate(field)).exists()) return TaskGroupObject.query.filter( (TaskGroupObject.task_group_id == cls.id) & or_(*parts)).exists()
class Assessment(Assignable, statusable.Statusable, AuditRelationship, AutoStatusChangeable, HasObjectState, TestPlanned, CustomAttributable, PublicDocumentable, Commentable, Personable, reminderable.Reminderable, Relatable, LastDeprecatedTimeboxed, WithSimilarityScore, FinishedDate, VerifiedDate, Notifiable, WithAction, labeled.Labeled, with_last_comment.WithLastComment, issuetracker_issue.IssueTracked, BusinessObject, Indexed, db.Model): """Class representing Assessment. Assessment is an object representing an individual assessment performed on a specific object during an audit to ascertain whether or not certain conditions were met for that object. """ __tablename__ = 'assessments' _title_uniqueness = False REWORK_NEEDED = u"Rework Needed" NOT_DONE_STATES = statusable.Statusable.NOT_DONE_STATES | { REWORK_NEEDED, } VALID_STATES = tuple(NOT_DONE_STATES | statusable.Statusable.DONE_STATES | statusable.Statusable.INACTIVE_STATES) REMINDERABLE_HANDLERS = { "statusToPerson": { "handler": reminderable.Reminderable.handle_state_to_person_reminder, "data": { statusable.Statusable.START_STATE: "Assignees", "In Progress": "Assignees" }, "reminders": { "assessment_assignees_reminder", } } } design = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") operationally = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") audit_id = deferred( db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=False), 'Assessment') assessment_type = deferred( db.Column(db.String, nullable=False, server_default="Control"), "Assessment") # whether to use the object test plan on snapshot mapping test_plan_procedure = db.Column(db.Boolean, nullable=False, default=True) @declared_attr def object_level_definitions(cls): # pylint: disable=no-self-argument """Set up a backref so that we can create an object level custom attribute definition without the need to do a flush to get the assessment id. This is used in the relate_ca method in hooks/assessment.py. """ return db.relationship( 'CustomAttributeDefinition', primaryjoin=lambda: and_( remote(CustomAttributeDefinition.definition_id) == cls.id, remote(CustomAttributeDefinition.definition_type) == "assessment"), foreign_keys=[ CustomAttributeDefinition.definition_id, CustomAttributeDefinition.definition_type ], backref='assessment_definition', cascade='all, delete-orphan') object = {} # we add this for the sake of client side error checking VALID_CONCLUSIONS = frozenset( ["Effective", "Ineffective", "Needs improvement", "Not Applicable"]) # REST properties _api_attrs = reflection.ApiAttributes( 'design', 'operationally', 'audit', 'assessment_type', 'test_plan_procedure', reflection.Attribute('issue_tracker', create=False, update=False), reflection.Attribute('archived', create=False, update=False), reflection.Attribute('folder', create=False, update=False), reflection.Attribute('object', create=False, update=False), ) _fulltext_attrs = [ 'archived', 'design', 'operationally', 'folder', ] AUTO_REINDEX_RULES = [ mixin.ReindexRule("Audit", lambda x: x.assessments, ["archived"]), ] _custom_publish = { 'audit': audit.build_audit_stub, } @classmethod def _populate_query(cls, query): return query.options( orm.Load(cls).undefer_group("Assessment_complete", ), orm.Load(cls).joinedload("audit").undefer_group( "Audit_complete", ), ) @classmethod def eager_query(cls): return cls._populate_query(super(Assessment, cls).eager_query()) @classmethod def indexed_query(cls): return cls._populate_query(super(Assessment, cls).indexed_query()) def log_json(self): out_json = super(Assessment, self).log_json() out_json["folder"] = self.folder return out_json _aliases = { "owners": None, "assessment_template": { "display_name": "Template", "ignore_on_update": True, "filter_by": "_ignore_filter", "type": reflection.AttributeInfo.Type.MAPPING, }, "assessment_type": { "display_name": "Assessment Type", "mandatory": False, }, "design": "Conclusion: Design", "operationally": "Conclusion: Operation", "archived": { "display_name": "Archived", "mandatory": False, "ignore_on_update": True, "view_only": True, }, "test_plan": "Assessment Procedure", # Currently we decided to have 'Due Date' alias for start_date, # but it can be changed in future "start_date": "Due Date", "status": { "display_name": "State", "mandatory": False, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) }, "issue_tracker": { "display_name": "Ticket Tracker", "mandatory": False, "view_only": True, } } def __init__(self, *args, **kwargs): super(Assessment, self).__init__(*args, **kwargs) self._warnings = collections.defaultdict(list) @orm.reconstructor def init_on_load(self): self._warnings = collections.defaultdict(list) def add_warning(self, domain, msg): self._warnings[domain].append(msg) @simple_property def issue_tracker(self): """Returns representation of issue tracker related info as a dict.""" issue_obj = issuetracker_issue.IssuetrackerIssue.get_issue( 'Assessment', self.id) res = issue_obj.to_dict( include_issue=True) if issue_obj is not None else {} res['_warnings'] = self._warnings['issue_tracker'] return res @simple_property def archived(self): """Returns a boolean whether assessment is archived or not.""" return self.audit.archived if self.audit else False @simple_property def folder(self): return self.audit.folder if self.audit else "" def validate_conclusion(self, value): return value if value in self.VALID_CONCLUSIONS else "" @validates("status") def validate_status(self, key, value): value = super(Assessment, self).validate_status(key, value) # pylint: disable=unused-argument if self.status == value: return value if self.status == self.REWORK_NEEDED: valid_states = [self.DONE_STATE, self.FINAL_STATE, self.DEPRECATED] if value not in valid_states: raise ValueError("Assessment in `Rework Needed` " "state can be only moved to: [{}]".format( ",".join(valid_states))) return value @validates("operationally") def validate_opperationally(self, key, value): # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("design") def validate_design(self, key, value): # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("assessment_type") def validate_assessment_type(self, key, value): """Validate assessment type to be the same as existing model name""" # pylint: disable=unused-argument # pylint: disable=no-self-use from ggrc.snapshotter.rules import Types if value and value not in Types.all: raise ValueError( "Assessment type '{}' is not snapshotable".format(value)) return value @classmethod def _ignore_filter(cls, _): return None
class Control(WithLastAssessmentDate, HasObjectState, Roleable, Relatable, CustomAttributable, Personable, ControlCategorized, PublicDocumentable, AssertionCategorized, Hierarchical, LastDeprecatedTimeboxed, Auditable, TestPlanned, BusinessObject, Indexed, db.Model): __tablename__ = 'controls' company_control = deferred(db.Column(db.Boolean), 'Control') directive_id = deferred( db.Column(db.Integer, db.ForeignKey('directives.id')), 'Control') kind_id = deferred(db.Column(db.Integer), 'Control') means_id = deferred(db.Column(db.Integer), 'Control') version = deferred(db.Column(db.String), 'Control') documentation_description = deferred(db.Column(db.Text), 'Control') verify_frequency_id = deferred(db.Column(db.Integer), 'Control') fraud_related = deferred(db.Column(db.Boolean), 'Control') key_control = deferred(db.Column(db.Boolean), 'Control') active = deferred(db.Column(db.Boolean), 'Control') principal_assessor_id = deferred( db.Column(db.Integer, db.ForeignKey('people.id')), 'Control') secondary_assessor_id = deferred( db.Column(db.Integer, db.ForeignKey('people.id')), 'Control') principal_assessor = db.relationship( 'Person', uselist=False, foreign_keys='Control.principal_assessor_id') secondary_assessor = db.relationship( 'Person', uselist=False, foreign_keys='Control.secondary_assessor_id') kind = db.relationship( 'Option', primaryjoin='and_(foreign(Control.kind_id) == Option.id, ' 'Option.role == "control_kind")', uselist=False) means = db.relationship( 'Option', primaryjoin='and_(foreign(Control.means_id) == Option.id, ' 'Option.role == "control_means")', uselist=False) verify_frequency = db.relationship( 'Option', primaryjoin='and_(foreign(Control.verify_frequency_id) == Option.id, ' 'Option.role == "verify_frequency")', uselist=False) @staticmethod def _extra_table_args(_): return ( db.Index('ix_controls_principal_assessor', 'principal_assessor_id'), db.Index('ix_controls_secondary_assessor', 'secondary_assessor_id'), ) # REST properties _api_attrs = reflection.ApiAttributes( 'active', 'company_control', 'directive', 'documentation_description', 'fraud_related', 'key_control', 'kind', 'means', 'verify_frequency', 'version', 'principal_assessor', 'secondary_assessor', ) _fulltext_attrs = [ 'active', 'company_control', 'directive', 'documentation_description', attributes.BooleanFullTextAttr('fraud_related', 'fraud_related', true_value="yes", false_value="no"), attributes.BooleanFullTextAttr('key_control', 'key_control', true_value="key", false_value="non-key"), 'kind', 'means', 'verify_frequency', 'version', attributes.FullTextAttr("principal_assessor", "principal_assessor", ["name", "email"]), attributes.FullTextAttr('secondary_assessor', 'secondary_assessor', ["name", "email"]), ] _sanitize_html = [ 'documentation_description', 'version', ] @classmethod def indexed_query(cls): return super(Control, cls).indexed_query().options( orm.Load(cls).undefer_group("Control_complete"), orm.Load(cls).joinedload("directive").undefer_group( "Directive_complete"), orm.Load(cls).joinedload("principal_assessor").undefer_group( "Person_complete"), orm.Load(cls).joinedload("secondary_assessor").undefer_group( "Person_complete"), orm.Load(cls).joinedload( 'kind', ).undefer_group("Option_complete"), orm.Load(cls).joinedload( 'means', ).undefer_group("Option_complete"), orm.Load(cls).joinedload( 'verify_frequency', ).undefer_group("Option_complete"), ) _include_links = [] _aliases = { "kind": "Kind/Nature", "means": "Type/Means", "verify_frequency": "Frequency", "fraud_related": "Fraud Related", "key_control": { "display_name": "Significance", "description": "Allowed values are:\nkey\nnon-key\n---", }, # overrides values from PublicDocumentable mixin "document_url": None, } @validates('kind', 'means', 'verify_frequency') def validate_control_options(self, key, option): desired_role = key if key == 'verify_frequency' else 'control_' + key return validate_option(self.__class__.__name__, key, option, desired_role) @classmethod def eager_query(cls): query = super(Control, cls).eager_query() return cls.eager_inclusions(query, Control._include_links).options( orm.joinedload('directive'), orm.joinedload('principal_assessor'), orm.joinedload('secondary_assessor'), orm.joinedload('kind'), orm.joinedload('means'), orm.joinedload('verify_frequency'), ) def log_json(self): out_json = super(Control, self).log_json() # so that event log can refer to deleted directive if self.directive: out_json["mapped_directive"] = self.directive.display_name return out_json