class Revision(base.ContextRBAC, Base, db.Model): """Revision object holds a JSON snapshot of the object at a time.""" __tablename__ = 'revisions' resource_id = db.Column(db.Integer, nullable=False) resource_type = db.Column(db.String, nullable=False) event_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False) action = db.Column(db.Enum(u'created', u'modified', u'deleted'), nullable=False) _content = db.Column('content', LongJsonType, nullable=False) resource_slug = db.Column(db.String, nullable=True) source_type = db.Column(db.String, nullable=True) source_id = db.Column(db.Integer, nullable=True) destination_type = db.Column(db.String, nullable=True) destination_id = db.Column(db.Integer, nullable=True) @staticmethod def _extra_table_args(_): return ( db.Index("revisions_modified_by", "modified_by_id"), db.Index("fk_revisions_resource", "resource_type", "resource_id"), db.Index("fk_revisions_source", "source_type", "source_id"), db.Index("fk_revisions_destination", "destination_type", "destination_id"), db.Index('ix_revisions_resource_slug', 'resource_slug'), ) _api_attrs = reflection.ApiAttributes( 'resource_id', 'resource_type', 'source_type', 'source_id', 'destination_type', 'destination_id', 'action', 'content', 'description', reflection.Attribute('diff_with_current', create=False, update=False), reflection.Attribute('meta', create=False, update=False), ) @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Revision, cls).eager_query() return query.options( orm.subqueryload('modified_by'), orm.subqueryload('event'), # used in description ) def __init__(self, obj, modified_by_id, action, content): self.resource_id = obj.id self.resource_type = obj.__class__.__name__ self.resource_slug = getattr(obj, "slug", None) self.modified_by_id = modified_by_id self.action = action if "access_control_list" in content and content["access_control_list"]: for acl in content["access_control_list"]: acl["person"] = { "id": acl["person_id"], "type": "Person", "href": "/api/people/{}".format(acl["person_id"]), } self._content = content for attr in [ "source_type", "source_id", "destination_type", "destination_id" ]: setattr(self, attr, getattr(obj, attr, None)) @builder.callable_property def diff_with_current(self): """Callable lazy property for revision.""" referenced_objects.mark_to_cache(self.resource_type, self.resource_id) revisions_diff.mark_for_latest_content(self.resource_type, self.resource_id) def lazy_loader(): """Lazy load diff for revisions.""" referenced_objects.rewarm_cache() revisions_diff.rewarm_latest_content() instance = referenced_objects.get(self.resource_type, self.resource_id) if instance: return revisions_diff.prepare(instance, self.content) # return empty diff object has already been removed return {} return lazy_loader @builder.callable_property def meta(self): """Callable lazy property for revision.""" referenced_objects.mark_to_cache(self.resource_type, self.resource_id) def lazy_loader(): """Lazy load diff for revisions.""" referenced_objects.rewarm_cache() instance = referenced_objects.get(self.resource_type, self.resource_id) meta_dict = {} if instance: instance_meta_info = meta_info.MetaInfo(instance) meta_dict["mandatory"] = instance_meta_info.mandatory return meta_dict return lazy_loader @builder.simple_property def description(self): """Compute a human readable description from action and content.""" if 'display_name' not in self._content: return '' display_name = self._content['display_name'] if not display_name: result = u"{0} {1}".format(self.resource_type, self.action) elif u'<->' in display_name: if self.action == 'created': msg = u"{destination} linked to {source}" elif self.action == 'deleted': msg = u"{destination} unlinked from {source}" else: msg = u"{display_name} {action}" source, destination = self._content['display_name'].split( '<->')[:2] result = msg.format(source=source, destination=destination, display_name=self._content['display_name'], action=self.action) elif 'mapped_directive' in self._content: # then this is a special case of combined map/creation # should happen only for Requirement and Control mapped_directive = self._content['mapped_directive'] if self.action == 'created': result = u"New {0}, {1}, created and mapped to {2}".format( self.resource_type, display_name, mapped_directive) elif self.action == 'deleted': result = u"{0} unmapped from {1} and deleted".format( display_name, mapped_directive) else: result = u"{0} {1}".format(display_name, self.action) else: # otherwise, it's a normal creation event result = u"{0} {1}".format(display_name, self.action) if self.event.action == "BULK": result += ", via bulk action" return result def populate_reference_url(self): """Add reference_url info for older revisions.""" if 'url' not in self._content: return {} reference_url_list = [] for key in ('url', 'reference_url'): link = self._content[key] # link might exist, but can be an empty string - we treat those values # as non-existing (empty) reference URLs if not link: continue # if creation/modification date is not available, we estimate it by # using the corresponding information from the Revision itself created_at = (self._content.get("created_at") or self.created_at.isoformat()) updated_at = (self._content.get("updated_at") or self.updated_at.isoformat()) reference_url_list.append({ "display_name": link, "kind": "REFERENCE_URL", "link": link, "title": link, "id": None, "created_at": created_at, "updated_at": updated_at, }) return {'reference_url': reference_url_list} @classmethod def _filter_internal_acls(cls, access_control_list): """Remove internal access control list entries. This is needed due to bugs in older code that in some cases the revisions stored internal ACL entries. Due to possible role removal, the parent_id is the only true flag that we can use for filtering Args: access_control_list: list of dicts containing ACL entries. Returns: access_control_list but without any ACL entry that was generated from some other ACL entry. """ return [ acl for acl in access_control_list if acl.get("parent_id") is None ] @classmethod def _populate_acl_with_people(cls, access_control_list): """Add person property with person stub on access control list.""" for acl in access_control_list: if "person" not in acl: acl["person"] = {"id": acl.get("person_id"), "type": "Person"} return access_control_list def populate_acl(self): """Add access_control_list info for older revisions.""" roles_dict = role.get_custom_roles_for(self.resource_type) reverted_roles_dict = {n: i for i, n in roles_dict.iteritems()} access_control_list = self._content.get("access_control_list") or [] map_field_to_role = { "principal_assessor": reverted_roles_dict.get("Principal Assignees"), "secondary_assessor": reverted_roles_dict.get("Secondary Assignees"), "contact": reverted_roles_dict.get("Primary Contacts"), "secondary_contact": reverted_roles_dict.get("Secondary Contacts"), "owners": reverted_roles_dict.get("Admin"), } exists_roles = {i["ac_role_id"] for i in access_control_list} for field, role_id in map_field_to_role.items(): if role_id in exists_roles or role_id is None: continue if field not in self._content: continue field_content = self._content.get(field) or {} if not field_content: continue if not isinstance(field_content, list): field_content = [field_content] person_ids = {fc.get("id") for fc in field_content if fc.get("id")} for person_id in person_ids: access_control_list.append({ "display_name": roles_dict[role_id], "ac_role_id": role_id, "context_id": None, "created_at": None, "object_type": self.resource_type, "updated_at": None, "object_id": self.resource_id, "modified_by_id": None, "person_id": person_id, # Frontend require data in such format "person": { "id": person_id, "type": "Person", "href": "/api/people/{}".format(person_id) }, "modified_by": None, "id": None, }) acl_with_people = self._populate_acl_with_people(access_control_list) filtered_acl = self._filter_internal_acls(acl_with_people) result_acl = [ acl for acl in filtered_acl if acl["ac_role_id"] in roles_dict ] return { "access_control_list": result_acl, } def populate_folder(self): """Add folder info for older revisions.""" if "folder" in self._content: return {} folders = self._content.get("folders") or [{"id": ""}] return {"folder": folders[0]["id"]} def populate_labels(self): """Add labels info for older revisions.""" if "label" not in self._content: return {} label = self._content["label"] return { "labels": [{ "id": None, "name": label }] } if label else { "labels": [] } def populate_status(self): """Update status for older revisions or add it if status does not exist.""" workflow_models = { "Cycle", "CycleTaskGroup", "CycleTaskGroupObjectTask", } statuses_mapping = {"InProgress": "In Progress"} status = statuses_mapping.get(self._content.get("status")) if self.resource_type in workflow_models and status: return {"status": status} pop_models = { # ggrc "AccessGroup", "Clause", "Control", "DataAsset", "Directive", "Facility", "Issue", "Market", "Objective", "OrgGroup", "Product", "Program", "Project", "Requirement", "System", "Vendor", # ggrc_risks "Risk", "Threat", } if self.resource_type not in pop_models: return {} statuses_mapping = { "Active": "Active", "Deprecated": "Deprecated", "Effective": "Active", "Final": "Active", "In Scope": "Active", "Ineffective": "Active", "Launched": "Active", } return { "status": statuses_mapping.get(self._content.get("status"), "Draft") } def _document_evidence_hack(self): """Update display_name on evideces Evidences have display names from links and titles, and until now they used slug property to calculate the display name. This hack is here since we must support older revisions with bad data, and to avoid using slug differently than everywhere else in the app. This function only modifies existing evidence entries on any given object. If an object does not have and document evidences then an empty dict is returned. Returns: dict with updated display name for each of the evidence entries if there are any. """ if "document_evidence" not in self._content: return {} document_evidence = self._content.get("document_evidence") for evidence in document_evidence: evidence[u"display_name"] = u"{link} {title}".format( link=evidence.get("link"), title=evidence.get("title"), ).strip() return {u"documents_file": document_evidence} def populate_categoies(self, key_name): """Fix revision logger. On controls in category field was loged categorization instances.""" if self.resource_type != "Control": return {} result = [] for categorization in self._content.get(key_name) or []: if "category_id" in categorization: result.append({ "id": categorization["category_id"], "type": categorization["category_type"], "name": categorization["display_name"], "display_name": categorization["display_name"], }) else: result.append(categorization) return {key_name: result} def _get_cavs(self): """Return cavs values from content.""" if "custom_attribute_values" in self._content: return self._content["custom_attribute_values"] if "custom_attributes" in self._content: return self._content["custom_attributes"] return [] def populate_cavs(self): """Setup cads in cav list if they are not presented in content but now they are associated to instance.""" from ggrc.models import custom_attribute_definition cads = custom_attribute_definition.get_custom_attributes_for( self.resource_type, self.resource_id) cavs = {int(i["custom_attribute_id"]): i for i in self._get_cavs()} for cad in cads: custom_attribute_id = int(cad["id"]) if custom_attribute_id in cavs: # Old revisions can contain falsy values for a Checkbox if cad["attribute_type"] == "Checkbox" \ and not cavs[custom_attribute_id]["attribute_value"]: cavs[custom_attribute_id]["attribute_value"] = cad[ "default_value"] continue if cad["attribute_type"] == "Map:Person": value = "Person" else: value = cad["default_value"] cavs[custom_attribute_id] = { "attribute_value": value, "attribute_object_id": None, "custom_attribute_id": custom_attribute_id, "attributable_id": self.resource_id, "attributable_type": self.resource_type, "display_name": "", "attribute_object": None, "type": "CustomAttributeValue", "context_id": None, } return { "custom_attribute_values": cavs.values(), "custom_attribute_definitions": cads } def populate_cad_default_values(self): """Setup default_value to CADs if it's needed.""" from ggrc.models import all_models if "custom_attribute_definitions" not in self._content: return {} cads = [] for cad in self._content["custom_attribute_definitions"]: if "default_value" not in cad: cad["default_value"] = ( all_models.CustomAttributeDefinition.get_default_value_for( cad["attribute_type"])) cads.append(cad) return {"custom_attribute_definitions": cads} def populate_requirements(self, populated_content): # noqa pylint: disable=too-many-branches """Populates revision content for Requirement models and models with fields that can contain Requirement old names. This fields would be checked and updated where necessary """ # change to add Requirement old names requirement_type = [ "Section", ] # change to add models and fields that can contain Requirement old names affected_models = { "AccessControlList": [ "object_type", ], "AccessControlRole": [ "object_type", ], "Assessment": [ "assessment_type", ], "AssessmentTemplate": [ "template_object_type", ], "Automapping": [ "source_type", "destination_type", ], "CustomAttributeValue": [ "attributable_type", ], "Event": [ "resource_type", ], "ObjectPerson": [ "personable_type", ], "Relationship": [ "source_type", "destination_type", ], "Revision": [ "resource_type", ], "Label": [ "object_type", ], "Context": [ "related_object_type", ], "IssuetrackerIssue": [ "object_type", ], "ObjectLabel": [ "object_type", ], "ObjectTemplates": [ "name", ], "Proposal": [ "instance_type", ], "Snapshot": [ "child_type", "parent_type", ], "TaskGroupObject": [ "object_type", ], } # change to add special values cases special_cases = { "CustomAttributeDefinition": { "fields": [ "definition_type", ], "old_values": [ "section", ], "new_value": "requirement", } } obj_type = self.resource_type # populate fields if they contain old names if obj_type in affected_models.keys(): for field in affected_models[obj_type]: if populated_content.get(field) in requirement_type: populated_content[field] = "Requirement" # populate fields for models that contain old names in special spelling if obj_type in special_cases.keys(): for field in special_cases[obj_type]["fields"]: if populated_content[field] in special_cases[obj_type][ "old_values"]: populated_content[field] = special_cases[obj_type][ "new_value"] # populate Requirements revisions if obj_type == "Requirement": populated_content["type"] = "Requirement" acls = populated_content.get("access_control_list", {}) if acls: for acl in acls: if acl.get("object_type") in requirement_type: acl["object_type"] = "Requirement" populated_content["access_control_list"] = acls cavs = populated_content.get("custom_attribute_values", {}) if cavs: for cav in cavs: if cav.get("attributable_type") in requirement_type: cav["attributable_type"] = "Requirement" populated_content["custom_attribute_values"] = cavs @builder.simple_property def content(self): """Property. Contains the revision content dict. Updated by required values, generated from saved content dict.""" # pylint: disable=too-many-locals populated_content = self._content.copy() populated_content.update(self.populate_acl()) populated_content.update(self.populate_reference_url()) populated_content.update(self.populate_folder()) populated_content.update(self.populate_labels()) populated_content.update(self.populate_status()) populated_content.update(self._document_evidence_hack()) populated_content.update(self.populate_categoies("categories")) populated_content.update(self.populate_categoies("assertions")) populated_content.update(self.populate_cad_default_values()) populated_content.update(self.populate_cavs()) self.populate_requirements(populated_content) # remove custom_attributes, # it's old style interface and now it's not needed populated_content.pop("custom_attributes", None) return populated_content @content.setter def content(self, value): """ Setter for content property.""" self._content = value
class AssertionCategorized(Categorizable): """Mixin for control only. Declate assertions for controls.""" @declared_attr def categorized_assertions(cls): # pylint: disable=no-self-argument return cls.declare_categorizable( "ControlAssertion", "assertion", "_assertions", "categorized_assertions") @hybrid_property def assertions(self): return self._assertions @assertions.setter def assertions(self, values): """Setter function for control's assertions. This setter function accepts two kind of values: - List of assertion objects. This is used to set assertions in back-end. - List of dicts containing json representation of assertion values. This is used when setting assertions through the API and json builder. Args: values: List of assertions or dicts containing json representation of assertion values. """ if not values: raise ValueError("Missing mandatory attribute: assertions") if isinstance(values[0], dict): values = self._get_assertions(values) self._set_assertions(values) @staticmethod def _get_assertions(values): """Get assertion objects from serialized values. Args: values: List of dicts representing `ControlAssertion` objects. """ new_assertions_ids = [v.get(u'id', False) for v in values] if not (new_assertions_ids and all(new_assertions_ids)): # Not all items in `values` contain `id` field. raise ValueError("Invalid values for attribute: assertions") new_assertions = ControlAssertion.eager_query().filter( ControlAssertion.id.in_(new_assertions_ids)).all() if len(new_assertions) != len(new_assertions_ids): # Not all passed assertion ids are valid. raise ValueError("Invalid values for attribute: assertions") return new_assertions def _set_assertions(self, values): """Set control assertions. Args: values: List of `ControlAssertion` objects. """ # pylint: disable=not-an-iterable proxied_set_map = dict([ (a.category, a) for a in self.categorized_assertions ]) # pylint: enable=not-an-iterable old_set, new_set = set(self.assertions), set(values) current_user_id = login.get_current_user_id() for assertion in new_set - old_set: new_assertion = self.assertions.creator(assertion) new_assertion.modified_by_id = current_user_id self.categorized_assertions.append(new_assertion) for assertion in old_set - new_set: self.categorized_assertions.remove(proxied_set_map[assertion]) _update_raw = ["assertions", ] _fulltext_attrs = [ attributes.MultipleSubpropertyFullTextAttr( "assertions", "categorized_assertions", ["category"] ), ] _api_attrs = reflection.ApiAttributes( reflection.HybridAttribute('assertions'), reflection.Attribute('categorized_assertions', create=False, update=False), ) _include_links = [] _aliases = { "assertions": { "display_name": "Assertions", "mandatory": True, } } @classmethod def eager_query(cls): """Eager Query""" query = super(AssertionCategorized, cls).eager_query() return query.options( orm.subqueryload('categorized_assertions').joinedload('category'), ) def log_json(self): """Log assertions too.""" out_json = super(AssertionCategorized, self).log_json() # pylint: disable=not-an-iterable out_json["assertions"] = [a.category.log_json() for a in self.categorized_assertions] return out_json @classmethod def indexed_query(cls): return super(AssertionCategorized, cls).indexed_query().options( orm.Load(cls).subqueryload( "categorized_assertions" ).joinedload( "category" ).load_only( "name", "type", ), )
class VerifiedDate(object): """Adds 'Verified Date' which is set when status is set to 'Verified'. When object is verified the status is overridden to 'Final' and the information about verification exposed as the 'verified' boolean. Requires Stateful to be mixed in as well. """ VERIFIED_STATES = {u"Verified"} DONE_STATES = {} # pylint: disable=method-hidden # because validator only sets date per model instance @declared_attr def verified_date(cls): # pylint: disable=no-self-argument return deferred( db.Column(db.DateTime, nullable=True), cls.__name__ ) @hybrid_property def verified(self): return self.verified_date != None # noqa _api_attrs = reflection.ApiAttributes( reflection.Attribute('verified', create=False, update=False), reflection.Attribute('verified_date', create=False, update=False), ) _aliases = { "verified_date": "Verified Date" } _fulltext_attrs = [ attributes.DatetimeFullTextAttr("verified_date", "verified_date"), "verified", ] @classmethod def indexed_query(cls): return super(VerifiedDate, cls).indexed_query().options( orm.Load(cls).load_only("verified_date"), ) @validates('status') def validate_status(self, key, value): """Update verified_date on status change, make verified status final.""" # Sqlalchemy only uses one validator per status (not necessarily the # first) and ignores others. This enables cooperation between validators # since 'status' is not defined here. if hasattr(super(VerifiedDate, self), "validate_status"): value = super(VerifiedDate, self).validate_status(key, value) if (value in self.VERIFIED_STATES and self.status not in self.VERIFIED_STATES): self.verified_date = datetime.datetime.utcnow() value = self.FINAL_STATE elif (value not in self.END_STATES and (self.status in self.VERIFIED_STATES or self.status in self.DONE_STATES)): self.verified_date = None return value
class CustomAttributable(object): """Custom Attributable mixin.""" _api_attrs = reflection.ApiAttributes( 'custom_attribute_values', reflection.Attribute('custom_attribute_definitions', create=False, update=False), reflection.Attribute('preconditions_failed', create=False, update=False), reflection.Attribute('custom_attributes', read=False), ) _include_links = [ 'custom_attribute_values', 'custom_attribute_definitions' ] _update_raw = ['custom_attribute_values'] _evidence_found = None @declared_attr def custom_attribute_definitions(cls): # pylint: disable=no-self-argument """Load custom attribute definitions""" from ggrc.models.custom_attribute_definition\ import CustomAttributeDefinition def join_function(): """Object and CAD join function.""" definition_id = foreign(CustomAttributeDefinition.definition_id) definition_type = foreign( CustomAttributeDefinition.definition_type) return and_(or_(definition_id == cls.id, definition_id.is_(None)), definition_type == cls._inflector.table_singular) return relationship( "CustomAttributeDefinition", primaryjoin=join_function, backref='{0}_custom_attributable_definition'.format(cls.__name__), order_by=(CustomAttributeDefinition.definition_id.desc(), CustomAttributeDefinition.id.asc()), viewonly=True, ) @declared_attr def _custom_attributes_deletion(cls): # pylint: disable=no-self-argument """This declared attribute is used only for handling cascade deletions for CustomAttributes. This is done in order not to try to delete "global" custom attributes that don't have any definition_id related. Attempt to delete custom attributes with definition_id=None causes the IntegrityError as we shouldn't be able to delete global attributes along side with any other object (e.g. Assessments). """ from ggrc.models.custom_attribute_definition import ( CustomAttributeDefinition) def join_function(): """Join condition used for deletion""" definition_id = foreign(CustomAttributeDefinition.definition_id) definition_type = foreign( CustomAttributeDefinition.definition_type) return and_(definition_id == cls.id, definition_type == cls._inflector.table_singular) return relationship("CustomAttributeDefinition", primaryjoin=join_function, cascade='all, delete-orphan', order_by="CustomAttributeDefinition.id") @declared_attr def _custom_attribute_values(cls): # pylint: disable=no-self-argument """Load custom attribute values""" from ggrc.models.custom_attribute_value import CustomAttributeValue def join_function(): return and_( foreign(CustomAttributeValue.attributable_id) == cls.id, foreign( CustomAttributeValue.attributable_type) == cls.__name__) return relationship( "CustomAttributeValue", primaryjoin=join_function, backref='{0}_custom_attributable'.format(cls.__name__), cascade='all, delete-orphan', ) @hybrid_property def custom_attribute_values(self): return self._custom_attribute_values @classmethod def indexed_query(cls): return super(CustomAttributable, cls).indexed_query().options( orm.Load(cls).subqueryload("custom_attribute_values").joinedload( "custom_attribute").load_only( "id", "title", "attribute_type", ), orm.Load(cls).subqueryload("custom_attribute_definitions"). undefer_group("CustomAttributeDefinition_complete"), orm.Load(cls).subqueryload("custom_attribute_values").load_only( "id", "attribute_value", "attribute_object_id", ), ) @custom_attribute_values.setter def custom_attribute_values(self, values): """Setter function for custom attribute values. This setter function accepts 2 kinds of values: - list of custom attributes. This is used on the back-end by developers. - list of dictionaries containing custom attribute values. This is to have a clean API where the front-end can put the custom attribute values into the custom_attribute_values property and the json builder can then handle the attributes just by setting them. Args: value: List of custom attribute values or dicts containing json representation of custom attribute values. """ if not values: return self._values_map = { value.custom_attribute_id or value.custom_attribute.id: value for value in self.custom_attribute_values } # pylint: disable=not-an-iterable self._definitions_map = { definition.id: definition for definition in self.custom_attribute_definitions } # pylint: enable=not-an-iterable if isinstance(values[0], dict): self._add_ca_value_dicts(values) else: self._add_ca_values(values) def _add_ca_values(self, values): """Add CA value objects to _custom_attributes_values property. Args: values: list of CustomAttributeValue models """ for new_value in values: existing_value = self._values_map.get( new_value.custom_attribute.id) if existing_value: existing_value.attribute_value = new_value.attribute_value existing_value.attribute_object_id = new_value.attribute_object_id else: new_value.attributable = self # new_value is automatically appended to self._custom_attribute_values # on new_value.attributable = self def _add_ca_value_dicts(self, values): """Add CA dict representations to _custom_attributes_values property. This adds or updates the _custom_attribute_values with the values in the custom attribute values serialized dictionary. Args: values: List of dictionaries that represent custom attribute values. """ from ggrc.models.custom_attribute_value import CustomAttributeValue for value in values: if not value.get("attribute_object_id"): # value.get("attribute_object", {}).get("id") won't help because # value["attribute_object"] can be None value["attribute_object_id"] = ( value["attribute_object"].get("id") if value.get("attribute_object") else None) attr = self._values_map.get(value.get("custom_attribute_id")) if attr: attr.attributable = self attr.attribute_value = value.get("attribute_value") attr.attribute_object_id = value.get("attribute_object_id") elif "custom_attribute_id" in value: # this is automatically appended to self._custom_attribute_values # on attributable=self CustomAttributeValue( attributable=self, custom_attribute_id=value.get("custom_attribute_id"), attribute_value=value.get("attribute_value"), attribute_object_id=value.get("attribute_object_id"), ) elif "href" in value: # Ignore setting of custom attribute stubs. Getting here means that the # front-end is not using the API correctly and needs to be updated. logger.info("Ignoring post/put of custom attribute stubs.") else: raise BadRequest("Bad custom attribute value inserted") def insert_definition(self, definition): """Insert a new custom attribute definition into database Args: definition: dictionary with field_name: value """ from ggrc.models.custom_attribute_definition \ import CustomAttributeDefinition field_names = reflection.AttributeInfo.gather_create_attrs( CustomAttributeDefinition) data = {fname: definition.get(fname) for fname in field_names} data["definition_type"] = self._inflector.table_singular cad = CustomAttributeDefinition(**data) db.session.add(cad) def process_definitions(self, definitions): """ Process custom attribute definitions If present, delete all related custom attribute definition and insert new custom attribute definitions in the order provided. Args: definitions: Ordered list of (dict) custom attribute definitions """ from ggrc.models.custom_attribute_definition \ import CustomAttributeDefinition as CADef if not hasattr(self, "PER_OBJECT_CUSTOM_ATTRIBUTABLE"): return if self.id is not None: db.session.query(CADef).filter( CADef.definition_id == self.id, CADef.definition_type == self._inflector.table_singular).delete() db.session.flush() db.session.expire_all() for definition in definitions: if "_pending_delete" in definition and definition[ "_pending_delete"]: continue definition['context'] = getattr(self, "context", None) self.insert_definition(definition) def _remove_existing_items(self, attr_values): """Remove existing CAV and corresponding full text records.""" from ggrc.fulltext.mysql import MysqlRecordProperty from ggrc.models.custom_attribute_value import CustomAttributeValue if not attr_values: return # 2) Delete all fulltext_record_properties for the list of values ftrp_properties = [] for val in attr_values: ftrp_properties.append(val.custom_attribute.title) if val.custom_attribute.attribute_type == "Map:Person": ftrp_properties.append(val.custom_attribute.title + ".name") ftrp_properties.append(val.custom_attribute.title + ".email") db.session.query(MysqlRecordProperty)\ .filter( and_( MysqlRecordProperty.key == self.id, MysqlRecordProperty.type == self.__class__.__name__, MysqlRecordProperty.property.in_(ftrp_properties)))\ .delete(synchronize_session='fetch') # 3) Delete the list of custom attribute values attr_value_ids = [value.id for value in attr_values] db.session.query(CustomAttributeValue)\ .filter(CustomAttributeValue.id.in_(attr_value_ids))\ .delete(synchronize_session='fetch') db.session.commit() def custom_attributes(self, src): """Legacy setter for custom attribute values and definitions. This code should only be used for custom attribute definitions until setter for that is updated. """ # pylint: disable=too-many-locals from ggrc.models.custom_attribute_value import CustomAttributeValue from ggrc.services import signals ca_values = src.get("custom_attribute_values") if ca_values and "attribute_value" in ca_values[0]: # This indicates that the new CA API is being used and the legacy API # should be ignored. If we need to use the legacy API the # custom_attribute_values property should contain stubs instead of entire # objects. return definitions = src.get("custom_attribute_definitions") if definitions is not None: self.process_definitions(definitions) attributes = src.get("custom_attributes") if not attributes: return old_values = collections.defaultdict(list) last_values = dict() # attributes looks like this: # [ {<id of attribute definition> : attribute value, ... }, ... ] # 1) Get all custom attribute values for the CustomAttributable instance attr_values = db.session.query(CustomAttributeValue).filter( and_( CustomAttributeValue.attributable_type == self.__class__.__name__, CustomAttributeValue.attributable_id == self.id)).all() # Save previous value of custom attribute. This is a bit complicated by # the fact that imports can save multiple values at the time of writing. # old_values holds all previous values of attribute, last_values holds # chronologically last value. for value in attr_values: old_values[value.custom_attribute_id].append( (value.created_at, value.attribute_value)) last_values = { str(key): max(old_vals, key=lambda (created_at, _): created_at) for key, old_vals in old_values.iteritems() } self._remove_existing_items(attr_values) # 4) Instantiate custom attribute values for each of the definitions # passed in (keys) # pylint: disable=not-an-iterable # filter out attributes like Person:None attributes = { k: v for k, v in attributes.items() if v != "Person:None" } definitions = { d.id: d for d in self.get_custom_attribute_definitions() } for ad_id in attributes.keys(): obj_type = self.__class__.__name__ obj_id = self.id new_value = CustomAttributeValue( custom_attribute_id=int(ad_id), attributable=self, attribute_value=attributes[ad_id], ) if definitions[int(ad_id)].attribute_type.startswith("Map:"): obj_type, obj_id = new_value.attribute_value.split(":") new_value.attribute_value = obj_type new_value.attribute_object_id = long(obj_id) elif definitions[int(ad_id)].attribute_type == "Checkbox": new_value.attribute_value = "1" if new_value.attribute_value else "0" # 5) Set the context_id for each custom attribute value to the context id # of the custom attributable. # TODO: We are ignoring contexts for now # new_value.context_id = cls.context_id # new value is appended to self.custom_attribute_values by the ORM # self.custom_attribute_values.append(new_value) if ad_id in last_values: _, previous_value = last_values[ad_id] if previous_value != attributes[ad_id]: signals.Signals.custom_attribute_changed.send( self.__class__, obj=self, src={ "type": obj_type, "id": obj_id, "operation": "UPDATE", "value": new_value, "old": previous_value }, service=self.__class__.__name__) else: signals.Signals.custom_attribute_changed.send( self.__class__, obj=self, src={ "type": obj_type, "id": obj_id, "operation": "INSERT", "value": new_value, }, service=self.__class__.__name__) @classmethod def get_custom_attribute_definitions(cls): """Get all applicable CA definitions (even ones without a value yet).""" from ggrc.models.custom_attribute_definition import \ CustomAttributeDefinition as cad if cls.__name__ == "Assessment": query = cad.query.filter( or_( cad.definition_type == utils.underscore_from_camelcase( cls.__name__), cad.definition_type == "assessment_template", )) else: query = cad.query.filter(cad.definition_type == utils. underscore_from_camelcase(cls.__name__)) return query.options( orm.undefer_group('CustomAttributeDefinition_complete')) @classmethod def eager_query(cls): """Define fields to be loaded eagerly to lower the count of DB queries.""" query = super(CustomAttributable, cls).eager_query() query = query.options( orm.subqueryload('custom_attribute_definitions').undefer_group( 'CustomAttributeDefinition_complete'), orm.subqueryload('_custom_attribute_values').undefer_group( 'CustomAttributeValue_complete').subqueryload( '{0}_custom_attributable'.format(cls.__name__)), orm.subqueryload('_custom_attribute_values').subqueryload( '_related_revisions'), ) if hasattr(cls, 'comments'): # only for Commentable classess query = query.options( orm.subqueryload('comments').undefer_group('Comment_complete'), ) return query def log_json(self): """Log custom attribute values.""" # pylint: disable=not-an-iterable from ggrc.models.custom_attribute_definition import \ CustomAttributeDefinition res = super(CustomAttributable, self).log_json() if self.custom_attribute_values: res["custom_attribute_values"] = [ value.log_json() for value in self.custom_attribute_values ] # fetch definitions form database because `self.custom_attribute` # may not be populated defs = CustomAttributeDefinition.query.filter( CustomAttributeDefinition.definition_type == self._inflector.table_singular, # noqa # pylint: disable=protected-access CustomAttributeDefinition.id.in_([ value.custom_attribute_id for value in self.custom_attribute_values ])) # also log definitions to freeze field names in time res["custom_attribute_definitions"] = [ definition.log_json() for definition in defs ] else: res["custom_attribute_definitions"] = [] res["custom_attribute_values"] = [] return res def validate_custom_attributes(self): """Set CADs and validate CAVs one by one.""" # pylint: disable=not-an-iterable; we can iterate over relationships map_ = {d.id: d for d in self.custom_attribute_definitions} for value in self._custom_attribute_values: if not value.custom_attribute and value.custom_attribute_id: value.custom_attribute = map_.get( int(value.custom_attribute_id)) value.validate() @builder.simple_property def preconditions_failed(self): """Returns True if any mandatory CAV, comment or evidence is missing. Note: return value may be incorrect if evidence count is changed after the first property calculation (see check_mandatory_evidence function). """ values_map = { cav.custom_attribute_id or cav.custom_attribute.id: cav for cav in self.custom_attribute_values } # pylint: disable=not-an-iterable; we can iterate over relationships for cad in self.custom_attribute_definitions: if cad.mandatory: cav = values_map.get(cad.id) if not cav or not cav.attribute_value: return True return any(c.preconditions_failed for c in self.custom_attribute_values) def check_mandatory_evidence(self): """Check presence of mandatory evidence. Note: mandatory evidence precondition is checked only once. Any additional changes to evidences after the first checking of the precondition will cause incorrect result of the function. """ from ggrc.models.object_document import Documentable if isinstance(self, Documentable): # Note: this is a suboptimal implementation of mandatory evidence check; # it should be refactored once Evicence-CA mapping is introduced def evidence_required(cav): """Return True if an evidence is required for this `cav`.""" # pylint: disable=protected-access flags = (cav._multi_choice_options_to_flags( cav.custom_attribute).get(cav.attribute_value)) return flags and flags.evidence_required if self._evidence_found is None: self._evidence_found = (len(self.document_evidence) >= len([ cav for cav in self.custom_attribute_values if evidence_required(cav) ])) if not self._evidence_found: return ["evidence"] return [] def invalidate_evidence_found(self): """Invalidate the cached value""" self._evidence_found = None
class Workflow(roleable.Roleable, mixins.CustomAttributable, HasOwnContext, mixins.Timeboxed, mixins.Described, mixins.Titled, mixins.Notifiable, mixins.Stateful, base.ContextRBAC, mixins.Slugged, mixins.Folderable, Indexed, db.Model): """Basic Workflow first class object. """ __tablename__ = 'workflows' _title_uniqueness = False DRAFT = u"Draft" ACTIVE = u"Active" INACTIVE = u"Inactive" VALID_STATES = [DRAFT, ACTIVE, INACTIVE] @classmethod def default_status(cls): return cls.DRAFT notify_on_change = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') notify_custom_message = deferred( db.Column(db.Text, nullable=False, default=u""), 'Workflow') object_approval = deferred( db.Column(db.Boolean, default=False, nullable=False), 'Workflow') recurrences = db.Column(db.Boolean, default=False, nullable=False) task_groups = db.relationship('TaskGroup', backref='workflow', cascade='all, delete-orphan') cycles = db.relationship('Cycle', backref='workflow', cascade='all, delete-orphan') next_cycle_start_date = db.Column(db.Date, nullable=True) non_adjusted_next_cycle_start_date = db.Column(db.Date, nullable=True) # this is an indicator if the workflow exists from before the change where # we deleted cycle objects, which changed how the cycle is created and # how objects are mapped to the cycle tasks is_old_workflow = deferred( db.Column(db.Boolean, default=False, nullable=True), 'Workflow') # This column needs to be deferred because one of the migrations # uses Workflow as a model and breaks since at that point in time # there is no 'kind' column yet kind = deferred(db.Column(db.String, default=None, nullable=True), 'Workflow') IS_VERIFICATION_NEEDED_DEFAULT = True is_verification_needed = db.Column(db.Boolean, default=IS_VERIFICATION_NEEDED_DEFAULT, nullable=False) repeat_every = deferred(db.Column(db.Integer, nullable=True, default=None), 'Workflow') DAY_UNIT = 'day' WEEK_UNIT = 'week' MONTH_UNIT = 'month' VALID_UNITS = (DAY_UNIT, WEEK_UNIT, MONTH_UNIT) unit = deferred( db.Column(db.Enum(*VALID_UNITS), nullable=True, default=None), 'Workflow') repeat_multiplier = deferred( db.Column(db.Integer, nullable=False, default=0), 'Workflow') UNIT_FREQ_MAPPING = { None: "one_time", DAY_UNIT: "daily", WEEK_UNIT: "weekly", MONTH_UNIT: "monthly" } # pylint: disable=unnecessary-lambda REPEAT_MAPPING = { None: lambda px, sx: "off", DAY_UNIT: lambda px, sx: "every {}weekday{}".format(px, sx), WEEK_UNIT: lambda px, sx: "every {}week{}".format(px, sx), MONTH_UNIT: lambda px, sx: "every {}month{}".format(px, sx) } REPEAT_ORDER_MAPPING = {None: 0, DAY_UNIT: 1, WEEK_UNIT: 2, MONTH_UNIT: 3} @hybrid.hybrid_property def frequency(self): """Hybrid property for SearchAPI filtering backward compatibility""" return self.UNIT_FREQ_MAPPING[self.unit] @frequency.expression def frequency(self): """Hybrid property for SearchAPI filtering backward compatibility""" return case([ (self.unit.is_(None), self.UNIT_FREQ_MAPPING[None]), (self.unit == self.DAY_UNIT, self.UNIT_FREQ_MAPPING[self.DAY_UNIT]), (self.unit == self.WEEK_UNIT, self.UNIT_FREQ_MAPPING[self.WEEK_UNIT]), (self.unit == self.MONTH_UNIT, self.UNIT_FREQ_MAPPING[self.MONTH_UNIT]), ]) @classmethod def _get_repeat(cls, unit, repeat_every): """Return repeat field representation for QueryAPI""" if repeat_every is None or repeat_every == 1: prefix, suffix = "", "" else: prefix, suffix = "{} ".format(repeat_every), "s" func = cls.REPEAT_MAPPING[unit] return func(prefix, suffix) @hybrid.hybrid_property def repeat(self): """Hybrid property for filtering in QueryAPI""" return self._get_repeat(self.unit, self.repeat_every) @repeat.expression def repeat(self): """Hybrid property for filtering in QueryAPI""" case_ = [(self.unit.is_(None), self.REPEAT_MAPPING[None](None, None))] case_.extend( ((self.unit == unit) & (self.repeat_every == repeat_every), self._get_repeat(unit, repeat_every)) for unit in self.VALID_UNITS for repeat_every in xrange(1, 31)) return case(case_) @property def repeat_order(self): """Property for ordering in QueryAPI""" unit_map = self.REPEAT_ORDER_MAPPING[self.unit] repeat_every_map = self.repeat_every or 0 return u"{:0>4}_{:0>4}".format(unit_map, repeat_every_map) @builder.simple_property def can_start_cycle(self): """Can start cycle. Boolean property, returns True if all task groups have at least one task group task, False otherwise. """ return not any(tg for tg in self.task_groups if not tg.task_group_tasks) @property def tasks(self): return list( itertools.chain(*[t.task_group_tasks for t in self.task_groups])) @property def min_task_start_date(self): """Fetches non adjusted setup cycle start date based on TGT user's setup. Args: self: Workflow instance. Returns: Date when first cycle should be started based on user's setup. """ tasks = self.tasks min_date = None for task in tasks: min_date = min(task.start_date, min_date or task.start_date) return min_date WORK_WEEK_LEN = 5 @classmethod def first_work_day(cls, day): holidays = google_holidays.GoogleHolidays() while day.isoweekday() > cls.WORK_WEEK_LEN or day in holidays: day -= relativedelta.relativedelta(days=1) return day def calc_next_adjusted_date(self, setup_date): """Calculates adjusted date which are expected in next cycle. Args: setup_date: Date which was setup by user. Returns: Adjusted date which are expected to be in next Workflow cycle. """ if self.repeat_every is None or self.unit is None: return self.first_work_day(setup_date) try: key = { self.WEEK_UNIT: "weeks", self.MONTH_UNIT: "months", self.DAY_UNIT: "days", }[self.unit] except KeyError: raise ValueError("Invalid Workflow unit") repeater = self.repeat_every * self.repeat_multiplier if self.unit == self.DAY_UNIT: weeks = repeater / self.WORK_WEEK_LEN days = repeater % self.WORK_WEEK_LEN # append weekends if it's needed days += ((setup_date.isoweekday() + days) > self.WORK_WEEK_LEN) * 2 return setup_date + relativedelta.relativedelta( setup_date, weeks=weeks, days=days) calc_date = setup_date + relativedelta.relativedelta( setup_date, **{key: repeater}) if self.unit == self.MONTH_UNIT: # check if setup date is the last day of the month # and if it is then calc_date should be the last day of hte month too setup_day = calendar.monthrange(setup_date.year, setup_date.month)[1] if setup_day == setup_date.day: calc_date = datetime.date( calc_date.year, calc_date.month, calendar.monthrange(calc_date.year, calc_date.month)[1]) return self.first_work_day(calc_date) @orm.validates('repeat_every') def validate_repeat_every(self, _, value): """Validate repeat_every field for Workflow. repeat_every shouldn't have 0 value. """ if value is not None and not isinstance(value, (int, long)): raise ValueError("'repeat_every' should be integer or 'null'") if value is not None and value <= 0: raise ValueError( "'repeat_every' should be strictly greater than 0") return value @orm.validates('unit') def validate_unit(self, _, value): """Validate unit field for Workflow. Unit should have one of the value from VALID_UNITS list or None. """ if value is not None and value not in self.VALID_UNITS: raise ValueError("'unit' field should be one of the " "value: null, {}".format(", ".join( self.VALID_UNITS))) return value @orm.validates('is_verification_needed') def validate_is_verification_needed(self, _, value): # pylint: disable=unused-argument """Validate is_verification_needed field for Workflow. It's not allowed to change is_verification_needed flag after creation. If is_verification_needed doesn't send, then is_verification_needed flag is True. """ if self.is_verification_needed is None: return self.IS_VERIFICATION_NEEDED_DEFAULT if value is None else value if value is None: return self.is_verification_needed if self.status != self.DRAFT and value != self.is_verification_needed: raise ValueError("is_verification_needed value isn't changeble " "on workflow with '{}' status".format( self.status)) return value @builder.simple_property def workflow_state(self): return WorkflowState.get_workflow_state(self.cycles) _sanitize_html = [ 'notify_custom_message', ] _fulltext_attrs = [ attributes.CustomOrderingFullTextAttr('repeat', 'repeat', order_prop_getter='repeat_order') ] _api_attrs = reflection.ApiAttributes( 'task_groups', 'notify_on_change', 'notify_custom_message', 'cycles', 'object_approval', 'recurrences', 'is_verification_needed', 'repeat_every', 'unit', reflection.Attribute('next_cycle_start_date', create=False, update=False), reflection.Attribute('can_start_cycle', create=False, update=False), reflection.Attribute('non_adjusted_next_cycle_start_date', create=False, update=False), reflection.Attribute('workflow_state', create=False, update=False), reflection.Attribute('kind', create=False, update=False), reflection.Attribute('repeat', create=False, update=False)) _aliases = { "repeat_every": { "display_name": "Repeat Every", "description": "'Repeat Every' value\nmust fall into\nthe range 1~30" "\nor '-' for None", }, "unit": { "display_name": "Unit", "description": "Allowed values for\n'Unit' are:\n{}" "\nor '-' for None".format("\n".join(VALID_UNITS)), }, "is_verification_needed": { "display_name": "Need Verification", "mandatory": True, "description": "This field is not changeable\nafter creation.", }, "notify_custom_message": "Custom email message", "notify_on_change": { "display_name": "Force real-time email updates", "mandatory": False, }, "status": None, "start_date": None, "end_date": None, } def copy(self, _other=None, **kwargs): """Create a partial copy of the current workflow. """ columns = [ 'title', 'description', 'notify_on_change', 'notify_custom_message', 'end_date', 'start_date', 'repeat_every', 'unit', 'is_verification_needed' ] if kwargs.get('clone_people', False): access_control_list = [{ "ac_role": acl.ac_role, "person": acl.person } for acl in self.access_control_list] else: role_id = { name: ind for (ind, name) in role.get_custom_roles_for(self.type).iteritems() }['Admin'] access_control_list = [{ "ac_role_id": role_id, "person": { "id": get_current_user().id } }] target = self.copy_into(_other, columns, access_control_list=access_control_list, **kwargs) return target def copy_task_groups(self, target, **kwargs): """Copy all task groups and tasks mapped to this workflow. """ for task_group in self.task_groups: obj = task_group.copy( workflow=target, context=target.context, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", False), modified_by=get_current_user(), ) target.task_groups.append(obj) if kwargs.get("clone_tasks"): task_group.copy_tasks( obj, clone_people=kwargs.get("clone_people", False), clone_objects=kwargs.get("clone_objects", True)) return target @classmethod def eager_query(cls): return super(Workflow, cls).eager_query().options( orm.subqueryload('cycles').undefer_group('Cycle_complete'). subqueryload("cycle_task_group_object_tasks").undefer_group( "CycleTaskGroupObjectTask_complete"), orm.subqueryload('task_groups').undefer_group( 'TaskGroup_complete'), orm.subqueryload('task_groups').subqueryload( "task_group_tasks").undefer_group('TaskGroupTask_complete'), ) @classmethod def indexed_query(cls): return super(Workflow, cls).indexed_query().options( orm.Load(cls).undefer_group("Workflow_complete", ), ) @classmethod def ensure_backlog_workflow_exists(cls): """Ensures there is at least one backlog workflow with an active cycle. If such workflow does not exist it creates one.""" def any_active_cycle(workflows): """Checks if any active cycle exists from given workflows""" for workflow in workflows: for cur_cycle in workflow.cycles: if cur_cycle.is_current: return True return False # Check if backlog workflow already exists backlog_workflows = Workflow.query.filter( and_( Workflow.kind == "Backlog", # the following means one_time wf Workflow.unit is None)).all() if len(backlog_workflows) > 0 and any_active_cycle(backlog_workflows): return "At least one backlog workflow already exists" # Create a backlog workflow backlog_workflow = Workflow(description="Backlog workflow", title="Backlog (one time)", status="Active", recurrences=0, kind="Backlog") # create wf context wf_ctx = backlog_workflow.get_or_create_object_context(context=1) backlog_workflow.context = wf_ctx db.session.flush(backlog_workflow) # create a cycle backlog_cycle = cycle.Cycle( description="Backlog workflow", title="Backlog (one time)", is_current=1, status="Assigned", start_date=None, end_date=None, context=backlog_workflow.get_or_create_object_context(), workflow=backlog_workflow) # create a cycletaskgroup backlog_ctg = cycle_task_group\ .CycleTaskGroup(description="Backlog workflow taskgroup", title="Backlog TaskGroup", cycle=backlog_cycle, status=cycle_task_group.CycleTaskGroup.IN_PROGRESS, start_date=None, end_date=None, context=backlog_workflow .get_or_create_object_context()) db.session.add_all([backlog_workflow, backlog_cycle, backlog_ctg]) db.session.flush() # add fulltext entries get_indexer().create_record(backlog_workflow) return "Backlog workflow created"
class Directive(mixins.LastDeprecatedTimeboxed, Commentable, mixins.TestPlanned, mixins.base.ContextRBAC, mixins.BusinessObject, mixins.Folderable, db.Model): """Directive model""" __tablename__ = 'directives' version = deferred(db.Column(db.String), 'Directive') organization = deferred(db.Column(db.String), 'Directive') scope = deferred(db.Column(db.Text, nullable=False, default=u""), 'Directive') kind_id = deferred(db.Column(db.Integer), 'Directive') audit_start_date = deferred(db.Column(db.DateTime), 'Directive') audit_frequency_id = deferred(db.Column(db.Integer), 'Directive') audit_duration_id = deferred(db.Column(db.Integer), 'Directive') meta_kind = db.Column(db.String) kind = deferred(db.Column(db.String), 'Directive') # TODO: FIX jost! # requirements = db.relationship( # 'Requirement', backref='directive', # order_by='Requirement.slug', cascade='all, delete-orphan') controls = db.relationship('Control', backref='directive', order_by='Control.slug') audit_frequency = db.relationship( 'Option', primaryjoin='and_(foreign(Directive.audit_frequency_id) == Option.id, ' 'Option.role == "audit_frequency")', uselist=False, ) audit_duration = db.relationship( 'Option', primaryjoin='and_(foreign(Directive.audit_duration_id) == Option.id, ' 'Option.role == "audit_duration")', uselist=False, ) __mapper_args__ = {'polymorphic_on': meta_kind} _api_attrs = reflection.ApiAttributes( 'audit_start_date', 'audit_frequency', 'audit_duration', 'controls', 'kind', 'organization', 'scope', 'version', ) _fulltext_attrs = [ 'audit_start_date', 'audit_frequency', 'audit_duration', 'controls', 'kind', 'organization', 'scope', 'version', ] @classmethod def indexed_query(cls): return super(Directive, cls).indexed_query().options( orm.Load(cls).joinedload('audit_frequency'), orm.Load(cls).joinedload('audit_duration'), orm.Load(cls).subqueryload('controls'), orm.Load(cls).load_only( 'audit_start_date', 'kind', 'organization', 'scope', 'version', ), ) _sanitize_html = [ 'organization', 'scope', 'version', ] _include_links = [] _aliases = { 'kind': "Kind/Type", "documents_file": None, } @validates('kind') def validate_kind(self, key, value): if not value: return None if value not in self.VALID_KINDS: message = "Invalid value '{}' for attribute {}.{}.".format( value, self.__class__.__name__, key) raise ValueError(message) return value @validates('audit_duration', 'audit_frequency') def validate_directive_options(self, key, option): return validate_option(self.__class__.__name__, key, option, key) @classmethod def eager_query(cls): query = super(Directive, cls).eager_query() return cls.eager_inclusions(query, Directive._include_links).options( orm.joinedload('audit_frequency'), orm.joinedload('audit_duration'), orm.subqueryload('controls')) @staticmethod def _extra_table_args(cls): return (db.Index('ix_{}_meta_kind'.format(cls.__tablename__), 'meta_kind'), )
class Control(synchronizable.Synchronizable, categorizable.Categorizable, WithLastAssessmentDate, synchronizable.RoleableSynchronizable, Relatable, mixins.CustomAttributable, Personable, PublicDocumentable, mixins.LastDeprecatedTimeboxed, mixins.TestPlanned, comment.ExternalCommentable, WithSimilarityScore, base.ContextRBAC, mixins.BusinessObject, Indexed, mixins.Folderable, db.Model): """Control model definition.""" __tablename__ = 'controls' company_control = deferred(db.Column(db.Boolean), 'Control') directive_id = deferred( db.Column(db.Integer, db.ForeignKey('directives.id')), 'Control') version = deferred(db.Column(db.String), 'Control') fraud_related = deferred(db.Column(db.Boolean), 'Control') key_control = deferred(db.Column(db.Boolean), 'Control') active = deferred(db.Column(db.Boolean), 'Control') kind = deferred(db.Column(db.String), "Control") means = deferred(db.Column(db.String), "Control") verify_frequency = deferred(db.Column(db.String), "Control") review_status = deferred(db.Column(db.String, nullable=True), "Control") review_status_display_name = deferred(db.Column(db.String, nullable=True), "Control") # GGRCQ attributes due_date = db.Column(db.Date, nullable=True) created_by_id = db.Column(db.Integer, nullable=False) # pylint: disable=no-self-argument @declared_attr def created_by(cls): """Relationship to user referenced by created_by_id.""" return utils.person_relationship(cls.__name__, "created_by_id") last_submitted_at = db.Column(db.DateTime, nullable=True) last_submitted_by_id = db.Column(db.Integer, nullable=True) # pylint: disable=no-self-argument @declared_attr def last_submitted_by(cls): """Relationship to user referenced by last_submitted_by_id.""" return utils.person_relationship(cls.__name__, "last_submitted_by_id") last_verified_at = db.Column(db.DateTime, nullable=True) last_verified_by_id = db.Column(db.Integer, nullable=True) # pylint: disable=no-self-argument @declared_attr def last_verified_by(cls): """Relationship to user referenced by last_verified_by_id.""" return utils.person_relationship(cls.__name__, "last_verified_by_id") _title_uniqueness = False _custom_publish = { 'created_by': ggrc_utils.created_by_stub, 'last_submitted_by': ggrc_utils.last_submitted_by_stub, 'last_verified_by': ggrc_utils.last_verified_by_stub, } # REST properties _api_attrs = reflection.ApiAttributes( 'active', 'company_control', 'directive', 'fraud_related', 'key_control', 'kind', 'means', 'verify_frequency', 'version', 'review_status', 'review_status_display_name', 'due_date', reflection.ExternalUserAttribute('created_by', force_create=True), 'last_submitted_at', reflection.ExternalUserAttribute('last_submitted_by', force_create=True), 'last_verified_at', reflection.ExternalUserAttribute('last_verified_by', force_create=True), ) _fulltext_attrs = [ 'active', 'company_control', 'directive', attributes.BooleanFullTextAttr('fraud_related', 'fraud_related', true_value="yes", false_value="no"), attributes.BooleanFullTextAttr('key_control', 'key_control', true_value="key", false_value="non-key"), 'kind', 'means', 'verify_frequency', 'version', 'review_status_display_name', ] _sanitize_html = [ 'version', ] @classmethod def indexed_query(cls): return super(Control, cls).indexed_query().options( orm.Load(cls).undefer_group("Control_complete"), orm.Load(cls).joinedload("directive").undefer_group( "Directive_complete"), ) _include_links = [] _aliases = { "kind": "Kind/Nature", "means": "Type/Means", "verify_frequency": "Frequency", "fraud_related": "Fraud Related", "key_control": { "display_name": "Significance", "description": "Allowed values are:\nkey\nnon-key\n---", }, "test_plan": "Assessment Procedure", "review_status": { "display_name": "Review State", "mandatory": False, "filter_only": True }, "review_status_display_name": { "display_name": "Review Status", "mandatory": False }, } @classmethod def eager_query(cls, **kwargs): query = super(Control, cls).eager_query(**kwargs) return cls.eager_inclusions(query, Control._include_links).options( orm.joinedload('directive'), ) def log_json(self): out_json = super(Control, self).log_json() out_json["created_by"] = ggrc_utils.created_by_stub(self) out_json["last_submitted_by"] = ggrc_utils.last_submitted_by_stub(self) out_json["last_verified_by"] = ggrc_utils.last_verified_by_stub(self) # so that event log can refer to deleted directive if self.directive: out_json["mapped_directive"] = self.directive.display_name return out_json @validates('review_status') def validate_review_status(self, _, value): # pylint: disable=no-self-use """Add explicit non-nullable validation.""" if value is None: raise ValidationError( "review_status for the object is not specified") return value # pylint: disable=invalid-name @validates('review_status_display_name') def validate_review_status_display_name(self, _, value): """Add explicit non-nullable validation.""" # pylint: disable=no-self-use,invalid-name if value is None: raise ValidationError( "review_status_display_name for the object is not specified") return value
class Comment(Roleable, Relatable, Described, Notifiable, base.ContextRBAC, Base, Indexed, db.Model): """Basic comment model.""" __tablename__ = "comments" assignee_type = db.Column(db.String, nullable=False, default=u"") revision_id = deferred( db.Column( db.Integer, db.ForeignKey('revisions.id', ondelete='SET NULL'), nullable=True, ), 'Comment') revision = db.relationship( 'Revision', uselist=False, ) custom_attribute_definition_id = deferred( db.Column( db.Integer, db.ForeignKey('custom_attribute_definitions.id', ondelete='SET NULL'), nullable=True, ), 'Comment') custom_attribute_definition = db.relationship( 'CustomAttributeDefinition', uselist=False, ) initiator_instance_id = db.Column(db.Integer, nullable=True) initiator_instance_type = db.Column(db.String, nullable=True) INITIATOR_INSTANCE_TMPL = "{}_comment_initiated_by" initiator_instance = utils.PolymorphicRelationship( "initiator_instance_id", "initiator_instance_type", INITIATOR_INSTANCE_TMPL) # REST properties _api_attrs = reflection.ApiAttributes( "assignee_type", reflection.Attribute("custom_attribute_revision", create=False, update=False), reflection.Attribute("custom_attribute_revision_upd", read=False), reflection.Attribute("header_url_link", create=False, update=False), ) _sanitize_html = [ "description", ] AUTO_REINDEX_RULES = [ ReindexRule("Comment", get_objects_to_reindex), ReindexRule("Relationship", reindex_by_relationship), ] @builder.simple_property def header_url_link(self): """Return header url link to comment if that comment related to proposal and that proposal is only proposed.""" if self.initiator_instance_type != "Proposal": return "" proposed_status = self.initiator_instance.STATES.PROPOSED if self.initiator_instance.status == proposed_status: return "proposal_link" return "" @classmethod def eager_query(cls): query = super(Comment, cls).eager_query() return query.options( orm.joinedload('revision'), orm.joinedload('custom_attribute_definition').undefer_group( 'CustomAttributeDefinition_complete'), ) def log_json(self): """Log custom attribute revisions.""" res = super(Comment, self).log_json() res["custom_attribute_revision"] = self.custom_attribute_revision return res @builder.simple_property def custom_attribute_revision(self): """Get the historical value of the relevant CA value.""" if not self.revision: return None revision = self.revision.content cav_stored_value = revision['attribute_value'] cad = self.custom_attribute_definition return { 'custom_attribute': { 'id': cad.id if cad else None, 'title': cad.title if cad else 'DELETED DEFINITION', }, 'custom_attribute_stored_value': cav_stored_value, } def custom_attribute_revision_upd(self, value): """Create a Comment-CA mapping with current CA value stored.""" ca_revision_dict = value.get('custom_attribute_revision_upd') if not ca_revision_dict: return ca_val_dict = self._get_ca_value(ca_revision_dict) ca_val_id = ca_val_dict['id'] ca_val_revision = Revision.query.filter_by( resource_type='CustomAttributeValue', resource_id=ca_val_id, ).order_by(Revision.created_at.desc(), ).limit(1).first() if not ca_val_revision: raise BadRequest( "No Revision found for CA value with id provided under " "'custom_attribute_value': {}".format(ca_val_dict)) self.revision_id = ca_val_revision.id self.revision = ca_val_revision # Here *attribute*_id is assigned to *definition*_id, strange but, # as you can see in src/ggrc/models/custom_attribute_value.py # custom_attribute_id is link to custom_attribute_definitions.id # possible best way is use definition id from request: # ca_revision_dict["custom_attribute_definition"]["id"] # but needs to be checked that is always exist in request self.custom_attribute_definition_id = ca_val_revision.content.get( 'custom_attribute_id', ) self.custom_attribute_definition = CustomAttributeDefinition.query.get( self.custom_attribute_definition_id, ) @staticmethod def _get_ca_value(ca_revision_dict): """Get CA value dict from json and do a basic validation.""" ca_val_dict = ca_revision_dict.get('custom_attribute_value') if not ca_val_dict: raise ValueError( "CA value expected under " "'custom_attribute_value': {}".format(ca_revision_dict)) if not ca_val_dict.get('id'): raise ValueError( "CA value id expected under 'id': {}".format(ca_val_dict)) return ca_val_dict
class Commentable(object): """Mixin for commentable objects. This is a mixin for adding default options to objects on which people can comment. recipients is used for setting who gets notified (Verifer, Requester, ...). send_by_default should be used for setting the "send notification" flag in the comment modal. """ # pylint: disable=too-few-public-methods VALID_RECIPIENTS = frozenset([ "Assignees", "Creators", "Verifiers", "Admin", "Primary Contacts", "Secondary Contacts", ]) @validates("recipients") def validate_recipients(self, key, value): """ Validate recipients list Args: value (string): Can be either empty, or list of comma separated `VALID_RECIPIENTS` """ # pylint: disable=unused-argument if value: value = set(name for name in value.split(",") if name) if value and value.issubset(self.VALID_RECIPIENTS): # The validator is a bit more smart and also makes some filtering of the # given data - this is intended. return ",".join(value) elif not value: return "" else: raise ValueError( value, 'Value should be either empty ' + 'or comma separated list of ' + ', '.join(sorted(self.VALID_RECIPIENTS))) recipients = db.Column(db.String, nullable=True, default=u"Assignees,Creators,Verifiers") send_by_default = db.Column(db.Boolean, nullable=True, default=True) _api_attrs = reflection.ApiAttributes("recipients", "send_by_default") _aliases = { "recipients": "Recipients", "send_by_default": "Send by default", "comments": { "display_name": "Comments", "description": 'DELIMITER=";;" double semi-colon separated values', }, } _fulltext_attrs = [ MultipleSubpropertyFullTextAttr("comment", "comments", ["description"]), ] @classmethod def indexed_query(cls): return super(Commentable, cls).indexed_query().options( orm.Load(cls).subqueryload("comments").load_only( "id", "description")) @classmethod def eager_query(cls): """Eager Query""" query = super(Commentable, cls).eager_query() return query.options(orm.subqueryload('comments')) @declared_attr def comments(cls): # pylint: disable=no-self-argument """Comments related to self via Relationship table.""" return db.relationship( Comment, primaryjoin=lambda: sa.or_( sa.and_( cls.id == Relationship.source_id, Relationship.source_type == cls.__name__, Relationship.destination_type == "Comment", ), sa.and_( cls.id == Relationship.destination_id, Relationship.destination_type == cls.__name__, Relationship.source_type == "Comment", )), secondary=Relationship.__table__, secondaryjoin=lambda: sa.or_( sa.and_( Comment.id == Relationship.source_id, Relationship.source_type == "Comment", ), sa.and_( Comment.id == Relationship.destination_id, Relationship.destination_type == "Comment", )), viewonly=True, )
class SystemOrProcess(ScopedCommentable, mixins.TestPlanned, mixins.LastDeprecatedTimeboxed, mixins.base.ContextRBAC, mixins.ScopeObject, mixins.Folderable, db.Model): # Override model_inflector _table_plural = 'systems_or_processes' __tablename__ = 'systems' infrastructure = deferred(db.Column(db.Boolean), 'SystemOrProcess') is_biz_process = db.Column(db.Boolean, default=False) version = deferred(db.Column(db.String), 'SystemOrProcess') network_zone_id = deferred(db.Column(db.Integer), 'SystemOrProcess') network_zone = db.relationship( 'Option', primaryjoin='and_(foreign(SystemOrProcess.network_zone_id) == Option.id,' ' Option.role == "network_zone")', uselist=False, ) __mapper_args__ = { 'polymorphic_on': is_biz_process } # REST properties _api_attrs = reflection.ApiAttributes( 'infrastructure', 'version', 'network_zone', reflection.Attribute('is_biz_process', create=False, update=False), ) _fulltext_attrs = [ 'infrastructure', 'version', 'network_zone', ] _sanitize_html = ['version'] _aliases = { "documents_file": None, "network_zone": { "display_name": "Network Zone", }, } @validates('network_zone') def validate_system_options(self, key, option): return validate_option( self.__class__.__name__, key, option, 'network_zone') @classmethod def eager_query(cls): from sqlalchemy import orm query = super(SystemOrProcess, cls).eager_query() return query.options( orm.joinedload('network_zone')) @classmethod def indexed_query(cls): from sqlalchemy import orm query = super(SystemOrProcess, cls).eager_query() return query.options( orm.joinedload( 'network_zone', ).undefer_group( "Option_complete", ) ) @staticmethod def _extra_table_args(cls): return ( db.Index('ix_{}_is_biz_process'.format(cls.__tablename__), 'is_biz_process'), )
class Comment(Roleable, Relatable, Described, Notifiable, base.ContextRBAC, Base, Indexed, db.Model): """Basic comment model.""" __tablename__ = "comments" assignee_type = db.Column(db.String, nullable=False, default=u"") revision_id = deferred( db.Column( db.Integer, db.ForeignKey('revisions.id', ondelete='SET NULL'), nullable=True, ), 'Comment') revision = db.relationship( 'Revision', uselist=False, ) custom_attribute_definition_id = deferred( db.Column( db.Integer, db.ForeignKey('custom_attribute_definitions.id', ondelete='SET NULL'), nullable=True, ), 'Comment') custom_attribute_definition = db.relationship( 'CustomAttributeDefinition', uselist=False, ) initiator_instance_id = db.Column(db.Integer, nullable=True) initiator_instance_type = db.Column(db.String, nullable=True) INITIATOR_INSTANCE_TMPL = "{}_comment_initiated_by" initiator_instance = utils.PolymorphicRelationship( "initiator_instance_id", "initiator_instance_type", INITIATOR_INSTANCE_TMPL) # REST properties _api_attrs = reflection.ApiAttributes( "assignee_type", reflection.Attribute("custom_attribute_revision", create=False, update=False), reflection.Attribute("custom_attribute_revision_upd", read=False), reflection.Attribute("header_url_link", create=False, update=False), ) _sanitize_html = [ "description", ] def get_objects_to_reindex(self): """Return list required objects for reindex if comment C.U.D.""" source_qs = db.session.query( Relationship.destination_type, Relationship.destination_id).filter( Relationship.source_type == self.__class__.__name__, Relationship.source_id == self.id) destination_qs = db.session.query( Relationship.source_type, Relationship.source_id).filter( Relationship.destination_type == self.__class__.__name__, Relationship.destination_id == self.id) result_qs = source_qs.union(destination_qs) klass_dict = defaultdict(set) for klass, object_id in result_qs: klass_dict[klass].add(object_id) queries = [] for klass, object_ids in klass_dict.iteritems(): model = inflector.get_model(klass) if not model: continue if issubclass(model, (Indexed, Commentable)): queries.append( model.query.filter(model.id.in_(list(object_ids)))) return list(itertools.chain(*queries)) AUTO_REINDEX_RULES = [ ReindexRule("Comment", lambda x: x.get_objects_to_reindex()), ReindexRule("Relationship", reindex_by_relationship), ] @builder.simple_property def header_url_link(self): """Return header url link to comment if that comment related to proposal and that proposal is only proposed.""" if self.initiator_instance_type != "Proposal": return "" proposed_status = self.initiator_instance.STATES.PROPOSED if self.initiator_instance.status == proposed_status: return "proposal_link" return "" @classmethod def eager_query(cls): query = super(Comment, cls).eager_query() return query.options( orm.joinedload('revision'), orm.joinedload('custom_attribute_definition').undefer_group( 'CustomAttributeDefinition_complete'), ) @builder.simple_property def custom_attribute_revision(self): """Get the historical value of the relevant CA value.""" if not self.revision: return None revision = self.revision.content cav_stored_value = revision['attribute_value'] cad = self.custom_attribute_definition return { 'custom_attribute': { 'id': cad.id if cad else None, 'title': cad.title if cad else 'DELETED DEFINITION', }, 'custom_attribute_stored_value': cav_stored_value, } def custom_attribute_revision_upd(self, value): """Create a Comment-CA mapping with current CA value stored.""" ca_revision_dict = value.get('custom_attribute_revision_upd') if not ca_revision_dict: return ca_val_dict = self._get_ca_value(ca_revision_dict) ca_val_id = ca_val_dict['id'] ca_val_revision = Revision.query.filter_by( resource_type='CustomAttributeValue', resource_id=ca_val_id, ).order_by(Revision.created_at.desc(), ).limit(1).first() if not ca_val_revision: raise BadRequest( "No Revision found for CA value with id provided under " "'custom_attribute_value': {}".format(ca_val_dict)) self.revision_id = ca_val_revision.id self.custom_attribute_definition_id = ca_val_revision.content.get( 'custom_attribute_id', ) @staticmethod def _get_ca_value(ca_revision_dict): """Get CA value dict from json and do a basic validation.""" ca_val_dict = ca_revision_dict.get('custom_attribute_value') if not ca_val_dict: raise ValueError( "CA value expected under " "'custom_attribute_value': {}".format(ca_revision_dict)) if not ca_val_dict.get('id'): raise ValueError( "CA value id expected under 'id': {}".format(ca_val_dict)) return ca_val_dict
class CustomAttributeValue(base.ContextRBAC, Base, Indexed, db.Model): """Custom attribute value model""" __tablename__ = 'custom_attribute_values' _api_attrs = reflection.ApiAttributes( 'custom_attribute_id', 'attributable_id', 'attributable_type', 'attribute_value', 'attribute_object', reflection.Attribute('preconditions_failed', create=False, update=False), ) _fulltext_attrs = ["attribute_value"] REQUIRED_GLOBAL_REINDEX = False _sanitize_html = [ "attribute_value", ] custom_attribute_id = db.Column( db.Integer, db.ForeignKey('custom_attribute_definitions.id', ondelete="CASCADE")) attributable_id = db.Column(db.Integer) attributable_type = db.Column(db.String) attribute_value = db.Column(db.String, nullable=False, default=u"") # When the attibute is of a mapping type this will hold the id of the mapped # object while attribute_value will hold the type name. # For example an instance of attribute type Map:Person will have a person id # in attribute_object_id and string 'Person' in attribute_value. attribute_object_id = db.Column(db.Integer) # pylint: disable=protected-access # This is just a mapping for accessing local functions so protected access # warning is a false positive _validator_map = { "Text": lambda self: self._validate_text(), "Rich Text": lambda self: self._validate_rich_text(), "Date": lambda self: self._validate_date(), "Dropdown": lambda self: self._validate_dropdown(), "Map:Person": lambda self: self._validate_map_object(), "Checkbox": lambda self: self._validate_checkbox(), } TYPES_NO_RICHTEXT_VALIDATE = ["Control"] @property def latest_revision(self): """Latest revision of CAV (used for comment precondition check).""" # TODO: make eager_query fetch only the first Revision return self._related_revisions[0] def delere_record(self): get_indexer().delete_record(self.attributable_id, self.attributable_type, False) def get_reindex_pair(self): return (self.attributable_type, self.attributable_id) @declared_attr def _related_revisions(cls): # pylint: disable=no-self-argument def join_function(): """Function to join CAV to its latest revision.""" resource_id = foreign(Revision.resource_id) resource_type = foreign(Revision.resource_type) return and_(resource_id == cls.id, resource_type == "CustomAttributeValue") return db.relationship( Revision, primaryjoin=join_function, viewonly=True, order_by=Revision.created_at.desc(), ) @classmethod def eager_query(cls, **kwargs): query = super(CustomAttributeValue, cls).eager_query(**kwargs) query = query.options( orm.subqueryload('_related_revisions'), orm.joinedload('custom_attribute'), ) return query @property def attributable_attr(self): return '{0}_custom_attributable'.format(self.attributable_type) @property def attributable(self): return getattr(self, self.attributable_attr) @attributable.setter def attributable(self, value): self.attributable_id = value.id if value is not None else None self.attributable_type = value.__class__.__name__ if value is not None \ else None return setattr(self, self.attributable_attr, value) @property def attribute_object(self): """Fetch the object referred to by attribute_object_id. Use backrefs defined in CustomAttributeMapable. Returns: A model instance of type specified in attribute_value """ try: return getattr(self, self._attribute_object_attr) except: # pylint: disable=bare-except return None @attribute_object.setter def attribute_object(self, value): """Set attribute_object_id via whole object. Args: value: model instance """ if value is None: # We get here if "attribute_object" does not get resolved. # TODO: make sure None value can be set for removing CA attribute object # value return self.attribute_object_id = value.id return setattr(self, self._attribute_object_attr, value) @property def attribute_object_type(self): """Fetch the mapped object pointed to by attribute_object_id. Returns: A model of type referenced in attribute_value """ attr_type = self.custom_attribute.attribute_type if not attr_type.startswith("Map:"): return None return self.attribute_object.__class__.__name__ @property def _attribute_object_attr(self): """Compute the relationship property based on object type. Returns: Property name """ attr_type = self.custom_attribute.attribute_type if not attr_type.startswith("Map:"): return None return 'attribute_{0}'.format(self.attribute_value) @classmethod def mk_filter_by_custom(cls, obj_class, custom_attribute_id): """Get filter for custom attributable object. This returns an exists filter for the given predicate, matching it to either a custom attribute value, or a value of the matched object. Args: obj_class: Class of the attributable object. custom_attribute_id: Id of the attribute definition. Returns: A function that will generate a filter for a given predicate. """ from ggrc.models import all_models attr_def = all_models.CustomAttributeDefinition.query.filter_by( id=custom_attribute_id).first() if attr_def and attr_def.attribute_type.startswith("Map:"): map_type = attr_def.attribute_type[4:] map_class = getattr(all_models, map_type, None) if map_class: fields = [ getattr(map_class, name, None) for name in ["email", "title", "slug"] ] fields = [field for field in fields if field is not None] def filter_by_mapping(predicate): return cls.query.filter( (cls.custom_attribute_id == custom_attribute_id) & (cls.attributable_type == obj_class.__name__) & (cls.attributable_id == obj_class.id) & (map_class.query.filter( (map_class.id == cls.attribute_object_id) & or_(*[predicate(f) for f in fields])).exists())).exists() return filter_by_mapping def filter_by_custom(predicate): return cls.query.filter( (cls.custom_attribute_id == custom_attribute_id) & (cls.attributable_type == obj_class.__name__) & (cls.attributable_id == obj_class.id) & predicate(cls.attribute_value)).exists() return filter_by_custom def _clone(self, obj): """Clone a custom value to a new object.""" data = { "custom_attribute_id": self.custom_attribute_id, "attributable_id": obj.id, "attributable_type": self.attributable_type, "attribute_value": self.attribute_value, "attribute_object_id": self.attribute_object_id } ca_value = CustomAttributeValue(**data) db.session.add(ca_value) db.session.flush() return ca_value @staticmethod def _extra_table_args(_): return (db.UniqueConstraint('attributable_id', 'custom_attribute_id'), ) def _validate_map_object(self): """Validate and correct mapped object values Mapped object custom attribute is only valid if both attribute_value and attribute_object_id are set. To keep the custom attribute api consistent with other types, we allow setting the value to a string containing both in this way "attribute_value:attribute_object_id". This validator checks Both scenarios and changes the string value to proper values needed by this custom attribute. """ self._extract_object_id_from_value() self._validate_mandatory_mapping() self._validate_map_type() self._validate_object_existence() def _extract_object_id_from_value(self): """Extract attribute_object_id from attribute_value""" if self.attribute_value and ":" in self.attribute_value: value, id_ = self.attribute_value.split(":") self.attribute_value = value self.attribute_object_id = id_ def _validate_mandatory_mapping(self): """Validate mandatory mapping attribute""" if (self.custom_attribute.is_gca and self.custom_attribute.mandatory and not self.attribute_object_id): raise ValueError('Missing mandatory attribute: %s' % self.custom_attribute.title) def _validate_map_type(self): """Validate related CAD attribute_type and provided attribute_value Related custom attribute definition's attribute_type column must starts with "Map:". Example: "Map:Person" - for mapping with Person model Provided attribute_value should match to custom attribute definition's attribute_type. If definition have "Map:Person" attribute_type, attribute_value must be "Person". """ from ggrc.models import all_models mapping_prefix = 'Map:' defined_type = self.custom_attribute.attribute_type if not defined_type.startswith(mapping_prefix): raise ValueError('Invalid definition type: %s expected mapping' % defined_type) if not self.attribute_value: return try: expected_type = defined_type.split(mapping_prefix)[1] except IndexError: raise ValueError( "Invalid definition type: mapping type didn't provided") if self.attribute_value != expected_type: raise ValueError('Invalid attribute type: %s expected %s' % (self.attribute_value, expected_type)) related_model = getattr(all_models, self.attribute_value) if not related_model or not issubclass(related_model, db.Model): raise ValueError('Invalid attribute type: %s' % self.attribute_value) def _validate_object_existence(self): """Validate existence of provided attribute_object_id To verify that attribute type is correct, must be called after '_validate_map_type()' method. """ from ggrc.models import all_models if not self.attribute_object_id: return related_model = getattr(all_models, self.attribute_value) related_object = related_model.query.filter_by( id=self.attribute_object_id) object_existence = db.session.query(related_object.exists()).scalar() if not object_existence: raise ValueError('Invalid attribute value: %s' % self.custom_attribute.title) def _validate_dropdown(self): """Validate dropdown option.""" valid_options = set( self.custom_attribute.multi_choice_options.split(",")) if self.attribute_value: self.attribute_value = self.attribute_value.strip() if self.attribute_value not in valid_options: raise ValueError( "Invalid custom attribute dropdown option: {v}, " "expected one of {l}".format(v=self.attribute_value, l=valid_options)) def _validate_date(self): """Convert date format.""" if self.attribute_value: # Validate the date format by trying to parse it self.attribute_value = utils.convert_date_format( self.attribute_value, utils.DATE_FORMAT_ISO, utils.DATE_FORMAT_ISO, ) def _validate_text(self): """Trim whitespaces.""" if self.attribute_value: self.attribute_value = self.attribute_value.strip() def _validate_rich_text(self): """Add tags for links.""" if self.attributable_type not in self.TYPES_NO_RICHTEXT_VALIDATE: self.attribute_value = url_parser.parse(self.attribute_value) def _validate_checkbox(self): """Set falsy value to zero.""" if not self.attribute_value: self.attribute_value = "0" def validate(self): """Validate custom attribute value.""" # pylint: disable=protected-access attributable_type = self.attributable._inflector.table_singular if not self.custom_attribute: raise ValueError("Custom attribute definition not found: Can not " "validate custom attribute value") if self.custom_attribute.definition_type != attributable_type: raise ValueError("Invalid custom attribute definition used.") validator = self._validator_map.get( self.custom_attribute.attribute_type) if validator: validator(self) @builder.simple_property def is_empty(self): """Return True if the CAV is empty or holds a logically empty value.""" # The CAV is considered empty when: # - the value is empty if not self.attribute_value: return True # - the type is Checkbox and the value is 0 if (self.custom_attribute.attribute_type == self.custom_attribute.ValidTypes.CHECKBOX and str(self.attribute_value) == "0"): return True # - the type is a mapping and the object value id is empty if (self.attribute_object_type is not None and not self.attribute_object_id): return True # Otherwise it the CAV is not empty return False @builder.simple_property def preconditions_failed(self): """A list of requirements self introduces that are unsatisfied. Returns: [str] - a list of unsatisfied requirements; possible items are: "value" - missing mandatory value, "comment" - missing mandatory comment, "evidence" - missing mandatory evidence. """ failed_preconditions = [] if self.custom_attribute.mandatory and self.is_empty: failed_preconditions += ["value"] if (self.custom_attribute.attribute_type == self.custom_attribute.ValidTypes.DROPDOWN): failed_preconditions += self._check_dropdown_requirements() return failed_preconditions or None def _check_dropdown_requirements(self): """Check mandatory comment and mandatory evidence for dropdown CAV.""" failed_preconditions = [] options_to_flags = self.multi_choice_options_to_flags( self.custom_attribute, ) flags = options_to_flags.get(self.attribute_value) if flags: for requirement in flags.keys(): if not flags[requirement]: continue if requirement == "comment": failed_preconditions += self._check_mandatory_comment() else: failed_preconditions += self.attributable \ .check_mandatory_requirement(requirement) return failed_preconditions def _check_mandatory_comment(self): """Check presence of mandatory comment.""" if hasattr(self.attributable, "comments"): comment_found = any( self.custom_attribute_id == ( comment.custom_attribute_definition_id) and self.latest_revision.id == comment.revision_id for comment in self.attributable.comments) else: comment_found = False if not comment_found: return ["comment"] return [] @staticmethod def multi_choice_options_to_flags(cad): """Parse mandatory comment and evidence flags from dropdown CA definition. Args: cad - a CA definition object Returns: {option_value: Flags} - a dict from dropdown options values to dict where keys "comment", "evidence" and "url" corresponds to the values from multi_choice_mandatory bitmasks """ def make_flags(multi_choice_mandatory): flags_mask = int(multi_choice_mandatory) return { "comment": flags_mask & (cad.MultiChoiceMandatoryFlags.COMMENT_REQUIRED), "evidence": flags_mask & (cad.MultiChoiceMandatoryFlags.EVIDENCE_REQUIRED), "url": flags_mask & (cad.MultiChoiceMandatoryFlags.URL_REQUIRED), } if not cad.multi_choice_options or not cad.multi_choice_mandatory: return {} return dict( zip( cad.multi_choice_options.split(","), (make_flags(mask) for mask in cad.multi_choice_mandatory.split(",")), )) def log_json_base(self): res = super(CustomAttributeValue, self).log_json_base() if self.attribute_object_id is not None and \ self._attribute_object_attr is not None: res["attribute_object"] = self.attribute_object return res
class Document(Roleable, Relatable, Base, Indexed, db.Model): """Audit model.""" __tablename__ = 'documents' # TODO: inherit from Titled mixin (note: title is nullable here) title = deferred(db.Column(db.String), 'Document') link = deferred(db.Column(db.String), 'Document') description = deferred(db.Column(db.Text), 'Document') kind_id = db.Column(db.Integer, db.ForeignKey('options.id'), nullable=True) year_id = db.Column(db.Integer, db.ForeignKey('options.id'), nullable=True) language_id = db.Column(db.Integer, db.ForeignKey('options.id'), nullable=True) URL = "URL" ATTACHMENT = "EVIDENCE" REFERENCE_URL = "REFERENCE_URL" document_type = deferred( db.Column(db.Enum(URL, ATTACHMENT, REFERENCE_URL), default=URL, nullable=False), 'Document') kind = db.relationship( 'Option', primaryjoin='and_(foreign(Document.kind_id) == Option.id, ' 'Option.role == "reference_type")', uselist=False, lazy="joined", ) year = db.relationship( 'Option', primaryjoin='and_(foreign(Document.year_id) == Option.id, ' 'Option.role == "document_year")', uselist=False, lazy="joined", ) language = db.relationship( 'Option', primaryjoin='and_(foreign(Document.language_id) == Option.id, ' 'Option.role == "language")', uselist=False, lazy="joined", ) _fulltext_attrs = [ 'title', 'link', 'description', "document_type", ] _api_attrs = reflection.ApiAttributes( 'title', 'link', 'description', 'kind', 'year', 'language', "document_type", ) _sanitize_html = [ 'title', 'description', ] _aliases = { 'title': "Title", 'link': "Link", 'description': "description", } @orm.validates('kind', 'year', 'language') def validate_document_options(self, key, option): """Returns correct option, otherwise rises an error""" if key == 'year': desired_role = 'document_year' elif key == 'kind': desired_role = 'reference_type' else: desired_role = key return validate_option(self.__class__.__name__, key, option, desired_role) @orm.validates('document_type') def validate_document_type(self, key, document_type): """Returns correct option, otherwise rises an error""" if document_type is None: document_type = self.URL if document_type not in [ self.URL, self.ATTACHMENT, self.REFERENCE_URL ]: raise exceptions.ValidationError( "Invalid value for attribute {attr}. " "Expected options are `{url}`, `{attachment}`, `{reference_url}`" .format(attr=key, url=self.URL, attachment=self.ATTACHMENT, reference_url=self.REFERENCE_URL)) return document_type @classmethod def indexed_query(cls): return super(Document, cls).indexed_query().options( orm.Load(cls).undefer_group("Document_complete", ), ) @classmethod def eager_query(cls): return super(Document, cls).eager_query().options( orm.joinedload('kind'), orm.joinedload('year'), orm.joinedload('language'), ) @hybrid_property def slug(self): if self.document_type in (self.URL, self.REFERENCE_URL): return self.link return u"{} {}".format(self.link, self.title) # pylint: disable=no-self-argument @slug.expression def slug(cls): return case([(cls.document_type == cls.ATTACHMENT, func.concat(cls.link, ' ', cls.title))], else_=cls.link) def log_json(self): tmp = super(Document, self).log_json() tmp['type'] = "Document" return tmp
class Revision(Base, db.Model): """Revision object holds a JSON snapshot of the object at a time.""" __tablename__ = 'revisions' resource_id = db.Column(db.Integer, nullable=False) resource_type = db.Column(db.String, nullable=False) event_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False) action = db.Column(db.Enum(u'created', u'modified', u'deleted'), nullable=False) _content = db.Column('content', LongJsonType, nullable=False) resource_slug = db.Column(db.String, nullable=True) source_type = db.Column(db.String, nullable=True) source_id = db.Column(db.Integer, nullable=True) destination_type = db.Column(db.String, nullable=True) destination_id = db.Column(db.Integer, nullable=True) @staticmethod def _extra_table_args(_): return ( db.Index("revisions_modified_by", "modified_by_id"), db.Index("fk_revisions_resource", "resource_type", "resource_id"), db.Index("fk_revisions_source", "source_type", "source_id"), db.Index("fk_revisions_destination", "destination_type", "destination_id"), db.Index('ix_revisions_resource_slug', 'resource_slug'), ) _api_attrs = reflection.ApiAttributes( 'resource_id', 'resource_type', 'source_type', 'source_id', 'destination_type', 'destination_id', 'action', 'content', 'description', ) @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Revision, cls).eager_query() return query.options( orm.subqueryload('modified_by'), orm.subqueryload('event'), # used in description ) def __init__(self, obj, modified_by_id, action, content): self.resource_id = obj.id self.resource_type = obj.__class__.__name__ self.resource_slug = getattr(obj, "slug", None) self.modified_by_id = modified_by_id self.action = action if "access_control_list" in content and content["access_control_list"]: for acl in content["access_control_list"]: acl["person"] = { "id": acl["person_id"], "type": "Person", "href": "/api/people/{}".format(acl["person_id"]), } self._content = content for attr in ["source_type", "source_id", "destination_type", "destination_id"]: setattr(self, attr, getattr(obj, attr, None)) @builder.simple_property def description(self): """Compute a human readable description from action and content.""" if 'display_name' not in self._content: return '' display_name = self._content['display_name'] if not display_name: result = u"{0} {1}".format(self.resource_type, self.action) elif u'<->' in display_name: if self.action == 'created': msg = u"{destination} linked to {source}" elif self.action == 'deleted': msg = u"{destination} unlinked from {source}" else: msg = u"{display_name} {action}" source, destination = self._content['display_name'].split('<->')[:2] result = msg.format(source=source, destination=destination, display_name=self._content['display_name'], action=self.action) elif 'mapped_directive' in self._content: # then this is a special case of combined map/creation # should happen only for Section and Control mapped_directive = self._content['mapped_directive'] if self.action == 'created': result = u"New {0}, {1}, created and mapped to {2}".format( self.resource_type, display_name, mapped_directive ) elif self.action == 'deleted': result = u"{0} unmapped from {1} and deleted".format( display_name, mapped_directive) else: result = u"{0} {1}".format(display_name, self.action) else: # otherwise, it's a normal creation event result = u"{0} {1}".format(display_name, self.action) if self.event.action == "BULK": result += ", via bulk action" return result def populate_reference_url(self): """Add reference_url info for older revisions.""" if 'url' not in self._content: return {} reference_url_list = [] for key in ('url', 'reference_url'): link = self._content[key] # link might exist, but can be an empty string - we treat those values # as non-existing (empty) reference URLs if not link: continue # if creation/modification date is not available, we estimate it by # using the corresponding information from the Revision itself created_at = (self._content.get("created_at") or self.created_at.isoformat()) updated_at = (self._content.get("updated_at") or self.updated_at.isoformat()) reference_url_list.append({ "display_name": link, "document_type": "REFERENCE_URL", "link": link, "title": link, "id": None, "created_at": created_at, "updated_at": updated_at, }) return {'reference_url': reference_url_list} def populate_acl(self): """Add access_control_list info for older revisions.""" roles_dict = role.get_custom_roles_for(self.resource_type) reverted_roles_dict = {n: i for i, n in roles_dict.iteritems()} access_control_list = self._content.get("access_control_list") or [] map_field_to_role = { "principal_assessor": reverted_roles_dict.get("Principal Assignees"), "secondary_assessor": reverted_roles_dict.get("Secondary Assignees"), "contact": reverted_roles_dict.get("Primary Contacts"), "secondary_contact": reverted_roles_dict.get("Secondary Contacts"), "owners": reverted_roles_dict.get("Admin"), } exists_roles = {i["ac_role_id"] for i in access_control_list} for field, role_id in map_field_to_role.items(): if field not in self._content: continue if role_id in exists_roles or role_id is None: continue field_content = self._content.get(field) or {} if not field_content: continue if not isinstance(field_content, list): field_content = [field_content] person_ids = {fc.get("id") for fc in field_content if fc.get("id")} for person_id in person_ids: access_control_list.append({ "display_name": roles_dict[role_id], "ac_role_id": role_id, "context_id": None, "created_at": None, "object_type": self.resource_type, "updated_at": None, "object_id": self.resource_id, "modified_by_id": None, "person_id": person_id, # Frontend require data in such format "person": { "id": person_id, "type": "Person", "href": "/api/people/{}".format(person_id) }, "modified_by": None, "id": None, }) for acl in access_control_list: if "person" not in acl: acl["person"] = {"id": acl.get("person_id"), "type": "Person"} return {"access_control_list": access_control_list} def populate_folder(self): """Add folder info for older revisions.""" if "folder" in self._content: return {} folders = self._content.get("folders") or [{"id": ""}] return {"folder": folders[0]["id"]} @builder.simple_property def content(self): """Property. Contains the revision content dict. Updated by required values, generated from saved content dict.""" # pylint: disable=too-many-locals populated_content = self._content.copy() populated_content.update(self.populate_acl()) populated_content.update(self.populate_reference_url()) populated_content.update(self.populate_folder()) return populated_content @content.setter def content(self, value): """ Setter for content property.""" self._content = value
class Snapshot(relationship.Relatable, WithLastAssessmentDate, mixins.Base, db.Model): """Snapshot object that holds a join of parent object, revision, child object and parent object's context. Conceptual model is that we have a parent snapshotable object (e.g. Audit) which will not create relationships to objects with automapper at the time of creation but will instead create snapshots of those objects based on the latest revision of the object at the time of create / update of the object. Objects that were supposed to be mapped are called child objects. """ __tablename__ = "snapshots" _api_attrs = reflection.ApiAttributes( "parent", "child_id", "child_type", reflection.Attribute("revision", create=False, update=False), reflection.Attribute("revision_id", create=False, update=False), reflection.Attribute("archived", create=False, update=False), reflection.Attribute("revisions", create=False, update=False), reflection.Attribute("is_latest_revision", create=False, update=False), reflection.Attribute("original_object_deleted", create=False, update=False), reflection.Attribute("update_revision", read=False), ) _include_links = [ "revision" ] _aliases = { "attributes": "Attributes", "mappings": { "display_name": "Mappings", "type": "mapping", } } parent_id = deferred(db.Column(db.Integer, nullable=False), "Snapshot") parent_type = deferred(db.Column(db.String, nullable=False), "Snapshot") # Child ID and child type are data denormalisations - we could easily get # them from revision.content, but since that is a JSON field it will be # easier for development to just denormalise on write and not worry # about it. child_id = deferred(db.Column(db.Integer, nullable=False), "Snapshot") child_type = deferred(db.Column(db.String, nullable=False), "Snapshot") revision_id = deferred(db.Column( db.Integer, db.ForeignKey("revisions.id"), nullable=False ), "Snapshot") revision = db.relationship( "Revision", ) _update_revision = None revisions = db.relationship( "Revision", primaryjoin="and_(Revision.resource_id == foreign(Snapshot.child_id)," "Revision.resource_type == foreign(Snapshot.child_type))", uselist=True, ) @builder.simple_property def archived(self): return self.parent.archived if self.parent else False @builder.simple_property def is_latest_revision(self): """Flag if the snapshot has the latest revision.""" return self.revisions and self.revision == self.revisions[-1] @builder.simple_property def original_object_deleted(self): """Flag if the snapshot has the latest revision.""" return self.revisions and self.revisions[-1].action == "deleted" @classmethod def eager_query(cls): query = super(Snapshot, cls).eager_query() return cls.eager_inclusions(query, Snapshot._include_links).options( orm.subqueryload('revision'), orm.subqueryload('revisions'), ) @hybrid_property def update_revision(self): return self.revision_id @update_revision.setter def update_revision(self, value): self._update_revision = value if value == "latest": _set_latest_revisions([self]) @property def parent_attr(self): return '{0}_parent'.format(self.parent_type) @property def parent(self): return getattr(self, self.parent_attr) @parent.setter def parent(self, value): self.parent_id = getattr(value, 'id', None) self.parent_type = getattr(value, 'type', None) return setattr(self, self.parent_attr, value) @staticmethod def _extra_table_args(_): return ( db.UniqueConstraint( "parent_type", "parent_id", "child_type", "child_id"), db.Index("ix_snapshots_parent", "parent_type", "parent_id"), db.Index("ix_snapshots_child", "child_type", "child_id"), )
class Relationship(base.ContextRBAC, Base, db.Model): """Relationship model.""" __tablename__ = 'relationships' source_id = db.Column(db.Integer, nullable=False) source_type = db.Column(db.String, nullable=False) destination_id = db.Column(db.Integer, nullable=False) destination_type = db.Column(db.String, nullable=False) parent_id = db.Column( db.Integer, db.ForeignKey('relationships.id', ondelete='SET NULL'), nullable=True, ) parent = db.relationship( lambda: Relationship, remote_side=lambda: Relationship.id ) automapping_id = db.Column( db.Integer, db.ForeignKey('automappings.id', ondelete='CASCADE'), nullable=True, ) is_external = db.Column(db.Boolean, nullable=False, default=False) def get_related_for(self, object_type): """Return related object for sent type.""" if object_type == self.source_type: return self.destination if object_type == self.destination_type: return self.source @property def source_attr(self): return '{0}_source'.format(self.source_type) @property def source(self): """Source getter.""" if not hasattr(self, self.source_attr): logger.warning( "Relationship source attr '%s' does not exist. " "This indicates invalid data in our database!", self.source_attr ) return None return getattr(self, self.source_attr) @source.setter def source(self, value): self.source_id = getattr(value, 'id', None) self.source_type = getattr(value, 'type', None) self.validate_relatable_type("source", value) return setattr(self, self.source_attr, value) @property def destination_attr(self): return '{0}_destination'.format(self.destination_type) @property def destination(self): """Destination getter.""" if not hasattr(self, self.destination_attr): logger.warning( "Relationship destination attr '%s' does not exist. " "This indicates invalid data in our database!", self.destination_attr ) return None return getattr(self, self.destination_attr) @destination.setter def destination(self, value): self.destination_id = getattr(value, 'id', None) self.destination_type = getattr(value, 'type', None) self.validate_relatable_type("destination", value) return setattr(self, self.destination_attr, value) @classmethod def find_related(cls, object1, object2): return cls.get_related_query(object1, object2).first() @classmethod def get_related_query_by_type_id(cls, type1, id1, type2, id2, strict_id=True): """Return query to find relationship(s) This function prepares query for the following cases: 1) Find relationships between 2 objects. In this case strict_id=True 2) Find relationships between on object and other objects of specified type In this case string_id=False :param type1: type of first object :param id1: id of first object :param type2: type of second object :param id2: if of second object :param strict_id: True if id must be specified, else False :return: prepared query """ def predicate(src_type, src_id, dst_type, dst_id): filters = [ Relationship.source_type == src_type, Relationship.destination_type == dst_type ] if src_id is not None: filters.append(Relationship.source_id == src_id) if dst_id is not None: filters.append(Relationship.destination_id == dst_id) return and_(*filters) if (strict_id and None in (id1, id2)) or None in (type1, type2): # One of the following occurred: # 1) One of ids is None, but it's requested to have ids specified # 2) One of types is None # Make filter to return empty list return Relationship.query.filter(false()) return Relationship.query.filter( or_(predicate(type1, id1, type2, id2), predicate(type2, id2, type1, id1)) ) @classmethod def get_related_query(cls, object1, object2): return cls.get_related_query_by_type_id( type1=object1.type, id1=object1.id, type2=object2.type, id2=object2.id, strict_id=False) @staticmethod def _extra_table_args(cls): return ( db.UniqueConstraint( 'source_id', 'source_type', 'destination_id', 'destination_type'), db.Index( 'ix_relationships_source', 'source_type', 'source_id'), db.Index( 'ix_relationships_destination', 'destination_type', 'destination_id'), ) _api_attrs = reflection.ApiAttributes( 'source', 'destination', reflection.Attribute( 'is_external', create=True, update=False, read=True), ) def _display_name(self): return "{}:{} <-> {}:{}".format(self.source_type, self.source_id, self.destination_type, self.destination_id) def validate_relatable_type(self, field, value): if value is None: raise ValidationError(u"{}.{} can't be None." .format(self.__class__.__name__, field)) if not isinstance(value, Relatable): raise ValidationError(u"You are trying to create relationship with not " u"Relatable type: {}".format(value.type)) tgt_type = self.source_type tgt_id = self.source_id self.validate_relation_by_type(self.source_type, self.destination_type) if field == "source": tgt_type = self.destination_type tgt_id = self.destination_id if value and getattr(value, "type") == "Snapshot": if not tgt_type: return if value.child_type == tgt_type and value.child_id == tgt_id: raise ValidationError( u"Invalid source-destination types pair for {}: " u"source_type={!r}, destination_type={!r}" .format(self.type, self.source_type, self.destination_type) ) # else check if the opposite is a Snapshot elif tgt_type == "Snapshot": from ggrc.models import Snapshot snapshot = db.session.query(Snapshot).get(tgt_id) if snapshot.child_type == value.type and snapshot.child_id == value.id: raise ValidationError( u"Invalid source-destination types pair for {}: " u"source_type={!r}, destination_type={!r}" .format(self.type, self.source_type, self.destination_type) ) @staticmethod def _check_relation_types_group(type1, type2, group1, group2): """Checks if 2 types belong to 2 groups Args: type1: name of model 1 type2: name of model 2 group1: Collection of model names which belong to group 1 group1: Collection of model names which belong to group 2 Return: True if types belong to different groups, else False """ if (type1 in group1 and type2 in group2) or (type2 in group1 and type1 in group2): return True return False @property def is_orphan(self): """Check if relationship's source or destination is deleted.""" deleted_objs = db.session.deleted return self.source in deleted_objs or self.destination in deleted_objs # pylint:disable=unused-argument @classmethod def validate_delete(cls, mapper, connection, target): """Validates is delete of Relationship is allowed.""" from ggrc.utils.user_generator import is_ext_app_request if target.is_orphan: # If relationship's source or destination is being deleted, relationship # should be deleted as well in spite of source and destination types. return cls.validate_relation_by_type(target.source_type, target.destination_type) if is_ext_app_request() and not target.is_external: raise ValidationError( 'External application can delete only external relationships.') @classmethod def validate_relation_by_type(cls, source_type, destination_type): """Checks if a mapping is allowed between given types.""" if is_external_app_user(): # external users can map and unmap scoping objects return from ggrc.models import all_models scoping_models_names = all_models.get_scope_model_names() # Check Regulation and Standard if cls._check_relation_types_group(source_type, destination_type, scoping_models_names, ("Regulation", "Standard")): raise ValidationError( u"You do not have the necessary permissions to map and unmap " u"scoping objects to directives in this application. Please " u"contact your administrator if you have any questions.") # Check Control control_external_only_mappings = set(scoping_models_names) control_external_only_mappings.update(("Regulation", "Standard", "Risk")) if cls._check_relation_types_group(source_type, destination_type, control_external_only_mappings, ("Control", )): raise ValidationError( u"You do not have the necessary permissions to map and unmap " u"controls to scoping objects, standards and regulations in this " u"application. Please contact your administrator " u"if you have any questions.") # Check Risk risk_external_only_mappings = set(scoping_models_names) risk_external_only_mappings.update(("Regulation", "Standard", "Control")) if cls._check_relation_types_group(source_type, destination_type, risk_external_only_mappings, ("Risk", )): raise ValidationError( u"You do not have the necessary permissions to map and unmap " u"risks to scoping objects, controls, standards " u"and regulations in this application." u"Please contact your administrator if you have any questions.")
class Assessment(Roleable, statusable.Statusable, AuditRelationship, AutoStatusChangeable, Assignable, HasObjectState, TestPlanned, CustomAttributable, PublicDocumentable, Commentable, Personable, reminderable.Reminderable, Relatable, LastDeprecatedTimeboxed, WithSimilarityScore, FinishedDate, VerifiedDate, ValidateOnComplete, Notifiable, WithAction, labeled.Labeled, BusinessObject, Indexed, db.Model): """Class representing Assessment. Assessment is an object representing an individual assessment performed on a specific object during an audit to ascertain whether or not certain conditions were met for that object. """ __tablename__ = 'assessments' _title_uniqueness = False REWORK_NEEDED = u"Rework Needed" NOT_DONE_STATES = statusable.Statusable.NOT_DONE_STATES | { REWORK_NEEDED, } VALID_STATES = tuple(NOT_DONE_STATES | statusable.Statusable.DONE_STATES | statusable.Statusable.INACTIVE_STATES) REMINDERABLE_HANDLERS = { "statusToPerson": { "handler": reminderable.Reminderable.handle_state_to_person_reminder, "data": { statusable.Statusable.START_STATE: "Assignees", "In Progress": "Assignees" }, "reminders": { "assessment_assignees_reminder", } } } design = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") operationally = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") audit_id = deferred( db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=False), 'Assessment') assessment_type = deferred( db.Column(db.String, nullable=False, server_default="Control"), "Assessment") @declared_attr def object_level_definitions(cls): # pylint: disable=no-self-argument """Set up a backref so that we can create an object level custom attribute definition without the need to do a flush to get the assessment id. This is used in the relate_ca method in hooks/assessment.py. """ return db.relationship( 'CustomAttributeDefinition', primaryjoin=lambda: and_( remote(CustomAttributeDefinition.definition_id) == cls.id, remote(CustomAttributeDefinition.definition_type) == "assessment"), foreign_keys=[ CustomAttributeDefinition.definition_id, CustomAttributeDefinition.definition_type ], backref='assessment_definition', cascade='all, delete-orphan') object = {} # we add this for the sake of client side error checking VALID_CONCLUSIONS = frozenset( ["Effective", "Ineffective", "Needs improvement", "Not Applicable"]) # REST properties _api_attrs = reflection.ApiAttributes( 'design', 'operationally', 'audit', 'assessment_type', reflection.Attribute('issue_tracker', create=False, update=False), reflection.Attribute('archived', create=False, update=False), reflection.Attribute('folder', create=False, update=False), reflection.Attribute('object', create=False, update=False), ) _fulltext_attrs = [ 'archived', 'design', 'operationally', 'folder', ] _custom_publish = { 'audit': _build_audit_stub, } @classmethod def _populate_query(cls, query): return query.options( orm.Load(cls).undefer_group("Assessment_complete", ), orm.Load(cls).joinedload("audit").undefer_group( "Audit_complete", ), ) @classmethod def eager_query(cls): return cls._populate_query(super(Assessment, cls).eager_query()) @classmethod def indexed_query(cls): return cls._populate_query(super(Assessment, cls).indexed_query()) def log_json(self): out_json = super(Assessment, self).log_json() out_json["folder"] = self.folder return out_json _aliases = { "owners": None, "assessment_template": { "display_name": "Template", "ignore_on_update": True, "filter_by": "_ignore_filter", "type": reflection.AttributeInfo.Type.MAPPING, }, "assessment_type": { "display_name": "Assessment Type", "mandatory": False, }, "design": "Conclusion: Design", "operationally": "Conclusion: Operation", "archived": { "display_name": "Archived", "mandatory": False, "ignore_on_update": True, "view_only": True, }, "test_plan": "Assessment Procedure", # Currently we decided to have 'Due Date' alias for start_date, # but it can be changed in future "start_date": "Due Date", "status": { "display_name": "State", "mandatory": False, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) }, } similarity_options = { "relevant_types": { "Objective": { "weight": 2 }, "Control": { "weight": 2 }, }, "threshold": 1, } def __init__(self, *args, **kwargs): super(Assessment, self).__init__(*args, **kwargs) self._warnings = collections.defaultdict(list) @orm.reconstructor def init_on_load(self): self._warnings = collections.defaultdict(list) def add_warning(self, domain, msg): self._warnings[domain].append(msg) @simple_property def issue_tracker(self): """Returns representation of issue tracker related info as a dict.""" issue_obj = issuetracker_issue.IssuetrackerIssue.get_issue( 'Assessment', self.id) res = issue_obj.to_dict( include_issue=True) if issue_obj is not None else {} res['_warnings'] = self._warnings['issue_tracker'] return res @simple_property def archived(self): """Returns a boolean whether assessment is archived or not.""" return self.audit.archived if self.audit else False @simple_property def folder(self): return self.audit.folder if self.audit else "" def validate_conclusion(self, value): return value if value in self.VALID_CONCLUSIONS else "" @validates("status") def validate_status(self, key, value): value = super(Assessment, self).validate_status(key, value) # pylint: disable=unused-argument if self.status == value: return value if self.status == self.REWORK_NEEDED: valid_states = [self.DONE_STATE, self.FINAL_STATE, self.DEPRECATED] if value not in valid_states: raise ValueError("Assessment in `Rework Needed` " "state can be only moved to: [{}]".format( ",".join(valid_states))) return value @validates("operationally") def validate_opperationally(self, key, value): # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("design") def validate_design(self, key, value): # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("assessment_type") def validate_assessment_type(self, key, value): """Validate assessment type to be the same as existing model name""" # pylint: disable=unused-argument # pylint: disable=no-self-use from ggrc.snapshotter.rules import Types if value and value not in Types.all: raise ValueError( "Assessment type '{}' is not snapshotable".format(value)) return value @classmethod def _ignore_filter(cls, _): return None
class Assessment(Roleable, statusable.Statusable, AuditRelationship, AutoStatusChangeable, Assignable, HasObjectState, TestPlanned, CustomAttributable, PublicDocumentable, Commentable, Personable, reminderable.Reminderable, Timeboxed, Relatable, WithSimilarityScore, FinishedDate, VerifiedDate, ValidateOnComplete, Notifiable, WithAction, BusinessObject, Indexed, db.Model): """Class representing Assessment. Assessment is an object representing an individual assessment performed on a specific object during an audit to ascertain whether or not certain conditions were met for that object. """ __tablename__ = 'assessments' _title_uniqueness = False REWORK_NEEDED = u"Rework Needed" NOT_DONE_STATES = statusable.Statusable.NOT_DONE_STATES | { REWORK_NEEDED, } VALID_STATES = tuple(NOT_DONE_STATES | statusable.Statusable.DONE_STATES) ASSIGNEE_TYPES = (u"Creator", u"Assessor", u"Verifier") REMINDERABLE_HANDLERS = { "statusToPerson": { "handler": reminderable.Reminderable.handle_state_to_person_reminder, "data": { statusable.Statusable.START_STATE: "Assessor", "In Progress": "Assessor" }, "reminders": { "assessment_assessor_reminder", } } } design = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") operationally = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") audit_id = deferred( db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=False), 'Assessment') assessment_type = deferred( db.Column(db.String, nullable=False, server_default="Control"), "Assessment") @declared_attr def object_level_definitions(cls): # pylint: disable=no-self-argument """Set up a backref so that we can create an object level custom attribute definition without the need to do a flush to get the assessment id. This is used in the relate_ca method in hooks/assessment.py. """ return db.relationship( 'CustomAttributeDefinition', primaryjoin=lambda: and_( remote(CustomAttributeDefinition.definition_id) == cls.id, remote(CustomAttributeDefinition.definition_type) == "assessment"), foreign_keys=[ CustomAttributeDefinition.definition_id, CustomAttributeDefinition.definition_type ], backref='assessment_definition', cascade='all, delete-orphan') object = {} # we add this for the sake of client side error checking VALID_CONCLUSIONS = frozenset( ["Effective", "Ineffective", "Needs improvement", "Not Applicable"]) # REST properties _api_attrs = reflection.ApiAttributes( 'design', 'operationally', 'audit', 'assessment_type', reflection.Attribute('archived', create=False, update=False), reflection.Attribute('object', create=False, update=False), ) _fulltext_attrs = [ 'archived', 'design', 'operationally', MultipleSubpropertyFullTextAttr('related_assessors', 'assessors', ['email', 'name']), MultipleSubpropertyFullTextAttr('related_creators', 'creators', ['email', 'name']), MultipleSubpropertyFullTextAttr('related_verifiers', 'verifiers', ['email', 'name']), ] @classmethod def indexed_query(cls): query = super(Assessment, cls).indexed_query() return query.options( orm.Load(cls).undefer_group("Assessment_complete", ), orm.Load(cls).joinedload("audit").undefer_group( "Audit_complete", ), ) _tracked_attrs = { 'description', 'design', 'notes', 'operationally', 'test_plan', 'title', 'start_date', 'end_date' } _aliases = { "owners": None, "assessment_template": { "display_name": "Template", "ignore_on_update": True, "filter_by": "_ignore_filter", "type": reflection.AttributeInfo.Type.MAPPING, }, "assessment_type": { "display_name": "Assessment Type", "mandatory": False, }, "design": "Conclusion: Design", "operationally": "Conclusion: Operation", "related_creators": { "display_name": "Creators", "mandatory": True, "type": reflection.AttributeInfo.Type.MAPPING, }, "related_assessors": { "display_name": "Assignees", "mandatory": True, "type": reflection.AttributeInfo.Type.MAPPING, }, "related_verifiers": { "display_name": "Verifiers", "type": reflection.AttributeInfo.Type.MAPPING, }, "archived": { "display_name": "Archived", "mandatory": False, "ignore_on_update": True, "view_only": True, }, "test_plan": "Assessment Procedure", # Currently we decided to have 'Due Date' alias for start_date, # but it can be changed in future "start_date": "Due Date", "status": { "display_name": "State", "mandatory": False, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) }, } AUTO_REINDEX_RULES = [ ReindexRule("RelationshipAttr", reindex_by_relationship_attr) ] similarity_options = { "relevant_types": { "Objective": { "weight": 2 }, "Control": { "weight": 2 }, }, "threshold": 1, } @simple_property def archived(self): return self.audit.archived if self.audit else False @property def assessors(self): """Get the list of assessor assignees""" return self.assignees_by_type.get("Assessor", []) @property def creators(self): """Get the list of creator assignees""" return self.assignees_by_type.get("Creator", []) @property def verifiers(self): """Get the list of verifier assignees""" return self.assignees_by_type.get("Verifier", []) def validate_conclusion(self, value): return value if value in self.VALID_CONCLUSIONS else "" @validates("status") def validate_status(self, key, value): value = super(Assessment, self).validate_status(key, value) # pylint: disable=unused-argument if self.status == value: return value if self.status == self.REWORK_NEEDED: valid_states = [self.DONE_STATE, self.FINAL_STATE] if value not in valid_states: raise ValueError("Assessment in `Rework Needed` " "state can be only moved to: [{}]".format( ",".join(valid_states))) return value @validates("operationally") def validate_opperationally(self, key, value): # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("design") def validate_design(self, key, value): # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("assessment_type") def validate_assessment_type(self, key, value): """Validate assessment type to be the same as existing model name""" # pylint: disable=unused-argument # pylint: disable=no-self-use from ggrc.snapshotter.rules import Types if value and value not in Types.all: raise ValueError( "Assessment type '{}' is not snapshotable".format(value)) return value @classmethod def _ignore_filter(cls, _): return None
class AccessControlRole(attributevalidator.AttributeValidator, base.ContextRBAC, mixins.Base, db.Model): """Access Control Role Model holds all roles in the application. These roles can be added by the users. """ __tablename__ = 'access_control_roles' name = db.Column(db.String, nullable=False) object_type = db.Column(db.String) tooltip = db.Column(db.String) read = db.Column(db.Boolean, nullable=False, default=True) update = db.Column(db.Boolean, nullable=False, default=True) delete = db.Column(db.Boolean, nullable=False, default=True) my_work = db.Column(db.Boolean, nullable=False, default=True) mandatory = db.Column(db.Boolean, nullable=False, default=False) non_editable = db.Column(db.Boolean, nullable=False, default=False) internal = db.Column(db.Boolean, nullable=False, default=False) default_to_current_user = db.Column( db.Boolean, nullable=False, default=False) notify_about_proposal = db.Column(db.Boolean, nullable=False, default=False) notify_about_review_status = db.Column(db.Boolean, nullable=False, default=False) access_control_list = db.relationship( 'AccessControlList', backref='ac_role', cascade='all, delete-orphan') parent_id = db.Column( db.Integer, db.ForeignKey('access_control_roles.id', ondelete='CASCADE'), nullable=True, ) parent = db.relationship( # pylint: disable=undefined-variable lambda: AccessControlRole, remote_side=lambda: AccessControlRole.id ) _reserved_names = {} @staticmethod def _extra_table_args(_): return ( db.UniqueConstraint('name', 'object_type'), ) @classmethod def eager_query(cls, **kwargs): """Define fields to be loaded eagerly to lower the count of DB queries.""" return super(AccessControlRole, cls).eager_query(**kwargs) _api_attrs = reflection.ApiAttributes( "name", "object_type", "tooltip", "read", "update", "delete", "my_work", "mandatory", "default_to_current_user", reflection.Attribute("non_editable", create=False, update=False), ) @sa.orm.validates("name", "object_type") def validates_name(self, key, value): # pylint: disable=no-self-use """Validate Custom Role name uniquness. Custom Role names need to follow 3 uniqueness rules: 1) Names must not match any attribute name on any existing object. 2) Object level CAD names must not match any global CAD name. 3) Names should not contains special values (.validate_name_correct) This validator should check for name collisions for 1st and 2nd rule. This validator works, because object_type is never changed. It only gets set when the role is created and after that only name filed can change. This makes validation using both fields possible. Args: value: access control role name Returns: value if the name passes all uniqueness checks. """ value = value.strip() if key == "name": validators.validate_name_correctness(value) if key == "name" and self.object_type: name = value.lower() object_type = self.object_type elif key == "object_type" and self.name: name = self.name.strip().lower() object_type = value else: return value if name in self._get_reserved_names(object_type): raise ValueError(u"Attribute name '{}' is reserved for this object type." .format(name)) if self._get_global_cad_names(object_type).get(name) is not None: raise ValueError(u"Global custom attribute '{}' " u"already exists for this object type" .format(name)) return value
class Audit(Snapshotable, clonable.Clonable, PublicDocumentable, mixins.CustomAttributable, Personable, HasOwnContext, Relatable, WithLastDeprecatedDate, mixins.Timeboxed, mixins.WithContact, mixins.BusinessObject, mixins.Folderable, Indexed, db.Model): """Audit model.""" __tablename__ = 'audits' _slug_uniqueness = False VALID_STATES = ( u'Planned', u'In Progress', u'Manager Review', u'Ready for External Review', u'Completed', u'Deprecated' ) CLONEABLE_CHILDREN = {"AssessmentTemplate"} report_start_date = deferred(db.Column(db.Date), 'Audit') report_end_date = deferred(db.Column(db.Date), 'Audit') audit_firm_id = deferred( db.Column(db.Integer, db.ForeignKey('org_groups.id')), 'Audit') audit_firm = db.relationship('OrgGroup', uselist=False) gdrive_evidence_folder = deferred(db.Column(db.String), 'Audit') program_id = deferred( db.Column(db.Integer, db.ForeignKey('programs.id'), nullable=False), 'Audit') audit_objects = db.relationship( 'AuditObject', backref='audit', cascade='all, delete-orphan') object_type = db.Column( db.String(length=250), nullable=False, default='Control') assessments = db.relationship('Assessment', backref='audit') issues = db.relationship('Issue', backref='audit') archived = deferred(db.Column(db.Boolean, nullable=False, default=False), 'Audit') _api_attrs = reflection.ApiAttributes( 'report_start_date', 'report_end_date', 'audit_firm', 'gdrive_evidence_folder', 'program', 'object_type', 'archived', reflection.Attribute('issue_tracker', create=False, update=False), reflection.Attribute('audit_objects', create=False, update=False), ) _fulltext_attrs = [ 'archived', 'report_start_date', 'report_end_date', 'audit_firm', 'gdrive_evidence_folder', ] @classmethod def indexed_query(cls): return super(Audit, cls).indexed_query().options( orm.Load(cls).undefer_group( "Audit_complete", ), ) _sanitize_html = [ 'gdrive_evidence_folder', 'description', ] _include_links = [] _aliases = { "program": { "display_name": "Program", "filter_by": "_filter_by_program", "mandatory": True, }, "user_role:Auditor": { "display_name": "Auditors", "type": AttributeInfo.Type.USER_ROLE, "filter_by": "_filter_by_auditor", }, "start_date": "Planned Start Date", "end_date": "Planned End Date", "report_start_date": "Planned Report Period from", "report_end_date": "Planned Report Period to", "contact": { "display_name": "Audit Captain", "mandatory": True, }, "secondary_contact": None, "notes": None, "reference_url": None, "archived": { "display_name": "Archived", "mandatory": False }, "status": { "display_name": "Status", "mandatory": True, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) } } @simple_property def issue_tracker(self): """Returns representation of issue tracker related info as a dict.""" issue_obj = issuetracker_issue.IssuetrackerIssue.get_issue( 'Audit', self.id) return issue_obj.to_dict() if issue_obj is not None else {} def _clone(self, source_object): """Clone audit and all relevant attributes. Keeps the internals of actual audit cloning and everything that is related to audit itself (auditors, audit firm, context setting, custom attribute values, etc.) """ from ggrc_basic_permissions import create_audit_context data = { "title": source_object.generate_attribute("title"), "description": source_object.description, "audit_firm": source_object.audit_firm, "start_date": source_object.start_date, "end_date": source_object.end_date, "last_deprecated_date": source_object.last_deprecated_date, "program": source_object.program, "status": source_object.VALID_STATES[0], "report_start_date": source_object.report_start_date, "report_end_date": source_object.report_end_date, "contact": source_object.contact } self.update_attrs(data) db.session.flush() create_audit_context(self) self._clone_auditors(source_object) self.clone_custom_attribute_values(source_object) def _clone_auditors(self, audit): """Clone auditors of specified audit. Args: audit: Audit instance """ from ggrc_basic_permissions.models import Role, UserRole role = Role.query.filter_by(name="Auditor").first() auditors = [ur.person for ur in UserRole.query.filter_by( role=role, context=audit.context).all()] for auditor in auditors: user_role = UserRole( context=self.context, person=auditor, role=role ) db.session.add(user_role) db.session.flush() def clone(self, source_id, mapped_objects=None): """Clone audit with specified whitelisted children. Children that can be cloned should be specified in CLONEABLE_CHILDREN. Args: mapped_objects: A list of related objects that should also be copied and linked to a new audit. """ if not mapped_objects: mapped_objects = [] source_object = Audit.query.get(source_id) self._clone(source_object) if any(mapped_objects): related_children = source_object.related_objects(mapped_objects) for obj in related_children: obj.clone(self) @classmethod def _filter_by_program(cls, predicate): return Program.query.filter( (Program.id == Audit.program_id) & (predicate(Program.slug) | predicate(Program.title)) ).exists() @classmethod def _filter_by_auditor(cls, predicate): from ggrc_basic_permissions.models import Role, UserRole return UserRole.query.join(Role, Person).filter( (Role.name == "Auditor") & (UserRole.context_id == cls.context_id) & (predicate(Person.name) | predicate(Person.email)) ).exists() @classmethod def eager_query(cls): query = super(Audit, cls).eager_query() return query.options( orm.joinedload('program'), orm.subqueryload('object_people').joinedload('person'), orm.subqueryload('audit_objects'), )
class Assessment(Assignable, statusable.Statusable, AuditRelationship, AutoStatusChangeable, TestPlanned, CustomAttributable, WithEvidence, Commentable, Personable, reminderable.Reminderable, Relatable, LastDeprecatedTimeboxed, WithSimilarityScore, FinishedDate, VerifiedDate, Notifiable, WithAction, labeled.Labeled, with_last_comment.WithLastComment, issue_tracker_mixins.IssueTrackedWithUrl, base.ContextRBAC, BusinessObject, Indexed, db.Model): """Class representing Assessment. Assessment is an object representing an individual assessment performed on a specific object during an audit to ascertain whether or not certain conditions were met for that object. """ __tablename__ = 'assessments' _title_uniqueness = False REWORK_NEEDED = u"Rework Needed" NOT_DONE_STATES = statusable.Statusable.NOT_DONE_STATES | { REWORK_NEEDED, } VALID_STATES = tuple(NOT_DONE_STATES | statusable.Statusable.DONE_STATES | statusable.Statusable.INACTIVE_STATES) REMINDERABLE_HANDLERS = { "statusToPerson": { "handler": reminderable.Reminderable.handle_state_to_person_reminder, "data": { statusable.Statusable.START_STATE: "Assignees", "In Progress": "Assignees" }, "reminders": { "assessment_assignees_reminder", } } } design = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") operationally = deferred(db.Column(db.String, nullable=False, default=""), "Assessment") audit_id = deferred( db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=False), 'Assessment') assessment_type = deferred( db.Column(db.String, nullable=False, server_default="Control"), "Assessment") # whether to use the object test plan on snapshot mapping test_plan_procedure = db.Column(db.Boolean, nullable=False, default=True) @declared_attr def object_level_definitions(cls): # pylint: disable=no-self-argument """Set up a backref so that we can create an object level custom attribute definition without the need to do a flush to get the assessment id. This is used in the relate_ca method in hooks/assessment.py. """ cad = custom_attribute_definition.CustomAttributeDefinition current_type = cls.__name__ def join_expr(): return sa.and_( orm.foreign(orm.remote(cad.definition_id)) == cls.id, cad.definition_type == utils.underscore_from_camelcase( current_type), ) # Since there is some kind of generic relationship on CAD side, correct # join expression for backref should be provided. If default, every call of # "{}_definition".format(definition_type) on CAD will produce a lot of # unnecessary DB queries returning nothing. def backref_join_expr(): return orm.remote(cls.id) == orm.foreign(cad.definition_id) return db.relationship( "CustomAttributeDefinition", primaryjoin=join_expr, backref=db.backref( "{}_definition".format( utils.underscore_from_camelcase(current_type)), lazy="joined", primaryjoin=backref_join_expr, ), cascade="all, delete-orphan", ) object = {} # we add this for the sake of client side error checking VALID_CONCLUSIONS = frozenset( ["Effective", "Ineffective", "Needs improvement", "Not Applicable"]) # REST properties _api_attrs = reflection.ApiAttributes( 'design', 'operationally', 'audit', 'assessment_type', 'test_plan_procedure', reflection.Attribute('archived', create=False, update=False), reflection.Attribute('folder', create=False, update=False), reflection.Attribute('object', create=False, update=False), ) _fulltext_attrs = [ 'archived', 'design', 'operationally', 'folder', ] AUTO_REINDEX_RULES = [ mixin.ReindexRule("Audit", lambda x: x.assessments, ["archived"]), ] _custom_publish = { 'audit': audit.build_audit_stub, } @classmethod def _populate_query(cls, query): return query.options( orm.Load(cls).undefer_group("Assessment_complete"), orm.Load(cls).joinedload("audit").undefer_group("Audit_complete"), orm.Load(cls).joinedload("audit").joinedload( audit.Audit.issuetracker_issue), ) @classmethod def eager_query(cls, **kwargs): return cls._populate_query( super(Assessment, cls).eager_query(**kwargs)) @classmethod def indexed_query(cls): return super(Assessment, cls).indexed_query().options( orm.Load(cls).load_only( "id", "design", "operationally", "audit_id", ), orm.Load(cls).joinedload("audit").load_only("archived", "folder"), ) def log_json(self): out_json = super(Assessment, self).log_json() out_json["folder"] = self.folder return out_json ASSESSMENT_TYPE_OPTIONS = ("AccessGroup", "AccountBalance", "Contract", "Control", "DataAsset", "Facility", "Market", "Objective", "OrgGroup", "Policy", "Process", "Product", "Regulation", "Requirement", "Standard", "System", "Vendor", "Risk", "TechnologyEnvironment", "Threat", "Metric", "ProductGroup", "KeyReport") _aliases = { "owners": None, "assessment_template": { "display_name": "Template", "ignore_on_update": True, "filter_by": "_ignore_filter", "type": reflection.AttributeInfo.Type.MAPPING, }, "assessment_type": { "display_name": "Assessment Type", "mandatory": False, "description": "Options are:\n{}".format('\n'.join(ASSESSMENT_TYPE_OPTIONS)), }, "design": { "display_name": "Conclusion: Design", "description": "Allowed values are:\n{}".format('\n'.join(VALID_CONCLUSIONS)), }, "operationally": { "display_name": "Conclusion: Operation", "description": "Allowed values are:\n{}".format('\n'.join(VALID_CONCLUSIONS)), }, "archived": { "display_name": "Archived", "mandatory": False, "ignore_on_update": True, "view_only": True, "description": "Allowed values are:\nyes\nno" }, "test_plan": "Assessment Procedure", # Currently we decided to have 'Due Date' alias for start_date, # but it can be changed in future "start_date": "Due Date", "status": { "display_name": "State", "mandatory": False, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) }, "issue_tracker": { "display_name": "Ticket Tracker", "mandatory": False, "view_only": True, }, } @simple_property def archived(self): """Returns a boolean whether assessment is archived or not.""" return self.audit.archived if self.audit else False @simple_property def folder(self): return self.audit.folder if self.audit else "" def validate_conclusion(self, value): return value if value in self.VALID_CONCLUSIONS else "" @validates("status") def validate_status(self, key, value): value = super(Assessment, self).validate_status(key, value) # pylint: disable=unused-argument if self.status == value: return value if self.status == self.REWORK_NEEDED: valid_states = [self.DONE_STATE, self.FINAL_STATE, self.DEPRECATED] if value not in valid_states: if not getattr(self, "skip_rework_validation", False): raise ValueError("Assessment in `Rework Needed` " "state can be only moved to: [{}]".format( ",".join(valid_states))) return value @validates("operationally") def validate_opperationally(self, key, value): """Validate assessment operationally by validating conclusion""" # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("design") def validate_design(self, key, value): """Validate assessment design by validating conclusion""" # pylint: disable=unused-argument return self.validate_conclusion(value) @validates("assessment_type") def validate_assessment_type(self, key, value): """Validate assessment type to be the same as existing model name""" # pylint: disable=unused-argument # pylint: disable=no-self-use from ggrc.snapshotter.rules import Types if value and value not in Types.all: raise ValueError( "Assessment type '{}' is not snapshotable".format(value)) return value @classmethod def _ignore_filter(cls, _): return None
class Person(CustomAttributable, CustomAttributeMapable, HasOwnContext, Relatable, Base, Indexed, db.Model): __tablename__ = 'people' email = deferred(db.Column(db.String, nullable=False), 'Person') name = deferred(db.Column(db.String), 'Person') language_id = deferred(db.Column(db.Integer), 'Person') company = deferred(db.Column(db.String), 'Person') object_people = db.relationship('ObjectPerson', backref='person', cascade='all, delete-orphan') access_control_list = db.relationship('AccessControlList', backref='person', cascade='all, delete-orphan') language = db.relationship( 'Option', primaryjoin='and_(foreign(Person.language_id) == Option.id, ' 'Option.role == "person_language")', uselist=False, ) @staticmethod def _extra_table_args(cls): return ( db.Index('ix_people_name_email', 'name', 'email'), db.Index('uq_people_email', 'email', unique=True), ) _fulltext_attrs = [ 'company', 'email', 'name', ] _api_attrs = reflection.ApiAttributes( 'company', 'email', 'language', 'name', reflection.Attribute('object_people', create=False, update=False), reflection.Attribute('system_wide_role', create=False, update=False), ) _sanitize_html = [ 'company', 'name', ] _include_links = [] _aliases = { "name": "Name", "email": { "display_name": "Email", "unique": True, }, "company": "Company", "user_role": { "display_name": "Role", "type": "user_role", "filter_by": "_filter_by_user_role", }, } @classmethod def _filter_by_user_role(cls, predicate): from ggrc_basic_permissions.models import Role, UserRole return UserRole.query.join(Role).filter((UserRole.person_id == cls.id) & (UserRole.context_id == None) & # noqa predicate(Role.name)).exists() # Methods required by Flask-Login # pylint: disable=no-self-use def is_authenticated(self): return self.system_wide_role != 'No Access' @property def user_name(self): return self.email.split("@")[0] def is_active(self): # pylint: disable=no-self-use return True # self.active def is_anonymous(self): # pylint: disable=no-self-use return False def get_id(self): return unicode(self.id) # noqa @validates('language') def validate_person_options(self, key, option): return validate_option(self.__class__.__name__, key, option, 'person_language') @validates('email') def validate_email(self, key, email): if not Person.is_valid_email(email): message = "Must provide a valid email address" raise ValidationError(message) return email @staticmethod def is_valid_email(val): # Borrowed from Django # literal form, ipv4 address (SMTP 4.1.3) email_re = re.compile( '^[-!#$%&\'*+\\.\/0-9=?A-Z^_`{|}~]+@([-0-9A-Z]+\.)+([0-9A-Z]){2,4}$', re.IGNORECASE) return email_re.match(val) if val else False @classmethod def eager_query(cls): from sqlalchemy import orm # query = super(Person, cls).eager_query() # Completely overriding eager_query to avoid eager loading of the # modified_by relationship return super(Person, cls).eager_query().options( orm.joinedload('language'), orm.subqueryload('object_people'), ) @classmethod def indexed_query(cls): from sqlalchemy import orm return super(Person, cls).indexed_query().options( orm.Load(cls).undefer_group("Person_complete", ), ) def _display_name(self): return self.email @builder.simple_property def system_wide_role(self): """For choosing the role string to show to the user; of all the roles in the system-wide context, it shows the highest ranked one (if there are multiple) or "No Access" if there are none. """ # FIXME: This method should be in `ggrc_basic_permissions`, since it # depends on `Role` and `UserRole` objects if self.email in getattr(settings, "BOOTSTRAP_ADMIN_USERS", []): return u"Superuser" role_hierarchy = { u'Administrator': 0, u'Editor': 1, u'Reader': 2, u'Creator': 3, } unique_roles = set([ user_role.role.name for user_role in self.user_roles if user_role.role.name in role_hierarchy ]) if len(unique_roles) == 0: return u"No Access" else: # -1 as default to make items not in this list appear on top # and thus shown to the user sorted_roles = sorted(unique_roles, key=lambda x: role_hierarchy.get(x, -1)) return sorted_roles[0]
class AssessmentTemplate(assessment.AuditRelationship, relationship.Relatable, mixins.Titled, mixins.CustomAttributable, Roleable, mixins.Slugged, mixins.Stateful, clonable.MultiClonable, Indexed, db.Model): """A class representing the assessment template entity. An Assessment Template is a template that allows users for easier creation of multiple Assessments that are somewhat similar to each other, avoiding the need to repeatedly define the same set of properties for every new Assessment object. """ __tablename__ = "assessment_templates" _mandatory_default_people = ("assignees", ) PER_OBJECT_CUSTOM_ATTRIBUTABLE = True RELATED_TYPE = 'assessment' # the type of the object under assessment template_object_type = db.Column(db.String, nullable=True) # whether to use the control test plan as a procedure test_plan_procedure = db.Column(db.Boolean, nullable=False, default=False) # procedure description procedure_description = db.Column(db.Text, nullable=False, default=u"") # the people that should be assigned by default to each assessment created # within the releated audit default_people = db.Column(JsonType, nullable=False) # parent audit audit_id = db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=True) # labels to show to the user in the UI for various default people values DEFAULT_PEOPLE_LABELS = { "Admin": "Object Admins", "Audit Lead": "Audit Captain", "Auditors": "Auditors", "Principal Assignees": "Principal Assignees", "Secondary Assignees": "Secondary Assignees", "Primary Contacts": "Primary Contacts", "Secondary Contacts": "Secondary Contacts", } _title_uniqueness = False DRAFT = 'Draft' ACTIVE = 'Active' DEPRECATED = 'Deprecated' VALID_STATES = ( DRAFT, ACTIVE, DEPRECATED, ) # REST properties _api_attrs = reflection.ApiAttributes( 'template_object_type', 'test_plan_procedure', 'procedure_description', 'default_people', 'audit', reflection.Attribute('issue_tracker', create=False, update=False), reflection.Attribute('archived', create=False, update=False), reflection.Attribute('DEFAULT_PEOPLE_LABELS', create=False, update=False), ) _fulltext_attrs = ["archived"] _custom_publish = { 'audit': audit.build_audit_stub, } _aliases = { "status": { "display_name": "State", "mandatory": False, "description": "Options are:\n{}".format('\n'.join(VALID_STATES)) }, "default_assignees": { "display_name": "Default Assignees", "mandatory": True, "filter_by": "_nop_filter", }, "default_verifier": { "display_name": "Default Verifiers", "mandatory": False, "filter_by": "_nop_filter", }, "default_test_plan": { "display_name": "Default Test Plan", "filter_by": "_nop_filter", }, "test_plan_procedure": { "display_name": "Use Control Assessment Procedure", "mandatory": False, }, "template_object_type": { "display_name": "Object Under Assessment", "mandatory": True, }, "archived": { "display_name": "Archived", "mandatory": False, "ignore_on_update": True, "view_only": True, }, "template_custom_attributes": { "display_name": "Custom Attributes", "type": AttributeInfo.Type.SPECIAL_MAPPING, "filter_by": "_nop_filter", "description": ("List of custom attributes for the assessment template\n" "One attribute per line. fields are separated by commas ','\n\n" "<attribute type>, <attribute name>, [<attribute value1>, " "<attribute value2>, ...]\n\n" "Valid attribute types: Text, Rich Text, Date, Checkbox, Person," "Dropdown.\n" "attribute name: Any single line string without commas. Leading " "and trailing spaces are ignored.\n" "list of attribute values: Comma separated list, only used if " "attribute type is 'Dropdown'. Prepend '(a)' if the value has a " "mandatory attachment and/or (c) if the value requires a " "mandatory comment.\n\n" "Limitations: Dropdown values can not start with either '(a)' or" "'(c)' and attribute names can not contain commas ','."), }, } @classmethod def eager_query(cls): query = super(AssessmentTemplate, cls).eager_query() return query.options( orm.Load(cls).joinedload("audit").undefer_group("Audit_complete")) @classmethod def indexed_query(cls): query = super(AssessmentTemplate, cls).indexed_query() return query.options( orm.Load(cls).joinedload("audit").undefer_group("Audit_complete")) @classmethod def _nop_filter(cls, _): """No operation filter. This is used for objects for which we can not implement a normal sql query filter. Example is default_verifier field that is a json string in the db and we can not create direct queries on json fields. """ return None @classmethod def generate_slug_prefix(cls): return "TEMPLATE" def _clone(self, target=None): """Clone Assessment Template. Args: target: Destination Audit object. Returns: Instance of assessment template copy. """ data = { "title": self.title, "audit": target, "template_object_type": self.template_object_type, "test_plan_procedure": self.test_plan_procedure, "procedure_description": self.procedure_description, "default_people": self.default_people, "modified_by": login.get_current_user(), } assessment_template_copy = AssessmentTemplate(**data) db.session.add(assessment_template_copy) return assessment_template_copy def clone(self, target): """Clone Assessment Template and related custom attributes.""" assessment_template_copy = self._clone(target) rel = relationship.Relationship(source=target, destination=assessment_template_copy) db.session.add(rel) db.session.flush() for cad in self.custom_attribute_definitions: # pylint: disable=protected-access cad._clone(assessment_template_copy) return (assessment_template_copy, rel) @validates('default_people') def validate_default_people(self, key, value): """Check that default people lists are not empty. Check if the default_people contains both assignees and verifiers. The values of those fields must be truthy, and if the value is a string it must be a valid default people label. If the value is not a string, it should be a list of valid user ids, but that is too expensive to test in this validator. """ # pylint: disable=unused-argument for mandatory in self._mandatory_default_people: mandatory_value = value.get(mandatory) if (not mandatory_value or isinstance(mandatory_value, list) and any(not isinstance(p_id, (int, long)) for p_id in mandatory_value) or isinstance(mandatory_value, basestring) and mandatory_value not in self.DEFAULT_PEOPLE_LABELS): raise ValidationError( 'Invalid value for default_people.{field}. Expected a people ' 'label in string or a list of int people ids, received {value}.' .format(field=mandatory, value=mandatory_value), ) return value @simple_property def archived(self): """Fetch the archived boolean from Audit""" if hasattr(self, 'context') and hasattr(self.context, 'related_object'): return getattr(self.context.related_object, 'archived', False) return False @simple_property def issue_tracker(self): """Returns representation of issue tracker related info as a dict.""" issue_obj = issuetracker_issue.IssuetrackerIssue.get_issue( 'AssessmentTemplate', self.id) return issue_obj.to_dict() if issue_obj is not None else {}
class CycleTaskGroup(mixins.WithContact, wf_mixins.CycleTaskGroupRelatedStatusValidatedMixin, mixins.Slugged, mixins.Timeboxed, mixins.Described, mixins.Titled, mixins.Base, index_mixin.Indexed, db.Model): """Cycle Task Group model. """ __tablename__ = 'cycle_task_groups' _title_uniqueness = False @classmethod def generate_slug_prefix_for(cls, obj): # pylint: disable=unused-argument return "CYCLEGROUP" cycle_id = db.Column( db.Integer, db.ForeignKey('cycles.id', ondelete="CASCADE"), nullable=False, ) task_group_id = db.Column(db.Integer, db.ForeignKey('task_groups.id'), nullable=True) cycle_task_group_tasks = db.relationship('CycleTaskGroupObjectTask', backref='cycle_task_group', cascade='all, delete-orphan') sort_index = db.Column(db.String(length=250), default="", nullable=False) next_due_date = db.Column(db.Date) _api_attrs = reflection.ApiAttributes('cycle', 'task_group', 'cycle_task_group_tasks', 'sort_index', 'next_due_date') _aliases = { "cycle": { "display_name": "Cycle", "filter_by": "_filter_by_cycle", }, } PROPERTY_TEMPLATE = u"group {}" _fulltext_attrs = [ attributes.MultipleSubpropertyFullTextAttr("task title", 'cycle_task_group_tasks', ["title"], False), attributes.MultipleSubpropertyFullTextAttr( "task assignee", lambda instance: [t.contact for t in instance.cycle_task_group_tasks], ["name", "email"], False), attributes.DateMultipleSubpropertyFullTextAttr( "task due date", "cycle_task_group_tasks", ["end_date"], False), attributes.DateFullTextAttr( "due date", 'next_due_date', ), attributes.FullTextAttr("assignee", "contact", ['name', 'email']), attributes.FullTextAttr("cycle title", 'cycle', ['title'], False), attributes.FullTextAttr("cycle assignee", lambda x: x.cycle.contact, ['email', 'name'], False), attributes.DateFullTextAttr("cycle due date", lambda x: x.cycle.next_due_date, with_template=False), attributes.MultipleSubpropertyFullTextAttr( "task comments", lambda instance: itertools.chain(*[ t.cycle_task_entries for t in instance.cycle_task_group_tasks ]), ["description"], False), ] AUTO_REINDEX_RULES = [ index_mixin.ReindexRule("CycleTaskGroupObjectTask", lambda x: x.cycle_task_group), index_mixin.ReindexRule("Person", _query_filtered_by_contact), index_mixin.ReindexRule( "Person", lambda x: [i.cycle for i in _query_filtered_by_contact(x)]), ] @classmethod def _filter_by_cycle(cls, predicate): """Get query that filters cycle task groups. Args: predicate: lambda function that excepts a single parameter and returns true of false. Returns: An sqlalchemy query that evaluates to true or false and can be used in filtering cycle task groups by related cycle. """ return Cycle.query.filter((Cycle.id == cls.cycle_id) & (predicate(Cycle.slug) | predicate(Cycle.title))).exists() @classmethod def indexed_query(cls): return super(CycleTaskGroup, cls).indexed_query().options( orm.Load(cls).load_only("next_due_date", ), orm.Load(cls).subqueryload("cycle_task_group_tasks").load_only( "id", "title", "end_date"), orm.Load(cls).joinedload("cycle").load_only( "id", "title", "next_due_date"), orm.Load(cls).subqueryload("cycle_task_group_tasks").joinedload( "contact").load_only("email", "name", "id"), orm.Load(cls).subqueryload("cycle_task_group_tasks").joinedload( "cycle_task_entries").load_only("description", "id"), orm.Load(cls).joinedload("cycle").joinedload("contact").load_only( "email", "name", "id"), orm.Load(cls).joinedload("contact").load_only( "email", "name", "id"), ) @classmethod def eager_query(cls): """Add cycle tasks and objects to cycle task group eager query. Make sure we load all cycle task group relevant data in a single query. Returns: a query object with cycle_task_group_tasks added to joined load options. """ query = super(CycleTaskGroup, cls).eager_query() return query.options(orm.joinedload('cycle_task_group_tasks'))
class WorkflowState(object): """Object state mixin. This is a mixin for adding workflow_state to all objects that can be mapped to workflow tasks. """ _api_attrs = reflection.ApiAttributes( reflection.Attribute('workflow_state', create=False, update=False)) OVERDUE = "Overdue" VERIFIED = "Verified" FINISHED = "Finished" ASSIGNED = "Assigned" IN_PROGRESS = "In Progress" UNKNOWN_STATE = None @classmethod def _get_state(cls, statusable_childs): """Get overall state of a group of tasks. Rules, the first that is true is selected: -if all are verified -> verified -if all are finished -> finished -if all are at least finished -> finished -if any are in progress or declined -> in progress -if any are assigned -> assigned The function will work correctly only for non Overdue states. If the result is overdue, it should be handled outside of this function. Args: current_tasks: list of tasks that are currently a part of an active cycle or cycles that are active in an workflow. Returns: Overall state according to the rules described above. """ states = {i.status or i.ASSIGNED for i in statusable_childs} if states in [{cls.VERIFIED}, {cls.FINISHED}, {cls.ASSIGNED}]: return states.pop() if states == {cls.FINISHED, cls.VERIFIED}: return cls.FINISHED return cls.IN_PROGRESS if states else cls.UNKNOWN_STATE @classmethod def get_object_state(cls, objs): """Get lowest state of an object Get the lowest possible state of the tasks relevant to one object. States are scanned in order: Overdue, In Progress, Finished, Assigned, Verified. Args: objs: A list of cycle group object tasks, which should all be mapped to the same object. Returns: Name of the lowest state of all active cycle tasks that relate to the given objects. """ current_tasks = [] for task in objs: if not task.cycle.is_current: continue if task.is_overdue: return cls.OVERDUE current_tasks.append(task) return cls._get_state(current_tasks) @classmethod def get_workflow_state(cls, cycles): """Get lowest state of a workflow Get the lowest possible state of the tasks relevant to a given workflow. States are scanned in order: Overdue, In Progress, Finished, Assigned, Verified. Args: cycles: list of cycles belonging to a single workflow. Returns: Name of the lowest workflow state, if there are any active cycles. Otherwise it returns None. """ current_cycles = [] for cycle_instance in cycles: if not cycle_instance.is_current: continue for task in cycle_instance.cycle_task_group_object_tasks: if task.is_overdue: return cls.OVERDUE current_cycles.append(cycle_instance) return cls._get_state(current_cycles) @builder.simple_property def workflow_state(self): return WorkflowState.get_object_state( self.cycle_task_group_object_tasks)
class AccessControlList(base.ContextRBAC, mixins.Base, db.Model): """Access Control List Model is a mapping between a role and an object. It creates a base for permissions of the role for mapping a person to this permission. """ __tablename__ = 'access_control_list' _api_attrs = reflection.ApiAttributes("ac_role_id") ac_role_id = db.Column(db.Integer, db.ForeignKey('access_control_roles.id'), nullable=False) object_id = db.Column(db.Integer, nullable=False) object_type = db.Column(db.String, nullable=False) # Base id always points to the top most parent of the acl propagation chain # or to itself if there are no parents. This field is used to optimize # permission queries by making sure a single extra join is needed to get to # the base ACL entry (one without parents) to which access control people are # mapped. base_id = db.Column( db.Integer, db.ForeignKey('access_control_list.id', ondelete='CASCADE'), nullable=True, ) # This field is a copy of parent_id but set to not nullable, so it can be # used in a unique constraint. Uniqueness check will always pass if there is # a NULL in the set. parent_id_nn = db.Column( db.Integer, nullable=False, default=0, ) # Parent id field is just to keep the information about the entire chain of # acl propagation. This field is only needed for acl deletion. So unmapping # will remove the entire subtree of propagated acl entries. parent_id = db.Column( db.Integer, db.ForeignKey('access_control_list.id', ondelete='CASCADE'), nullable=True, ) parent = db.relationship( lambda: AccessControlList, # pylint: disable=undefined-variable foreign_keys=lambda: AccessControlList.parent_id, remote_side=lambda: AccessControlList.id, ) access_control_people = db.relationship( 'AccessControlPerson', foreign_keys='AccessControlPerson.ac_list_id', backref='ac_list', lazy='subquery', cascade='all, delete-orphan', ) @property def object_attr(self): return '{0}_object'.format(self.object_type) @property def object(self): return getattr(self, self.object_attr) @object.setter def object(self, value): self.object_id = getattr(value, 'id', None) self.object_type = getattr(value, 'type', None) return setattr(self, self.object_attr, value) @staticmethod def _extra_table_args(_): return ( db.UniqueConstraint( 'ac_role_id', 'object_id', 'object_type', 'parent_id_nn', ), db.Index('idx_object_type_object_idx', 'object_type', 'object_id'), db.Index('ix_role_object', 'ac_role_id', 'object_type', 'object_id'), db.Index( 'idx_object_type_object_id_parent_id_nn', 'object_type', 'object_id', 'parent_id_nn', ), ) def _remove_people(self, obsolete_people): """Remove people from the current acl.""" if not obsolete_people: return people_acp_map = { acp.person: acp for acp in self.access_control_people } for person in obsolete_people: self.access_control_people.remove(people_acp_map[person]) def _add_people(self, additional_people): """Add people to the current acl.""" for person in additional_people: people.AccessControlPerson(ac_list=self, person=person) def add_person(self, additional_person): """Add a single person to current ACL entry. Args: additional_person: new person model that will be added. """ self.add_people({additional_person}) def add_people(self, additional_people): """Ensure that people are linked to the current ACL entry. Args: additional_people: set of people objects that will be added. """ existing_people = {acp.person for acp in self.access_control_people} self._add_people(additional_people - existing_people) def remove_person(self, obsolete_person): self.remove_people({obsolete_person}) def remove_people(self, obsolete_people): """Remove the given people from the current ACL entry. Args: obsolete_people: set of people models that will be removed. """ existing_people = {acp.person for acp in self.access_control_people} self._remove_people(obsolete_people & existing_people) def update_people(self, new_people): """Update the list of current acl people to match new_people. Args: new_people: set of people objects. Any existing person missing from that set will be removed. Any new people will be added. """ existing_people = {acp.person for acp in self.access_control_people} self._remove_people(existing_people - new_people) self._add_people(new_people - existing_people)
class Control(WithLastAssessmentDate, review.Reviewable, Roleable, Relatable, mixins.CustomAttributable, Personable, ControlCategorized, PublicDocumentable, AssertionCategorized, mixins.LastDeprecatedTimeboxed, mixins.TestPlanned, Commentable, WithSimilarityScore, base.ContextRBAC, mixins.BusinessObject, Indexed, mixins.Folderable, proposal.Proposalable, db.Model): """Control model definition.""" __tablename__ = 'controls' company_control = deferred(db.Column(db.Boolean), 'Control') directive_id = deferred( db.Column(db.Integer, db.ForeignKey('directives.id')), 'Control') kind_id = deferred(db.Column(db.Integer), 'Control') means_id = deferred(db.Column(db.Integer), 'Control') version = deferred(db.Column(db.String), 'Control') verify_frequency_id = deferred(db.Column(db.Integer), 'Control') fraud_related = deferred(db.Column(db.Boolean), 'Control') key_control = deferred(db.Column(db.Boolean), 'Control') active = deferred(db.Column(db.Boolean), 'Control') kind = db.relationship( 'Option', primaryjoin='and_(foreign(Control.kind_id) == Option.id, ' 'Option.role == "control_kind")', uselist=False) means = db.relationship( 'Option', primaryjoin='and_(foreign(Control.means_id) == Option.id, ' 'Option.role == "control_means")', uselist=False) verify_frequency = db.relationship( 'Option', primaryjoin='and_(foreign(Control.verify_frequency_id) == Option.id, ' 'Option.role == "verify_frequency")', uselist=False) # REST properties _api_attrs = reflection.ApiAttributes( 'active', 'company_control', 'directive', 'fraud_related', 'key_control', 'kind', 'means', 'verify_frequency', 'version', ) _fulltext_attrs = [ 'active', 'company_control', 'directive', attributes.BooleanFullTextAttr( 'fraud_related', 'fraud_related', true_value="yes", false_value="no"), attributes.BooleanFullTextAttr( 'key_control', 'key_control', true_value="key", false_value="non-key"), 'kind', 'means', 'verify_frequency', 'version', ] _sanitize_html = [ 'version', ] VALID_RECIPIENTS = frozenset([ "Assignees", "Creators", "Verifiers", "Admin", "Control Operators", "Control Owners", "Other Contacts", ]) @classmethod def indexed_query(cls): return super(Control, cls).indexed_query().options( orm.Load(cls).undefer_group( "Control_complete" ), orm.Load(cls).joinedload( "directive" ).undefer_group( "Directive_complete" ), orm.Load(cls).joinedload( 'kind', ).load_only( "title" ), orm.Load(cls).joinedload( 'means', ).load_only( "title" ), orm.Load(cls).joinedload( 'verify_frequency', ).load_only( "title" ), ) _include_links = [] _aliases = { "kind": "Kind/Nature", "means": "Type/Means", "verify_frequency": "Frequency", "fraud_related": "Fraud Related", "key_control": { "display_name": "Significance", "description": "Allowed values are:\nkey\nnon-key\n---", }, "test_plan": "Assessment Procedure", } @validates('kind', 'means', 'verify_frequency') def validate_control_options(self, key, option): """Validate control 'kind', 'means', 'verify_frequency'""" desired_role = key if key == 'verify_frequency' else 'control_' + key return validate_option(self.__class__.__name__, key, option, desired_role) @classmethod def eager_query(cls): query = super(Control, cls).eager_query() return cls.eager_inclusions(query, Control._include_links).options( orm.joinedload('directive'), orm.joinedload('kind'), orm.joinedload('means'), orm.joinedload('verify_frequency'), ) def log_json(self): out_json = super(Control, self).log_json() # so that event log can refer to deleted directive if self.directive: out_json["mapped_directive"] = self.directive.display_name return out_json
class Proposal(mixins.person_relation_factory("applied_by"), mixins.person_relation_factory("declined_by"), mixins.person_relation_factory("proposed_by"), comment.CommentInitiator, mixins.Stateful, roleable.Roleable, relationship.Relatable, base.ContextRBAC, mixins.Base, ft_mixin.Indexed, db.Model): """Proposal model. Collect all information about propose change to Proposable instances.""" __tablename__ = 'proposals' class STATES(object): """All states for proposals.""" PROPOSED = "proposed" APPLIED = "applied" DECLINED = "declined" class CommentTemplatesTextBuilder(object): """Temapltes for comments for proposals.""" PROPOSED_WITH_AGENDA = ("<p>Proposal has been created with comment: " "{text}</p>") APPLIED_WITH_COMMENT = ( "<p>Proposal created by {user} has been applied " "with a comment: {text}</p>") DECLINED_WITH_COMMENT = ( "<p>Proposal created by {user} has been declined " "with a comment: {text}</p>") PROPOSED_WITHOUT_AGENDA = "<p>Proposal has been created.</p>" APPLIED_WITHOUT_COMMENT = ("<p>Proposal created by {user} " "has been applied.</p>") DECLINED_WITHOUT_COMMENT = ("<p>Proposal created by {user} " "has been declined.</p>") # pylint: enable=too-few-public-methods def build_comment_text(self, reason, text, proposed_by): """Build proposal comment dependable from proposal state.""" if reason == self.STATES.PROPOSED: with_tmpl = self.CommentTemplatesTextBuilder.PROPOSED_WITH_AGENDA without_tmpl = self.CommentTemplatesTextBuilder.PROPOSED_WITHOUT_AGENDA elif reason == self.STATES.APPLIED: with_tmpl = self.CommentTemplatesTextBuilder.APPLIED_WITH_COMMENT without_tmpl = self.CommentTemplatesTextBuilder.APPLIED_WITHOUT_COMMENT elif reason == self.STATES.DECLINED: with_tmpl = self.CommentTemplatesTextBuilder.DECLINED_WITH_COMMENT without_tmpl = self.CommentTemplatesTextBuilder.DECLINED_WITHOUT_COMMENT tmpl = with_tmpl if text else without_tmpl return tmpl.format(user=proposed_by.email, text=text) VALID_STATES = [STATES.PROPOSED, STATES.APPLIED, STATES.DECLINED] instance_id = db.Column(db.Integer, nullable=False) instance_type = db.Column(db.String, nullable=False) content = db.Column('content', types.LongJsonType, nullable=False) agenda = db.Column(db.Text, nullable=False, default=u"") decline_reason = db.Column(db.Text, nullable=False, default=u"") decline_datetime = db.Column(db.DateTime, nullable=True) apply_reason = db.Column(db.Text, nullable=False, default=u"") apply_datetime = db.Column(db.DateTime, nullable=True) proposed_notified_datetime = db.Column(db.DateTime, nullable=True) INSTANCE_TMPL = "{}_proposalable" instance = ProposalablePolymorphicRelationship("instance_id", "instance_type", INSTANCE_TMPL) _fulltext_attrs = [ "instance_id", "instance_type", "agenda", "decline_reason", "decline_datetime", "apply_reason", "apply_datetime", ] _api_attrs = reflection.ApiAttributes( reflection.Attribute("instance", update=False), reflection.Attribute("content", create=False, update=False), reflection.Attribute("agenda", update=False), # ignore create proposal in specific state to be shure # new proposal will be only in proposed state reflection.Attribute('status', create=False), reflection.Attribute('decline_reason', create=False), reflection.Attribute('decline_datetime', create=False, update=False), reflection.Attribute('declined_by', create=False, update=False), reflection.Attribute('apply_reason', create=False), reflection.Attribute('apply_datetime', create=False, update=False), reflection.Attribute('applied_by', create=False, update=False), reflection.Attribute('full_instance_content', create=True, update=False, read=False), reflection.Attribute('proposed_by', create=False, update=False), ) full_instance_content = FullInstanceContentFased() @staticmethod def _extra_table_args(_): return (db.Index("fk_instance", "instance_id", "instance_type"), db.Index("ix_decline_datetime", "decline_datetime"), db.Index("ix_apply_datetime", "apply_datetime"), db.Index("ix_proposed_notified_datetime", "proposed_notified_datetime")) # pylint: disable=no-self-use @validates("instance_type") def validate_instance_type(self, _, instance_type): """Validate instance_type attribute. We preventing creation of proposals for external models. """ instance_class = inflector.get_model(instance_type) if issubclass(instance_class, synchronizable.Synchronizable): raise ValueError("Trying to create proposal for external model.") return instance_type
class Slugged(Base): """Several classes make use of the common mixins and additional are "slugged" and have additional fields related to their publishing in the system. """ @declared_attr def slug(cls): # pylint: disable=no-self-argument return deferred(db.Column(db.String, nullable=False), cls.__name__) @staticmethod def _extra_table_args(model): if getattr(model, '_slug_uniqueness', True): return ( db.UniqueConstraint('slug', name='uq_{}'.format(model.__tablename__)), ) return () # REST properties _api_attrs = reflection.ApiAttributes('slug') _fulltext_attrs = ['slug'] _sanitize_html = ['slug'] _aliases = { "slug": { "display_name": "Code", "description": ("Must be unique. Can be left empty for " "auto generation. If updating or deleting, " "code is required"), } } @classmethod def indexed_query(cls): return super(Slugged, cls).indexed_query().options( orm.Load(cls).load_only("slug"), ) @classmethod def generate_slug_for(cls, obj): """Generate unique Slug among the objects of the current class""" _id = getattr(obj, 'id', uuid1()) obj.slug = "{0}-{1}".format(cls.generate_slug_prefix(), _id) # We need to make sure the generated slug is not already present in the # database. If it is, we increment the id until we find a slug that is # unique. # A better approach would be to query the database for slug uniqueness # only if the there was a conflict, but because we can't easily catch a # session rollback at this point we are sticking with a # suboptimal solution for now. INCREMENT = 1000 while db.session.query( cls.query.filter(cls.slug == obj.slug).exists()).scalar(): _id += INCREMENT obj.slug = "{0}-{1}".format(cls.generate_slug_prefix(), _id) @classmethod def generate_slug_prefix(cls): return cls.__name__.upper() @validates("slug") def validate_slug(self, _, value): """Validates slug for presence of forbidden symbols""" # pylint: disable=no-self-use if value and "*" in value: raise exceptions.ValidationError( "Field 'Code' contains unsupported symbol '*'" ) return value @classmethod def ensure_slug_before_flush(cls, session, flush_context, instances): """Set the slug to a default string so we don't run afoul of the NOT NULL constraint. """ # pylint: disable=unused-argument for o in session.new: if isinstance(o, Slugged) and (o.slug is None or o.slug == ''): o.slug = str(uuid1()) o._replace_slug = True # pylint: disable=protected-access @classmethod def ensure_slug_after_flush_postexec(cls, session, flush_context): """Replace the placeholder slug with a real slug that will be set on the next flush/commit. """ # pylint: disable=unused-argument for o in session.identity_map.values(): if isinstance(o, Slugged) and hasattr(o, '_replace_slug'): o.generate_slug_for(o) delattr(o, '_replace_slug')
class Cycle(mixins.WithContact, wf_mixins.CycleStatusValidatedMixin, mixins.Timeboxed, mixins.Described, mixins.Titled, mixins.Slugged, mixins.Notifiable, ft_mixin.Indexed, db.Model): """Workflow Cycle model """ __tablename__ = 'cycles' _title_uniqueness = False workflow_id = db.Column( db.Integer, db.ForeignKey('workflows.id', ondelete="CASCADE"), nullable=False, ) cycle_task_groups = db.relationship('CycleTaskGroup', backref='cycle', cascade='all, delete-orphan') cycle_task_group_object_tasks = db.relationship( 'CycleTaskGroupObjectTask', backref='cycle', cascade='all, delete-orphan') cycle_task_entries = db.relationship('CycleTaskEntry', backref='cycle', cascade='all, delete-orphan') is_current = db.Column(db.Boolean, default=True, nullable=False) next_due_date = db.Column(db.Date) _api_attrs = reflection.ApiAttributes( 'workflow', 'cycle_task_groups', 'is_current', 'next_due_date', ) _aliases = { "cycle_workflow": { "display_name": "Workflow", "filter_by": "_filter_by_cycle_workflow", }, "contact": "Assignee", "secondary_contact": None, } PROPERTY_TEMPLATE = u"cycle {}" _fulltext_attrs = [ ft_attributes.MultipleSubpropertyFullTextAttr( "group title", "cycle_task_groups", ["title"], False, ), ft_attributes.MultipleSubpropertyFullTextAttr( "group assignee", lambda instance: [g.contact for g in instance.cycle_task_groups], ["name", "email"], False, ), ft_attributes.DateMultipleSubpropertyFullTextAttr( "group due date", 'cycle_task_groups', ["next_due_date"], False, ), ft_attributes.MultipleSubpropertyFullTextAttr( "task title", 'cycle_task_group_object_tasks', ["title"], False, ), ft_attributes.MultipleSubpropertyFullTextAttr( "task assignee", lambda instance: [t.contact for t in instance.cycle_task_group_object_tasks], ["name", "email"], False), ft_attributes.DateMultipleSubpropertyFullTextAttr( "task due date", "cycle_task_group_object_tasks", ["end_date"], False), ft_attributes.DateFullTextAttr("due date", "next_due_date"), ft_attributes.MultipleSubpropertyFullTextAttr( "task comments", lambda instance: list( itertools.chain(*[ t.cycle_task_entries for t in instance.cycle_task_group_object_tasks ])), ["description"], False), ] AUTO_REINDEX_RULES = [ ft_mixin.ReindexRule("CycleTaskGroup", lambda x: x.cycle), ft_mixin.ReindexRule("CycleTaskGroupObjectTask", lambda x: x.cycle_task_group.cycle), ft_mixin.ReindexRule("Person", _query_filtered_by_contact) ] @classmethod def _filter_by_cycle_workflow(cls, predicate): from ggrc_workflows.models.workflow import Workflow return Workflow.query.filter((Workflow.id == cls.workflow_id) & (predicate(Workflow.slug) | predicate(Workflow.title))).exists() @classmethod def eager_query(cls): """Add cycle task groups to cycle eager query This function adds cycle_task_groups as a join option when fetching cycles, and makes sure we fetch all cycle related data needed for generating cycle json, in one query. Returns: a query object with cycle_task_groups added to joined load options. """ query = super(Cycle, cls).eager_query() return query.options(orm.joinedload('cycle_task_groups'), ) @classmethod def indexed_query(cls): return super(Cycle, cls).indexed_query().options( orm.Load(cls).load_only("next_due_date"), orm.Load(cls).subqueryload( "cycle_task_group_object_tasks").load_only( "id", "title", "end_date"), orm.Load(cls).subqueryload("cycle_task_groups").load_only( "id", "title", "end_date", "next_due_date", ), orm.Load(cls).subqueryload("cycle_task_group_object_tasks"). joinedload("contact").load_only("email", "name", "id"), orm.Load(cls).subqueryload("cycle_task_group_object_tasks"). joinedload("cycle_task_entries").load_only("description", "id"), orm.Load(cls).subqueryload("cycle_task_groups").joinedload( "contact").load_only("email", "name", "id"), orm.Load(cls).joinedload("contact").load_only( "email", "name", "id"), )