예제 #1
0
class ImportExport(Identifiable, db.Model):
    """ImportExport Model."""

    __tablename__ = 'import_exports'

    IMPORT_EXPORT_STATUSES = [
        'Not Started',
        'Analysis',
        'In Progress',
        'Blocked',
        'Analysis Failed',
        'Stopped',
        'Failed',
        'Finished',
    ]

    job_type = db.Column(db.Enum('Import', 'Export'), nullable=False)
    status = db.Column(db.Enum(*IMPORT_EXPORT_STATUSES),
                       nullable=False,
                       default='Not Started')
    description = db.Column(db.Text)
    created_at = db.Column(db.DateTime, nullable=False)
    start_at = db.Column(db.DateTime)
    end_at = db.Column(db.DateTime)
    created_by_id = db.Column(db.Integer,
                              db.ForeignKey('people.id'),
                              nullable=False)
    created_by = db.relationship('Person',
                                 foreign_keys='ImportExport.created_by_id',
                                 uselist=False)
    results = db.Column(mysql.LONGTEXT)
    title = db.Column(db.Text),
    content = db.Column(mysql.LONGTEXT)
    gdrive_metadata = db.Column('gdrive_metadata', db.Text)
예제 #2
0
class Statusable(object):
    """Mixin with default labels for status field"""

    # pylint: disable=too-few-public-methods

    START_STATE = u"Not Started"
    PROGRESS_STATE = u"In Progress"
    DONE_STATE = u"In Review"
    VERIFIED_STATE = u"Verified"
    FINAL_STATE = u"Completed"
    END_STATES = {VERIFIED_STATE, FINAL_STATE}

    NOT_DONE_STATES = {START_STATE, PROGRESS_STATE}
    DONE_STATES = {DONE_STATE} | END_STATES
    VALID_STATES = tuple(NOT_DONE_STATES | DONE_STATES)

    status = db.Column(db.Enum(*VALID_STATES),
                       nullable=False,
                       default=START_STATE)

    _aliases = {
        "status": {
            "display_name": "State",
            "mandatory": False,
            "description": "Options are:\n{}".format('\n'.join(VALID_STATES))
        }
    }

    @classmethod
    def default_status(cls):
        return "Not Started"
예제 #3
0
class BaseNotification(base.ContextRBAC, Base, db.Model):
    """Base notifications and notifications history model."""
    __abstract__ = True

    RUNNER_DAILY = "daily"
    RUNNER_FAST = "fast"

    object_id = db.Column(db.Integer, nullable=False)
    object_type = db.Column(db.String, nullable=False)
    send_on = db.Column(db.DateTime, nullable=False)
    sent_at = db.Column(db.DateTime, nullable=True)
    custom_message = db.Column(db.Text, nullable=False, default=u"")
    force_notifications = db.Column(db.Boolean, default=False, nullable=False)
    repeating = db.Column(db.Boolean, nullable=False, default=False)
    object = utils.PolymorphicRelationship("object_id", "object_type",
                                           "{}_notifiable")
    runner = db.Column(db.Enum(RUNNER_DAILY, RUNNER_FAST),
                       nullable=False,
                       default=RUNNER_DAILY)

    @declared_attr
    def notification_type_id(cls):  # pylint: disable=no-self-argument
        return db.Column(db.Integer,
                         db.ForeignKey('notification_types.id'),
                         nullable=False)

    @declared_attr
    def notification_type(cls):  # pylint: disable=no-self-argument
        return db.relationship('NotificationType',
                               foreign_keys='{}.notification_type_id'.format(
                                   cls.__name__))
예제 #4
0
class Statusable(object):

  """Mixin with default labels for status field"""

  # pylint: disable=too-few-public-methods

  START_STATE = u"Not Started"
  PROGRESS_STATE = u"In Progress"
  DONE_STATE = u"Ready for Review"
  VERIFIED_STATE = u"Verified"
  FINAL_STATE = u"Completed"
  END_STATES = {VERIFIED_STATE, FINAL_STATE}

  NOT_DONE_STATES = {START_STATE, PROGRESS_STATE}
  DONE_STATES = {DONE_STATE} | END_STATES
  VALID_STATES = tuple(NOT_DONE_STATES | DONE_STATES)

  status = db.Column(
      db.Enum(*VALID_STATES),
      nullable=False,
      default=START_STATE)

  @classmethod
  def default_status(cls):
    return "Not Started"
예제 #5
0
 def status(cls):  # pylint: disable=no-self-argument
   return deferred(
       db.Column(
           db.Enum(*cls.VALID_STATES),
           nullable=False,
           default=cls.default_status()
       ),
       cls.__name__
   )
예제 #6
0
class ImportExport(Identifiable, db.Model):
  """ImportExport Model."""

  __tablename__ = 'import_exports'

  IMPORT_EXPORT_STATUSES = [
      'Not Started',
      'Analysis',
      'In Progress',
      'Blocked',
      'Analysis Failed',
      'Stopped',
      'Failed',
      'Finished',
  ]

  job_type = db.Column(db.Enum('Import', 'Export'), nullable=False)
  status = db.Column(db.Enum(*IMPORT_EXPORT_STATUSES), nullable=False,
                     default='Not Started')
  description = db.Column(db.Text)
  created_at = db.Column(db.DateTime, nullable=False)
  start_at = db.Column(db.DateTime)
  end_at = db.Column(db.DateTime)
  created_by_id = db.Column(db.Integer,
                            db.ForeignKey('people.id'), nullable=False)
  created_by = db.relationship('Person',
                               foreign_keys='ImportExport.created_by_id',
                               uselist=False)
  results = db.Column(mysql.LONGTEXT)
  title = db.Column(db.Text)
  content = db.Column(mysql.LONGTEXT)
  gdrive_metadata = db.Column('gdrive_metadata', db.Text)

  def log_json(self):
    """JSON representation"""
    res = {column.name: getattr(self, column.name)
           for column in self.__table__.columns
           if column.name not in ('content', 'gdrive_metadata')}
    if self.results:
      res['results'] = json.loads(self.results)
    res['created_at'] = self.created_at.isoformat()
    return res
예제 #7
0
class Revision(Identifiable, db.Model):
    __tablename__ = 'revisions'

    resource_id = db.Column(db.Integer, nullable=False)
    resource_type = db.Column(db.String, nullable=False)
    event_id = db.Column(db.Integer,
                         db.ForeignKey('events.id'),
                         nullable=False)
    action = db.Column(db.Enum(u'created', u'modified', u'deleted'),
                       nullable=False)
    content = db.Column(db.String, nullable=False)
예제 #8
0
class Event(Base, db.Model):
    __tablename__ = 'events'

    action = db.Column(
        db.Enum(u'POST', u'PUT', u'DELETE', u'BULK', u'GET'),
        nullable=False,
    )
    resource_id = db.Column(db.Integer)
    resource_type = db.Column(db.String)

    revisions = db.relationship(
        'Revision',
        backref='event',
        cascade='all, delete-orphan',
    )

    _publish_attrs = [
        'action',
        'resource_id',
        'resource_type',
        'revisions',
    ]

    _include_links = [
        'revisions',
    ]

    @staticmethod
    def _extra_table_args(class_):
        return (
            db.Index('events_modified_by', 'modified_by_id'),
            db.Index(
                'ix_{}_updated_at'.format(class_.__tablename__),
                'updated_at',
            ),
        )

    @classmethod
    def eager_query(cls):
        from sqlalchemy import orm

        query = super(Event, cls).eager_query()
        return query.options(
            orm.subqueryload('revisions').undefer_group('Revision_complete'), )
예제 #9
0
파일: event.py 프로젝트: xuechaos/ggrc-core
class Event(Base, db.Model):
    __tablename__ = 'events'

    action = db.Column(
        db.Enum(u'POST', u'PUT', u'DELETE', u'BULK', u'GET'),
        nullable=False,
    )
    resource_id = db.Column(db.Integer)
    resource_type = db.Column(db.String)

    revisions = db.relationship(
        'Revision',
        backref='event',
        cascade='all, delete-orphan',
    )

    _api_attrs = reflection.ApiAttributes(
        'action',
        'resource_id',
        'resource_type',
        'revisions',
    )

    _include_links = [
        'revisions',
    ]

    @staticmethod
    def _extra_table_args(class_):
        return (db.Index('events_modified_by', 'modified_by_id'), )

    @classmethod
    def eager_query(cls, **kwargs):
        query = super(Event, cls).eager_query(**kwargs)
        return query.options(
            orm.subqueryload('revisions').undefer_group('Revision_complete'), )
예제 #10
0
class Document(Roleable, Relatable, Base, mixins.Titled, Indexed,
               bfh.BeforeFlushHandleable, db.Model):
    """Document model."""
    __tablename__ = 'documents'

    _title_uniqueness = False

    link = deferred(db.Column(db.String, nullable=False), 'Document')
    description = deferred(db.Column(db.Text, nullable=False, default=u""),
                           'Document')
    source_gdrive_id = deferred(
        db.Column(db.String, nullable=False, default=u""), 'Document')

    gdrive_id = deferred(db.Column(db.String, nullable=False, default=u""),
                         'Document')

    URL = "URL"
    ATTACHMENT = "EVIDENCE"
    REFERENCE_URL = "REFERENCE_URL"
    VALID_DOCUMENT_TYPES = [URL, ATTACHMENT, REFERENCE_URL]
    document_type = deferred(
        db.Column(db.Enum(*VALID_DOCUMENT_TYPES), default=URL, nullable=False),
        'Document')

    _fulltext_attrs = [
        'title',
        'link',
        'description',
        'document_type',
    ]

    _api_attrs = reflection.ApiAttributes(
        'title',
        'link',
        'description',
        'document_type',
        reflection.Attribute('source_gdrive_id', update=False),
        reflection.Attribute('gdrive_id', create=False, update=False),
        reflection.Attribute('documentable_obj', read=False, update=False),
        reflection.Attribute('is_uploaded', read=False, update=False),
    )

    _sanitize_html = [
        'title',
        'description',
    ]

    _aliases = {
        'title': 'Title',
        'link': 'Link',
        'description': 'description',
    }

    _allowed_documentables = {
        'Assessment', 'Control', 'Audit', 'Issue', 'RiskAssessment'
    }

    FILE_NAME_SEPARATOR = '_ggrc'

    @orm.validates('document_type')
    def validate_document_type(self, key, document_type):
        """Returns correct option, otherwise rises an error"""
        if document_type is None:
            document_type = self.URL
        if document_type not in self.VALID_DOCUMENT_TYPES:
            raise exceptions.ValidationError(
                "Invalid value for attribute {attr}. "
                "Expected options are `{url}`, `{attachment}`, `{reference_url}`"
                .format(attr=key,
                        url=self.URL,
                        attachment=self.ATTACHMENT,
                        reference_url=self.REFERENCE_URL))
        return document_type

    @classmethod
    def indexed_query(cls):
        return super(Document, cls).indexed_query().options(
            orm.Load(cls).undefer_group("Document_complete", ), )

    @hybrid_property
    def slug(self):
        """Slug property"""
        if self.document_type in (self.URL, self.REFERENCE_URL):
            return self.link
        return u"{} {}".format(self.link, self.title)

    # pylint: disable=no-self-argument
    @slug.expression
    def slug(cls):
        return case([(cls.document_type == cls.ATTACHMENT,
                      func.concat(cls.link, ' ', cls.title))],
                    else_=cls.link)

    def log_json(self):
        tmp = super(Document, self).log_json()
        tmp['type'] = "Document"
        return tmp

    @simple_property
    def is_uploaded(self):
        """This flag is used to know if file uploaded from a local user folder.

    In that case we need just rename file, not copy.
    """
        return self._is_uploaded if hasattr(self, '_is_uploaded') else False

    @is_uploaded.setter
    def is_uploaded(self, value):
        self._is_uploaded = value

    @simple_property
    def documentable_obj(self):
        return self._documentable_obj

    @documentable_obj.setter
    def documentable_obj(self, value):
        self._documentable_obj = value

    def _get_documentable_obj(self):
        """Get documentable object specified"""
        if 'id' not in self._documentable_obj:
            raise exceptions.ValidationError('"id" is mandatory'
                                             ' for documentable_obj')
        if 'type' not in self._documentable_obj:
            raise exceptions.ValidationError(
                '"type" is mandatory for documentable_obj')
        if self._documentable_obj['type'] not in self._allowed_documentables:
            raise exceptions.ValidationError('Allowed types are: {}.'.format(
                ', '.join(self._allowed_documentables)))

        doc_type = self._documentable_obj['type']
        doc_id = self._documentable_obj['id']
        obj = referenced_objects.get(doc_type, doc_id)

        if not obj:
            raise ValueError(
                'Documentable object not found: {type} {id}'.format(
                    type=doc_type, id=doc_id))
        return obj

    @staticmethod
    def _build_file_name_postfix(documentable_obj):
        """Build postfix for given documentable object"""
        postfix_parts = [Document.FILE_NAME_SEPARATOR, documentable_obj.slug]

        related_snapshots = documentable_obj.related_objects(
            _types=['Snapshot'])
        related_snapshots = sorted(related_snapshots, key=lambda it: it.id)

        slugs = (sn.revision.content['slug'] for sn in related_snapshots
                 if sn.child_type == documentable_obj.assessment_type)

        postfix_parts.extend(slugs)
        postfix_sting = '_'.join(postfix_parts).lower()

        return postfix_sting

    def _build_relationship(self, documentable_obj):
        """Build relationship between document and documentable object"""
        from ggrc.models import relationship
        rel = relationship.Relationship(source=documentable_obj,
                                        destination=self)
        db.session.add(rel)

    def _update_fields(self, response):
        """Update fields of document with values of the copied file"""
        self.gdrive_id = response['id']
        self.link = response['webViewLink']
        self.title = response['name']
        self.document_type = Document.ATTACHMENT

    @staticmethod
    def _get_folder(parent_obj):
        return parent_obj.folder if hasattr(parent_obj, "folder") else ""

    def _map_documentable(self):
        """Maps document to documentable object

    If Document.ATTACHMENT and source_gdrive_id => copy file
    """
        if self.is_with_documentable_obj():
            documentable_obj = self._get_documentable_obj()
            if self.document_type == Document.ATTACHMENT and self.source_gdrive_id:
                self.exec_gdrive_file_copy_flow(documentable_obj)
            self._build_relationship(documentable_obj)
            self._documentable_obj = None

    def exec_gdrive_file_copy_flow(self, documentable_obj):
        """Execute google gdrive file copy flow

    Build file name, destination folder and copy file to that folder.
    After coping fills document object fields with new gdrive URL
    """
        postfix = self._build_file_name_postfix(documentable_obj)
        folder_id = self._get_folder(documentable_obj)
        file_id = self.source_gdrive_id
        from ggrc.gdrive.file_actions import process_gdrive_file
        response = process_gdrive_file(folder_id,
                                       file_id,
                                       postfix,
                                       separator=Document.FILE_NAME_SEPARATOR,
                                       is_uploaded=self.is_uploaded)
        self._update_fields(response)

    def is_with_documentable_obj(self):
        return bool(
            hasattr(self, "_documentable_obj") and self._documentable_obj)

    def handle_before_flush(self):
        """Handler that called  before SQLAlchemy flush event"""
        self._map_documentable()
예제 #11
0
class Document(Ownable, Relatable, Base, Indexed, db.Model):
    """Audit model."""
    __tablename__ = 'documents'

    # TODO: inherit from Titled mixin (note: title is nullable here)
    title = deferred(db.Column(db.String), 'Document')
    link = deferred(db.Column(db.String), 'Document')
    description = deferred(db.Column(db.Text), 'Document')
    kind_id = deferred(db.Column(db.Integer), 'Document')
    year_id = deferred(db.Column(db.Integer), 'Document')
    language_id = deferred(db.Column(db.Integer), 'Document')

    URL = "URL"
    ATTACHMENT = "EVIDENCE"
    document_type = deferred(
        db.Column(db.Enum(URL, ATTACHMENT), default=URL, nullable=False),
        'Document')

    kind = db.relationship(
        'Option',
        primaryjoin='and_(foreign(Document.kind_id) == Option.id, '
        'Option.role == "reference_type")',
        uselist=False,
    )
    year = db.relationship(
        'Option',
        primaryjoin='and_(foreign(Document.year_id) == Option.id, '
        'Option.role == "document_year")',
        uselist=False,
    )
    language = db.relationship(
        'Option',
        primaryjoin='and_(foreign(Document.language_id) == Option.id, '
        'Option.role == "language")',
        uselist=False,
    )

    _fulltext_attrs = [
        'title',
        'link',
        'description',
        "document_type",
    ]

    _publish_attrs = [
        'title',
        'link',
        'description',
        'kind',
        'year',
        'language',
        "document_type",
    ]

    _sanitize_html = [
        'title',
        'description',
    ]

    _aliases = {
        'title': "Title",
        'link': "Link",
        'description': "description",
    }

    @orm.validates('kind', 'year', 'language')
    def validate_document_options(self, key, option):
        """Returns correct option, otherwise rises an error"""
        if key == 'year':
            desired_role = 'document_year'
        elif key == 'kind':
            desired_role = 'reference_type'
        else:
            desired_role = key
        return validate_option(self.__class__.__name__, key, option,
                               desired_role)

    @orm.validates('document_type')
    def validate_document_type(self, key, document_type):
        """Returns correct option, otherwise rises an error"""
        if document_type is None:
            document_type = self.URL
        if document_type not in [self.URL, self.ATTACHMENT]:
            raise exceptions.ValidationError(
                "Invalid value for attribute {attr}. "
                "Expected options are `{url}`, `{attachment}`.".format(
                    attr=key,
                    url=self.URL,
                    attachment=self.ATTACHMENT,
                ))
        return document_type

    @classmethod
    def indexed_query(cls):
        return super(Document, cls).indexed_query().options(
            orm.Load(cls).load_only("title"),
            orm.Load(cls).load_only("link"),
            orm.Load(cls).load_only("description"),
        )

    @classmethod
    def eager_query(cls):
        return super(Document, cls).eager_query().options(
            orm.joinedload('kind'),
            orm.joinedload('year'),
            orm.joinedload('language'),
        )
예제 #12
0
class Revision(Base, db.Model):
  """Revision object holds a JSON snapshot of the object at a time."""

  __tablename__ = 'revisions'

  resource_id = db.Column(db.Integer, nullable=False)
  resource_type = db.Column(db.String, nullable=False)
  event_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False)
  action = db.Column(db.Enum(u'created', u'modified', u'deleted'),
                     nullable=False)
  _content = db.Column('content', LongJsonType, nullable=False)

  resource_slug = db.Column(db.String, nullable=True)
  source_type = db.Column(db.String, nullable=True)
  source_id = db.Column(db.Integer, nullable=True)
  destination_type = db.Column(db.String, nullable=True)
  destination_id = db.Column(db.Integer, nullable=True)

  @staticmethod
  def _extra_table_args(_):
    return (
        db.Index("revisions_modified_by", "modified_by_id"),
        db.Index("fk_revisions_resource", "resource_type", "resource_id"),
        db.Index("fk_revisions_source", "source_type", "source_id"),
        db.Index("fk_revisions_destination",
                 "destination_type", "destination_id"),
        db.Index('ix_revisions_resource_slug', 'resource_slug'),
    )

  _publish_attrs = [
      'resource_id',
      'resource_type',
      'source_type',
      'source_id',
      'destination_type',
      'destination_id',
      'action',
      'content',
      'description',
  ]

  @classmethod
  def eager_query(cls):
    from sqlalchemy import orm

    query = super(Revision, cls).eager_query()
    return query.options(
        orm.subqueryload('modified_by'),
        orm.subqueryload('event'),  # used in description
    )

  def __init__(self, obj, modified_by_id, action, content):
    self.resource_id = obj.id
    self.resource_type = obj.__class__.__name__
    self.resource_slug = getattr(obj, "slug", None)
    self.modified_by_id = modified_by_id
    self.action = action
    self._content = content

    for attr in ["source_type",
                 "source_id",
                 "destination_type",
                 "destination_id"]:
      setattr(self, attr, getattr(obj, attr, None))

  @builder.simple_property
  def description(self):
    """Compute a human readable description from action and content."""
    if 'display_name' not in self._content:
      return ''
    display_name = self._content['display_name']
    if not display_name:
      result = u"{0} {1}".format(self.resource_type, self.action)
    elif u'<->' in display_name:
      if self.action == 'created':
        msg = u"{destination} linked to {source}"
      elif self.action == 'deleted':
        msg = u"{destination} unlinked from {source}"
      else:
        msg = u"{display_name} {action}"
      source, destination = self._content['display_name'].split('<->')[:2]
      result = msg.format(source=source,
                          destination=destination,
                          display_name=self._content['display_name'],
                          action=self.action)
    elif 'mapped_directive' in self._content:
      # then this is a special case of combined map/creation
      # should happen only for Section and Control
      mapped_directive = self._content['mapped_directive']
      if self.action == 'created':
        result = u"New {0}, {1}, created and mapped to {2}".format(
            self.resource_type,
            display_name,
            mapped_directive
        )
      elif self.action == 'deleted':
        result = u"{0} unmapped from {1} and deleted".format(
            display_name, mapped_directive)
      else:
        result = u"{0} {1}".format(display_name, self.action)
    else:
      # otherwise, it's a normal creation event
      result = u"{0} {1}".format(display_name, self.action)
    if self.event.action == "BULK":
      result += ", via bulk action"
    return result

  @builder.simple_property
  def content(self):
    """Property. Contains the revision content dict.

    Updated by required values, generated from saved content dict."""
    roles_dict = role.get_custom_roles_for(self.resource_type)
    reverted_roles_dict = {n: i for i, n in roles_dict.iteritems()}
    access_control_list = self._content.get("access_control_list") or []
    map_field_to_role = {
        "principal_assessor": reverted_roles_dict.get("Principal Assignees"),
        "secondary_assessor": reverted_roles_dict.get("Secondary Assignees"),
        "contact": reverted_roles_dict.get("Primary Contacts"),
        "secondary_contact": reverted_roles_dict.get("Secondary Contacts"),
    }
    exists_roles = {i["ac_role_id"] for i in access_control_list}
    for field, role_id in map_field_to_role.items():
      if field not in self._content:
        continue
      if role_id in exists_roles or role_id is None:
        continue
      person_id = (self._content.get(field) or {}).get("id")
      if not person_id:
        continue
      access_control_list.append({
          "display_name": roles_dict[role_id],
          "ac_role_id": role_id,
          "context_id": None,
          "created_at": None,
          "object_type": self.resource_type,
          "updated_at": None,
          "object_id": self.resource_id,
          "modified_by_id": None,
          "person_id": person_id,
          "modified_by": None,
          "id": None,
      })
    populated_content = self._content.copy()
    populated_content["access_control_list"] = access_control_list
    return populated_content

  @content.setter
  def content(self, value):
    """ Setter for content property."""
    self._content = value
예제 #13
0
class ImportExport(Identifiable, db.Model):
  """ImportExport Model."""

  __tablename__ = 'import_exports'

  IMPORT_JOB_TYPE = 'Import'
  EXPORT_JOB_TYPE = 'Export'

  ANALYSIS_STATUS = 'Analysis'
  BLOCKED_STATUS = 'Blocked'
  FAILED_STATUS = 'Failed'
  IN_PROGRESS_STATUS = 'In Progress'
  NOT_STARTED_STATUS = 'Not Started'
  STOPPED_STATUS = 'Stopped'

  IMPORT_EXPORT_STATUSES = [
      NOT_STARTED_STATUS,
      ANALYSIS_STATUS,
      IN_PROGRESS_STATUS,
      BLOCKED_STATUS,
      'Analysis Failed',
      STOPPED_STATUS,
      FAILED_STATUS,
      'Finished',
  ]

  DEFAULT_COLUMNS = ['id', 'title', 'created_at', 'status']

  job_type = db.Column(db.Enum(IMPORT_JOB_TYPE, EXPORT_JOB_TYPE),
                       nullable=False)
  status = db.Column(db.Enum(*IMPORT_EXPORT_STATUSES), nullable=False,
                     default=NOT_STARTED_STATUS)
  description = db.Column(db.Text)
  created_at = db.Column(db.DateTime, nullable=False)
  start_at = db.Column(db.DateTime)
  end_at = db.Column(db.DateTime)
  created_by_id = db.Column(db.Integer,
                            db.ForeignKey('people.id'), nullable=False)
  created_by = db.relationship('Person',
                               foreign_keys='ImportExport.created_by_id',
                               uselist=False)
  results = db.Column(mysql.LONGTEXT)
  title = db.Column(db.Text)
  content = db.Column(mysql.LONGTEXT)
  gdrive_metadata = db.Column('gdrive_metadata', db.Text)

  def log_json(self, is_default=False):
    """JSON representation"""
    if is_default:
      columns = self.DEFAULT_COLUMNS
    else:
      columns = (column.name for column in self.__table__.columns
                 if column.name not in ('content', 'gdrive_metadata'))

    res = {}
    for column in columns:
      if column == "results":
        res[column] = json.loads(self.results) if self.results \
            else self.results
      elif column == "created_at":
        res[column] = self.created_at.isoformat()
      else:
        res[column] = getattr(self, column)

    return res
예제 #14
0
class Revision(Base, db.Model):
  """Revision object holds a JSON snapshot of the object at a time."""

  __tablename__ = 'revisions'

  resource_id = db.Column(db.Integer, nullable=False)
  resource_type = db.Column(db.String, nullable=False)
  event_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False)
  action = db.Column(db.Enum(u'created', u'modified', u'deleted'),
                     nullable=False)
  content = db.Column(JsonType, nullable=False)

  source_type = db.Column(db.String, nullable=True)
  source_id = db.Column(db.Integer, nullable=True)
  destination_type = db.Column(db.String, nullable=True)
  destination_id = db.Column(db.Integer, nullable=True)

  @staticmethod
  def _extra_table_args(_):
    return (
        db.Index("revisions_modified_by", "modified_by_id"),
        db.Index("fk_revisions_resource", "resource_type", "resource_id"),
        db.Index("fk_revisions_source", "source_type", "source_id"),
        db.Index("fk_revisions_destination",
                 "destination_type", "destination_id"),
    )

  _publish_attrs = [
      'resource_id',
      'resource_type',
      'source_type',
      'source_id',
      'destination_type',
      'destination_id',
      'action',
      'content',
      'description',
  ]

  @classmethod
  def eager_query(cls):
    from sqlalchemy import orm

    query = super(Revision, cls).eager_query()
    return query.options(
        orm.subqueryload('modified_by'),
        orm.subqueryload('event'),  # used in description
    )

  def __init__(self, obj, modified_by_id, action, content):
    self.resource_id = obj.id
    self.modified_by_id = modified_by_id
    self.resource_type = str(obj.__class__.__name__)
    self.action = action
    self.content = content

    for attr in ["source_type",
                 "source_id",
                 "destination_type",
                 "destination_id"]:
      setattr(self, attr, getattr(obj, attr, None))

  def _description_mapping(self, link_objects):
    """Compute description for revisions with <-> in display name."""
    display_name = self.content['display_name']
    source, destination = display_name.split('<->')[:2]
    mapping_verb = "linked" if self.resource_type in link_objects else "mapped"
    if self.action == 'created':
      result = u"{1} {2} to {0}".format(source, destination, mapping_verb)
    elif self.action == 'deleted':
      result = u"{1} un{2} from {0}".format(source, destination, mapping_verb)
    else:
      result = u"{0} {1}".format(display_name, self.action)
    return result

  @computed_property
  def description(self):
    """Compute a human readable description from action and content."""
    link_objects = ['ObjectDocument']
    if 'display_name' not in self.content:
      return ''
    display_name = self.content['display_name']
    if not display_name:
      result = u"{0} {1}".format(self.resource_type, self.action)
    elif u'<->' in display_name:
      result = self._description_mapping(link_objects)
    else:
      if 'mapped_directive' in self.content:
        # then this is a special case of combined map/creation
        # should happen only for Section and Control
        mapped_directive = self.content['mapped_directive']
        if self.action == 'created':
          result = u"New {0}, {1}, created and mapped to {2}".format(
              self.resource_type,
              display_name,
              mapped_directive
          )
        elif self.action == 'deleted':
          result = u"{0} unmapped from {1} and deleted".format(
              display_name, mapped_directive)
        else:
          result = u"{0} {1}".format(display_name, self.action)
      else:
        # otherwise, it's a normal creation event
        result = u"{0} {1}".format(display_name, self.action)
    if self.event.action == "BULK":
      result += ", via bulk action"
    return result
예제 #15
0
파일: document.py 프로젝트: inesp/ggrc-core
class Document(Roleable, Relatable, mixins.Titled,
               bfh.BeforeFlushHandleable, Statusable,
               mixins.WithLastDeprecatedDate, comment.Commentable,
               wrch.WithRelationshipCreatedHandler,
               Indexed, base.ContextRBAC, mixins.Slugged, db.Model):
  """Document model."""
  __tablename__ = 'documents'

  _title_uniqueness = False

  # Override from Commentable mixin (can be removed after GGRC-5192)
  send_by_default = db.Column(db.Boolean, nullable=False, default=True)

  link = deferred(db.Column(db.String, nullable=False), 'Document')
  description = deferred(db.Column(db.Text, nullable=False, default=u""),
                         'Document')
  FILE = "FILE"
  REFERENCE_URL = "REFERENCE_URL"
  VALID_DOCUMENT_KINDS = [FILE, REFERENCE_URL]

  START_STATE = 'Active'
  DEPRECATED = 'Deprecated'

  VALID_STATES = (START_STATE, DEPRECATED, )

  kind = deferred(db.Column(db.Enum(*VALID_DOCUMENT_KINDS),
                            default=REFERENCE_URL,
                            nullable=False),
                  "Document")
  source_gdrive_id = deferred(db.Column(db.String, nullable=False,
                                        default=u""),
                              'Document')

  gdrive_id = deferred(db.Column(db.String, nullable=False,
                                 default=u""),
                       'Document')

  _api_attrs = reflection.ApiAttributes(
      'title',
      'description',
      'status',
      reflection.Attribute('link', update=False),
      reflection.Attribute('kind', update=False),
      reflection.Attribute('source_gdrive_id', update=False),
      reflection.Attribute('gdrive_id', create=False, update=False),
      reflection.Attribute('parent_obj', read=False, update=False),
      reflection.Attribute('is_uploaded', read=False, update=False),
      reflection.Attribute('send_by_default', create=False, update=False),
  )

  _fulltext_attrs = [
      'title',
      'link',
      'description',
      'kind',
      'status'
  ]

  _sanitize_html = [
      'title',
      'description',
  ]

  _aliases = {
      'title': 'Title',
      'link': 'Link',
      'description': 'Description',
      'kind': 'Type',
  }

  ALLOWED_PARENTS = {
      'Control',
      'Issue',
      'RiskAssessment',
      'AccessGroup',
      'Contract',
      'DataAsset',
      'Facility',
      'Market',
      'Metric',
      'Objective',
      'OrgGroup',
      'Policy',
      'Process',
      'Product',
      'ProductGroup',
      'Program',
      'Project',
      'Regulation',
      'Requirement',
      'Risk',
      'Standard',
      'System',
      'TechnologyEnvironment',
      'Threat',
      'Vendor',
  }

  @orm.validates('kind')
  def validate_kind(self, key, kind):
    """Returns correct option, otherwise rises an error"""
    if kind is None:
      kind = self.REFERENCE_URL
    if kind not in self.VALID_DOCUMENT_KINDS:
      raise exceptions.ValidationError(
          "Invalid value for attribute {attr}. "
          "Expected options are `{file}`, `{reference_url}`".
          format(
              attr=key,
              file=self.FILE,
              reference_url=self.REFERENCE_URL
          )
      )
    return kind

  @classmethod
  def indexed_query(cls):
    return super(Document, cls).indexed_query().options(
        orm.Load(cls).undefer_group(
            "Document_complete",
        ),
    )

  def _display_name(self):
    result = self.title
    if self.kind == Document.FILE:
      result = self.link + ' ' + self.title
    return result

  def log_json(self):
    tmp = super(Document, self).log_json()
    tmp['type'] = "Document"
    return tmp

  @simple_property
  def is_uploaded(self):
    """This flag is used to know if file uploaded from a local user folder.

    In that case we need just rename file, not copy.
    """
    return self._is_uploaded if hasattr(self, '_is_uploaded') else False

  @is_uploaded.setter
  def is_uploaded(self, value):
    # pylint: disable=attribute-defined-outside-init
    self._is_uploaded = value

  @simple_property
  def parent_obj(self):
    return self._parent_obj

  @parent_obj.setter
  def parent_obj(self, value):
    # pylint: disable=attribute-defined-outside-init
    self._parent_obj = value

  def _get_parent_obj(self):
    """Get parent object specified"""
    from ggrc.models.object_document import Documentable
    from ggrc.models import all_models
    if 'id' not in self._parent_obj:
      raise exceptions.ValidationError('"id" is mandatory for parent_obj')
    if 'type' not in self._parent_obj:
      raise exceptions.ValidationError(
          '"type" is mandatory for parent_obj')
    parent_type = self._parent_obj['type']
    parent_model = getattr(all_models, parent_type, None)
    if parent_model is None:
      raise exceptions.ValidationError(
          'Type "{}" not found.'.format(parent_type)
      )
    if not issubclass(parent_model, Documentable):
      raise exceptions.ValidationError(
          'Type "{}" is not Documentable.'.format(parent_type)
      )

    parent_id = self._parent_obj['id']
    obj = referenced_objects.get(parent_type, parent_id)

    if not obj:
      raise ValueError(
          'Parent object not found: {type} {id}'.format(type=parent_type,
                                                        id=parent_id))
    return obj

  def _build_relationship(self, parent_obj):
    """Build relationship between document and documentable object"""
    from ggrc.models import relationship
    rel = relationship.Relationship(
        source=parent_obj,
        destination=self
    )
    db.session.add(rel)

  def _update_fields(self, link):
    """Update fields of document with values of the copied file"""
    self.gdrive_id = self.source_gdrive_id
    self.link = link
    self.kind = Document.FILE

  @staticmethod
  def _get_folder(parent):
    return parent.folder if hasattr(parent, 'folder') else ''

  def _process_gdrive_business_logic(self):
    """Handles gdrive business logic

    If parent_obj specified => add file to parent folder
    If parent_obj not specified => get file link
    """
    if self.is_with_parent_obj():
      parent = self._get_parent_obj()
      if self.kind == Document.FILE and self.source_gdrive_id:
        parent_folder_id = self._get_folder(parent)
        self.add_gdrive_file_folder(parent_folder_id)
      self._build_relationship(parent)
      self._parent_obj = None
    elif (self.kind == Document.FILE and
          self.source_gdrive_id and not self.link):
      self.gdrive_id = self.source_gdrive_id
      from ggrc.gdrive.file_actions import get_gdrive_file_link
      self.link = get_gdrive_file_link(self.source_gdrive_id)

  def add_gdrive_file_folder(self, folder_id):
    """Add file to parent folder if exists"""

    file_id = self.source_gdrive_id
    from ggrc.gdrive import file_actions
    if folder_id:
      file_link = file_actions.add_gdrive_file_folder(file_id, folder_id)
    else:
      file_link = file_actions.get_gdrive_file_link(file_id)
    self._update_fields(file_link)

  def is_with_parent_obj(self):
    return bool(hasattr(self, '_parent_obj') and self._parent_obj)

  def add_admin_role(self):
    """Add current user to Document Admins"""
    from ggrc.models import all_models
    admin_role = db.session.query(all_models.AccessControlRole).filter_by(
        name="Admin", object_type=self.type).one()
    self.extend_access_control_list([{
        "ac_role": admin_role,
        "person": login.get_current_user()
    }])

  def handle_relationship_created(self, target):
    """Add document to parent folder if specified"""
    from ggrc.models.object_document import Documentable
    if (isinstance(target, Documentable) and isinstance(target, Folderable) and
            self.kind == Document.FILE and self.source_gdrive_id):
      parent_folder_id = self._get_folder(target)
      self.add_gdrive_file_folder(parent_folder_id)

  def handle_before_flush(self):
    """Handler that called  before SQLAlchemy flush event"""
    self._process_gdrive_business_logic()
예제 #16
0
class Evidence(Roleable, Relatable, mixins.Titled, bfh.BeforeFlushHandleable,
               Statusable, mixins.WithLastDeprecatedDate, comment.Commentable,
               WithAutoDeprecation, mixin.Indexed, base.ContextRBAC,
               mixins.Slugged, db.Model):
    """Evidence (Audit-scope URLs, FILE's) model."""
    __tablename__ = "evidence"

    _title_uniqueness = False

    URL = "URL"
    FILE = "FILE"
    VALID_EVIDENCE_KINDS = [URL, FILE]

    START_STATE = 'Active'
    DEPRECATED = 'Deprecated'

    VALID_STATES = (
        START_STATE,
        DEPRECATED,
    )

    kind = deferred(
        db.Column(db.Enum(*VALID_EVIDENCE_KINDS), default=URL, nullable=False),
        "Evidence")
    source_gdrive_id = deferred(
        db.Column(db.String, nullable=False, default=u""), "Evidence")
    gdrive_id = deferred(db.Column(db.String, nullable=False, default=u""),
                         "Evidence")

    link = deferred(db.Column(db.String), "Evidence")

    description = deferred(db.Column(db.Text, nullable=False, default=u""),
                           "Evidence")

    # Override from Commentable mixin (can be removed after GGRC-5192)
    send_by_default = db.Column(db.Boolean, nullable=False, default=True)

    _api_attrs = reflection.ApiAttributes(
        "title",
        reflection.Attribute("link", update=False),
        reflection.Attribute("source_gdrive_id", update=False),
        "description",
        "status",
        reflection.Attribute("kind", update=False),
        reflection.Attribute("parent_obj", read=False, update=False),
        reflection.Attribute('archived', create=False, update=False),
        reflection.Attribute('is_uploaded', read=False, update=False),
    )

    _fulltext_attrs = ["link", "description", "kind", "status", "archived"]

    AUTO_REINDEX_RULES = [
        mixin.ReindexRule("Audit", lambda x: x.all_related_evidences,
                          ["archived"]),
    ]

    _sanitize_html = [
        "title",
        "description",
    ]

    _aliases = {
        "title": "Title",
        "link": "Link",
        "description": "Description",
        "kind": "Type",
        "archived": {
            "display_name": "Archived",
            "mandatory": False
        },
    }

    _allowed_parents = {'Assessment', 'Audit'}
    FILE_NAME_SEPARATOR = '_ggrc'

    @orm.validates("kind")
    def validate_kind(self, key, kind):
        """Returns correct option, otherwise rises an error"""
        if kind is None:
            kind = self.URL
        if kind not in self.VALID_EVIDENCE_KINDS:
            raise exceptions.ValidationError(
                "Invalid value for attribute {attr}. "
                "Expected options are `{url}`, `{file}`".format(
                    attr=key, url=self.URL, file=self.FILE))
        return kind

    @classmethod
    def indexed_query(cls):
        return super(Evidence, cls).indexed_query().options(
            orm.Load(cls).undefer_group("Evidence_complete", ),
            orm.Load(cls).subqueryload('related_sources'),
            orm.Load(cls).subqueryload('related_destinations'),
        )

    @simple_property
    def archived(self):
        """Returns a boolean whether parent is archived or not."""
        parent_candidates = self.related_objects(
            _types=Evidence._allowed_parents)
        if parent_candidates:
            parent = parent_candidates.pop()
            return parent.archived
        return False

    def log_json(self):
        tmp = super(Evidence, self).log_json()
        tmp['type'] = 'Evidence'
        return tmp

    @simple_property
    def is_uploaded(self):
        """This flag is used to know if file uploaded from a local user folder.

    In that case we need just rename file, not copy.
    """
        return self._is_uploaded if hasattr(self, '_is_uploaded') else False

    @is_uploaded.setter
    def is_uploaded(self, value):
        # pylint: disable=attribute-defined-outside-init
        self._is_uploaded = value

    @simple_property
    def parent_obj(self):
        """Getter for local parent object property."""
        # pylint: disable=attribute-defined-outside-init
        return self._parent_obj

    @parent_obj.setter
    def parent_obj(self, value):
        # pylint: disable=attribute-defined-outside-init
        self._parent_obj = value

    def _get_parent_obj(self):
        """Get parent object specified"""
        if 'id' not in self._parent_obj:
            raise exceptions.ValidationError(
                '"id" is mandatory for parent_obj')
        if 'type' not in self._parent_obj:
            raise exceptions.ValidationError(
                '"type" is mandatory for parent_obj')
        if self._parent_obj['type'] not in self._allowed_parents:
            raise exceptions.ValidationError('Allowed types are: {}.'.format(
                ', '.join(self._allowed_parents)))

        parent_type = self._parent_obj['type']
        parent_id = self._parent_obj['id']
        obj = referenced_objects.get(parent_type, parent_id)

        if not obj:
            raise ValueError('Parent object not found: {type} {id}'.format(
                type=parent_type, id=parent_id))
        return obj

    @staticmethod
    def _build_file_name_postfix(parent_obj):
        """Build postfix for given parent object"""
        postfix_parts = [Evidence.FILE_NAME_SEPARATOR, parent_obj.slug]

        related_snapshots = parent_obj.related_objects(_types=['Snapshot'])
        related_snapshots = sorted(related_snapshots, key=lambda it: it.id)

        slugs = (sn.revision.content['slug'] for sn in related_snapshots
                 if sn.child_type == parent_obj.assessment_type)

        postfix_parts.extend(slugs)
        postfix_sting = '_'.join(postfix_parts).lower()

        return postfix_sting

    def _build_relationship(self, parent_obj):
        """Build relationship between evidence and parent object"""
        from ggrc.models import all_models
        rel = all_models.Relationship(source=parent_obj, destination=self)
        db.session.add(rel)
        signals.Restful.model_put.send(rel.__class__, obj=rel, service=self)

    def _update_fields(self, response):
        """Update fields of evidence with values of the copied file"""
        self.gdrive_id = response['id']
        self.link = response['webViewLink']
        self.title = response['name']
        self.kind = Evidence.FILE

    @staticmethod
    def _get_folder(parent):
        return parent.folder if hasattr(parent, 'folder') else ''

    def exec_gdrive_file_copy_flow(self):
        """Execute google gdrive file copy flow

    Build file name, destination folder and copy file to that folder.
    After coping fills evidence object fields with new gdrive URL
    """
        if self.is_with_parent_obj() and \
           self.kind == Evidence.FILE and \
           self.source_gdrive_id:

            parent = self._get_parent_obj()
            postfix = self._build_file_name_postfix(parent)
            folder_id = self._get_folder(parent)
            file_id = self.source_gdrive_id
            from ggrc.gdrive.file_actions import process_gdrive_file
            response = process_gdrive_file(
                file_id,
                folder_id,
                postfix,
                separator=Evidence.FILE_NAME_SEPARATOR,
                is_uploaded=self.is_uploaded)
            self._update_fields(response)

    def is_with_parent_obj(self):
        return bool(hasattr(self, '_parent_obj') and self._parent_obj)

    def add_admin_role(self):
        """Add current user as Evidence admin"""
        self.add_person_with_role_name(login.get_current_user(), "Admin")

    def handle_before_flush(self):
        """Handler that called  before SQLAlchemy flush event"""
        self.exec_gdrive_file_copy_flow()
예제 #17
0
 def label(cls):  # pylint: disable=no-self-argument
     return deferred(
         db.Column(db.Enum(*cls.POSSIBLE_LABELS), nullable=True),
         cls.__name__)
예제 #18
0
class Revision(Base, db.Model):
    """Revision object holds a JSON snapshot of the object at a time."""

    __tablename__ = 'revisions'

    resource_id = db.Column(db.Integer, nullable=False)
    resource_type = db.Column(db.String, nullable=False)
    event_id = db.Column(db.Integer,
                         db.ForeignKey('events.id'),
                         nullable=False)
    action = db.Column(db.Enum(u'created', u'modified', u'deleted'),
                       nullable=False)
    _content = db.Column('content', LongJsonType, nullable=False)

    resource_slug = db.Column(db.String, nullable=True)
    source_type = db.Column(db.String, nullable=True)
    source_id = db.Column(db.Integer, nullable=True)
    destination_type = db.Column(db.String, nullable=True)
    destination_id = db.Column(db.Integer, nullable=True)

    @staticmethod
    def _extra_table_args(_):
        return (
            db.Index("revisions_modified_by", "modified_by_id"),
            db.Index("fk_revisions_resource", "resource_type", "resource_id"),
            db.Index("fk_revisions_source", "source_type", "source_id"),
            db.Index("fk_revisions_destination", "destination_type",
                     "destination_id"),
            db.Index('ix_revisions_resource_slug', 'resource_slug'),
        )

    _api_attrs = reflection.ApiAttributes(
        'resource_id',
        'resource_type',
        'source_type',
        'source_id',
        'destination_type',
        'destination_id',
        'action',
        'content',
        'description',
    )

    @classmethod
    def eager_query(cls):
        from sqlalchemy import orm

        query = super(Revision, cls).eager_query()
        return query.options(
            orm.subqueryload('modified_by'),
            orm.subqueryload('event'),  # used in description
        )

    def __init__(self, obj, modified_by_id, action, content):
        self.resource_id = obj.id
        self.resource_type = obj.__class__.__name__
        self.resource_slug = getattr(obj, "slug", None)
        self.modified_by_id = modified_by_id
        self.action = action
        if "access_control_list" in content and content["access_control_list"]:
            for acl in content["access_control_list"]:
                acl["person"] = {
                    "id": acl["person_id"],
                    "type": "Person",
                    "href": "/api/people/{}".format(acl["person_id"]),
                }

        self._content = content

        for attr in [
                "source_type", "source_id", "destination_type",
                "destination_id"
        ]:
            setattr(self, attr, getattr(obj, attr, None))

    @builder.simple_property
    def description(self):
        """Compute a human readable description from action and content."""
        if 'display_name' not in self._content:
            return ''
        display_name = self._content['display_name']
        if not display_name:
            result = u"{0} {1}".format(self.resource_type, self.action)
        elif u'<->' in display_name:
            if self.action == 'created':
                msg = u"{destination} linked to {source}"
            elif self.action == 'deleted':
                msg = u"{destination} unlinked from {source}"
            else:
                msg = u"{display_name} {action}"
            source, destination = self._content['display_name'].split(
                '<->')[:2]
            result = msg.format(source=source,
                                destination=destination,
                                display_name=self._content['display_name'],
                                action=self.action)
        elif 'mapped_directive' in self._content:
            # then this is a special case of combined map/creation
            # should happen only for Section and Control
            mapped_directive = self._content['mapped_directive']
            if self.action == 'created':
                result = u"New {0}, {1}, created and mapped to {2}".format(
                    self.resource_type, display_name, mapped_directive)
            elif self.action == 'deleted':
                result = u"{0} unmapped from {1} and deleted".format(
                    display_name, mapped_directive)
            else:
                result = u"{0} {1}".format(display_name, self.action)
        else:
            # otherwise, it's a normal creation event
            result = u"{0} {1}".format(display_name, self.action)
        if self.event.action == "BULK":
            result += ", via bulk action"
        return result

    @builder.simple_property
    def content(self):
        """Property. Contains the revision content dict.

    Updated by required values, generated from saved content dict."""
        # pylint: disable=too-many-locals
        roles_dict = role.get_custom_roles_for(self.resource_type)
        reverted_roles_dict = {n: i for i, n in roles_dict.iteritems()}
        access_control_list = self._content.get("access_control_list") or []
        map_field_to_role = {
            "principal_assessor":
            reverted_roles_dict.get("Principal Assignees"),
            "secondary_assessor":
            reverted_roles_dict.get("Secondary Assignees"),
            "contact": reverted_roles_dict.get("Primary Contacts"),
            "secondary_contact": reverted_roles_dict.get("Secondary Contacts"),
            "owners": reverted_roles_dict.get("Admin"),
        }
        exists_roles = {i["ac_role_id"] for i in access_control_list}
        for field, role_id in map_field_to_role.items():
            if field not in self._content:
                continue
            if role_id in exists_roles or role_id is None:
                continue
            field_content = self._content.get(field) or {}
            if not field_content:
                continue
            if not isinstance(field_content, list):
                field_content = [field_content]
            person_ids = {fc.get("id") for fc in field_content if fc.get("id")}
            for person_id in person_ids:
                access_control_list.append({
                    "display_name": roles_dict[role_id],
                    "ac_role_id": role_id,
                    "context_id": None,
                    "created_at": None,
                    "object_type": self.resource_type,
                    "updated_at": None,
                    "object_id": self.resource_id,
                    "modified_by_id": None,
                    "person_id": person_id,
                    # Frontend require data in such format
                    "person": {
                        "id": person_id,
                        "type": "Person",
                        "href": "/api/people/{}".format(person_id)
                    },
                    "modified_by": None,
                    "id": None,
                })
        populated_content = self._content.copy()
        populated_content["access_control_list"] = access_control_list

        if 'url' in self._content:
            reference_url_list = []
            for key in ('url', 'reference_url'):
                link = self._content[key]
                # link might exist, but can be an empty string - we treat those values
                # as non-existing (empty) reference URLs
                if not link:
                    continue

                # if creation/modification date is not available, we estimate it by
                # using the corresponding information from the Revision itself
                created_at = (self._content.get("created_at")
                              or self.created_at.isoformat())
                updated_at = (self._content.get("updated_at")
                              or self.updated_at.isoformat())

                reference_url_list.append({
                    "display_name": link,
                    "document_type": "REFERENCE_URL",
                    "link": link,
                    "title": link,
                    "id": None,
                    "created_at": created_at,
                    "updated_at": updated_at,
                })
            populated_content['reference_url'] = reference_url_list

        return populated_content

    @content.setter
    def content(self, value):
        """ Setter for content property."""
        self._content = value
예제 #19
0
class Evidence(Roleable, Relatable, mixins.Titled, bfh.BeforeFlushHandleable,
               Statusable, mixins.WithLastDeprecatedDate, comment.Commentable,
               WithAutoDeprecation, base.ContextRBAC, mixins.Slugged,
               mixin.Indexed, db.Model):
    """Evidence (Audit-scope URLs, FILE's) model."""
    __tablename__ = "evidence"

    _title_uniqueness = False

    URL = "URL"
    FILE = "FILE"
    VALID_EVIDENCE_KINDS = [URL, FILE]

    START_STATE = 'Active'
    DEPRECATED = 'Deprecated'

    VALID_STATES = (
        START_STATE,
        DEPRECATED,
    )

    kind = deferred(
        db.Column(db.Enum(*VALID_EVIDENCE_KINDS), default=URL, nullable=False),
        "Evidence")
    source_gdrive_id = deferred(
        db.Column(db.String, nullable=False, default=u""), "Evidence")
    gdrive_id = deferred(db.Column(db.String, nullable=False, default=u""),
                         "Evidence")

    link = deferred(db.Column(db.String), "Evidence")

    description = deferred(db.Column(db.Text, nullable=False, default=u""),
                           "Evidence")

    # Override from Commentable mixin (can be removed after GGRC-5192)
    send_by_default = db.Column(db.Boolean, nullable=False, default=True)

    _api_attrs = reflection.ApiAttributes(
        "title",
        reflection.Attribute("link", update=False),
        reflection.Attribute("source_gdrive_id", update=False),
        "description",
        "status",
        reflection.Attribute("kind", update=False),
        reflection.Attribute("parent_obj", read=False, update=False),
        reflection.Attribute("archived", create=False, update=False),
        reflection.Attribute("is_uploaded", read=False, update=False),
    )

    _fulltext_attrs = [
        "title", "link", "description", "kind", "status", "archived"
    ]

    AUTO_REINDEX_RULES = [
        mixin.ReindexRule("Audit", lambda x: x.assessments, ["archived"]),
    ]

    _sanitize_html = [
        "title",
        "description",
    ]

    _aliases = {
        "title": "Title",
        "link": "Link",
        "description": "Description",
        "kind": "Type",
        "archived": {
            "display_name": "Archived",
            "mandatory": False
        },
    }

    _allowed_parents = {"Assessment", "Audit"}
    FILE_NAME_SEPARATOR = "_ggrc"

    @orm.validates("kind")
    def validate_kind(self, key, kind):
        """Returns correct option, otherwise rises an error"""
        if kind is None:
            kind = self.URL
        if kind not in self.VALID_EVIDENCE_KINDS:
            raise exceptions.ValidationError(
                "Invalid value for attribute {attr}. "
                "Expected options are `{url}`, `{file}`".format(
                    attr=key, url=self.URL, file=self.FILE))
        return kind

    @classmethod
    def _populate_query(cls, query):
        return query.options(
            orm.subqueryload(cls._related_assessment),
            orm.subqueryload(cls._related_audit).load_only("archived"),
            orm.Load(cls).undefer_group("Evidence_complete", ),
        )

    @classmethod
    def indexed_query(cls):
        return cls._populate_query(super(Evidence, cls).indexed_query())

    @classmethod
    def eager_query(cls):
        return cls._populate_query(super(Evidence, cls).eager_query())

    @simple_property
    def archived(self):
        """Evidence archived if related Assessment/Audit is archived"""
        # pylint: disable=unsubscriptable-object
        if self._related_assessment:
            return self._related_assessment.audit.archived
        elif self._related_audit:
            return self._related_audit.archived
        return False

    def log_json(self):
        tmp = super(Evidence, self).log_json()
        tmp["type"] = "Evidence"
        return tmp

    @simple_property
    def is_uploaded(self):
        """This flag is used to know if file uploaded from a local user folder.

    In that case we need just rename file, not copy.
    """
        return self._is_uploaded if hasattr(self, "_is_uploaded") else False

    @is_uploaded.setter
    def is_uploaded(self, value):
        # pylint: disable=attribute-defined-outside-init
        self._is_uploaded = value

    @simple_property
    def parent_obj(self):
        return self._parent_obj

    @parent_obj.setter
    def parent_obj(self, value):
        # pylint: disable=attribute-defined-outside-init
        self._parent_obj = value

    def _get_parent_obj(self):
        """Get parent object specified"""
        if "id" not in self._parent_obj:
            raise exceptions.ValidationError(
                "'id' is mandatory for parent_obj")
        if "type" not in self._parent_obj:
            raise exceptions.ValidationError(
                "'type' is mandatory for parent_obj")
        if self._parent_obj["type"] not in self._allowed_parents:
            raise exceptions.ValidationError("Allowed types are: {}.".format(
                ", ".join(self._allowed_parents)))

        parent_type = self._parent_obj["type"]
        parent_id = self._parent_obj["id"]
        obj = referenced_objects.get(parent_type, parent_id)

        if not obj:
            raise ValueError("Parent object not found: {type} {id}".format(
                type=parent_type, id=parent_id))
        return obj

    @staticmethod
    def _build_file_name_postfix(parent_obj):
        """Build postfix for given parent object"""
        postfix_parts = [Evidence.FILE_NAME_SEPARATOR, parent_obj.slug]

        related_snapshots = parent_obj.related_objects(_types=["Snapshot"])
        related_snapshots = sorted(related_snapshots, key=lambda it: it.id)

        slugs = (sn.revision.content["slug"] for sn in related_snapshots
                 if sn.child_type == parent_obj.assessment_type)

        postfix_parts.extend(slugs)
        postfix_sting = "_".join(postfix_parts).lower()

        return postfix_sting

    def _build_relationship(self, parent_obj):
        """Build relationship between evidence and parent object"""
        from ggrc.models import all_models
        rel = all_models.Relationship(source=parent_obj, destination=self)
        db.session.add(rel)
        signals.Restful.model_put.send(rel.__class__, obj=rel, service=self)

    def _update_fields(self, response):
        """Update fields of evidence with values of the copied file"""
        self.gdrive_id = response["id"]
        self.link = response["webViewLink"]
        self.title = response["name"]
        self.kind = Evidence.FILE

    @staticmethod
    def _get_folder(parent):
        return parent.folder if hasattr(parent, "folder") else ""

    def _map_parent(self):
        """Maps evidence to parent object

    If Document.FILE and source_gdrive_id => copy file
    """
        if self.is_with_parent_obj():
            parent = self._get_parent_obj()
            if self.kind == Evidence.FILE and self.source_gdrive_id:
                self.exec_gdrive_file_copy_flow(parent)
            self._build_relationship(parent)
            self._parent_obj = None

    def exec_gdrive_file_copy_flow(self, parent):
        """Execute google gdrive file copy flow

    Build file name, destination folder and copy file to that folder.
    After coping fills evidence object fields with new gdrive URL
    """
        postfix = self._build_file_name_postfix(parent)
        folder_id = self._get_folder(parent)
        file_id = self.source_gdrive_id
        from ggrc.gdrive.file_actions import process_gdrive_file
        response = process_gdrive_file(file_id,
                                       folder_id,
                                       postfix,
                                       separator=Evidence.FILE_NAME_SEPARATOR,
                                       is_uploaded=self.is_uploaded)
        self._update_fields(response)

    def is_with_parent_obj(self):
        return bool(hasattr(self, "_parent_obj") and self._parent_obj)

    def add_admin_role(self):
        """Add current user as Evidence admin"""
        from ggrc.models import all_models
        admin_role = db.session.query(all_models.AccessControlRole).filter_by(
            name="Admin", object_type=self.type).one()
        self.extend_access_control_list([{
            "ac_role": admin_role,
            "person": login.get_current_user()
        }])

    def handle_before_flush(self):
        """Handler that called  before SQLAlchemy flush event"""
        self._map_parent()

    @declared_attr
    def _related_audit(cls):  # pylint: disable=no-self-argument
        """Audits mapped to Evidence"""
        def primary_join_function():
            return or_(
                and_(Relationship.source_id == cls.id,
                     Relationship.source_type == cls.__name__,
                     Relationship.destination_type == "Audit"),
                and_(Relationship.destination_id == cls.id,
                     Relationship.destination_type == cls.__name__,
                     Relationship.source_type == "Audit"))

        def secondary_join_function():
            from ggrc.models import all_models
            return or_(
                and_(
                    all_models.Audit.id == Relationship.destination_id,
                    Relationship.destination_type == "Audit",
                ),
                and_(
                    all_models.Audit.id == Relationship.source_id,
                    Relationship.source_type == "Audit",
                ))

        return db.relationship("Audit",
                               primaryjoin=primary_join_function,
                               secondary=Relationship.__table__,
                               secondaryjoin=secondary_join_function,
                               viewonly=True,
                               uselist=False)

    @declared_attr
    def _related_assessment(cls):  # pylint: disable=no-self-argument
        """Assessments mapped to Evidence"""
        def primary_join_function():
            return or_(
                and_(Relationship.source_id == cls.id,
                     Relationship.source_type == cls.__name__,
                     Relationship.destination_type == "Assessment"),
                and_(Relationship.destination_id == cls.id,
                     Relationship.destination_type == cls.__name__,
                     Relationship.source_type == "Assessment"))

        def secondary_join_function():
            from ggrc.models import all_models
            return or_(
                and_(
                    all_models.Assessment.id == Relationship.destination_id,
                    Relationship.destination_type == "Assessment",
                ),
                and_(
                    all_models.Assessment.id == Relationship.source_id,
                    Relationship.source_type == "Assessment",
                ))

        return db.relationship("Assessment",
                               primaryjoin=primary_join_function,
                               secondary=Relationship.__table__,
                               secondaryjoin=secondary_join_function,
                               viewonly=True,
                               uselist=False)
예제 #20
0
class Revision(Filterable, base.ContextRBAC, Base, db.Model):
    """Revision object holds a JSON snapshot of the object at a time."""

    __tablename__ = 'revisions'

    resource_id = db.Column(db.Integer, nullable=False)
    resource_type = db.Column(db.String, nullable=False)
    event_id = db.Column(db.Integer,
                         db.ForeignKey('events.id'),
                         nullable=False)
    action = db.Column(db.Enum(u'created', u'modified', u'deleted'),
                       nullable=False)
    _content = db.Column('content', LongJsonType, nullable=False)

    resource_slug = db.Column(db.String, nullable=True)
    source_type = db.Column(db.String, nullable=True)
    source_id = db.Column(db.Integer, nullable=True)
    destination_type = db.Column(db.String, nullable=True)
    destination_id = db.Column(db.Integer, nullable=True)

    @staticmethod
    def _extra_table_args(_):
        return (
            db.Index("revisions_modified_by", "modified_by_id"),
            db.Index("ix_revisions_resource_action", "resource_type",
                     "resource_id", "action"),
            db.Index("fk_revisions_source", "source_type", "source_id"),
            db.Index("fk_revisions_destination", "destination_type",
                     "destination_id"),
            db.Index('ix_revisions_resource_slug', 'resource_slug'),
        )

    _api_attrs = reflection.ApiAttributes(
        'resource_id',
        'resource_type',
        'source_type',
        'source_id',
        'destination_type',
        'destination_id',
        'action',
        'content',
        'description',
        reflection.Attribute('diff_with_current', create=False, update=False),
        reflection.Attribute('meta', create=False, update=False),
    )

    _filterable_attrs = [
        'action',
        'resource_id',
        'resource_type',
        'source_type',
        'source_id',
        'destination_type',
        'destination_id',
    ]

    @classmethod
    def eager_query(cls):
        from sqlalchemy import orm

        query = super(Revision, cls).eager_query()
        return query.options(
            orm.subqueryload('modified_by'),
            orm.subqueryload('event'),  # used in description
        )

    def __init__(self, obj, modified_by_id, action, content):
        self.resource_id = obj.id
        self.resource_type = obj.__class__.__name__
        self.resource_slug = getattr(obj, "slug", None)
        self.modified_by_id = modified_by_id
        self.action = action
        if "access_control_list" in content and content["access_control_list"]:
            for acl in content["access_control_list"]:
                acl["person"] = {
                    "id": acl["person_id"],
                    "type": "Person",
                    "href": "/api/people/{}".format(acl["person_id"]),
                }

        self._content = content

        for attr in [
                "source_type", "source_id", "destination_type",
                "destination_id"
        ]:
            setattr(self, attr, getattr(obj, attr, None))

    @builder.callable_property
    def diff_with_current(self):
        """Callable lazy property for revision."""
        referenced_objects.mark_to_cache(self.resource_type, self.resource_id)
        revisions_diff.mark_for_latest_content(self.resource_type,
                                               self.resource_id)

        def lazy_loader():
            """Lazy load diff for revisions."""
            referenced_objects.rewarm_cache()
            revisions_diff.rewarm_latest_content()
            instance = referenced_objects.get(self.resource_type,
                                              self.resource_id)
            if instance:
                return revisions_diff.prepare(instance, self.content)
            # return empty diff object has already been removed
            return {}

        return lazy_loader

    @builder.callable_property
    def meta(self):
        """Callable lazy property for revision."""
        referenced_objects.mark_to_cache(self.resource_type, self.resource_id)

        def lazy_loader():
            """Lazy load diff for revisions."""
            referenced_objects.rewarm_cache()
            instance = referenced_objects.get(self.resource_type,
                                              self.resource_id)
            meta_dict = {}
            if instance:
                instance_meta_info = meta_info.MetaInfo(instance)
                meta_dict["mandatory"] = instance_meta_info.mandatory
            return meta_dict

        return lazy_loader

    @builder.simple_property
    def description(self):
        """Compute a human readable description from action and content."""
        if 'display_name' not in self._content:
            return ''
        display_name = self._content['display_name']
        if not display_name:
            result = u"{0} {1}".format(self.resource_type, self.action)
        elif u'<->' in display_name:
            if self.action == 'created':
                msg = u"{destination} linked to {source}"
            elif self.action == 'deleted':
                msg = u"{destination} unlinked from {source}"
            else:
                msg = u"{display_name} {action}"
            source, destination = self._content['display_name'].split(
                '<->')[:2]
            result = msg.format(source=source,
                                destination=destination,
                                display_name=self._content['display_name'],
                                action=self.action)
        elif 'mapped_directive' in self._content:
            # then this is a special case of combined map/creation
            # should happen only for Requirement and Control
            mapped_directive = self._content['mapped_directive']
            if self.action == 'created':
                result = u"New {0}, {1}, created and mapped to {2}".format(
                    self.resource_type, display_name, mapped_directive)
            elif self.action == 'deleted':
                result = u"{0} unmapped from {1} and deleted".format(
                    display_name, mapped_directive)
            else:
                result = u"{0} {1}".format(display_name, self.action)
        else:
            # otherwise, it's a normal creation event
            result = u"{0} {1}".format(display_name, self.action)
        if self.event.action == "BULK":
            result += ", via bulk action"
        return result

    def populate_reference_url(self):
        """Add reference_url info for older revisions."""
        if 'url' not in self._content:
            return {}
        reference_url_list = []
        for key in ('url', 'reference_url'):
            link = self._content[key]
            # link might exist, but can be an empty string - we treat those values
            # as non-existing (empty) reference URLs
            if not link:
                continue

            # if creation/modification date is not available, we estimate it by
            # using the corresponding information from the Revision itself
            created_at = (self._content.get("created_at")
                          or self.created_at.isoformat())
            updated_at = (self._content.get("updated_at")
                          or self.updated_at.isoformat())

            reference_url_list.append({
                "display_name": link,
                "kind": "REFERENCE_URL",
                "link": link,
                "title": link,
                "id": None,
                "created_at": created_at,
                "updated_at": updated_at,
            })
        return {'reference_url': reference_url_list}

    @classmethod
    def _filter_internal_acls(cls, access_control_list):
        """Remove internal access control list entries.

    This is needed due to bugs in older code that in some cases the revisions
    stored internal ACL entries.
    Due to possible role removal, the parent_id is the only true flag that we
    can use for filtering

    Args:
      access_control_list: list of dicts containing ACL entries.

    Returns:
      access_control_list but without any ACL entry that was generated from
        some other ACL entry.
    """
        return [
            acl for acl in access_control_list if acl.get("parent_id") is None
        ]

    @classmethod
    def _populate_acl_with_people(cls, access_control_list):
        """Add person property with person stub on access control list."""
        for acl in access_control_list:
            if "person" not in acl:
                acl["person"] = {"id": acl.get("person_id"), "type": "Person"}
        return access_control_list

    def populate_acl(self):
        """Add access_control_list info for older revisions."""
        roles_dict = role.get_custom_roles_for(self.resource_type)
        reverted_roles_dict = {n: i for i, n in roles_dict.iteritems()}
        access_control_list = self._content.get("access_control_list") or []
        map_field_to_role = {
            "principal_assessor":
            reverted_roles_dict.get("Principal Assignees"),
            "secondary_assessor":
            reverted_roles_dict.get("Secondary Assignees"),
            "contact": reverted_roles_dict.get("Primary Contacts"),
            "secondary_contact": reverted_roles_dict.get("Secondary Contacts"),
            "owners": reverted_roles_dict.get("Admin"),
        }
        exists_roles = {i["ac_role_id"] for i in access_control_list}

        for field, role_id in map_field_to_role.items():
            if role_id in exists_roles or role_id is None:
                continue
            if field not in self._content:
                continue
            field_content = self._content.get(field) or {}
            if not field_content:
                continue
            if not isinstance(field_content, list):
                field_content = [field_content]
            person_ids = {fc.get("id") for fc in field_content if fc.get("id")}
            for person_id in person_ids:
                access_control_list.append({
                    "display_name": roles_dict[role_id],
                    "ac_role_id": role_id,
                    "context_id": None,
                    "created_at": None,
                    "object_type": self.resource_type,
                    "updated_at": None,
                    "object_id": self.resource_id,
                    "modified_by_id": None,
                    "person_id": person_id,
                    # Frontend require data in such format
                    "person": {
                        "id": person_id,
                        "type": "Person",
                        "href": "/api/people/{}".format(person_id)
                    },
                    "modified_by": None,
                    "id": None,
                })

        acl_with_people = self._populate_acl_with_people(access_control_list)
        filtered_acl = self._filter_internal_acls(acl_with_people)
        result_acl = [
            acl for acl in filtered_acl if acl["ac_role_id"] in roles_dict
        ]
        return {
            "access_control_list": result_acl,
        }

    def populate_folder(self):
        """Add folder info for older revisions."""
        if "folder" in self._content:
            return {}
        folders = self._content.get("folders") or [{"id": ""}]
        return {"folder": folders[0]["id"]}

    def populate_labels(self):
        """Add labels info for older revisions."""
        if "label" not in self._content:
            return {}
        label = self._content["label"]
        return {
            "labels": [{
                "id": None,
                "name": label
            }]
        } if label else {
            "labels": []
        }

    def populate_status(self):
        """Update status for older revisions or add it if status does not exist."""
        workflow_models = {
            "Cycle",
            "CycleTaskGroup",
            "CycleTaskGroupObjectTask",
        }
        statuses_mapping = {"InProgress": "In Progress"}
        status = statuses_mapping.get(self._content.get("status"))
        if self.resource_type in workflow_models and status:
            return {"status": status}

        pop_models = {
            # ggrc
            "AccessGroup",
            "Control",
            "DataAsset",
            "Directive",
            "Facility",
            "Issue",
            "Market",
            "Objective",
            "OrgGroup",
            "Product",
            "Program",
            "Project",
            "Requirement",
            "System",
            "Vendor",
            "Risk",
            "Threat",
        }
        if self.resource_type not in pop_models:
            return {}
        statuses_mapping = {
            "Active": "Active",
            "Deprecated": "Deprecated",
            "Effective": "Active",
            "Final": "Active",
            "In Scope": "Active",
            "Ineffective": "Active",
            "Launched": "Active",
        }
        return {
            "status": statuses_mapping.get(self._content.get("status"),
                                           "Draft")
        }

    def populate_review_status(self):
        """Replace os_state with review state for old revisions"""
        result = {}
        if "os_state" in self._content:
            result = {"review_status": self._content["os_state"]}
        return result

    def _document_evidence_hack(self):
        """Update display_name on evideces

    Evidences have display names from links and titles, and until now they used
    slug property to calculate the display name. This hack is here since we
    must support older revisions with bad data, and to avoid using slug
    differently than everywhere else in the app.

    This function only modifies existing evidence entries on any given object.
    If an object does not have and document evidences then an empty dict is
    returned.

    Returns:
      dict with updated display name for each of the evidence entries if there
      are any.
    """
        if "document_evidence" not in self._content:
            return {}
        document_evidence = self._content.get("document_evidence")
        for evidence in document_evidence:
            evidence[u"display_name"] = u"{link} {title}".format(
                link=evidence.get("link"),
                title=evidence.get("title"),
            ).strip()
        return {u"documents_file": document_evidence}

    def populate_categoies(self, key_name):
        """Fix revision logger.

    On controls in category field was loged categorization instances."""
        if self.resource_type != "Control":
            return {}
        result = []
        for categorization in self._content.get(key_name) or []:
            if "category_id" in categorization:
                result.append({
                    "id": categorization["category_id"],
                    "type": categorization["category_type"],
                    "name": categorization["display_name"],
                    "display_name": categorization["display_name"],
                })
            else:
                result.append(categorization)
        return {key_name: result}

    def _get_cavs(self):
        """Return cavs values from content."""
        if "custom_attribute_values" in self._content:
            return self._content["custom_attribute_values"]
        if "custom_attributes" in self._content:
            return self._content["custom_attributes"]
        return []

    def populate_cavs(self):
        """Setup cads in cav list if they are not presented in content

    but now they are associated to instance."""
        from ggrc.models import custom_attribute_definition
        cads = custom_attribute_definition.get_custom_attributes_for(
            self.resource_type, self.resource_id)
        cavs = {int(i["custom_attribute_id"]): i for i in self._get_cavs()}
        for cad in cads:
            custom_attribute_id = int(cad["id"])
            if custom_attribute_id in cavs:
                # Old revisions can contain falsy values for a Checkbox
                if cad["attribute_type"] == "Checkbox" \
                        and not cavs[custom_attribute_id]["attribute_value"]:
                    cavs[custom_attribute_id]["attribute_value"] = cad[
                        "default_value"]
                continue
            if cad["attribute_type"] == "Map:Person":
                value = "Person"
            else:
                value = cad["default_value"]
            cavs[custom_attribute_id] = {
                "attribute_value": value,
                "attribute_object_id": None,
                "custom_attribute_id": custom_attribute_id,
                "attributable_id": self.resource_id,
                "attributable_type": self.resource_type,
                "display_name": "",
                "attribute_object": None,
                "type": "CustomAttributeValue",
                "context_id": None,
            }
        return {
            "custom_attribute_values": cavs.values(),
            "custom_attribute_definitions": cads
        }

    def populate_cad_default_values(self):
        """Setup default_value to CADs if it's needed."""
        from ggrc.models import all_models
        if "custom_attribute_definitions" not in self._content:
            return {}
        cads = []
        for cad in self._content["custom_attribute_definitions"]:
            if "default_value" not in cad:
                cad["default_value"] = (
                    all_models.CustomAttributeDefinition.get_default_value_for(
                        cad["attribute_type"]))
            cads.append(cad)
        return {"custom_attribute_definitions": cads}

    def populate_requirements(self, populated_content):  # noqa pylint: disable=too-many-branches
        """Populates revision content for Requirement models and models with fields

    that can contain Requirement old names. This fields would be checked and
    updated where necessary
    """
        # change to add Requirement old names
        requirement_type = ["Section", "Clause"]
        # change to add models and fields that can contain Requirement old names
        affected_models = {
            "AccessControlList": [
                "object_type",
            ],
            "AccessControlRole": [
                "object_type",
            ],
            "Assessment": [
                "assessment_type",
            ],
            "AssessmentTemplate": [
                "template_object_type",
            ],
            "Automapping": [
                "source_type",
                "destination_type",
            ],
            "CustomAttributeValue": [
                "attributable_type",
            ],
            "Event": [
                "resource_type",
            ],
            "ObjectPerson": [
                "personable_type",
            ],
            "Relationship": [
                "source_type",
                "destination_type",
            ],
            "Revision": [
                "resource_type",
            ],
            "Label": [
                "object_type",
            ],
            "Context": [
                "related_object_type",
            ],
            "IssuetrackerIssue": [
                "object_type",
            ],
            "ObjectLabel": [
                "object_type",
            ],
            "ObjectTemplates": [
                "name",
            ],
            "Proposal": [
                "instance_type",
            ],
            "Snapshot": [
                "child_type",
                "parent_type",
            ],
            "TaskGroupObject": [
                "object_type",
            ],
        }
        # change to add special values cases
        special_cases = {
            "CustomAttributeDefinition": {
                "fields": [
                    "definition_type",
                ],
                "old_values": ["section", "clause"],
                "new_value": "requirement",
            }
        }

        obj_type = self.resource_type

        # populate fields if they contain old names
        if obj_type in affected_models.keys():
            for field in affected_models[obj_type]:
                if populated_content.get(field) in requirement_type:
                    populated_content[field] = "Requirement"

        # populate fields for models that contain old names in special spelling
        if obj_type in special_cases.keys():
            for field in special_cases[obj_type]["fields"]:
                if populated_content[field] in special_cases[obj_type][
                        "old_values"]:
                    populated_content[field] = special_cases[obj_type][
                        "new_value"]

        # populate Requirements revisions
        if obj_type == "Requirement":
            populated_content["type"] = "Requirement"

            acls = populated_content.get("access_control_list", {})
            if acls:
                for acl in acls:
                    if acl.get("object_type") in requirement_type:
                        acl["object_type"] = "Requirement"
                populated_content["access_control_list"] = acls

            cavs = populated_content.get("custom_attribute_values", {})
            if cavs:
                for cav in cavs:
                    if cav.get("attributable_type") in requirement_type:
                        cav["attributable_type"] = "Requirement"
                populated_content["custom_attribute_values"] = cavs

    @builder.simple_property
    def content(self):
        """Property. Contains the revision content dict.

    Updated by required values, generated from saved content dict."""
        # pylint: disable=too-many-locals
        populated_content = self._content.copy()
        populated_content.update(self.populate_acl())
        populated_content.update(self.populate_reference_url())
        populated_content.update(self.populate_folder())
        populated_content.update(self.populate_labels())
        populated_content.update(self.populate_status())
        populated_content.update(self.populate_review_status())
        populated_content.update(self._document_evidence_hack())
        populated_content.update(self.populate_categoies("categories"))
        populated_content.update(self.populate_categoies("assertions"))
        populated_content.update(self.populate_cad_default_values())
        populated_content.update(self.populate_cavs())

        self.populate_requirements(populated_content)
        # remove custom_attributes,
        # it's old style interface and now it's not needed
        populated_content.pop("custom_attributes", None)

        return populated_content

    @content.setter
    def content(self, value):
        """ Setter for content property."""
        self._content = value
예제 #21
0
class Revision(Base, db.Model):
  """Revision object holds a JSON snapshot of the object at a time."""

  __tablename__ = 'revisions'

  resource_id = db.Column(db.Integer, nullable=False)
  resource_type = db.Column(db.String, nullable=False)
  event_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False)
  action = db.Column(db.Enum(u'created', u'modified', u'deleted'),
                     nullable=False)
  _content = db.Column('content', LongJsonType, nullable=False)

  resource_slug = db.Column(db.String, nullable=True)
  source_type = db.Column(db.String, nullable=True)
  source_id = db.Column(db.Integer, nullable=True)
  destination_type = db.Column(db.String, nullable=True)
  destination_id = db.Column(db.Integer, nullable=True)

  @staticmethod
  def _extra_table_args(_):
    return (
        db.Index("revisions_modified_by", "modified_by_id"),
        db.Index("fk_revisions_resource", "resource_type", "resource_id"),
        db.Index("fk_revisions_source", "source_type", "source_id"),
        db.Index("fk_revisions_destination",
                 "destination_type", "destination_id"),
        db.Index('ix_revisions_resource_slug', 'resource_slug'),
    )

  _api_attrs = reflection.ApiAttributes(
      'resource_id',
      'resource_type',
      'source_type',
      'source_id',
      'destination_type',
      'destination_id',
      'action',
      'content',
      'description',
      reflection.Attribute('diff_with_current', create=False, update=False),
  )

  @classmethod
  def eager_query(cls):
    from sqlalchemy import orm

    query = super(Revision, cls).eager_query()
    return query.options(
        orm.subqueryload('modified_by'),
        orm.subqueryload('event'),  # used in description
    )

  def __init__(self, obj, modified_by_id, action, content):
    self.resource_id = obj.id
    self.resource_type = obj.__class__.__name__
    self.resource_slug = getattr(obj, "slug", None)
    self.modified_by_id = modified_by_id
    self.action = action
    if "access_control_list" in content and content["access_control_list"]:
      for acl in content["access_control_list"]:
        acl["person"] = {
            "id": acl["person_id"],
            "type": "Person",
            "href": "/api/people/{}".format(acl["person_id"]),
        }

    self._content = content

    for attr in ["source_type",
                 "source_id",
                 "destination_type",
                 "destination_id"]:
      setattr(self, attr, getattr(obj, attr, None))

  @builder.callable_property
  def diff_with_current(self):
    referenced_objects.mark_to_cache(self.resource_type, self.resource_id)
    revisions_diff.mark_for_latest_content(self.resource_type,
                                           self.resource_id)

    def foo():
      referenced_objects.rewarm_cache()
      revisions_diff.rewarm_latest_content()
      instance = referenced_objects.get(self.resource_type, self.resource_id)
      if instance:
        return revisions_diff.prepare(instance, self.content)

    return foo

  @builder.simple_property
  def description(self):
    """Compute a human readable description from action and content."""
    if 'display_name' not in self._content:
      return ''
    display_name = self._content['display_name']
    if not display_name:
      result = u"{0} {1}".format(self.resource_type, self.action)
    elif u'<->' in display_name:
      if self.action == 'created':
        msg = u"{destination} linked to {source}"
      elif self.action == 'deleted':
        msg = u"{destination} unlinked from {source}"
      else:
        msg = u"{display_name} {action}"
      source, destination = self._content['display_name'].split('<->')[:2]
      result = msg.format(source=source,
                          destination=destination,
                          display_name=self._content['display_name'],
                          action=self.action)
    elif 'mapped_directive' in self._content:
      # then this is a special case of combined map/creation
      # should happen only for Section and Control
      mapped_directive = self._content['mapped_directive']
      if self.action == 'created':
        result = u"New {0}, {1}, created and mapped to {2}".format(
            self.resource_type,
            display_name,
            mapped_directive
        )
      elif self.action == 'deleted':
        result = u"{0} unmapped from {1} and deleted".format(
            display_name, mapped_directive)
      else:
        result = u"{0} {1}".format(display_name, self.action)
    else:
      # otherwise, it's a normal creation event
      result = u"{0} {1}".format(display_name, self.action)
    if self.event.action == "BULK":
      result += ", via bulk action"
    return result

  def populate_reference_url(self):
    """Add reference_url info for older revisions."""
    if 'url' not in self._content:
      return {}
    reference_url_list = []
    for key in ('url', 'reference_url'):
      link = self._content[key]
      # link might exist, but can be an empty string - we treat those values
      # as non-existing (empty) reference URLs
      if not link:
        continue

      # if creation/modification date is not available, we estimate it by
      # using the corresponding information from the Revision itself
      created_at = (self._content.get("created_at") or
                    self.created_at.isoformat())
      updated_at = (self._content.get("updated_at") or
                    self.updated_at.isoformat())

      reference_url_list.append({
          "display_name": link,
          "document_type": "REFERENCE_URL",
          "link": link,
          "title": link,
          "id": None,
          "created_at": created_at,
          "updated_at": updated_at,
      })
    return {'reference_url': reference_url_list}

  def populate_acl(self):
    """Add access_control_list info for older revisions."""
    roles_dict = role.get_custom_roles_for(self.resource_type)
    reverted_roles_dict = {n: i for i, n in roles_dict.iteritems()}
    access_control_list = self._content.get("access_control_list") or []
    map_field_to_role = {
        "principal_assessor": reverted_roles_dict.get("Principal Assignees"),
        "secondary_assessor": reverted_roles_dict.get("Secondary Assignees"),
        "contact": reverted_roles_dict.get("Primary Contacts"),
        "secondary_contact": reverted_roles_dict.get("Secondary Contacts"),
        "owners": reverted_roles_dict.get("Admin"),
    }
    exists_roles = {i["ac_role_id"] for i in access_control_list}
    for field, role_id in map_field_to_role.items():
      if field not in self._content:
        continue
      if role_id in exists_roles or role_id is None:
        continue
      field_content = self._content.get(field) or {}
      if not field_content:
        continue
      if not isinstance(field_content, list):
        field_content = [field_content]
      person_ids = {fc.get("id") for fc in field_content if fc.get("id")}
      for person_id in person_ids:
        access_control_list.append({
            "display_name": roles_dict[role_id],
            "ac_role_id": role_id,
            "context_id": None,
            "created_at": None,
            "object_type": self.resource_type,
            "updated_at": None,
            "object_id": self.resource_id,
            "modified_by_id": None,
            "person_id": person_id,
            # Frontend require data in such format
            "person": {
                "id": person_id,
                "type": "Person",
                "href": "/api/people/{}".format(person_id)
            },
            "modified_by": None,
            "id": None,
        })
    for acl in access_control_list:
      if "person" not in acl:
        acl["person"] = {"id": acl.get("person_id"), "type": "Person"}
    return {"access_control_list": access_control_list}

  def populate_folder(self):
    """Add folder info for older revisions."""
    if "folder" in self._content:
      return {}
    folders = self._content.get("folders") or [{"id": ""}]
    return {"folder": folders[0]["id"]}

  def populate_labels(self):
    """Add labels info for older revisions."""
    if "label" not in self._content:
      return {}
    label = self._content["label"]
    return {"labels": [{"id": None,
                        "name": label}]} if label else {"labels": []}

  def _document_evidence_hack(self):
    """Update display_name on evideces

    Evidences have display names from links and titles, and until now they used
    slug property to calculate the display name. This hack is here since we
    must support older revisions with bad data, and to avoid using slug
    differently than everywhere else in the app.

    This function only modifies existing evidence entries on any given object.
    If an object does not have and document evidences then an empty dict is
    returned.

    Returns:
      dict with updated display name for each of the evidence entries if there
      are any.
    """
    if "document_evidence" not in self._content:
      return {}
    document_evidence = self._content.get("document_evidence")
    for evidence in document_evidence:
      evidence[u"display_name"] = u"{link} {title}".format(
          link=evidence.get("link"),
          title=evidence.get("title"),
      ).strip()
    return {u"document_evidence": document_evidence}

  def populate_categoies(self, key_name):
    """Fix revision logger.

    On controls in category field was loged categorization instances."""
    if self.resource_type != "Control":
      return {}
    result = []
    for categorization in self._content.get(key_name) or []:
      if "category_id" in categorization:
        result.append({
            "id": categorization["category_id"],
            "type": categorization["category_type"],
            "name": categorization["display_name"],
            "display_name": categorization["display_name"],
        })
      else:
        result.append(categorization)
    return {key_name: result}

  @builder.simple_property
  def content(self):
    """Property. Contains the revision content dict.

    Updated by required values, generated from saved content dict."""
    # pylint: disable=too-many-locals
    populated_content = self._content.copy()
    populated_content.update(self.populate_acl())
    populated_content.update(self.populate_reference_url())
    populated_content.update(self.populate_folder())
    populated_content.update(self.populate_labels())
    populated_content.update(self._document_evidence_hack())
    populated_content.update(self.populate_categoies("categories"))
    populated_content.update(self.populate_categoies("assertions"))
    return populated_content

  @content.setter
  def content(self, value):
    """ Setter for content property."""
    self._content = value
예제 #22
0
class Document(Roleable, Relatable, Base, Indexed, db.Model):
    """Audit model."""
    __tablename__ = 'documents'

    # TODO: inherit from Titled mixin (note: title is nullable here)
    title = deferred(db.Column(db.String), 'Document')
    link = deferred(db.Column(db.String), 'Document')
    description = deferred(db.Column(db.Text, nullable=False, default=u""),
                           'Document')
    kind_id = db.Column(db.Integer, db.ForeignKey('options.id'), nullable=True)
    year_id = db.Column(db.Integer, db.ForeignKey('options.id'), nullable=True)
    language_id = db.Column(db.Integer,
                            db.ForeignKey('options.id'),
                            nullable=True)

    URL = "URL"
    ATTACHMENT = "EVIDENCE"
    REFERENCE_URL = "REFERENCE_URL"
    VALID_DOCUMENT_TYPES = [URL, ATTACHMENT, REFERENCE_URL]
    document_type = deferred(
        db.Column(db.Enum(*VALID_DOCUMENT_TYPES), default=URL, nullable=False),
        'Document')

    kind = db.relationship(
        'Option',
        primaryjoin='and_(foreign(Document.kind_id) == Option.id, '
        'Option.role == "reference_type")',
        uselist=False,
        lazy="joined",
    )
    year = db.relationship(
        'Option',
        primaryjoin='and_(foreign(Document.year_id) == Option.id, '
        'Option.role == "document_year")',
        uselist=False,
        lazy="joined",
    )
    language = db.relationship(
        'Option',
        primaryjoin='and_(foreign(Document.language_id) == Option.id, '
        'Option.role == "language")',
        uselist=False,
        lazy="joined",
    )

    _fulltext_attrs = [
        'title',
        'link',
        'description',
        "document_type",
    ]

    _api_attrs = reflection.ApiAttributes(
        'title',
        'link',
        'description',
        'kind',
        'year',
        'language',
        "document_type",
    )

    _sanitize_html = [
        'title',
        'description',
    ]

    _aliases = {
        'title': "Title",
        'link': "Link",
        'description': "description",
    }

    @orm.validates('kind', 'year', 'language')
    def validate_document_options(self, key, option):
        """Returns correct option, otherwise rises an error"""
        if key == 'year':
            desired_role = 'document_year'
        elif key == 'kind':
            desired_role = 'reference_type'
        else:
            desired_role = key
        return validate_option(self.__class__.__name__, key, option,
                               desired_role)

    @orm.validates('document_type')
    def validate_document_type(self, key, document_type):
        """Returns correct option, otherwise rises an error"""
        if document_type is None:
            document_type = self.URL
        if document_type not in self.VALID_DOCUMENT_TYPES:
            raise exceptions.ValidationError(
                "Invalid value for attribute {attr}. "
                "Expected options are `{url}`, `{attachment}`, `{reference_url}`"
                .format(attr=key,
                        url=self.URL,
                        attachment=self.ATTACHMENT,
                        reference_url=self.REFERENCE_URL))
        return document_type

    @classmethod
    def indexed_query(cls):
        return super(Document, cls).indexed_query().options(
            orm.Load(cls).undefer_group("Document_complete", ), )

    @classmethod
    def eager_query(cls):
        return super(Document, cls).eager_query().options(
            orm.joinedload('kind'),
            orm.joinedload('year'),
            orm.joinedload('language'),
        )

    @hybrid_property
    def slug(self):
        if self.document_type in (self.URL, self.REFERENCE_URL):
            return self.link
        return u"{} {}".format(self.link, self.title)

    # pylint: disable=no-self-argument
    @slug.expression
    def slug(cls):
        return case([(cls.document_type == cls.ATTACHMENT,
                      func.concat(cls.link, ' ', cls.title))],
                    else_=cls.link)

    def log_json(self):
        tmp = super(Document, self).log_json()
        tmp['type'] = "Document"
        return tmp
예제 #23
0
class Document(Roleable, Relatable, Base, mixins.Titled, Indexed, db.Model):
  """Audit model."""
  __tablename__ = 'documents'

  _title_uniqueness = False

  link = deferred(db.Column(db.String, nullable=False), 'Document')
  description = deferred(db.Column(db.Text, nullable=False, default=u""),
                         'Document')

  URL = "URL"
  ATTACHMENT = "EVIDENCE"
  REFERENCE_URL = "REFERENCE_URL"
  VALID_DOCUMENT_TYPES = [URL, ATTACHMENT, REFERENCE_URL]
  document_type = deferred(db.Column(db.Enum(*VALID_DOCUMENT_TYPES),
                                     default=URL,
                                     nullable=False),
                           'Document')

  _fulltext_attrs = [
      'title',
      'link',
      'description',
      "document_type",
  ]

  _api_attrs = reflection.ApiAttributes(
      'title',
      'link',
      'description',
      "document_type",
  )

  _sanitize_html = [
      'title',
      'description',
  ]

  _aliases = {
      'title': "Title",
      'link': "Link",
      'description': "description",
  }

  @orm.validates('document_type')
  def validate_document_type(self, key, document_type):
    """Returns correct option, otherwise rises an error"""
    if document_type is None:
      document_type = self.URL
    if document_type not in self.VALID_DOCUMENT_TYPES:
      raise exceptions.ValidationError(
          "Invalid value for attribute {attr}. "
          "Expected options are `{url}`, `{attachment}`, `{reference_url}`".
          format(
              attr=key,
              url=self.URL,
              attachment=self.ATTACHMENT,
              reference_url=self.REFERENCE_URL
          )
      )
    return document_type

  @classmethod
  def indexed_query(cls):
    return super(Document, cls).indexed_query().options(
        orm.Load(cls).undefer_group(
            "Document_complete",
        ),
    )

  @hybrid_property
  def slug(self):
    if self.document_type in (self.URL, self.REFERENCE_URL):
      return self.link
    return u"{} {}".format(self.link, self.title)

  # pylint: disable=no-self-argument
  @slug.expression
  def slug(cls):
    return case([(cls.document_type == cls.ATTACHMENT,
                 func.concat(cls.link, ' ', cls.title))],
                else_=cls.link)

  def log_json(self):
    tmp = super(Document, self).log_json()
    tmp['type'] = "Document"
    return tmp
예제 #24
0
class Workflow(roleable.Roleable, relationship.Relatable,
               mixins.CustomAttributable, HasOwnContext, mixins.Timeboxed,
               mixins.Described, mixins.Titled, mixins.Notifiable,
               mixins.Stateful, base.ContextRBAC, mixins.Slugged,
               mixins.Folderable, Indexed, db.Model):
    """Basic Workflow first class object.
  """
    __tablename__ = 'workflows'
    _title_uniqueness = False

    DRAFT = u"Draft"
    ACTIVE = u"Active"
    INACTIVE = u"Inactive"
    VALID_STATES = [DRAFT, ACTIVE, INACTIVE]

    @classmethod
    def default_status(cls):
        return cls.DRAFT

    notify_on_change = deferred(
        db.Column(db.Boolean, default=False, nullable=False), 'Workflow')
    notify_custom_message = deferred(
        db.Column(db.Text, nullable=False, default=u""), 'Workflow')

    object_approval = deferred(
        db.Column(db.Boolean, default=False, nullable=False), 'Workflow')

    recurrences = db.Column(db.Boolean, default=False, nullable=False)

    task_groups = db.relationship('TaskGroup',
                                  backref='_workflow',
                                  cascade='all, delete-orphan')

    cycles = db.relationship('Cycle',
                             backref='_workflow',
                             cascade='all, delete-orphan')

    next_cycle_start_date = db.Column(db.Date, nullable=True)

    non_adjusted_next_cycle_start_date = db.Column(db.Date, nullable=True)

    # this is an indicator if the workflow exists from before the change where
    # we deleted cycle objects, which changed how the cycle is created and
    # how objects are mapped to the cycle tasks
    is_old_workflow = deferred(
        db.Column(db.Boolean, default=False, nullable=True), 'Workflow')

    IS_VERIFICATION_NEEDED_DEFAULT = True
    is_verification_needed = db.Column(db.Boolean,
                                       default=IS_VERIFICATION_NEEDED_DEFAULT,
                                       nullable=False)

    repeat_every = deferred(db.Column(db.Integer, nullable=True, default=None),
                            'Workflow')
    DAY_UNIT = 'day'
    WEEK_UNIT = 'week'
    MONTH_UNIT = 'month'
    VALID_UNITS = (DAY_UNIT, WEEK_UNIT, MONTH_UNIT)
    unit = deferred(
        db.Column(db.Enum(*VALID_UNITS), nullable=True, default=None),
        'Workflow')
    repeat_multiplier = deferred(
        db.Column(db.Integer, nullable=False, default=0), 'Workflow')

    UNIT_FREQ_MAPPING = {
        None: "one_time",
        DAY_UNIT: "daily",
        WEEK_UNIT: "weekly",
        MONTH_UNIT: "monthly"
    }

    # pylint: disable=unnecessary-lambda
    REPEAT_MAPPING = {
        None: lambda px, sx: "off",
        DAY_UNIT: lambda px, sx: "every {}weekday{}".format(px, sx),
        WEEK_UNIT: lambda px, sx: "every {}week{}".format(px, sx),
        MONTH_UNIT: lambda px, sx: "every {}month{}".format(px, sx)
    }
    REPEAT_ORDER_MAPPING = {None: 0, DAY_UNIT: 1, WEEK_UNIT: 2, MONTH_UNIT: 3}

    @hybrid.hybrid_property
    def frequency(self):
        """Hybrid property for SearchAPI filtering backward compatibility"""
        return self.UNIT_FREQ_MAPPING[self.unit]

    @frequency.expression
    def frequency(self):
        """Hybrid property for SearchAPI filtering backward compatibility"""
        return case([
            (self.unit.is_(None), self.UNIT_FREQ_MAPPING[None]),
            (self.unit == self.DAY_UNIT,
             self.UNIT_FREQ_MAPPING[self.DAY_UNIT]),
            (self.unit == self.WEEK_UNIT,
             self.UNIT_FREQ_MAPPING[self.WEEK_UNIT]),
            (self.unit == self.MONTH_UNIT,
             self.UNIT_FREQ_MAPPING[self.MONTH_UNIT]),
        ])

    @classmethod
    def _get_repeat(cls, unit, repeat_every):
        """Return repeat field representation for QueryAPI"""
        if repeat_every is None or repeat_every == 1:
            prefix, suffix = "", ""
        else:
            prefix, suffix = "{} ".format(repeat_every), "s"

        func = cls.REPEAT_MAPPING[unit]
        return func(prefix, suffix)

    @hybrid.hybrid_property
    def repeat(self):
        """Hybrid property for filtering in QueryAPI"""
        return self._get_repeat(self.unit, self.repeat_every)

    @repeat.expression
    def repeat(self):
        """Hybrid property for filtering in QueryAPI"""
        case_ = [(self.unit.is_(None), self.REPEAT_MAPPING[None](None, None))]
        case_.extend(
            ((self.unit == unit) & (self.repeat_every == repeat_every),
             self._get_repeat(unit, repeat_every)) for unit in self.VALID_UNITS
            for repeat_every in xrange(1, 31))

        return case(case_)

    @property
    def repeat_order(self):
        """Property for ordering in QueryAPI"""
        unit_map = self.REPEAT_ORDER_MAPPING[self.unit]
        repeat_every_map = self.repeat_every or 0

        return u"{:0>4}_{:0>4}".format(unit_map, repeat_every_map)

    @builder.simple_property
    def can_start_cycle(self):
        """Can start cycle.

    Boolean property, returns True if all task groups have at least one
    task group task, False otherwise.
    """
        return not any(tg for tg in self.task_groups if not tg.task_group_tasks) \
            if self.task_groups else False

    @property
    def tasks(self):
        return list(
            itertools.chain(*[t.task_group_tasks for t in self.task_groups]))

    @property
    def min_task_start_date(self):
        """Fetches non adjusted setup cycle start date based on TGT user's setup.

    Args:
        self: Workflow instance.

    Returns:
        Date when first cycle should be started based on user's setup.
    """
        tasks = self.tasks
        min_date = None
        for task in tasks:
            min_date = min(task.start_date, min_date or task.start_date)
        return min_date

    WORK_WEEK_LEN = 5

    @classmethod
    def first_work_day(cls, day):
        """Get first work day."""
        while day.isoweekday() > cls.WORK_WEEK_LEN:
            day -= relativedelta.relativedelta(days=1)
        return day

    def calc_next_adjusted_date(self, setup_date):
        """Calculates adjusted date which are expected in next cycle.

    Args:
        setup_date: Date which was setup by user.

    Returns:
        Adjusted date which are expected to be in next Workflow cycle.
    """
        if self.repeat_every is None or self.unit is None:
            return self.first_work_day(setup_date)
        try:
            key = {
                self.WEEK_UNIT: "weeks",
                self.MONTH_UNIT: "months",
                self.DAY_UNIT: "days",
            }[self.unit]
        except KeyError:
            raise ValueError("Invalid Workflow unit")
        repeater = self.repeat_every * self.repeat_multiplier
        if self.unit == self.DAY_UNIT:
            weeks = repeater / self.WORK_WEEK_LEN
            days = repeater % self.WORK_WEEK_LEN
            # append weekends if it's needed
            days += ((setup_date.isoweekday() + days) > self.WORK_WEEK_LEN) * 2
            return setup_date + relativedelta.relativedelta(
                setup_date, weeks=weeks, days=days)
        calc_date = setup_date + relativedelta.relativedelta(
            setup_date, **{key: repeater})
        if self.unit == self.MONTH_UNIT:
            # check if setup date is the last day of the month
            # and if it is then calc_date should be the last day of hte month too
            setup_day = calendar.monthrange(setup_date.year,
                                            setup_date.month)[1]
            if setup_day == setup_date.day:
                calc_date = datetime.date(
                    calc_date.year, calc_date.month,
                    calendar.monthrange(calc_date.year, calc_date.month)[1])
        return self.first_work_day(calc_date)

    @orm.validates('repeat_every')
    def validate_repeat_every(self, _, value):
        """Validate repeat_every field for Workflow.

    repeat_every shouldn't have 0 value.
    """
        if value is not None and not isinstance(value, (int, long)):
            raise ValueError("'repeat_every' should be integer or 'null'")
        if value is not None and value <= 0:
            raise ValueError(
                "'repeat_every' should be strictly greater than 0")
        return value

    @orm.validates('unit')
    def validate_unit(self, _, value):
        """Validate unit field for Workflow.

    Unit should have one of the value from VALID_UNITS list or None.
    """
        if value is not None and value not in self.VALID_UNITS:
            raise ValueError("'unit' field should be one of the "
                             "value: null, {}".format(", ".join(
                                 self.VALID_UNITS)))
        return value

    @orm.validates('is_verification_needed')
    def validate_is_verification_needed(self, _, value):
        # pylint: disable=unused-argument
        """Validate is_verification_needed field for Workflow.

    It's not allowed to change is_verification_needed flag after creation.
    If is_verification_needed doesn't send,
    then is_verification_needed flag is True.
    """
        if self.is_verification_needed is None:
            return self.IS_VERIFICATION_NEEDED_DEFAULT if value is None else value
        if value is None:
            return self.is_verification_needed
        if self.status != self.DRAFT and value != self.is_verification_needed:
            raise ValueError("is_verification_needed value isn't changeble "
                             "on workflow with '{}' status".format(
                                 self.status))
        return value

    @builder.simple_property
    def workflow_state(self):
        return WorkflowState.get_workflow_state(self.cycles)

    @property
    def workflow_archived(self):
        """Determines whether workflow is archived."""
        return bool(self.unit and not self.recurrences
                    and self.next_cycle_start_date)

    _sanitize_html = [
        'notify_custom_message',
    ]

    _fulltext_attrs = [
        attributes.CustomOrderingFullTextAttr('repeat',
                                              'repeat',
                                              order_prop_getter='repeat_order')
    ]

    _api_attrs = reflection.ApiAttributes(
        'task_groups', 'notify_on_change', 'notify_custom_message', 'cycles',
        'recurrences', 'is_verification_needed', 'repeat_every', 'unit',
        reflection.Attribute('object_approval', update=False),
        reflection.Attribute('next_cycle_start_date',
                             create=False,
                             update=False),
        reflection.Attribute('can_start_cycle', create=False, update=False),
        reflection.Attribute('non_adjusted_next_cycle_start_date',
                             create=False,
                             update=False),
        reflection.Attribute('workflow_state', create=False, update=False),
        reflection.Attribute('repeat', create=False, update=False))

    _aliases = {
        "repeat_every": {
            "display_name":
            "Repeat Every",
            "description":
            "'Repeat Every' value\nmust fall into\nthe range 1~30"
            "\nor '-' for None",
        },
        "unit": {
            "display_name":
            "Unit",
            "description":
            "Allowed values for\n'Unit' are:\n{}"
            "\nor '-' for None".format("\n".join(VALID_UNITS)),
        },
        "is_verification_needed": {
            "display_name":
            "Need Verification",
            "mandatory":
            True,
            "description":
            "This field is not changeable\nafter "
            "workflow activation.",
        },
        "notify_custom_message": "Custom email message",
        "notify_on_change": {
            "display_name": "Force real-time email updates",
            "mandatory": False,
        },
        "status": None,
        "start_date": None,
        "end_date": None,
    }

    def copy(self, _other=None, **kwargs):
        """Create a partial copy of the current workflow.
    """
        columns = [
            'title', 'description', 'notify_on_change',
            'notify_custom_message', 'end_date', 'start_date', 'repeat_every',
            'unit', 'is_verification_needed'
        ]
        if kwargs.get('clone_people', False):
            access_control_list = [{
                "ac_role_id": acl.ac_role.id,
                "person": {
                    "id": person.id
                }
            } for person, acl in self.access_control_list]
        else:
            role_id = {
                name: ind
                for (ind,
                     name) in role.get_custom_roles_for(self.type).iteritems()
            }['Admin']
            access_control_list = [{
                "ac_role_id": role_id,
                "person": {
                    "id": get_current_user().id
                }
            }]
        target = self.copy_into(_other,
                                columns,
                                access_control_list=access_control_list,
                                **kwargs)
        return target

    def copy_task_groups(self, target, **kwargs):
        """Copy all task groups and tasks mapped to this workflow.
    """
        for task_group in self.task_groups:
            obj = task_group.copy(
                workflow=target,
                context=target.context,
                clone_people=kwargs.get("clone_people", False),
                clone_objects=kwargs.get("clone_objects", False),
                modified_by=get_current_user(),
            )
            target.task_groups.append(obj)

            if kwargs.get("clone_tasks"):
                task_group.copy_tasks(
                    obj,
                    clone_people=kwargs.get("clone_people", False),
                    clone_objects=kwargs.get("clone_objects", True))

        return target

    @classmethod
    def eager_query(cls, **kwargs):
        return super(Workflow, cls).eager_query(**kwargs).options(
            orm.subqueryload('cycles').undefer_group('Cycle_complete').
            subqueryload("cycle_task_group_object_tasks").undefer_group(
                "CycleTaskGroupObjectTask_complete"),
            orm.subqueryload('task_groups').undefer_group(
                'TaskGroup_complete'),
            orm.subqueryload('task_groups').subqueryload(
                "task_group_tasks").undefer_group('TaskGroupTask_complete'),
        )

    @classmethod
    def indexed_query(cls):
        return super(Workflow, cls).indexed_query().options(
            orm.Load(cls).undefer_group("Workflow_complete", ), )
예제 #25
0
class Request(statusable.Statusable, AutoStatusChangeable, Assignable,
              EvidenceURL, Personable, CustomAttributable, Notifiable,
              relationship.Relatable, WithSimilarityScore, Titled, Slugged,
              Described, Commentable, FinishedDate, VerifiedDate, Base,
              db.Model):
    """Class representing Requests.

  Request is an object representing a request from a Requester to Assignee
  to provide feedback, evidence or attachment in the form of comments,
  documents or URLs that (if specified) Verifier has to approve of
  before Request is considered finished.
  """
    __tablename__ = 'requests'
    _title_uniqueness = False

    VALID_TYPES = (u'documentation', u'interview')

    ASSIGNEE_TYPES = (u'Assignee', u'Requester', u'Verifier')

    similarity_options = similarity_options_module.REQUEST

    # TODO Remove requestor and requestor_id on database cleanup
    requestor_id = db.Column(db.Integer, db.ForeignKey('people.id'))
    requestor = db.relationship('Person', foreign_keys=[requestor_id])

    # TODO Remove request_type on database cleanup
    request_type = deferred(db.Column(db.Enum(*VALID_TYPES), nullable=False),
                            'Request')

    start_date = deferred(
        db.Column(db.Date, nullable=False, default=date.today), 'Request')

    end_date = deferred(
        db.Column(db.Date,
                  nullable=False,
                  default=lambda: date.today() + timedelta(7)), 'Request')

    # TODO Remove audit_id audit_object_id on database cleanup
    audit_id = db.Column(db.Integer,
                         db.ForeignKey('audits.id'),
                         nullable=False)
    audit_object_id = db.Column(db.Integer,
                                db.ForeignKey('audit_objects.id'),
                                nullable=True)
    gdrive_upload_path = deferred(db.Column(db.String, nullable=True),
                                  'Request')
    # TODO Remove test and notes columns on database cleanup
    test = deferred(db.Column(db.Text, nullable=True), 'Request')
    notes = deferred(db.Column(db.Text, nullable=True), 'Request')

    _publish_attrs = [
        'requestor', 'request_type', 'gdrive_upload_path', 'start_date',
        'end_date', 'status', 'audit', 'test', 'notes', 'title', 'description'
    ]

    _tracked_attrs = ((set(_publish_attrs) | {'slug'}) - {'status'})

    _sanitize_html = [
        'gdrive_upload_path', 'test', 'notes', 'description', 'title'
    ]

    _aliases = {
        "request_audit": {
            "display_name": "Audit",
            "filter_by": "_filter_by_request_audit",
            "mandatory": True,
        },
        "end_date": "Due On",
        "notes": "Notes",
        "request_type": "Request Type",
        "start_date": "Starts On",
        "status": {
            "display_name": "Status",
            "handler_key": "request_status",
        },
        "test": "Test",
        "related_assignees": {
            "display_name": "Assignee",
            "mandatory": True,
            "filter_by": "_filter_by_related_assignees",
            "type": reflection.AttributeInfo.Type.MAPPING,
        },
        "related_requesters": {
            "display_name": "Requester",
            "mandatory": True,
            "filter_by": "_filter_by_related_requesters",
            "type": reflection.AttributeInfo.Type.MAPPING,
        },
        "related_verifiers": {
            "display_name": "Verifier",
            "filter_by": "_filter_by_related_verifiers",
            "type": reflection.AttributeInfo.Type.MAPPING,
        },
    }

    def _display_name(self):
        # pylint: disable=unsubscriptable-object
        if len(self.title) > 32:
            display_string = self.description[:32] + u'...'
        elif self.title:
            display_string = self.title
        elif len(self.description) > 32:
            display_string = self.description[:32] + u'...'
        else:
            display_string = self.description
        return u'Request with id {0} "{1}" for Audit "{2}"'.format(
            self.id, display_string, self.audit.display_name)

    @classmethod
    def eager_query(cls):
        query = super(Request, cls).eager_query()
        return query.options(orm.joinedload('audit'))

    @classmethod
    def _filter_by_related_assignees(cls, predicate):
        return cls._get_relate_filter(predicate, "Assignee")

    @classmethod
    def _filter_by_related_requesters(cls, predicate):
        return cls._get_relate_filter(predicate, "Requester")

    @classmethod
    def _filter_by_related_verifiers(cls, predicate):
        return cls._get_relate_filter(predicate, "Verifier")

    @classmethod
    def _filter_by_request_audit(cls, predicate):
        return cls.query.filter((audit.Audit.id == cls.audit_id)
                                & (predicate(audit.Audit.slug)
                                   | predicate(audit.Audit.title))).exists()

    @classmethod
    def default_request_type(cls):
        return cls.VALID_TYPES[0]
예제 #26
0
class Workflow(mixins.CustomAttributable,
               HasOwnContext,
               mixins.Timeboxed,
               mixins.Described,
               mixins.Titled,
               mixins.Notifiable,
               mixins.Stateful,
               mixins.Slugged,
               mixins.Folderable,
               Indexed,
               db.Model):
  """Basic Workflow first class object.
  """
  __tablename__ = 'workflows'
  _title_uniqueness = False

  DRAFT = u"Draft"
  ACTIVE = u"Active"
  INACTIVE = u"Inactive"
  VALID_STATES = [DRAFT, ACTIVE, INACTIVE]

  @classmethod
  def default_status(cls):
    return cls.DRAFT

  notify_on_change = deferred(
      db.Column(db.Boolean, default=False, nullable=False), 'Workflow')
  notify_custom_message = deferred(
      db.Column(db.Text, nullable=False, default=u""), 'Workflow')

  object_approval = deferred(
      db.Column(db.Boolean, default=False, nullable=False), 'Workflow')

  recurrences = db.Column(db.Boolean, default=False, nullable=False)

  workflow_people = db.relationship(
      'WorkflowPerson', backref='workflow', cascade='all, delete-orphan')
  people = association_proxy(
      'workflow_people', 'person', 'WorkflowPerson')

  task_groups = db.relationship(
      'TaskGroup', backref='workflow', cascade='all, delete-orphan')

  cycles = db.relationship(
      'Cycle', backref='workflow', cascade='all, delete-orphan')

  next_cycle_start_date = db.Column(db.Date, nullable=True)

  non_adjusted_next_cycle_start_date = db.Column(db.Date, nullable=True)

  # this is an indicator if the workflow exists from before the change where
  # we deleted cycle objects, which changed how the cycle is created and
  # how objects are mapped to the cycle tasks
  is_old_workflow = deferred(
      db.Column(db.Boolean, default=False, nullable=True), 'Workflow')

  # This column needs to be deferred because one of the migrations
  # uses Workflow as a model and breaks since at that point in time
  # there is no 'kind' column yet
  kind = deferred(
      db.Column(db.String, default=None, nullable=True), 'Workflow')
  IS_VERIFICATION_NEEDED_DEFAULT = True
  is_verification_needed = db.Column(
      db.Boolean,
      default=IS_VERIFICATION_NEEDED_DEFAULT,
      nullable=False)

  repeat_every = deferred(db.Column(db.Integer, nullable=True, default=None),
                          'Workflow')
  DAY_UNIT = 'day'
  WEEK_UNIT = 'week'
  MONTH_UNIT = 'month'
  VALID_UNITS = (DAY_UNIT, WEEK_UNIT, MONTH_UNIT)
  unit = deferred(db.Column(db.Enum(*VALID_UNITS), nullable=True,
                            default=None), 'Workflow')
  repeat_multiplier = deferred(db.Column(db.Integer, nullable=False,
                                         default=0), 'Workflow')

  UNIT_FREQ_MAPPING = {
      None: "one_time",
      DAY_UNIT: "daily",
      WEEK_UNIT: "weekly",
      MONTH_UNIT: "monthly"
  }

  @hybrid.hybrid_property
  def frequency(self):
    """Hybrid property for SearchAPI filtering backward compatibility"""
    return self.UNIT_FREQ_MAPPING[self.unit]

  @frequency.expression
  def frequency(self):
    """Hybrid property for SearchAPI filtering backward compatibility"""
    return case([
        (self.unit.is_(None), self.UNIT_FREQ_MAPPING[None]),
        (self.unit == self.DAY_UNIT, self.UNIT_FREQ_MAPPING[self.DAY_UNIT]),
        (self.unit == self.WEEK_UNIT, self.UNIT_FREQ_MAPPING[self.WEEK_UNIT]),
        (self.unit == self.MONTH_UNIT,
         self.UNIT_FREQ_MAPPING[self.MONTH_UNIT]),
    ])

  @property
  def tasks(self):
    return list(itertools.chain(*[t.task_group_tasks
                                  for t in self.task_groups]))

  @property
  def min_task_start_date(self):
    """Fetches non adjusted setup cycle start date based on TGT user's setup.

    Args:
        self: Workflow instance.

    Returns:
        Date when first cycle should be started based on user's setup.
    """
    tasks = self.tasks
    min_date = None
    for task in tasks:
      min_date = min(task.start_date, min_date or task.start_date)
    return min_date

  WORK_WEEK_LEN = 5

  @classmethod
  def first_work_day(cls, day):
    holidays = google_holidays.GoogleHolidays()
    while day.isoweekday() > cls.WORK_WEEK_LEN or day in holidays:
      day -= relativedelta.relativedelta(days=1)
    return day

  def calc_next_adjusted_date(self, setup_date):
    """Calculates adjusted date which are expected in next cycle.

    Args:
        setup_date: Date which was setup by user.

    Returns:
        Adjusted date which are expected to be in next Workflow cycle.
    """
    if self.repeat_every is None or self.unit is None:
      return self.first_work_day(setup_date)
    try:
      key = {
          self.WEEK_UNIT: "weeks",
          self.MONTH_UNIT: "months",
          self.DAY_UNIT: "days",
      }[self.unit]
    except KeyError:
      raise ValueError("Invalid Workflow unit")
    repeater = self.repeat_every * self.repeat_multiplier
    if self.unit == self.DAY_UNIT:
      weeks = repeater / self.WORK_WEEK_LEN
      days = repeater % self.WORK_WEEK_LEN
      # append weekends if it's needed
      days += ((setup_date.isoweekday() + days) > self.WORK_WEEK_LEN) * 2
      return setup_date + relativedelta.relativedelta(
          setup_date, weeks=weeks, days=days)
    calc_date = setup_date + relativedelta.relativedelta(
        setup_date,
        **{key: repeater}
    )
    if self.unit == self.MONTH_UNIT:
      # check if setup date is the last day of the month
      # and if it is then calc_date should be the last day of hte month too
      setup_day = calendar.monthrange(setup_date.year, setup_date.month)[1]
      if setup_day == setup_date.day:
        calc_date = datetime.date(
            calc_date.year,
            calc_date.month,
            calendar.monthrange(calc_date.year, calc_date.month)[1])
    return self.first_work_day(calc_date)

  @orm.validates('repeat_every')
  def validate_repeat_every(self, _, value):
    """Validate repeat_every field for Workflow.

    repeat_every shouldn't have 0 value.
    """
    if value is not None and not isinstance(value, (int, long)):
      raise ValueError("'repeat_every' should be integer or 'null'")
    if value is not None and value <= 0:
      raise ValueError("'repeat_every' should be strictly greater than 0")
    return value

  @orm.validates('unit')
  def validate_unit(self, _, value):
    """Validate unit field for Workflow.

    Unit should have one of the value from VALID_UNITS list or None.
    """
    if value is not None and value not in self.VALID_UNITS:
      raise ValueError("'unit' field should be one of the "
                       "value: null, {}".format(", ".join(self.VALID_UNITS)))
    return value

  @orm.validates('is_verification_needed')
  def validate_is_verification_needed(self, _, value):
    # pylint: disable=unused-argument
    """Validate is_verification_needed field for Workflow.

    It's not allowed to change is_verification_needed flag after creation.
    If is_verification_needed doesn't send,
    then is_verification_needed flag is True.
    """
    if self.is_verification_needed is None:
      return self.IS_VERIFICATION_NEEDED_DEFAULT if value is None else value
    if value is None:
      return self.is_verification_needed
    if self.status != self.DRAFT and value != self.is_verification_needed:
      raise ValueError("is_verification_needed value isn't changeble "
                       "on workflow with '{}' status".format(self.status))
    return value

  @builder.simple_property
  def workflow_state(self):
    return WorkflowState.get_workflow_state(self.cycles)

  _sanitize_html = [
      'notify_custom_message',
  ]

  _api_attrs = reflection.ApiAttributes(
      'workflow_people',
      reflection.Attribute('people', create=False, update=False),
      'task_groups',
      'notify_on_change',
      'notify_custom_message',
      'cycles',
      'object_approval',
      'recurrences',
      'is_verification_needed',
      'repeat_every',
      'unit',
      reflection.Attribute('next_cycle_start_date',
                           create=False, update=False),
      reflection.Attribute('non_adjusted_next_cycle_start_date',
                           create=False, update=False),
      reflection.Attribute('workflow_state',
                           create=False, update=False),
      reflection.Attribute('kind',
                           create=False, update=False),
  )

  _aliases = {
      "repeat_every": {
          "display_name": "Repeat Every",
          "description": "'Repeat Every' value\nmust fall into\nthe range 1~30"
                         "\nor '-' for None",
      },
      "unit": {
          "display_name": "Unit",
          "description": "Allowed values for\n'Unit' are:\n{}"
                         "\nor '-' for None".format("\n".join(VALID_UNITS)),
      },
      "is_verification_needed": {
          "display_name": "Need Verification",
          "mandatory": True,
          "description": "This field is not changeable\nafter creation.",
      },
      "notify_custom_message": "Custom email message",
      "notify_on_change": {
          "display_name": "Force real-time email updates",
          "mandatory": False,
      },
      "workflow_owner": {
          "display_name": "Manager",
          "mandatory": True,
          "filter_by": "_filter_by_workflow_owner",
      },
      "workflow_member": {
          "display_name": "Member",
          "filter_by": "_filter_by_workflow_member",
      },
      "status": None,
      "start_date": None,
      "end_date": None,
  }

  @classmethod
  def _filter_by_workflow_owner(cls, predicate):
    return cls._filter_by_role("WorkflowOwner", predicate)

  @classmethod
  def _filter_by_workflow_member(cls, predicate):
    return cls._filter_by_role("WorkflowMember", predicate)

  def copy(self, _other=None, **kwargs):
    """Create a partial copy of the current workflow.
    """
    columns = ['title',
               'description',
               'notify_on_change',
               'notify_custom_message',
               'end_date',
               'start_date',
               'repeat_every',
               'unit',
               'is_verification_needed']
    target = self.copy_into(_other, columns, **kwargs)
    return target

  def copy_task_groups(self, target, **kwargs):
    """Copy all task groups and tasks mapped to this workflow.
    """
    for task_group in self.task_groups:
      obj = task_group.copy(
          workflow=target,
          context=target.context,
          clone_people=kwargs.get("clone_people", False),
          clone_objects=kwargs.get("clone_objects", False),
          modified_by=get_current_user(),
      )
      target.task_groups.append(obj)

      if kwargs.get("clone_tasks"):
        task_group.copy_tasks(
            obj,
            clone_people=kwargs.get("clone_people", False),
            clone_objects=kwargs.get("clone_objects", True)
        )

    return target

  @classmethod
  def eager_query(cls):
    return super(Workflow, cls).eager_query().options(
        orm.subqueryload('cycles').undefer_group('Cycle_complete')
           .subqueryload("cycle_task_group_object_tasks")
           .undefer_group("CycleTaskGroupObjectTask_complete"),
        orm.subqueryload('task_groups').undefer_group('TaskGroup_complete'),
        orm.subqueryload(
            'task_groups'
        ).subqueryload(
            "task_group_tasks"
        ).undefer_group(
            'TaskGroupTask_complete'
        ),
        orm.subqueryload('workflow_people'),
    )

  @classmethod
  def indexed_query(cls):
    return super(Workflow, cls).indexed_query().options(
        orm.Load(cls).undefer_group(
            "Workflow_complete",
        ),
    )

  @classmethod
  def ensure_backlog_workflow_exists(cls):
    """Ensures there is at least one backlog workflow with an active cycle.
    If such workflow does not exist it creates one."""

    def any_active_cycle(workflows):
      """Checks if any active cycle exists from given workflows"""
      for workflow in workflows:
        for cur_cycle in workflow.cycles:
          if cur_cycle.is_current:
            return True
      return False

    # Check if backlog workflow already exists
    backlog_workflows = Workflow.query.filter(
        and_(Workflow.kind == "Backlog",
             # the following means one_time wf
             Workflow.unit is None)
    ).all()

    if len(backlog_workflows) > 0 and any_active_cycle(backlog_workflows):
      return "At least one backlog workflow already exists"
    # Create a backlog workflow
    backlog_workflow = Workflow(description="Backlog workflow",
                                title="Backlog (one time)",
                                status="Active",
                                recurrences=0,
                                kind="Backlog")

    # create wf context
    wf_ctx = backlog_workflow.get_or_create_object_context(context=1)
    backlog_workflow.context = wf_ctx
    db.session.flush(backlog_workflow)
    # create a cycle
    backlog_cycle = cycle.Cycle(description="Backlog workflow",
                                title="Backlog (one time)",
                                is_current=1,
                                status="Assigned",
                                start_date=None,
                                end_date=None,
                                context=backlog_workflow
                                .get_or_create_object_context(),
                                workflow=backlog_workflow)

    # create a cycletaskgroup
    backlog_ctg = cycle_task_group\
        .CycleTaskGroup(description="Backlog workflow taskgroup",
                        title="Backlog TaskGroup",
                        cycle=backlog_cycle,
                        status="InProgress",
                        start_date=None,
                        end_date=None,
                        context=backlog_workflow
                        .get_or_create_object_context())

    db.session.add_all([backlog_workflow, backlog_cycle, backlog_ctg])
    db.session.flush()

    # add fulltext entries
    indexer = get_indexer()
    indexer.create_record(indexer.fts_record_for(backlog_workflow))
    return "Backlog workflow created"
예제 #27
0
파일: audit.py 프로젝트: gaurav46/ggrc-core
class Audit(clonable.Clonable, CustomAttributable, Personable, HasOwnContext,
            Relatable, Timeboxed, Noted, Described, Hyperlinked, WithContact,
            Titled, Slugged, db.Model):
    """Audit model."""

    __tablename__ = 'audits'
    _slug_uniqueness = False

    VALID_STATES = (u'Planned', u'In Progress', u'Manager Review',
                    u'Ready for External Review', u'Completed')

    CLONEABLE_CHILDREN = {"AssessmentTemplate"}

    report_start_date = deferred(db.Column(db.Date), 'Audit')
    report_end_date = deferred(db.Column(db.Date), 'Audit')
    audit_firm_id = deferred(
        db.Column(db.Integer, db.ForeignKey('org_groups.id')), 'Audit')
    audit_firm = db.relationship('OrgGroup', uselist=False)
    # TODO: this should be stateful mixin
    status = deferred(db.Column(db.Enum(*VALID_STATES), nullable=False),
                      'Audit')
    gdrive_evidence_folder = deferred(db.Column(db.String), 'Audit')
    program_id = deferred(
        db.Column(db.Integer, db.ForeignKey('programs.id'), nullable=False),
        'Audit')
    requests = db.relationship('Request',
                               backref='audit',
                               cascade='all, delete-orphan')
    audit_objects = db.relationship('AuditObject',
                                    backref='audit',
                                    cascade='all, delete-orphan')
    object_type = db.Column(db.String(length=250),
                            nullable=False,
                            default='Control')

    _publish_attrs = [
        'report_start_date', 'report_end_date', 'audit_firm', 'status',
        'gdrive_evidence_folder', 'program', 'requests', 'object_type',
        PublishOnly('audit_objects')
    ]

    _sanitize_html = [
        'gdrive_evidence_folder',
        'description',
    ]

    _include_links = []

    _aliases = {
        "program": {
            "display_name": "Program",
            "filter_by": "_filter_by_program",
            "mandatory": True,
        },
        "user_role:Auditor": {
            "display_name": "Auditors",
            "type": AttributeInfo.Type.USER_ROLE,
            "filter_by": "_filter_by_auditor",
        },
        "status": "Status",
        "start_date": "Planned Start Date",
        "end_date": "Planned End Date",
        "report_start_date": "Planned Report Period from",
        "report_end_date": "Planned Report Period to",
        "contact": {
            "display_name": "Internal Audit Lead",
            "mandatory": True,
            "filter_by": "_filter_by_contact",
        },
        "secondary_contact": None,
        "notes": None,
        "url": None,
        "reference_url": None,
    }

    def _clone(self, source_object):
        """Clone audit and all relevant attributes.

    Keeps the internals of actual audit cloning and everything that is related
    to audit itself (auditors, audit firm, context setting,
    custom attribute values, etc.)
    """
        from ggrc_basic_permissions import create_audit_context

        data = {
            "title": source_object.generate_attribute("title"),
            "description": source_object.description,
            "audit_firm": source_object.audit_firm,
            "start_date": source_object.start_date,
            "end_date": source_object.end_date,
            "program": source_object.program,
            "status": source_object.VALID_STATES[0],
            "report_start_date": source_object.report_start_date,
            "report_end_date": source_object.report_end_date,
            "contact": source_object.contact
        }

        self.update_attrs(data)
        db.session.flush()

        create_audit_context(self)
        self._clone_auditors(source_object)
        self.clone_custom_attribute_values(source_object)

    def _clone_auditors(self, audit):
        """Clone auditors of specified audit.

    Args:
      audit: Audit instance
    """
        from ggrc_basic_permissions.models import Role, UserRole

        role = Role.query.filter_by(name="Auditor").first()
        auditors = [
            ur.person
            for ur in UserRole.query.filter_by(role=role,
                                               context=audit.context).all()
        ]

        for auditor in auditors:
            user_role = UserRole(context=self.context,
                                 person=auditor,
                                 role=role)
            db.session.add(user_role)
        db.session.flush()

    def clone(self, source_id, mapped_objects=None):
        """Clone audit with specified whitelisted children.

    Children that can be cloned should be specified in CLONEABLE_CHILDREN.

    Args:
      mapped_objects: A list of related objects that should also be copied and
      linked to a new audit.
    """
        if not mapped_objects:
            mapped_objects = []

        source_object = Audit.query.get(source_id)
        self._clone(source_object)

        if any(mapped_objects):
            related_children = source_object.related_objects(mapped_objects)

            for obj in related_children:
                obj.clone(self)

    @classmethod
    def _filter_by_program(cls, predicate):
        return Program.query.filter((Program.id == Audit.program_id)
                                    & (predicate(Program.slug)
                                       | predicate(Program.title))).exists()

    @classmethod
    def _filter_by_auditor(cls, predicate):
        from ggrc_basic_permissions.models import Role, UserRole
        return UserRole.query.join(
            Role, Person).filter((Role.name == "Auditor")
                                 & (UserRole.context_id == cls.context_id)
                                 & (predicate(Person.name)
                                    | predicate(Person.email))).exists()

    @classmethod
    def eager_query(cls):
        from sqlalchemy import orm

        query = super(Audit, cls).eager_query()
        return query.options(
            orm.joinedload('program'),
            orm.subqueryload('requests'),
            orm.subqueryload('object_people').joinedload('person'),
            orm.subqueryload('audit_objects'),
        )