def test_commentable_import(self):
     """Test import of recipients and send_by_default fields"""
     for model_name in COMMENTABLE_MODELS:
         model_cls = inflector.get_model(model_name)
         obj = model_cls.query.first()
         self.assertEqual(sorted(obj.recipients.split(",")),
                          sorted(RECIPIENTS))
         self.assertEqual(obj.send_by_default, True)
     for model_name in SCOPING_MODELS:
         model_cls = inflector.get_model(model_name)
         obj = model_cls.query.first()
         self.assertEqual(sorted(obj.recipients.split(",")),
                          sorted(SCOPING_RECIPIENTS))
         self.assertEqual(obj.send_by_default, True)
 def test_commentable_import(self):
   """Test import of recipients and send_by_default fields"""
   for model_name in COMMENTABLE_MODELS:
     model_cls = inflector.get_model(model_name)
     obj = model_cls.query.first()
     self.assertEqual(sorted(obj.recipients.split(",")),
                      sorted(RECIPIENTS))
     self.assertEqual(obj.send_by_default, True)
   for model_name in SCOPING_MODELS:
     model_cls = inflector.get_model(model_name)
     obj = model_cls.query.first()
     self.assertEqual(sorted(obj.recipients.split(",")),
                      sorted(SCOPING_RECIPIENTS))
     self.assertEqual(obj.send_by_default, True)
Beispiel #3
0
def update_indexer(session):  # pylint:disable=unused-argument
  """General function to update index

  for all updated related instance before commit"""
  models_ids_to_reindex = defaultdict(set)
  db.session.flush()
  for for_index in getattr(db.session, 'reindex_set', set()):
    if for_index not in db.session:
      continue
    type_name, id_value = for_index.get_reindex_pair()
    if type_name:
      models_ids_to_reindex[type_name].add(id_value)
  db.session.reindex_set = set()
  for model_name, ids in models_ids_to_reindex.iteritems():
    get_model(model_name).bulk_record_update_for(ids)
Beispiel #4
0
def update_indexer(session):  # pylint:disable=unused-argument
    """General function to update index

  for all updated related instance before commit"""
    models_ids_to_reindex = defaultdict(set)
    db.session.flush()
    for for_index in getattr(db.session, 'reindex_set', set()):
        if for_index not in db.session:
            continue
        type_name, id_value = for_index.get_reindex_pair()
        if type_name:
            models_ids_to_reindex[type_name].add(id_value)
    db.session.reindex_set = set()
    for model_name, ids in models_ids_to_reindex.iteritems():
        get_model(model_name).bulk_record_update_for(ids)
Beispiel #5
0
    def _parse_query(cls, query):
        """Parse cloning parameters from input query.

    Args:
        query: Dict with cloning parameters.

    Returns:
        Tuple that include list objects to clone, destination object and
        list of possible mapped types (source_objs, destination, mapped_types).
    """
        if not query:
            raise exceptions.BadRequest()

        source_ids = query.get("sourceObjectIds", [])
        if not source_ids:
            raise exceptions.BadRequest(
                "sourceObjectIds parameter wasn't provided")
        source_objs = cls.query.options(
            sa.orm.subqueryload('custom_attribute_definitions'),
            sa.orm.subqueryload('custom_attribute_values'),
        ).filter(cls.id.in_(source_ids)).all()

        dest_query = query.get("destination", {})
        destination = None
        if dest_query and dest_query.get("type") and dest_query.get("id"):
            destination_cls = inflector.get_model(dest_query.get("type"))
            destination = destination_cls.query.filter_by(
                id=dest_query.get("id")).first()

        mapped_types = {
            type_
            for type_ in query.get("mappedObjects", [])
            if type_ in cls.CLONEABLE_CHILDREN
        }
        return source_objs, destination, mapped_types
Beispiel #6
0
def get_objects_by_query():
  """Return objects corresponding to a POST'ed query list."""
  query = request.json

  results = get_handler_results(query)

  last_modified_list = [result["last_modified"] for result in results
                        if result["last_modified"]]
  last_modified = max(last_modified_list) if last_modified_list else None
  collections = []
  collection_fields = ["ids", "values", "count", "total", "object_name"]

  for result in results:
    model = get_model(result["object_name"])

    if model is not None:
      collection = build_collection_representation(
          model,
          {
              field: result[field] for field in collection_fields
              if field in result
          }
      )
      collections.append(collection)

  return json_success_response(collections, last_modified)
Beispiel #7
0
def get_objects_to_reindex(obj):
  """Return list of Commentable objects related to provided comment."""
  source_qs = db.session.query(
      Relationship.destination_type, Relationship.destination_id
  ).filter(
      Relationship.source_type == obj.type,
      Relationship.source_id == obj.id
  )
  destination_qs = db.session.query(
      Relationship.source_type, Relationship.source_id
  ).filter(
      Relationship.destination_type == obj.type,
      Relationship.destination_id == obj.id
  )
  result_qs = source_qs.union(destination_qs)
  klass_dict = defaultdict(set)
  for klass, object_id in result_qs:
    klass_dict[klass].add(object_id)

  queries = []
  for klass, object_ids in klass_dict.iteritems():
    model = inflector.get_model(klass)
    if not model:
      continue
    if issubclass(model, (Indexed, Commentable, ExternalCommentable)):
      queries.append(model.query.filter(model.id.in_(list(object_ids))))
  return list(itertools.chain(*queries))
Beispiel #8
0
def get_objects_to_reindex(obj):
  """Return list of Commentable objects related to provided comment."""
  source_qs = db.session.query(
      Relationship.destination_type, Relationship.destination_id
  ).filter(
      Relationship.source_type == obj.type,
      Relationship.source_id == obj.id
  )
  destination_qs = db.session.query(
      Relationship.source_type, Relationship.source_id
  ).filter(
      Relationship.destination_type == obj.type,
      Relationship.destination_id == obj.id
  )
  result_qs = source_qs.union(destination_qs)
  klass_dict = defaultdict(set)
  for klass, object_id in result_qs:
    klass_dict[klass].add(object_id)

  queries = []
  for klass, object_ids in klass_dict.iteritems():
    model = inflector.get_model(klass)
    if not model:
      continue
    if issubclass(model, (Indexed, Commentable, ExternalCommentable)):
      queries.append(model.query.filter(model.id.in_(list(object_ids))))
  return list(itertools.chain(*queries))
Beispiel #9
0
def mark_to_cache(type_, id_):
    """Mark object for warmup"""
    if not hasattr(flask.g, "referenced_objects_markers"):
        flask.g.referenced_objects_markers = collections.defaultdict(set)
    if not (isinstance(type_, type) and issubclass(type_, db.Model)):
        type_ = inflector.get_model(type_)
    flask.g.referenced_objects_markers[type_].add(id_)
Beispiel #10
0
    def text_local_delete_relationship_scoping_directive(
            self, scoping_factory, directive_factory):
        """Test that deleteion of relationship disabled for local users."""
        # Set up relationships
        self.api.set_user(self.person_ext)
        with factories.single_commit():
            scoping_object = scoping_factory()
            directive = directive_factory()
        mappings = [(scoping_object, directive), (directive, scoping_object)]
        relationship_ids = []
        for source, destination in mappings:
            rel = self.create_relationship(source, destination, True,
                                           self.person_ext)
            relationship_ids.append(rel.id)

        self.api.set_user(self.person)
        for rel_id in relationship_ids:
            relationship = all_models.Relationship.query.get(rel_id)
            response = self.api.delete(relationship)
            self.assert400(response)

            # relationship allowed to be deleted when source or
            # destination objects are deleted
            directive_model = get_model(relationship.destination_type)
            directive = directive_model.query.get(relationship.destination_id)
            self.assertIsNone(directive)
            response = self.api.delete(directive)
            self.assert200(response)
            relationship = all_models.Relationship.query.get(rel.id)
            self.assertIsNone(relationship)
Beispiel #11
0
def get_objects_by_query():
  """Return objects corresponding to a POST'ed query list."""
  query = request.json

  query_helper = QueryAPIQueryHelper(query)
  results = query_helper.get_results()

  last_modified_list = [result["last_modified"] for result in results
                        if result["last_modified"]]
  last_modified = max(last_modified_list) if last_modified_list else None
  collections = []
  collection_fields = ["ids", "values", "count", "total"]

  for result in results:
    if last_modified is None:
      last_modified = result["last_modified"]
    elif result["last_modified"] is not None:
      last_modified = max(last_modified, result["last_modified"])

    model = get_model(result["object_name"])

    collection = build_collection_representation(
        model,
        {
            field: result[field] for field in collection_fields
            if field in result
        }
    )
    collections.append(collection)

  return json_success_response(collections, last_modified)
Beispiel #12
0
  def get_objects_to_reindex(self):
    """Return list required objects for reindex if comment C.U.D."""
    source_qs = db.session.query(
        Relationship.destination_type, Relationship.destination_id
    ).filter(
        Relationship.source_type == self.__class__.__name__,
        Relationship.source_id == self.id
    )
    destination_qs = db.session.query(
        Relationship.source_type, Relationship.source_id
    ).filter(
        Relationship.destination_type == self.__class__.__name__,
        Relationship.destination_id == self.id
    )
    result_qs = source_qs.union(destination_qs)
    klass_dict = defaultdict(set)
    for klass, object_id in result_qs:
      klass_dict[klass].add(object_id)

    queries = []
    for klass, object_ids in klass_dict.iteritems():
      model = inflector.get_model(klass)
      if not model:
        continue
      if issubclass(model, (Indexed, Commentable)):
        queries.append(model.query.filter(model.id.in_(list(object_ids))))
    return list(itertools.chain(*queries))
  def get_objects_to_reindex(self):
    """Return list required objects for reindex if comment C.U.D."""
    source_qs = db.session.query(
        Relationship.destination_type, Relationship.destination_id
    ).filter(
        Relationship.source_type == self.__class__.__name__,
        Relationship.source_id == self.id
    )
    destination_qs = db.session.query(
        Relationship.source_type, Relationship.source_id
    ).filter(
        Relationship.destination_type == self.__class__.__name__,
        Relationship.destination_id == self.id
    )
    result_qs = source_qs.union(destination_qs)
    klass_dict = defaultdict(set)
    for klass, object_id in result_qs:
      klass_dict[klass].add(object_id)

    queries = []
    for klass, object_ids in klass_dict.iteritems():
      model = inflector.get_model(klass)
      if not model:
        continue
      if issubclass(model, (Indexed, Commentable)):
        queries.append(model.query.filter(model.id.in_(list(object_ids))))
    return list(itertools.chain(*queries))
Beispiel #14
0
def similar(exp, object_class, target_class, query):
    """Filter by relationships similarity.

  Note: only the first id from the list of ids is used.

  Args:
    object_name: the name of the class of the objects to which similarity
                 will be computed.
    ids: the ids of similar objects of type `object_name`.

  Returns:
    sqlalchemy.sql.elements.BinaryExpression if an object of `object_class`
    is similar to one the given objects.
  """
    similar_class = inflector.get_model(exp['object_name'])
    if not hasattr(similar_class, "get_similar_objects_query"):
        raise BadQueryException(u"{} does not define weights to count "
                                u"relationships similarity".format(
                                    similar_class.__name__))
    similar_objects_query = similar_class.get_similar_objects_query(
        id_=exp['ids'][0],
        types=[object_class.__name__],
    )
    flask.g.similar_objects_query = similar_objects_query
    similar_objects_ids = [obj.id for obj in similar_objects_query]
    if similar_objects_ids:
        return object_class.id.in_(similar_objects_ids)
    return sqlalchemy.sql.false()
Beispiel #15
0
def get_objects_by_query():
  """Return objects corresponding to a POST'ed query list."""
  query = request.json

  results = get_handler_results(query)

  last_modified_list = [result["last_modified"] for result in results
                        if result["last_modified"]]
  last_modified = max(last_modified_list) if last_modified_list else None
  collections = []
  collection_fields = ["ids", "values", "count", "total", "object_name"]

  for result in results:
    model = get_model(result["object_name"])

    collection = build_collection_representation(
        model,
        {
            field: result[field] for field in collection_fields
            if field in result
        }
    )
    collections.append(collection)

  return json_success_response(collections, last_modified)
Beispiel #16
0
def similar(exp, object_class, target_class, query):
  """Filter by relationships similarity.

  Note: only the first id from the list of ids is used.

  Args:
    object_name: the name of the class of the objects to which similarity
                 will be computed.
    ids: the ids of similar objects of type `object_name`.

  Returns:
    sqlalchemy.sql.elements.BinaryExpression if an object of `object_class`
    is similar to one the given objects.
  """
  similar_class = inflector.get_model(exp['object_name'])
  if not hasattr(similar_class, "get_similar_objects_query"):
    raise BadQueryException(u"{} does not define weights to count "
                            u"relationships similarity"
                            .format(similar_class.__name__))
  similar_objects_query = similar_class.get_similar_objects_query(
      id_=exp['ids'][0],
      type_=object_class.__name__,
  )
  similar_objects_ids = {obj[0] for obj in similar_objects_query}
  if similar_objects_ids:
    return object_class.id.in_(similar_objects_ids)
  return sqlalchemy.sql.false()
Beispiel #17
0
def do_reindex():
    """Update the full text search index."""

    indexer = get_indexer()
    with benchmark('Delete all records'):
        indexer.delete_all_records(False)

    indexed_models = get_indexed_model_names()

    people = db.session.query(all_models.Person.id, all_models.Person.name,
                              all_models.Person.email)
    g.people_map = {p.id: (p.name, p.email) for p in people}

    for model in sorted(indexed_models):
        # pylint: disable=protected-access
        logger.info("Updating index for: %s", model)
        with benchmark("Create records for %s" % model):
            model = get_model(model)
            mapper_class = model._sa_class_manager.mapper.base_mapper.class_
            query = model.query.options(
                db.undefer_group(mapper_class.__name__ + '_complete'), )
            for query_chunk in generate_query_chunks(query):
                for instance in query_chunk:
                    indexer.create_record(fts_record_for(instance), False)
                db.session.commit()

    reindex_snapshots()

    delattr(g, "people_map")
Beispiel #18
0
def _get_missing_models_query(role):
    """Get query for objects with a missing ACL entry for the given role.

  Note that the filter flag here is just for optimization. When creating a new
  access control role we can be sure that no object will have the ACL entry for
  that role so we can skip the filter.

  Args:
    role: role object for which we want to check acl entries.
    filter_: A flag to filter the possible objects that might already have the
      given acl entry

  Returns:
    sqlalchemy query for the object type specified in the access control role.
  """
    model = inflector.get_model(role.object_type)
    if not model:
        # We only log info instead of warning here because we still leave access
        # control roles of obsolete objects in our database, so that we can use
        # them with old revisions in our history log.
        logger.info("Trying to handle role '%s' for non existent object '%s'",
                    role.name, role.object_type)
        return None

    query = model.query.outerjoin(
        all_models.AccessControlList,
        sa.and_(all_models.AccessControlList.object_type == model.__name__,
                all_models.AccessControlList.object_id == model.id,
                all_models.AccessControlList.ac_role_id == role.id)).filter(
                    all_models.AccessControlList.id.is_(None)).order_by(
                        model.id)

    return query
 def import_model(self, model_name, audit, recipients, send_by_default):
     """Import model data with commentable fields"""
     # pylint: disable=protected-access
     import_data = [
         ("object_type", model_name),
         ("Code", "{}-1".format(model_name)),
         ("Title", "{}-Title".format(model_name)),
         ("Admin", "*****@*****.**"),
         ("Recipients", recipients),
         ("Send by default", send_by_default),
     ]
     model_cls = inflector.get_model(model_name)
     if model_name in SCOPING_MODELS:
         import_data.append(("Assignee", "*****@*****.**"))
         import_data.append(("Verifier", "*****@*****.**"))
     if model_name == "Control":
         import_data.append(("Assertions*", "Privacy"))
     if issubclass(model_cls, AuditRelationship):
         import_data.append(("Map:Audit", audit))
     if (issubclass(model_cls, Described) and "description"
             not in model_cls._aliases) or model_name == "Risk":
         import_data.append(
             ("description", "{}-Description".format(model_name)))
     response = self.import_data(OrderedDict(import_data))
     self._check_csv_response(response, {})
Beispiel #20
0
def do_reindex():
    """Update the full text search index."""

    indexer = get_indexer()
    indexed_models = get_indexed_model_names()

    people = db.session.query(all_models.Person.id, all_models.Person.name,
                              all_models.Person.email)
    indexer.cache["people_map"] = {p.id: (p.name, p.email) for p in people}
    for model in sorted(indexed_models):
        # pylint: disable=protected-access
        logger.info("Updating index for: %s", model)
        with benchmark("Create records for %s" % model):
            model = get_model(model)
            mapper_class = model._sa_class_manager.mapper.base_mapper.class_
            if issubclass(model, mixin.Indexed):
                for query_chunk in generate_query_chunks(
                        db.session.query(model.id)):
                    model.bulk_record_update_for([i.id for i in query_chunk])
                    db.session.commit()
            else:
                logger.warning(
                    "Try to index model that not inherited from Indexed mixin: %s",
                    model.__name__)
                indexer.delete_records_by_type(model.__name__)
                query = model.query.options(
                    db.undefer_group(mapper_class.__name__ + '_complete'), )
                for query_chunk in generate_query_chunks(query):
                    for instance in query_chunk:
                        indexer.create_record(indexer.fts_record_for(instance),
                                              False)
                    db.session.commit()

    reindex_snapshots()
    indexer.invalidate_cache()
Beispiel #21
0
    def load_objs(cls, data):
        """Load objects by their ids and types.

    Args:
        data: List of stubs [(_, type, id),] for objects to load.

    Returns:
        Dict with object type and id as keys and instance as value.
    """
        # Combine ids of one type together to load in one query
        type_ids = defaultdict(set)
        for _, type_, id_ in data:
            type_ids[type_].add(id_)

        type_id_objs = defaultdict(dict)
        # We can't load all objects with different types in one step, so we
        # load them for each type separately
        for type_, ids in type_ids.items():
            related_model = inflector.get_model(type_)
            related_query = related_model.query.options(
                sa.orm.subqueryload('custom_attribute_definitions'), ).filter(
                    related_model.id.in_(ids))
            for related in related_query:
                type_id_objs[type_][related.id] = related
        return type_id_objs
Beispiel #22
0
def get_objects_by_query():
    """Return objects corresponding to a POST'ed query list."""
    query = request.json

    query_helper = QueryAPIQueryHelper(query)
    results = query_helper.get_results()

    last_modified_list = [
        result["last_modified"] for result in results
        if result["last_modified"]
    ]
    last_modified = max(last_modified_list) if last_modified_list else None
    collections = []
    collection_fields = ["ids", "values", "count", "total"]

    for result in results:
        if last_modified is None:
            last_modified = result["last_modified"]
        elif result["last_modified"] is not None:
            last_modified = max(last_modified, result["last_modified"])

        model = get_model(result["object_name"])

        collection = build_collection_representation(model, {
            field: result[field]
            for field in collection_fields if field in result
        })
        collections.append(collection)

    return json_success_response(collections, last_modified)
Beispiel #23
0
  def load_objs(cls, data):
    """Load objects by their ids and types.

    Args:
        data: List of stubs [(_, type, id),] for objects to load.

    Returns:
        Dict with object type and id as keys and instance as value.
    """
    # Combine ids of one type together to load in one query
    type_ids = defaultdict(set)
    for _, type_, id_ in data:
      type_ids[type_].add(id_)

    type_id_objs = defaultdict(dict)
    # We can't load all objects with different types in one step, so we
    # load them for each type separately
    for type_, ids in type_ids.items():
      related_model = inflector.get_model(type_)
      related_query = related_model.query.options(
          sa.orm.subqueryload('custom_attribute_definitions'),
      ).filter(related_model.id.in_(ids))
      for related in related_query:
        type_id_objs[type_][related.id] = related
    return type_id_objs
Beispiel #24
0
  def _parse_query(cls, query):
    """Parse cloning parameters from input query.

    Args:
        query: Dict with cloning parameters.

    Returns:
        Tuple that include list objects to clone, destination object and
        list of possible mapped types (source_objs, destination, mapped_types).
    """
    if not query:
      raise exceptions.BadRequest()

    source_ids = query.get("sourceObjectIds", [])
    if not source_ids:
      raise exceptions.BadRequest("sourceObjectIds parameter wasn't provided")
    source_objs = cls.query.options(
        sa.orm.subqueryload('custom_attribute_definitions'),
        sa.orm.subqueryload('custom_attribute_values'),
    ).filter(cls.id.in_(source_ids)).all()

    dest_query = query.get("destination", {})
    destination = None
    if dest_query and dest_query.get("type") and dest_query.get("id"):
      destination_cls = inflector.get_model(dest_query.get("type"))
      destination = destination_cls.query.filter_by(
          id=dest_query.get("id")
      ).first()

    mapped_types = {
        type_ for type_ in query.get("mappedObjects", [])
        if type_ in cls.CLONEABLE_CHILDREN
    }
    return source_objs, destination, mapped_types
 def test_commentable_import(self):
     """Test import of recipients and send_by_default fields"""
     for model_name, recipients in RECIPIENTS_MAPPING.iteritems():
         model_cls = inflector.get_model(model_name)
         obj = model_cls.query.first()
         self.assertEqual(sorted(obj.recipients.split(",")),
                          sorted(recipients))
         self.assertEqual(obj.send_by_default, True)
Beispiel #26
0
def mark_to_cache(type_, id_):
  """Mark object for warmup"""
  if not hasattr(flask.g, "referenced_objects_markers"):
    flask.g.referenced_objects_markers = collections.defaultdict(set)
  if not (isinstance(type_, type) and issubclass(type_, db.Model)):
    type_ = inflector.get_model(type_)
  if type_ is not None:
    flask.g.referenced_objects_markers[type_].add(id_)
Beispiel #27
0
def get_by_id(obj):
  """Get object instance by id"""
  if not obj:
    return
  model = inflector.get_model(obj['type'])
  if not model:
    return
  return model.query.get(obj["id"])
Beispiel #28
0
 def _slugs_to_ids(object_name, slugs):
   """Convert SLUG to proper ids for the given objec."""
   object_class = inflector.get_model(object_name)
   if not object_class:
     return []
   ids = [c.id for c in object_class.query.filter(
       object_class.slug.in_(slugs)).all()]
   return ids
Beispiel #29
0
 def _slugs_to_ids(object_name, slugs):
   """Convert SLUG to proper ids for the given objec."""
   object_class = inflector.get_model(object_name)
   if not object_class:
     return []
   ids = [c.id for c in object_class.query.filter(
       object_class.slug.in_(slugs)).all()]
   return ids
Beispiel #30
0
def _ensure_program_relationships(snapshots):
  """Ensure that snapshotted object is related to audit program.

  This function is made to handle multiple snapshots for a single audit.
  Args:
    snapshots: list of snapshot objects with child_id, child_type and parent.
  """
  # assert that every parent is an Audit as the code relies on program_id field
  assert {s.parent_type for s in snapshots} == {"Audit"}

  rel = relationship.Relationship

  program_children = {}
  for obj in snapshots:
    program_children.setdefault(obj.parent.program, set()).add(
        (obj.child_type, obj.child_id)
    )

  for program, children_set in program_children.items():
    query = db.session.query(
        rel.destination_type, rel.destination_id
    ).filter(
        and_(
            rel.source_type == "Program",
            rel.source_id == program.id,
            tuple_(rel.destination_type, rel.destination_id).in_(children_set)
        )
    ).union_all(
        db.session.query(
            rel.source_type, rel.source_id
        ).filter(
            and_(
                rel.destination_type == "Program",
                rel.destination_id == program.id,
                tuple_(rel.source_type, rel.source_id).in_(children_set)
            )
        )
    )
    children_set.difference_update(query.all())

    child_objects = {}
    type_ids = {}
    for child in children_set:
      type_ids.setdefault(child[0], set()).add(child[1])
    for child_type, ids in type_ids.items():
      child_model = inflector.get_model(child_type)
      query = child_model.query.filter(child_model.id.in_(ids))
      for child in query:
        child_objects[(child.type, child.id)] = child

    for child in children_set:
      if child in child_objects:
        db.session.add(
            relationship.Relationship(
                source=program,
                destination=child_objects[child],
            )
        )
Beispiel #31
0
 def original_object_deleted(self):
     """Flag if the snapshot has the last revision and action is deleted."""
     if not self.revisions:
         return False
     deleted = self.revisions[-1].action == "deleted"
     external_deleted = (
         self.revisions[-1].content["status"] == "Deprecated" and
         issubclass(inflector.get_model(self.child_type), Synchronizable))
     return bool(deleted or external_deleted)
Beispiel #32
0
    def validate_instance_type(self, _, instance_type):
        """Validate instance_type attribute.

    We preventing creation of proposals for external models.
    """
        instance_class = inflector.get_model(instance_type)

        if issubclass(instance_class, synchronizable.Synchronizable):
            raise ValueError("Trying to create proposal for external model.")

        return instance_type
Beispiel #33
0
  def validate_reviewable_type(self, _, reviewable_type):
    """Validate reviewable_type attribute.

    We preventing creation of reviews for external models.
    """
    reviewable_class = inflector.get_model(reviewable_type)

    if issubclass(reviewable_class, synchronizable.Synchronizable):
      raise ValueError("Trying to create review for external model.")

    return reviewable_type
Beispiel #34
0
  def validate_reviewable_type(self, _, reviewable_type):
    """Validate reviewable_type attribute.

    We preventing creation of reviews for external models.
    """
    reviewable_class = inflector.get_model(reviewable_type)

    if issubclass(reviewable_class, synchronizable.Synchronizable):
      raise ValueError("Trying to create review for external model.")

    return reviewable_type
Beispiel #35
0
    def test_asmnt_procedure_export(self, model):
        """Test export of Assessment Procedure. {}"""
        with factories.single_commit():
            program = factories.ProgramFactory()
            audit = factories.AuditFactory(program=program)
        import_queries = []
        for i in range(3):
            import_queries.append(
                collections.OrderedDict([
                    ("object_type", model),
                    ("Assessment Procedure", "Procedure-{}".format(i)),
                    ("Title", "Title {}".format(i)),
                    ("Code*", "{}-{}".format(model, i)),
                    ("Admin", "*****@*****.**"),
                    ("Assignees", "*****@*****.**"),
                    ("Creators", "*****@*****.**"),
                    ("Description", "{} description".format(model)),
                    ("Program", program.slug),
                    ("Audit", audit.slug),
                    ("Start Date", ""),
                    ("End Date", ""),
                ]))
            if model == "Control":
                import_queries[-1]["Assertions"] = "Privacy"
            if model == "Risk":
                import_queries[-1]["Risk Type"] = "Risk type"
            if model.replace(" ", "") in self.SCOPING_MODELS_NAMES:
                import_queries[-1]["Assignee"] = "*****@*****.**"
                import_queries[-1]["Verifier"] = "*****@*****.**"

        self.check_import_errors(self.import_data(*import_queries))

        model_cls = inflector.get_model(model)
        objects = model_cls.query.order_by(model_cls.test_plan).all()
        self.assertEqual(len(objects), 3)
        for num, obj in enumerate(objects):
            self.assertEqual(obj.test_plan, "Procedure-{}".format(num))

        obj_dicts = [{
            "Code*": obj.slug,
            "Assessment Procedure": "Procedure-{}".format(i)
        } for i, obj in enumerate(objects)]
        search_request = [{
            "object_name": model_cls.__name__,
            "filters": {
                "expression": {},
                "order_by": {
                    "name": "id"
                }
            },
            "fields": ["slug", "test_plan"],
        }]
        exported_data = self.export_parsed_csv(search_request)[model]
        self.assertEqual(exported_data, obj_dicts)
Beispiel #36
0
  def validate_instance_type(self, _, instance_type):
    """Validate instance_type attribute.

    We preventing creation of proposals for external models.
    """
    instance_class = inflector.get_model(instance_type)

    if issubclass(instance_class, synchronizable.Synchronizable):
      raise ValueError("Trying to create proposal for external model.")

    return instance_type
Beispiel #37
0
def update_indexer(session):  # pylint:disable=unused-argument
    """General function to update index

  for all updated related instance before commit"""
    with benchmark("Update indexer before commit"):
        if not hasattr(db.session, "reindex_set"):
            return

        models_ids_to_reindex = defaultdict(set)
        db.session.flush()
        for for_index in db.session.reindex_set:
            if for_index not in db.session:
                continue
            type_name, id_value = for_index.get_reindex_pair()
            if type_name:
                models_ids_to_reindex[type_name].add(id_value)
        # expire required to fix declared_attr cached value
        db.session.expire_all()
        db.session.reindex_set.invalidate()
        for model_name, ids in models_ids_to_reindex.iteritems():
            get_model(model_name).bulk_record_update_for(ids)
 def expected_single_equal(self, setup_objs):
     """Calculate expected values for single test case, equal operator"""
     values = []
     for data in setup_objs:
         search_obj = inflector.get_model(data.searchable_type).query.get(
             data.searchable_id)
         for subprop in self.Meta.subprops:
             values.append(([
                 create_tuple_data(data.obj_id, search_obj,
                                   self.Meta.subprops)
             ], getattr(search_obj, subprop)))
     return values
Beispiel #39
0
 def test_unified_hint_state(self, model):
   """Tests if {} type attribute state has hint expected"""
   data = {
       "export_to": "csv",
       "objects": [
           {"object_name": model, "fields": ["title", "status"]},
       ],
   }
   response = self.client.post("/_service/export_csv",
                               data=dumps(data), headers=self.headers)
   self.assertIn("Allowed values are:\n{}".format(
       '\n'.join(inflector.get_model(model).VALID_STATES)), response.data)
Beispiel #40
0
  def _get_ids(self, object_query):
    """Get a set of ids of objects described in the filters."""

    object_name = object_query["object_name"]
    expression = object_query.get("filters", {}).get("expression")

    if expression is None:
      return set()
    object_class = inflector.get_model(object_name)
    query = db.session.query(object_class.id)

    tgt_class = object_class
    if object_name == "Snapshot":
      child_type = self._get_snapshot_child_type(object_query)
      tgt_class = getattr(models.all_models, child_type, object_class)

    requested_permissions = object_query.get("permissions", "read")
    with benchmark("Get permissions: _get_ids > _get_type_query"):
      type_query = self._get_type_query(object_class, requested_permissions)
      if type_query is not None:
        query = query.filter(type_query)
    with benchmark("Parse filter query: _get_ids > _build_expression"):
      filter_expression = custom_operators.build_expression(
          expression,
          object_class,
          tgt_class,
          self.query
      )
      if filter_expression is not None:
        query = query.filter(filter_expression)
    if object_query.get("order_by"):
      with benchmark("Sorting: _get_ids > order_by"):
        query = self._apply_order_by(
            object_class,
            query,
            object_query["order_by"],
            tgt_class,
        )
    with benchmark("Apply limit"):
      limit = object_query.get("limit")
      if limit:
        ids, total = self._apply_limit(query, limit)
      else:
        ids = [obj.id for obj in query]
        total = len(ids)
      object_query["total"] = total

    if hasattr(flask.g, "similar_objects_query"):
      # delete similar_objects_query for the case when several queries are
      # POSTed in one request, the first one filters by similarity and the
      # second one doesn't but tries to sort by __similarity__
      delattr(flask.g, "similar_objects_query")
    return ids
Beispiel #41
0
def update_indexer(session):  # pylint:disable=unused-argument
  """General function to update index

  for all updated related instance before commit"""
  with benchmark("Update indexer before commit"):
    if not hasattr(db.session, "reindex_set"):
      return

    models_ids_to_reindex = defaultdict(set)
    db.session.flush()
    for for_index in db.session.reindex_set:
      if for_index not in db.session:
        continue
      type_name, id_value = for_index.get_reindex_pair()
      if type_name:
        models_ids_to_reindex[type_name].add(id_value)
    # expire required to fix declared_attr cached value
    db.session.expire_all()
    db.session.reindex_set.invalidate()
    for model_name, ids in models_ids_to_reindex.iteritems():
      get_model(model_name).bulk_record_update_for(ids)
Beispiel #42
0
  def _get_ids(self, object_query):
    """Get a set of ids of objects described in the filters."""

    object_name = object_query["object_name"]
    expression = object_query.get("filters", {}).get("expression")

    if expression is None:
      return set()
    object_class = inflector.get_model(object_name)
    if object_class is None:
      return set()
    query = db.session.query(object_class.id)

    tgt_class = object_class
    if object_name == "Snapshot":
      child_type = self._get_snapshot_child_type(object_query)
      tgt_class = getattr(models.all_models, child_type, object_class)

    requested_permissions = object_query.get("permissions", "read")
    with benchmark("Get permissions: _get_ids > _get_type_query"):
      type_query = self._get_type_query(object_class, requested_permissions)
      if type_query is not None:
        query = query.filter(type_query)
    with benchmark("Parse filter query: _get_ids > _build_expression"):
      filter_expression = custom_operators.build_expression(
          expression,
          object_class,
          tgt_class,
          self.query
      )
      if filter_expression is not None:
        query = query.filter(filter_expression)
    if object_query.get("order_by"):
      with benchmark("Sorting: _get_ids > order_by"):
        query = pagination.apply_order_by(
            object_class,
            query,
            object_query["order_by"],
            tgt_class,
        )
    with benchmark("Apply limit"):
      limit = object_query.get("limit")
      if limit:
        limit_query = pagination.apply_limit(query, limit)
        total = pagination.get_total_count(query)
        ids = [obj.id for obj in limit_query]
      else:
        ids = [obj.id for obj in query]
        total = len(ids)
      object_query["total"] = total

    return ids
Beispiel #43
0
  def _get_ids(self, object_query):
    """Get a set of ids of objects described in the filters."""

    object_name = object_query["object_name"]
    expression = object_query.get("filters", {}).get("expression")

    if expression is None:
      return set()
    object_class = inflector.get_model(object_name)
    if object_class is None:
      return set()
    query = db.session.query(object_class.id)

    tgt_class = object_class
    if object_name == "Snapshot":
      child_type = self._get_snapshot_child_type(object_query)
      tgt_class = getattr(models.all_models, child_type, object_class)

    requested_permissions = object_query.get("permissions", "read")
    with benchmark("Get permissions: _get_ids > _get_type_query"):
      type_query = self._get_type_query(object_class, requested_permissions)
      if type_query is not None:
        query = query.filter(type_query)
    with benchmark("Parse filter query: _get_ids > _build_expression"):
      filter_expression = custom_operators.build_expression(
          expression,
          object_class,
          tgt_class,
          self.query
      )
      if filter_expression is not None:
        query = query.filter(filter_expression)
    if object_query.get("order_by"):
      with benchmark("Sorting: _get_ids > order_by"):
        query = pagination.apply_order_by(
            object_class,
            query,
            object_query["order_by"],
            tgt_class,
        )
    with benchmark("Apply limit"):
      limit = object_query.get("limit")
      if limit:
        limit_query = pagination.apply_limit(query, limit)
        total = pagination.get_total_count(query)
        ids = [obj.id for obj in limit_query]
      else:
        ids = [obj.id for obj in query]
        total = len(ids)
      object_query["total"] = total

    return ids
 def expected_single_equal(self, setup_objs):
   """Calculate expected values for single test case, equal operator"""
   values = []
   for data in setup_objs:
     search_obj = inflector.get_model(data.searchable_type).query.get(
         data.searchable_id
     )
     for subprop in self.Meta.subprops:
       values.append((
           [create_tuple_data(data.obj_id, search_obj, self.Meta.subprops)],
           getattr(search_obj, subprop)
       ))
   return values
 def expected_multiple_not_equal(self, setup_objs):
     """Calculate expected values for multiple test case, not equal operator"""
     values, expected_data = [], []
     search_obj = inflector.get_model(
         setup_objs[0].searchable_type).query.get(
             setup_objs[0].searchable_id)
     for data in setup_objs:
         if data.searchable_id != setup_objs[0].searchable_id:
             expected_data.append(
                 create_tuple_data(data.obj_id, search_obj,
                                   self.Meta.subprops))
     for subprop in self.Meta.subprops:
         values.append((expected_data, getattr(search_obj, subprop)))
     return values
 def get_issuetracked_objects(obj_type, obj_ids):
     """Fetch issuetracked objects from db."""
     issuetracked_model = inflector.get_model(obj_type)
     return issuetracked_model.query.join(
         all_models.IssuetrackerIssue,
         sa.and_(
             all_models.IssuetrackerIssue.object_type == obj_type,
             all_models.IssuetrackerIssue.object_id ==
             issuetracked_model.id)).filter(
                 all_models.IssuetrackerIssue.object_id.in_(obj_ids),
                 all_models.IssuetrackerIssue.issue_id.is_(None),
             ).options(
                 sa.orm.Load(issuetracked_model).undefer_group(
                     "{}_complete".format(obj_type), ))
Beispiel #47
0
    def _get_ids(self, object_query):
        """Get a set of ids of objects described in the filters."""

        object_name = object_query["object_name"]
        expression = object_query.get("filters", {}).get("expression")

        if expression is None:
            return set()
        object_class = inflector.get_model(object_name)
        query = db.session.query(object_class.id)

        tgt_class = object_class
        if object_name == "Snapshot":
            child_type = self._get_snapshot_child_type(object_query)
            tgt_class = getattr(models.all_models, child_type, object_class)

        requested_permissions = object_query.get("permissions", "read")
        with benchmark("Get permissions: _get_ids > _get_type_query"):
            type_query = self._get_type_query(object_class,
                                              requested_permissions)
            if type_query is not None:
                query = query.filter(type_query)
        with benchmark("Parse filter query: _get_ids > _build_expression"):
            filter_expression = custom_operators.build_expression(
                expression, object_class, tgt_class, self.query)
            if filter_expression is not None:
                query = query.filter(filter_expression)
        if object_query.get("order_by"):
            with benchmark("Sorting: _get_ids > order_by"):
                query = self._apply_order_by(
                    object_class,
                    query,
                    object_query["order_by"],
                    tgt_class,
                )
        with benchmark("Apply limit"):
            limit = object_query.get("limit")
            if limit:
                ids, total = self._apply_limit(query, limit)
            else:
                ids = [obj.id for obj in query]
                total = len(ids)
            object_query["total"] = total

        if hasattr(flask.g, "similar_objects_query"):
            # delete similar_objects_query for the case when several queries are
            # POSTed in one request, the first one filters by similarity and the
            # second one doesn't but tries to sort by __similarity__
            delattr(flask.g, "similar_objects_query")
        return ids
Beispiel #48
0
  def test_asmnt_procedure_export(self, model):
    """Test export of Assessment Procedure. {}"""
    with factories.single_commit():
      program = factories.ProgramFactory()
      audit = factories.AuditFactory(program=program)
    import_queries = []
    for i in range(3):
      import_queries.append(collections.OrderedDict([
          ("object_type", model),
          ("Assessment Procedure", "Procedure-{}".format(i)),
          ("Title", "Title {}".format(i)),
          ("Code*", "{}-{}".format(model, i)),
          ("Admin", "*****@*****.**"),
          ("Assignees", "*****@*****.**"),
          ("Creators", "*****@*****.**"),
          ("Description", "{} description".format(model)),
          ("Program", program.slug),
          ("Audit", audit.slug),
          ("Start Date", "01/02/2019"),
          ("End Date", "03/03/2019"),
      ]))
      if model.replace(" ", "") in all_models.get_scope_model_names():
        import_queries[-1]["Assignee"] = "*****@*****.**"
        import_queries[-1]["Verifier"] = "*****@*****.**"

    self.check_import_errors(self.import_data(*import_queries))

    model_cls = inflector.get_model(model)
    objects = model_cls.query.order_by(model_cls.test_plan).all()
    self.assertEqual(len(objects), 3)
    for num, obj in enumerate(objects):
      self.assertEqual(obj.test_plan, "Procedure-{}".format(num))

    obj_dicts = [
        {
            "Code*": obj.slug,
            "Assessment Procedure": "Procedure-{}".format(i)
        } for i, obj in enumerate(objects)
    ]
    search_request = [{
        "object_name": model_cls.__name__,
        "filters": {
            "expression": {},
            "order_by": {"name": "id"}
        },
        "fields": ["slug", "test_plan"],
    }]
    exported_data = self.export_parsed_csv(search_request)[model]
    self.assertEqual(exported_data, obj_dicts)
Beispiel #49
0
  def _is_parent_for(parent, obj):
    """Check that 'parent' is parent for 'obj'
    Cache parents ids for mega"""
    if hasattr(flask.g, "mega_parents_cache"):
      mega_parents_cache = flask.g.mega_parents_cache
    else:
      mega_parents_cache = flask.g.mega_parents_cache = defaultdict(dict)

    if obj.id in mega_parents_cache[obj.type]:
      parents_ids = mega_parents_cache[obj.type][obj.id]
    else:
      obj = get_model(obj.type).query.get(obj.id)
      parents_ids = obj.get_all_relatives_ids("parents")
      mega_parents_cache[obj.type][obj.id] = parents_ids
    return parent.id in parents_ids
Beispiel #50
0
def get(type_, id_):
  """Check flask.g.referenced_objects for the object or get it from the DB."""
  if not id_:
    return None

  ref_objects = getattr(flask.g, "referenced_objects", {})

  if not (isinstance(type_, type) and issubclass(type_, db.Model)):
    type_ = inflector.get_model(type_)

  result = ref_objects.get(type_, {}).get(id_, None)

  if not result:
    result = type_.query.get(id_)

  return result
 def expected_multiple_not_equal(self, setup_objs):
   """Calculate expected values for multiple test case, not equal operator"""
   values, expected_data = [], []
   search_obj = inflector.get_model(setup_objs[0].searchable_type).query.get(
       setup_objs[0].searchable_id
   )
   for data in setup_objs:
     if data.searchable_id != setup_objs[0].searchable_id:
       expected_data.append(
           create_tuple_data(data.obj_id, search_obj, self.Meta.subprops)
       )
   for subprop in self.Meta.subprops:
     values.append(
         (expected_data, getattr(search_obj, subprop))
     )
   return values
 def get_issuetracked_objects(obj_type, obj_ids):
   """Fetch issuetracked objects from db."""
   issuetracked_model = inflector.get_model(obj_type)
   return issuetracked_model.query.join(
       all_models.IssuetrackerIssue,
       sa.and_(
           all_models.IssuetrackerIssue.object_type == obj_type,
           all_models.IssuetrackerIssue.object_id == issuetracked_model.id
       )
   ).filter(
       all_models.IssuetrackerIssue.object_id.in_(obj_ids),
       all_models.IssuetrackerIssue.issue_id.isnot(None),
       all_models.IssuetrackerIssue.enabled != 0,
   ).options(
       sa.orm.Load(issuetracked_model).undefer_group(
           "{}_complete".format(obj_type),
       )
   )
Beispiel #53
0
 def import_model(self, model_name, audit, recipients, send_by_default):
   """Import model data with commentable fields"""
   # pylint: disable=protected-access
   import_data = [
       ("object_type", model_name),
       ("Code", "{}-1".format(model_name)),
       ("Title", "{}-Title".format(model_name)),
       ("Admin", "*****@*****.**"),
       ("Recipients", recipients),
       ("Send by default", send_by_default),
   ]
   model_cls = inflector.get_model(model_name)
   if issubclass(model_cls, AuditRelationship):
     import_data.append(("Map:Audit", audit))
   if (issubclass(model_cls, Described) and
      "description" not in model_cls._aliases) or model_name == "Risk":
     import_data.append(("description", "{}-Description".format(model_name)))
   response = self.import_data(OrderedDict(import_data))
   self._check_csv_response(response, {})
Beispiel #54
0
def get(type_, id_):
  """Check flask.g.referenced_objects for the object or get it from the DB."""
  # id == 0 is a valid case if id is an int; therefore "not id" doesn't fit
  if id_ is None:
    return None

  ref_objects = getattr(flask.g, "referenced_objects", {})

  if not (isinstance(type_, type) and issubclass(type_, db.Model)):
    type_ = inflector.get_model(type_)
  # model for type_ has been removed
  if type_ is None:
    return None
  result = ref_objects.get(type_, {}).get(id_, None)

  if not result:
    result = type_.query.get(id_)

  return result
Beispiel #55
0
  def _get_objects(self, object_query):
    """Get a set of objects described in the filters."""

    with benchmark("Get ids: _get_objects -> _get_ids"):
      ids = self._get_ids(object_query)
    if not ids:
      return set()

    object_name = object_query["object_name"]
    object_class = inflector.get_model(object_name)
    query = object_class.eager_query()
    query = query.filter(object_class.id.in_(ids))

    with benchmark("Get objects by ids: _get_objects -> obj in query"):
      id_object_map = {obj.id: obj for obj in query}

    with benchmark("Order objects by ids: _get_objects"):
      objects = [id_object_map[id_] for id_ in ids]

    return objects
 def expected_single_not_equal(self, setup_objs):
   """Calculate expected values for single test case, not equal operator"""
   values = []
   # Think that data type of all searchable objects is the same
   model = inflector.get_model(setup_objs[0].searchable_type)
   id_searchables = dict(
       db.session.query(model.id, model).filter(getattr(model, "id").in_(
           s.searchable_id for s in setup_objs
       ))
   )
   for data in setup_objs:
     exp_data = [so for so in setup_objs if so.obj_id != data.obj_id]
     search_obj = id_searchables[data.searchable_id]
     for subprop in self.Meta.subprops:
       values.append((
           [create_tuple_data(
               ed.obj_id, id_searchables[ed.searchable_id], self.Meta.subprops
           ) for ed in exp_data],
           getattr(search_obj, subprop))
       )
   return values
Beispiel #57
0
  def __init__(self):
    """Set up basic test fixture with the following data:
    - Persons and Roles
    - Searchable model instance
    - basic test cases
    """
    if not db.engine.dialect.has_table(db.engine, SetupData.__tablename__):
      SetupData.__table__.create(db.engine)

    objects = []
    init_globals(Types.all)
    with factories.single_commit():
      audit_id = factories.AuditFactory().id
      for model in Types.all:
        for field in FIELD_SETTERS:
          if field_exists(inflector.get_model(model), field):
            for operation in OPERATIONS:
              objects += self.base_single_setup(model, field, operation)
              objects += self.base_multiple_setup(model, field, operation)

    with app.app.app_context():
      create_reindexed_snapshots(audit_id, objects)
def generate_classes(models, field):
  """Generate test class containing all BasePersonQueryApiTest tests"""
  def generate(model):
    """Create test class with meta information"""
    meta_fields = {"field": field, "model": model}
    meta = type("Meta", (object,), meta_fields)

    class_name = "Test{}FilterBy{}".format(
        model.__name__.replace(" ", ""),
        field.replace(" ", "_")
    )
    instance = type(
        class_name,
        (BasePersonQueryApiTest,),
        {"Meta": meta}
    )
    return instance

  module = sys.modules[__name__]
  for model in models:
    test_model = generate(inflector.get_model(model))
    setattr(module, test_model.__name__, test_model)
Beispiel #59
0
def _get_missing_models_query(role):
  """Get query for objects with a missing ACL entry for the given role.

  Note that the filter flag here is just for optimization. When creating a new
  access control role we can be sure that no object will have the ACL entry for
  that role so we can skip the filter.

  Args:
    role: role object for which we want to check acl entries.
    filter_: A flag to filter the possible objects that might already have the
      given acl entry

  Returns:
    sqlalchemy query for the object type specified in the access control role.
  """
  model = inflector.get_model(role.object_type)
  if not model:
    # We only log info instead of warning here because we still leave access
    # control roles of obsolete objects in our database, so that we can use
    # them with old revisions in our history log.
    logger.info("Trying to handle role '%s' for non existent object '%s'",
                role.name, role.object_type)
    return None

  query = model.query.outerjoin(
      all_models.AccessControlList,
      sa.and_(
          all_models.AccessControlList.object_type == model.__name__,
          all_models.AccessControlList.object_id == model.id,
          all_models.AccessControlList.ac_role_id == role.id
      )
  ).filter(
      all_models.AccessControlList.id.is_(None)
  ).order_by(
      model.id
  )

  return query
Beispiel #60
0
  def _get_object(self, request_json):
    """Return existing object or raise HTTP error"""

    object_type = request_json.get('object_type')
    object_id = request_json.get('object_id')

    if None in (object_type, object_id):
      raise exceptions.BadRequest(
          "'object_id' and 'object_type' must be specified")

    model = inflector.get_model(object_type)
    if model is None or not issubclass(model, mixins.Folderable):
      raise exceptions.BadRequest("Model {} not found".format(object_type))

    obj = model.query.get(object_id)
    if obj is None:
      raise exceptions.NotFound(
          "{} with id {} not found".format(model.__name__, object_id))

    self._ensure_has_permissions(obj)
    self._validate_readonly_access(obj)

    return obj