Ejemplo n.º 1
0
def update_index_for_objects(session, cache):
  indexer = get_indexer()
  for obj in cache.new:
    indexer.create_record(fts_record_for(obj), commit=False)
  for obj in cache.dirty:
    indexer.update_record(fts_record_for(obj), commit=False)
  for obj in cache.deleted:
    indexer.delete_record(obj.id, obj.__class__.__name__, commit=False)
  session.commit()
Ejemplo n.º 2
0
def do_reindex():
    """
  update the full text search index
  """

    indexer = get_indexer()
    indexer.delete_all_records(False)

    # Remove model base classes and non searchable objects
    excluded_models = {
        all_models.Directive, all_models.Option, all_models.SystemOrProcess
    }
    indexed_models = {
        model
        for model in all_models.all_models if model_is_indexed(model)
    }

    indexed_models -= excluded_models

    for model in indexed_models:
        mapper_class = model._sa_class_manager.mapper.base_mapper.class_
        query = model.query.options(
            db.undefer_group(mapper_class.__name__ + '_complete'), )
        for query_chunk in generate_query_chunks(query):
            for instance in query_chunk:
                indexer.create_record(fts_record_for(instance), False)
            db.session.commit()
Ejemplo n.º 3
0
def do_reindex():
  """Update the full text search index."""

  indexer = get_indexer()
  indexer.delete_all_records(False)

  # Remove model base classes and non searchable objects
  excluded_models = {
      all_models.Directive,
      all_models.Option,
      all_models.SystemOrProcess,
      all_models.Role,
  }
  indexed_models = {model for model in all_models.all_models
                    if model_is_indexed(model)}

  indexed_models -= excluded_models

  for model in indexed_models:
    # pylint: disable=protected-access
    mapper_class = model._sa_class_manager.mapper.base_mapper.class_
    query = model.query.options(
        db.undefer_group(mapper_class.__name__ + '_complete'),
    )
    for query_chunk in generate_query_chunks(query):
      for instance in query_chunk:
        indexer.create_record(fts_record_for(instance), False)
      db.session.commit()

  reindex_snapshots()
Ejemplo n.º 4
0
    def ensure_backlog_workflow_exists(cls):
        """Ensures there is at least one backlog workflow with an active cycle.
    If such workflow does not exist it creates one."""
        def any_active_cycle(workflows):
            """Checks if any active cycle exists from given workflows"""
            for workflow in workflows:
                for cur_cycle in workflow.cycles:
                    if cur_cycle.is_current:
                        return True
            return False

        # Check if backlog workflow already exists
        backlog_workflows = Workflow.query\
                                    .filter(and_
                                            (Workflow.kind == "Backlog",
                                             Workflow.frequency == "one_time"))\
                                    .all()

        if len(backlog_workflows) > 0 and any_active_cycle(backlog_workflows):
            return "At least one backlog workflow already exists"
        # Create a backlog workflow
        backlog_workflow = Workflow(description="Backlog workflow",
                                    title="Backlog (one time)",
                                    frequency="one_time",
                                    status="Active",
                                    recurrences=0,
                                    kind="Backlog")

        # create wf context
        wf_ctx = backlog_workflow.get_or_create_object_context(context=1)
        backlog_workflow.context = wf_ctx
        db.session.flush(backlog_workflow)
        # create a cycle
        backlog_cycle = cycle.Cycle(
            description="Backlog workflow",
            title="Backlog (one time)",
            is_current=1,
            status="Assigned",
            start_date=None,
            end_date=None,
            context=backlog_workflow.get_or_create_object_context(),
            workflow=backlog_workflow)

        # create a cycletaskgroup
        backlog_ctg = cycle_task_group\
            .CycleTaskGroup(description="Backlog workflow taskgroup",
                            title="Backlog TaskGroup",
                            cycle=backlog_cycle,
                            status="InProgress",
                            start_date=None,
                            end_date=None,
                            context=backlog_workflow
                            .get_or_create_object_context())

        db.session.add_all([backlog_workflow, backlog_cycle, backlog_ctg])
        db.session.flush()

        # add fulltext entries
        get_indexer().create_record(fts_record_for(backlog_workflow))
        return "Backlog workflow created"
Ejemplo n.º 5
0
def do_reindex():
    """Update the full text search index."""

    indexer = get_indexer()
    with benchmark('Delete all records'):
        indexer.delete_all_records(False)

    indexed_models = get_indexed_model_names()

    people = db.session.query(all_models.Person.id, all_models.Person.name,
                              all_models.Person.email)
    g.people_map = {p.id: (p.name, p.email) for p in people}

    for model in sorted(indexed_models):
        # pylint: disable=protected-access
        logger.info("Updating index for: %s", model)
        with benchmark("Create records for %s" % model):
            model = get_model(model)
            mapper_class = model._sa_class_manager.mapper.base_mapper.class_
            query = model.query.options(
                db.undefer_group(mapper_class.__name__ + '_complete'), )
            for query_chunk in generate_query_chunks(query):
                for instance in query_chunk:
                    indexer.create_record(fts_record_for(instance), False)
                db.session.commit()

    reindex_snapshots()

    delattr(g, "people_map")
Ejemplo n.º 6
0
def admin_reindex():
  """Simple re-index of all indexable objects
  """
  if not permissions.is_allowed_read("/admin", 1):
    raise Forbidden()

  from ggrc.fulltext import get_indexer
  from ggrc.fulltext.recordbuilder import fts_record_for

  indexer = get_indexer()
  indexer.delete_all_records(False)

  from ggrc.models import all_models
  from ggrc.app import db

  # Find all models then remove base classes
  models = set(all_models.all_models) -\
      set([all_models.Directive, all_models.SystemOrProcess])
  for model in models:
    mapper_class = model._sa_class_manager.mapper.base_mapper.class_
    query = model.query.options(
        db.undefer_group(mapper_class.__name__+'_complete'),
        )
    for instance in query.all():
      indexer.create_record(fts_record_for(instance), False)
  db.session.commit()

  return app.make_response((
    'success', 200, [('Content-Type', 'text/html')]))
Ejemplo n.º 7
0
def create_user(email, **kwargs):
  user = Person(email=email, **kwargs)
  db.session.add(user)
  db.session.flush()
  log_event(db.session, user, user.id)
  user_context = Context(
      name='Personal Context for {0}'.format(email),
      description='',
      related_object=user,
      context_id=1,
      )
  db.session.add(user_context)
  db.session.commit()
  get_indexer().create_record(fts_record_for(user))
  return user
Ejemplo n.º 8
0
def create_user(email, **kwargs):
    user = Person(email=email, **kwargs)
    db.session.add(user)
    db.session.flush()
    log_event(db.session, user, user.id)
    user_context = Context(
        name='Personal Context for {0}'.format(email),
        description='',
        related_object=user,
        context_id=1,
    )
    db.session.add(user_context)
    db.session.commit()
    get_indexer().create_record(fts_record_for(user))
    return user
Ejemplo n.º 9
0
 def _log_event(cls, instance):
     indexer = get_indexer()
     db.session.flush()
     user = cls._get_user()
     revision = models.Revision(instance, user.id, 'created',
                                instance.log_json())
     event = models.Event(
         modified_by=user,
         action="POST",
         resource_id=instance.id,
         resource_type=instance.type,
         context=instance.context,
         revisions=[revision],
     )
     db.session.add(revision)
     db.session.add(event)
     indexer.update_record(fts_record_for(instance), commit=False)
Ejemplo n.º 10
0
 def _log_event(cls, instance):
   indexer = get_indexer()
   db.session.flush()
   user = cls._get_user()
   revision = models.Revision(
       instance, user.id, 'created', instance.log_json())
   event = models.Event(
       modified_by=user,
       action="POST",
       resource_id=instance.id,
       resource_type=instance.type,
       context=instance.context,
       revisions=[revision],
   )
   db.session.add(revision)
   db.session.add(event)
   indexer.update_record(fts_record_for(instance), commit=False)
Ejemplo n.º 11
0
def do_reindex():
    """
  update the full text search index
  """

    indexer = get_indexer()
    indexer.delete_all_records(False)

    # Find all models then remove base classes
    #   (If we don't remove base classes, we get duplicates in the index.)
    inheritance_base_models = [
        all_models.Directive, all_models.SystemOrProcess
    ]
    models_ = set(all_models.all_models) - set(inheritance_base_models)
    models_ = [model for model in models_ if model_is_indexed(model)]

    for model in models_:
        mapper_class = model._sa_class_manager.mapper.base_mapper.class_
        query = model.query.options(
            db.undefer_group(mapper_class.__name__ + '_complete'), )
        for query_chunk in generate_query_chunks(query):
            for instance in query_chunk:
                indexer.create_record(fts_record_for(instance), False)
            db.session.commit()
Ejemplo n.º 12
0
  def ensure_backlog_workflow_exists(cls):
    """Ensures there is at least one backlog workflow with an active cycle.
    If such workflow does not exist it creates one."""

    def any_active_cycle(workflows):
      """Checks if any active cycle exists from given workflows"""
      for workflow in workflows:
        for cur_cycle in workflow.cycles:
          if cur_cycle.is_current:
            return True
      return False

    # Check if backlog workflow already exists
    backlog_workflows = Workflow.query\
                                .filter(and_
                                        (Workflow.kind == "Backlog",
                                         Workflow.frequency == "one_time"))\
                                .all()

    if len(backlog_workflows) > 0 and any_active_cycle(backlog_workflows):
      return "At least one backlog workflow already exists"
    # Create a backlog workflow
    backlog_workflow = Workflow(description="Backlog workflow",
                                title="Backlog (one time)",
                                frequency="one_time",
                                status="Active",
                                recurrences=0,
                                kind="Backlog")

    # create wf context
    wf_ctx = backlog_workflow.get_or_create_object_context(context=1)
    backlog_workflow.context = wf_ctx
    db.session.flush(backlog_workflow)
    # create a cycle
    backlog_cycle = cycle.Cycle(description="Backlog workflow",
                                title="Backlog (one time)",
                                is_current=1,
                                status="Assigned",
                                start_date=None,
                                end_date=None,
                                context=backlog_workflow
                                .get_or_create_object_context(),
                                workflow=backlog_workflow)

    # create a cycletaskgroup
    backlog_ctg = cycle_task_group\
        .CycleTaskGroup(description="Backlog workflow taskgroup",
                        title="Backlog TaskGroup",
                        cycle=backlog_cycle,
                        status="InProgress",
                        start_date=None,
                        end_date=None,
                        context=backlog_workflow
                        .get_or_create_object_context())
    db.session.add_all([backlog_workflow, backlog_cycle, backlog_ctg])
    db.session.flush()

    # add fulltext entries
    get_indexer().create_record(fts_record_for(backlog_workflow))
    get_indexer().create_record(fts_record_for(backlog_cycle))
    get_indexer().create_record(fts_record_for(backlog_ctg))
    return "Backlog workflow created"
Ejemplo n.º 13
0
        if not permissions.is_allowed_create(self.model.__name__, self.get_context_id_from_json(src)):
            raise Forbidden()
        if src.get("private") == True and src.get("context") is not None and src["context"].get("id") is not None:
            raise BadRequest('context MUST be "null" when creating a private resource.')
        elif "context" not in src:
            raise BadRequest("context MUST be specified.")
        else:
            if not permissions.is_allowed_create(self.model.__name__, self.get_context_id_from_json(src)):
                raise Forbidden()
        self.json_create(obj, src)
        self.model_posted.send(obj.__class__, obj=obj, src=src, service=self)
        obj.modified_by_id = get_current_user_id()
        db.session.add(obj)
        log_event(db.session, obj)
        db.session.commit()
        get_indexer().create_record(fts_record_for(obj))
        return self.json_success_response(self.object_for_json(obj), self.modified_at(obj), id=obj.id, status=201)

    @classmethod
    def add_to(cls, app, url, model_class=None, decorators=()):
        if model_class:
            service_class = type(model_class.__name__, (cls,), {"_model": model_class})
            import ggrc.services

            setattr(ggrc.services, model_class.__name__, service_class)
        else:
            service_class = cls
        view_func = service_class.as_view(service_class.endpoint_name())
        view_func = cls.decorate_view_func(view_func, decorators)
        app.add_url_rule(url, defaults={cls.pk: None}, view_func=view_func, methods=["GET", "POST"])
        app.add_url_rule(
Ejemplo n.º 14
0
class Resource(ModelView):
    """View base class for Views handling.  Will typically be registered with an
  application following a collection style for routes. Collection `GET` and
  `POST` will have a route like `/resources` while collection member
  resource routes will have routes likej `/resources/<pk:pk_type>`.

  To register a Resource subclass FooCollection with a Flask application:

  ..

     FooCollection.add_to(app, '/foos')

  By default will only support the `application/json` content-type.
  """
    def dispatch_request(self, *args, **kwargs):
        method = request.method.lower()

        if method == 'get':
            if self.pk in kwargs and kwargs[self.pk] is not None:
                return self.get(*args, **kwargs)
            else:
                return self.collection_get()
        elif method == 'post':
            if self.pk in kwargs and kwargs[self.pk] is not None:
                return self.post(*args, **kwargs)
            else:
                return self.collection_post()
        elif method == 'put':
            return self.put(*args, **kwargs)
        elif method == 'delete':
            return self.delete(*args, **kwargs)
        else:
            raise NotImplementedError()

    def post(*args, **kwargs):
        raise NotImplementedError()

    # Default JSON request handlers
    def get(self, id):
        obj = self.get_object(id)
        if obj is None:
            return self.not_found_response()
        if 'Accept' in self.request.headers and \
           'application/json' not in self.request.headers['Accept']:
            return current_app.make_response(
                ('application/json', 406, [('Content-Type', 'text/plain')]))
        object_for_json = self.object_for_json(obj)
        if 'If-None-Match' in self.request.headers and \
            self.request.headers['If-None-Match'] == self.etag(object_for_json):
            return current_app.make_response(
                ('', 304, [('Etag', self.etag(object_for_json))]))
        return self.json_success_response(self.object_for_json(obj),
                                          self.modified_at(obj))

    def validate_headers_for_put_or_delete(self, obj):
        missing_headers = []
        if 'If-Match' not in self.request.headers:
            missing_headers.append('If-Match')
        if 'If-Unmodified-Since' not in self.request.headers:
            missing_headers.append('If-Unmodified-Since')
        if missing_headers:
            # rfc 6585 defines a new status code for missing required headers
            return current_app.make_response(('If-Match is required.', 428, [
                ('Content-Type', 'text/plain')
            ]))
        if request.headers['If-Match'] != self.etag(self.object_for_json(obj)) or \
           request.headers['If-Unmodified-Since'] != \
              self.http_timestamp(self.modified_at(obj)):
            return current_app.make_response((
                'The resource has been changed. The conflict must be resolved and '
                'the request resubmitted with an up to date Etag for If-Match '
                'header.', 409, [('Content-Type', 'text/plain')]))
        return None

    def put(self, id):
        obj = self.get_object(id)
        if obj is None:
            return self.not_found_response()
        if self.request.headers['Content-Type'] != 'application/json':
            return current_app.make_response(
                ('Content-Type must be application/json', 415, []))
        header_error = self.validate_headers_for_put_or_delete(obj)
        if header_error:
            return header_error
        src = UnicodeSafeJsonWrapper(self.request.json)
        root_attribute = self.model._inflector.table_singular
        try:
            src = src[root_attribute]
        except KeyError, e:
            return current_app.make_response(
                ('Required attribute "{0}" not found'.format(root_attribute),
                 400, []))
        ggrc.builder.json.update(obj, src)
        #FIXME Fake the modified_by_id until we have that information in session.
        obj.modified_by_id = get_current_user_id()
        db.session.add(obj)
        db.session.commit()
        obj = self.get_object(id)
        get_indexer().update_record(fts_record_for(obj))
        return self.json_success_response(self.object_for_json(obj),
                                          self.modified_at(obj))
Ejemplo n.º 15
0
                ('Content-Type must be application/json', 415, []))
        obj = self.model()
        src = UnicodeSafeJsonWrapper(self.request.json)
        root_attribute = self.model._inflector.table_singular
        try:
            src = src[root_attribute]
        except KeyError, e:
            return current_app.make_response(
                ('Required attribute "{0}" not found'.format(root_attribute),
                 400, []))
        ggrc.builder.json.create(obj, src)
        #FIXME Fake the modified_by_id until we have that information in session.
        obj.modified_by_id = get_current_user_id()
        db.session.add(obj)
        db.session.commit()
        get_indexer().create_record(fts_record_for(obj))
        return self.json_success_response(self.object_for_json(obj),
                                          self.modified_at(obj),
                                          id=obj.id,
                                          status=201)

    @classmethod
    def add_to(cls, app, url, model_class=None, decorators=()):
        if model_class:
            service_class = type(model_class.__name__, (Resource, ), {
                '_model': model_class,
            })
            import ggrc.services
            setattr(ggrc.services, model_class.__name__, service_class)
        else:
            service_class = cls