예제 #1
0
  def _flush(self, parent_relationship):
    """Manually INSERT generated automappings."""
    if not self.auto_mappings:
      return
    with benchmark("Automapping flush"):
      current_user_id = login.get_current_user_id()
      automapping_result = db.session.execute(
          Automapping.__table__.insert().values(
              relationship_id=parent_relationship.id,
              source_id=parent_relationship.source_id,
              source_type=parent_relationship.source_type,
              destination_id=parent_relationship.destination_id,
              destination_type=parent_relationship.destination_type,
              modified_by_id=current_user_id,
          )
      )
      automapping_id = automapping_result.inserted_primary_key[0]
      self.automapping_ids.add(automapping_id)
      now = datetime.utcnow()
      # We are doing an INSERT IGNORE INTO here to mitigate a race condition
      # that happens when multiple simultaneous requests create the same
      # automapping. If a relationship object fails our unique constraint
      # it means that the mapping was already created by another request
      # and we can safely ignore it.
      inserter = Relationship.__table__.insert().prefix_with("IGNORE")
      original = self.order(Stub.from_source(parent_relationship),
                            Stub.from_destination(parent_relationship))
      db.session.execute(inserter.values([{
          "id": None,
          "modified_by_id": current_user_id,
          "created_at": now,
          "updated_at": now,
          "source_id": src.id,
          "source_type": src.type,
          "destination_id": dst.id,
          "destination_type": dst.type,
          "context_id": None,
          "status": None,
          "parent_id": parent_relationship.id,
          "automapping_id": automapping_id,
          "is_external": False}
          for src, dst in self.auto_mappings
          if (src, dst) != original]))  # (src, dst) is sorted

      self._set_audit_id_for_issues(automapping_id)

      cache = Cache.get_cache(create=True)
      if cache:
        # Add inserted relationships into new objects collection of the cache,
        # so that they will be logged within event and appropriate revisions
        # will be created.
        cache.new.update(
            (relationship, relationship.log_json())
            for relationship in Relationship.query.filter_by(
                automapping_id=automapping_id,
            )
        )
예제 #2
0
  def _flush(self, parent_relationship):
    """Manually INSERT generated automappings."""
    if not self.auto_mappings:
      return
    with benchmark("Automapping flush"):
      current_user_id = login.get_current_user_id()
      automapping_result = db.session.execute(
          Automapping.__table__.insert().values(
              relationship_id=parent_relationship.id,
              source_id=parent_relationship.source_id,
              source_type=parent_relationship.source_type,
              destination_id=parent_relationship.destination_id,
              destination_type=parent_relationship.destination_type,
          )
      )
      automapping_id = automapping_result.inserted_primary_key[0]
      self.automapping_ids.add(automapping_id)
      now = datetime.utcnow()
      # We are doing an INSERT IGNORE INTO here to mitigate a race condition
      # that happens when multiple simultaneous requests create the same
      # automapping. If a relationship object fails our unique constraint
      # it means that the mapping was already created by another request
      # and we can safely ignore it.
      inserter = Relationship.__table__.insert().prefix_with("IGNORE")
      original = self.order(Stub.from_source(parent_relationship),
                            Stub.from_destination(parent_relationship))
      db.session.execute(inserter.values([{
          "id": None,
          "modified_by_id": current_user_id,
          "created_at": now,
          "updated_at": now,
          "source_id": src.id,
          "source_type": src.type,
          "destination_id": dst.id,
          "destination_type": dst.type,
          "context_id": None,
          "status": None,
          "parent_id": parent_relationship.id,
          "automapping_id": automapping_id,
          "is_external": False}
          for src, dst in self.auto_mappings
          if (src, dst) != original]))  # (src, dst) is sorted

      self._set_audit_id_for_issues(automapping_id)

      cache = Cache.get_cache(create=True)
      if cache:
        # Add inserted relationships into new objects collection of the cache,
        # so that they will be logged within event and appropriate revisions
        # will be created.
        cache.new.update(
            (relationship, relationship.log_json())
            for relationship in Relationship.query.filter_by(
                automapping_id=automapping_id,
            )
        )
예제 #3
0
def _get_log_revisions(current_user_id, obj=None, force_obj=False):
  """Generate and return revisions bodies for all cached objects."""
  revisions = []
  cache = Cache.get_cache()
  if not cache:
    return revisions
  modified_objects = set(cache.dirty)
  new_objects = set(cache.new)
  delete_objects = set(cache.deleted)
  all_edited_objects = itertools.chain(cache.new, cache.dirty, cache.deleted)
  relationships_changes = (o for o in all_edited_objects
                           if o.type == "Relationship")
  for rel in relationships_changes:
    documentable = rel.get_related_for("Document")
    if documentable and \
       documentable.type not in ("Comment", "ExternalComment"):
      document = rel.get_related_for(documentable.type)
      if rel in new_objects and document not in documentable.documents:
        documentable.documents.append(document)
      if rel in delete_objects and document in documentable.documents:
        documentable.documents.remove(document)
      if (documentable not in new_objects and
              documentable not in delete_objects):
        modified_objects.add(documentable)

  revisions.extend(_revision_generator(
      current_user_id, "created", cache.new
  ))
  revisions = sort_relationship_revisions(revisions)
  revisions.extend(_revision_generator(
      current_user_id, "modified", modified_objects
  ))
  if force_obj and obj is not None and obj not in cache.dirty:
    # If the ``obj`` has been updated, but only its custom attributes have
    # been changed, then this object will not be added into
    # ``cache.dirty set``. So that its revision will not be created.
    # The ``force_obj`` flag solves the issue, but in a bit dirty way.
    rev = _revision_generator(current_user_id, "modified", (obj,))
    revisions.extend(rev)
  revisions.extend(_revision_generator(
      current_user_id, "deleted", cache.deleted
  ))
  return revisions
예제 #4
0
def _get_log_revisions(current_user_id, obj=None, force_obj=False):
  """Generate and return revisions for all cached objects."""
  revisions = []
  cache = Cache.get_cache()
  if not cache:
    return revisions
  modified_objects = set(cache.dirty)
  new_objects = set(cache.new)
  delete_objects = set(cache.deleted)
  all_edited_objects = itertools.chain(cache.new, cache.dirty, cache.deleted)
  relationships_changes = (o for o in all_edited_objects
                           if o.type == "Relationship")
  for rel in relationships_changes:
    documentable = rel.get_related_for("Document")
    if documentable and \
       documentable.type not in ("Comment", "ExternalComment"):
      document = rel.get_related_for(documentable.type)
      if rel in new_objects and document not in documentable.documents:
        documentable.documents.append(document)
      if rel in delete_objects and document in documentable.documents:
        documentable.documents.remove(document)
      if (documentable not in new_objects and
              documentable not in delete_objects):
        modified_objects.add(documentable)

  revisions.extend(_revision_generator(
      current_user_id, "created", cache.new
  ))
  revisions = sort_relationship_revisions(revisions)
  revisions.extend(_revision_generator(
      current_user_id, "modified", modified_objects
  ))
  if force_obj and obj is not None and obj not in cache.dirty:
    # If the ``obj`` has been updated, but only its custom attributes have
    # been changed, then this object will not be added into
    # ``cache.dirty set``. So that its revision will not be created.
    # The ``force_obj`` flag solves the issue, but in a bit dirty way.
    revision = Revision(obj, current_user_id, 'modified', obj.log_json())
    revisions.append(revision)
  revisions.extend(_revision_generator(
      current_user_id, "deleted", cache.deleted
  ))
  return revisions
예제 #5
0
def _get_log_revisions(current_user_id, obj=None, force_obj=False):
  """Generate and return revisions for all cached objects."""
  revisions = []
  cache = Cache.get_cache()
  if not cache:
    return revisions
  modified_objects = set(cache.dirty)
  new_objects = set(cache.new)
  delete_objects = set(cache.deleted)
  all_edited_objects = itertools.chain(cache.new, cache.dirty, cache.deleted)
  for o in all_edited_objects:
    if o.type == "ObjectFolder" and o.folderable:
      modified_objects.add(o.folderable)
    if o.type == "Relationship" and o.get_related_for("Document"):
      documentable = o.get_related_for("Document")
      document = o.get_related_for(documentable.type)
      if o in new_objects and document not in documentable.documents:
        documentable.documents.append(document)
      if o in delete_objects and document in documentable.documents:
        documentable.documents.remove(document)
      if (
          documentable not in new_objects and
              documentable not in delete_objects):
        modified_objects.add(documentable)

  revisions.extend(_revision_generator(
      current_user_id, "created", cache.new
  ))
  revisions.extend(_revision_generator(
      current_user_id, "modified", modified_objects
  ))
  if force_obj and obj is not None and obj not in cache.dirty:
    # If the ``obj`` has been updated, but only its custom attributes have
    # been changed, then this object will not be added into
    # ``cache.dirty set``. So that its revision will not be created.
    # The ``force_obj`` flag solves the issue, but in a bit dirty way.
    revision = Revision(obj, current_user_id, 'modified', obj.log_json())
    revisions.append(revision)
  revisions.extend(_revision_generator(
      current_user_id, "deleted", cache.deleted
  ))
  return revisions
예제 #6
0
 def clear_cache(session):
   cache = Cache.get_cache()
   if cache:
     cache.clear()
예제 #7
0
 def update_cache_after_flush(session, flush_context):
   with benchmark("update cache after flush"):
     cache = Cache.get_cache(create=False)
     if cache:
       cache.update_after_flush(session, flush_context)
예제 #8
0
 def update_cache_before_flush(session, flush_context, objects):
   with benchmark("update cache before flush"):
     cache = Cache.get_cache(create=True)
     if cache:
       cache.update_before_flush(session, flush_context)
예제 #9
0
 def update_cache_after_flush(session, flush_context):
   cache = Cache.get_cache(create=False)
   if cache:
     cache.update_after_flush(session, flush_context)
예제 #10
0
 def update_cache_before_flush(session, flush_context, objects):
   cache = Cache.get_cache(create=True)
   if cache:
     cache.update_before_flush(session, flush_context)
예제 #11
0
 def clear_cache(session):
     cache = Cache.get_cache()
     if cache:
         cache.clear()
예제 #12
0
 def update_cache_after_flush(session, flush_context):
     cache = Cache.get_cache(create=False)
     if cache:
         cache.update_after_flush(session, flush_context)
예제 #13
0
 def update_cache_before_flush(session, flush_context, objects):
     cache = Cache.get_cache(create=True)
     if cache:
         cache.update_before_flush(session, flush_context)
예제 #14
0
 def update_cache_after_flush(session, flush_context):
     with benchmark("update cache after flush"):
         cache = Cache.get_cache(create=False)
         if cache:
             cache.update_after_flush(session, flush_context)
예제 #15
0
 def update_cache_before_flush(session, flush_context, objects):
     with benchmark("update cache before flush"):
         cache = Cache.get_cache(create=True)
         if cache:
             cache.update_before_flush(session, flush_context)
예제 #16
0
 def clear_cache(session):
     """Clear cache."""
     cache = Cache.get_cache()
     if cache:
         cache.clear()