def do_missing_revisions(): """Create 'created/modified' revisions. Iterate thought objects in objects_without_revisions table and create revisions """ from ggrc.utils.user_generator import get_migrator_id event = all_models.Event(modified_by_id=get_migrator_id(), action="BULK") db.session.add(event) db.session.commit() revisions_table = all_models.Revision.__table__ count = _get_new_objects_count() chunk_size = 100 logger.info("Creating revision content...") for index, chunk in enumerate(_get_chunks(_get_new_objects(), chunk_size), 1): logger.info("Processing chunk %s of %s", index, count / chunk_size + 1) revisions = [] for obj_id, obj_type, action, modified_by_id in chunk: model = getattr(all_models, obj_type, None) if not model: logger.warning( "Failed to update revisions" " for invalid model: %s", obj_type) continue if not hasattr(model, "log_json"): logger.warning( "Model '%s' has no log_json method," " revision generation skipped", obj_type) continue obj = model.query.get(obj_id) if not obj and action == u"deleted": obj_content = get_last_revision_content(obj_type, obj_id) if not obj_content: logger.info( "Revision for Object '%s' with id '%s' does't exists," " 'deleted' revision generation skipped", obj_type, obj_id) continue revisions.append( build_revision_body(obj_id, obj_type, obj_content, event.id, action, modified_by_id)) elif not obj: logger.info( "Object '%s' with id '%s' does't exists," " revision generation skipped", obj_type, obj_id) continue else: obj_content = obj.log_json() revisions.append( build_revision_body(obj_id, obj_type, obj_content, event.id, action, modified_by_id)) db.session.execute(revisions_table.insert(), revisions) db.session.commit() db.session.execute("truncate objects_without_revisions")
def do_refresh_revisions(): """Update last revisions of models with fixed data.""" event = all_models.Event(action="BULK") db.session.add(event) db.session.flush([event]) # TODO: Improve performance/memory consumption so that we can run # _fix_type_revisions for all objects and not just the objects that are # snapshottable for type_ in sorted(Types.all | {"Assessment"}): logger.info("Updating revisions for: %s", type_) _fix_type_revisions(event, type_, _get_revisions_by_type(type_))
def do_missing_revisions(): """Crate 'created/modified' revisions. Iterate thought objects in objects_without_revisions table and create revisions """ event = all_models.Event(action="BULK") db.session.add(event) db.session.commit() revisions_table = all_models.Revision.__table__ count = _get_new_objects_count() chunk_size = 100 logger.info("Crating revision content...") for index, chunk in enumerate(_get_chunks(_get_new_objects(), chunk_size), 1): logger.info("Processing chunk %s of %s", index, count / chunk_size + 1) revisions = [] for obj_id, obj_type, action in chunk: model = getattr(all_models, obj_type, None) if not model: logger.warning( "Failed to update revisions" " for invalid model: %s", obj_type) continue if not hasattr(model, "log_json"): logger.warning( "Model '%s' has no log_json method," " revision generation skipped", obj_type) continue obj = model.query.get(obj_id) if not obj: logger.info( "Object '%s' with id '%s' does't exists," " revision generation skipped", obj_type, obj_id) continue obj_content = obj.log_json() revisions.append({ "resource_id": obj_id, "resource_type": obj_type, "resource_slug": obj_content.get("slug"), "event_id": event.id, "action": action, "content": obj_content, "context_id": obj_content.get("context_id"), "modified_by_id": (obj_content.get("modified_by_id") or get_current_user_id()), "source_type": obj_content.get("source_type"), "source_id": obj_content.get("source_id"), "destination_type": obj_content.get("destination_type"), "destination_id": obj_content.get("destination_id") }) db.session.execute(revisions_table.insert(), revisions) db.session.commit() db.session.execute("truncate objects_without_revisions")