def update_cad_related_objects(task): """Update CAD related objects""" event = models.all_models.Event.query.filter_by( id=task.parameters.get("event_id") ).first() model = models.get_model(task.parameters.get("model_name")) if issubclass(model, models.mixins.ExternalCustomAttributable): cad_model = models.all_models.ExternalCustomAttributeDefinition else: cad_model = models.all_models.CustomAttributeDefinition cad = cad_model.query.filter_by(id=event.resource_id).first() query = db.session.query(model if task.parameters.get("need_revisions") else model.id) if event.action == "PUT": refresh_program_cads_title(cad) objects_count = len(query.all()) handled_objects = 0 for chunk in ggrc_utils.generate_query_chunks(query): chunk_objects = chunk.all() handled_objects += len(chunk_objects) logger.info( "Updating CAD related objects: %s/%s", handled_objects, objects_count ) if task.parameters.get("need_revisions"): for obj in chunk_objects: obj.updated_at = datetime.datetime.utcnow() obj.modified_by_id = task.parameters.get("modified_by_id") else: model.bulk_record_update_for([obj_id for obj_id, in chunk_objects]) log_event.log_event(db.session, cad, event=event) db.session.commit() return app.make_response(("success", 200, [("Content-Type", "text/html")]))
def update_cad_related_objects(task): """Update CAD related objects""" event_id = task.parameters.get("event_id") model_name = task.parameters.get("model_name") need_revisions = task.parameters.get("need_revisions") modified_by_id = task.parameters.get("modified_by_id") event = models.all_models.Event.query.filter_by(id=event_id).first() cad = models.all_models.CustomAttributeDefinition.query.filter_by( id=event.resource_id ).first() model = models.get_model(model_name) query = db.session.query(model if need_revisions else model.id) objects_count = query.count() handled_objects = 0 for chunk in ggrc_utils.generate_query_chunks(query): handled_objects += chunk.count() logger.info( "Updating CAD related objects: %s/%s", handled_objects, objects_count ) if need_revisions: for obj in chunk: obj.updated_at = datetime.datetime.utcnow() obj.modified_by_id = modified_by_id else: model.bulk_record_update_for([obj_id for obj_id, in chunk]) log_event.log_event(db.session, cad, event=event) db.session.commit() return app.make_response(("success", 200, [("Content-Type", "text/html")]))
def find_users(emails): """Find or generate user. If Integration Server is specified not found in DB user is generated with Creator role. """ # pylint: disable=too-many-locals if not settings.INTEGRATION_SERVICE_URL: return Person.query.filter(Person.email.in_(emails)).options( orm.undefer_group('Person_complete')).all() # Verify emails usernames = [email.split('@')[0] for email in emails if is_authorized_domain(email) and not is_external_app_user_email(email)] service = client.PersonClient() ldaps = service.search_persons(usernames) authorized_domain = getattr(settings, "AUTHORIZED_DOMAIN", "") verified_emails = {'%s@%s' % (ldap['username'], authorized_domain) for ldap in ldaps} # Find users in db users = Person.query.filter(Person.email.in_(emails)).all() found_emails = {user.email for user in users} # Create new users new_emails = verified_emails - found_emails new_usernames = [email.split('@')[0] for email in new_emails] new_users = [('%s@%s' % (ldap['username'], authorized_domain), '%s %s' % (ldap['firstName'], ldap['lastName'])) for ldap in ldaps if ldap['username'] in new_usernames] for email, name in new_users: user = create_user(email, name=name, modified_by_id=get_current_user_id()) users.append(user) # bulk create people if new_users: log_event(db.session) db.session.commit() creator_role_granted = False # Grant Creator role to all users for user in users: if user.system_wide_role == SystemWideRoles.NO_ACCESS: add_creator_role(user) creator_role_granted = True # bulk create people roles if creator_role_granted: log_event(db.session) db.session.commit() return users
def add_creator_role(user): """Add creator role for sent user.""" user_creator_role = UserRole( person=user, role=basic_roles.creator(), ) db.session.add(user_creator_role) db.session.commit() log_event(db.session, user_creator_role, user_creator_role.id)
def create_external_user(app_user, external_user_email): """Create external user.""" external_user = find_user(external_user_email, modifier=app_user.id) if external_user and external_user.id is None: db.session.flush() log_event(db.session, external_user, app_user.id) return external_user
def create_external_user(app_user, external_user_email): """Create external user.""" external_user = find_user(external_user_email, modifier=app_user.id) if external_user and external_user.id is None: db.session.flush() log_event(db.session, external_user, app_user.id) db.session.commit() return external_user
def _get_profile(self, **kwargs): """Get person profile""" get_profile = self._get_or_create_profile(kwargs["id"]) if get_profile[0]: log_event(db.session, get_profile[1]) db.session.commit() response_json = { "last_seen_whats_new": get_profile[1].last_seen_whats_new } return self.json_success_response(response_json, )
def create_user(email, **kwargs): """Create User attr: email (string) required """ user = Person(email=email, **kwargs) db.session.add(user) db.session.flush() log_event(db.session, user, user.id) db.session.commit() return user
def post(self): """handler of POST method""" if login.is_external_app_user(): raise exceptions.Forbidden() is_modified = self._perform_request() if is_modified: # create revision and commit changes log_event(db.session) db.session.commit() return "Success", 200
def commit_object(self): """Commit the row. This method also calls pre-and post-commit signals and handles failures. """ if self.block_converter.converter.dry_run or self.ignore: return try: modified_objects = get_modified_objects(db.session) import_event = log_event(db.session, None) cache_utils.update_memcache_before_commit( self.block_converter, modified_objects, self.block_converter.CACHE_EXPIRY_IMPORT, ) try: self.send_before_commit_signals(import_event) except StatusValidationError as exp: status_alias = self.headers.get("status", {}).get("display_name") self.add_error(errors.VALIDATION_ERROR, column_name=status_alias, message=exp.message) db.session.commit() self.block_converter._store_revision_ids(import_event) cache_utils.update_memcache_after_commit(self.block_converter) update_snapshot_index(db.session, modified_objects) except exc.SQLAlchemyError as err: db.session.rollback() logger.exception("Import failed with: %s", err.message) self.block_converter.add_errors(errors.UNKNOWN_ERROR, line=self.offset + 2) else: self.send_post_commit_signals(event=import_event)
def save_import(self): """Commit all changes in the session and update memcache.""" try: modified_objects = get_modified_objects(db.session) import_event = log_event(db.session, None) cache_utils.update_memcache_before_commit(self, modified_objects, CACHE_EXPIRY_IMPORT) for row_converter in self.row_converters: try: row_converter.send_before_commit_signals(import_event) except StatusValidationError as exp: status_alias = row_converter.headers.get( "status", {}).get("display_name") row_converter.add_error(errors.VALIDATION_ERROR, column_name=status_alias, message=exp.message) db.session.commit() self._store_revision_ids(import_event) cache_utils.update_memcache_after_commit(self) update_snapshot_index(db.session, modified_objects) return import_event except exc.SQLAlchemyError as err: db.session.rollback() logger.exception("Import failed with: %s", err.message) self.add_errors(errors.UNKNOWN_ERROR, line=self.offset + 2)
def commit_user_and_role(user): """Commits and flushes user and its role after the login.""" db_user, db_role = None, None if hasattr(flask.g, "user_cache"): db_user = flask.g.user_cache.get(user.email, None) if hasattr(flask.g, "user_creator_roles_cache"): db_role = flask.g.user_creator_roles_cache.get(user.email, None) if db_user or db_role: db.session.flush() if db_user: log_event(db.session, db_user, db_user.id, flush=False) elif db_role: # log_event of user includes event of role creation. # if no user in cache, then it was created before but has no role. log_event(db.session, db_role, user.id, flush=False) db.session.commit()
def flush_object(self): """Flush dirty data related to the current row.""" if self.dry_run or self.ignore: return self.send_pre_commit_signals() try: if self.object_class == all_models.Audit and self.is_new: # This hack is needed only for snapshot creation # for audit during import, this is really bad and # need to be refactored import_event = log_event(db.session, None) self.insert_object() db.session.flush() if self.object_class == all_models.Audit and self.is_new: # This hack is needed only for snapshot creation # for audit during import, this is really bad and # need to be refactored create_snapshots(self.obj, import_event) except exc.SQLAlchemyError as err: db.session.rollback() logger.exception("Import failed with: %s", err.message) self.add_error(errors.UNKNOWN_ERROR) return if self.is_new and not self.ignore: self.block_converter.send_collection_post_signals([self.obj])
def _set_profile(self, **kwargs): """Update person profile""" json = self.request.json get_profile = self._get_or_create_profile(kwargs["id"]) try: requested_date_time = date_parser.parse( json["last_seen_whats_new"]) get_profile[1].last_seen_whats_new = requested_date_time except Exception as err: logger.exception(err) raise BadRequest() log_event(db.session, get_profile[1]) db.session.commit() response_json = {"Person": {"id": kwargs["id"], "profile": json}} return self.json_success_response(response_json, )
def _set_profile(self, **kwargs): """Update person profile""" json = self.request.json get_profile = self._get_or_create_profile(kwargs["id"]) try: requested_date_time = date_parser.parse(json["last_seen_whats_new"]) offset_naive = requested_date_time.replace(tzinfo=None) get_profile[1].last_seen_whats_new = offset_naive except Exception as err: logger.exception(err) raise BadRequest() log_event(db.session, get_profile[1]) db.session.commit() response_json = { "Person": {"id": kwargs["id"], "profile": json} } return self.json_success_response(response_json, )
def handle_model_clone(cls, query): """Process cloning of objects. Args: query: Dict with parameters for cloning procedure. It should have following structure: { "sourceObjectIds": [1, 2], "destination": {"type": "Audit", "id": 2}, # optional "mappedObjects":[] # optional }. Returns: Response with status code 200 in case of success and 400 if provided parameters are invalid. """ source_objs, destination, mapped_types = cls._parse_query(query) clonned_objs = {} for source_obj in source_objs: if ( not permissions.is_allowed_read_for(source_obj) or not permissions.is_allowed_create( source_obj.type, source_obj.id, destination.context_id ) ): raise exceptions.Forbidden() clonned_objs[source_obj] = cls._copy_obj(source_obj, destination) for target, mapped_obj in cls._collect_mapped(source_objs, mapped_types): clonned_objs[mapped_obj] = cls._copy_obj(mapped_obj, target) cls._set_parent_context(clonned_objs.values(), destination) db.session.flush() for source, clonned in clonned_objs.items(): cls._clone_cads(source, clonned) if clonned_objs: db.session.add(log_event(db.session, flush=False)) db.session.commit() from ggrc.query import views collections = [] if cls.RETURN_OBJ_JSON: for obj in clonned_objs: collections.append( views.build_collection_representation(cls, obj.log_json()) ) return views.json_success_response(collections, datetime.datetime.utcnow())
def start_recurring_cycles(): """Start recurring cycles by cron job.""" with benchmark("contributed cron job start_recurring_cycles"): today = date.today() workflows = models.Workflow.query.filter( models.Workflow.next_cycle_start_date <= today, models.Workflow.recurrences == True # noqa ) for workflow in workflows: # Follow same steps as in model_posted.connect_via(models.Cycle) while workflow.next_cycle_start_date <= date.today(): cycle = build_cycle(workflow) if not cycle: break db.session.add(cycle) notification.handle_cycle_created(cycle, False) notification.handle_workflow_modify(None, workflow) # db.session.commit was moved into cycle intentionally. # 'Cycles' for each 'Workflow' should be committed separately # to free memory on each iteration. Single commit exeeded # maximum memory limit on AppEngine instance. log_event(db.session) db.session.commit()
def delete_relationship(self, relationship): """Send post deletion signals.""" db.session.delete(relationship) signals.Restful.model_deleted.send( models.Relationship, obj=relationship, service=self) modified_objects = services_common.get_modified_objects(db.session) event = log_event.log_event(db.session, relationship) cache_utils.update_memcache_before_commit( self.request, modified_objects, services_common.CACHE_EXPIRY_COLLECTION) db.session.flush() services_common.update_snapshot_index(modified_objects) cache_utils.update_memcache_after_commit(flask.request) signals.Restful.model_deleted_after_commit.send( models.Relationship, obj=relationship, service=self, event=event) services_common.send_event_job(event)
def start_recurring_cycles(): """Start recurring cycles by cron job.""" with benchmark("contributed cron job start_recurring_cycles"): today = date.today() workflows = models.Workflow.query.filter( models.Workflow.next_cycle_start_date <= today, models.Workflow.recurrences == True # noqa ) event = None for workflow in workflows: # Follow same steps as in model_posted.connect_via(models.Cycle) while workflow.next_cycle_start_date <= date.today(): cycle = build_cycle(workflow) if not cycle: break db.session.add(cycle) notification.handle_cycle_created(cycle, False) notification.handle_workflow_modify(None, workflow) # db.session.commit was moved into cycle intentionally. # 'Cycles' for each 'Workflow' should be committed separately # to free memory on each iteration. Single commit exeeded # maximum memory limit on AppEngine instance. event = log_event(db.session, event=event) db.session.commit()