def add_creator_role(user): user_creator_role = UserRole( person=user, role=basic_roles.creator(), ) db.session.add(user_creator_role) db.session.commit() log_event(db.session, user_creator_role, user_creator_role.id)
def add_creator_role(user): """Add createor role for sent user.""" user_creator_role = UserRole( person=user, role=basic_roles.creator(), ) db.session.add(user_creator_role) db.session.commit() log_event(db.session, user_creator_role, user_creator_role.id)
def save_import(self): for row_converter in self.objects: row_converter.save(db.session, **self.options) db.session.flush() for row_converter in self.objects: row_converter.run_after_save_hooks(db.session, **self.options) modified_objects = get_modified_objects(db.session) log_event(db.session) db.session.commit() update_index_for_objects(db.session, modified_objects)
def create_user(email, **kwargs): """Create User attr: email (string) required """ user = Person(email=email, **kwargs) db.session.add(user) db.session.flush() log_event(db.session, user, user.id) db.session.commit() return user
def save_import(self): """Commit all changes in the session and update memcache.""" try: modified_objects = get_modified_objects(db.session) log_event(db.session, None) update_memcache_before_commit( self, modified_objects, CACHE_EXPIRY_IMPORT) db.session.commit() update_memcache_after_commit(self) update_index(db.session, modified_objects) except exc.SQLAlchemyError as err: db.session.rollback() logger.exception("Import failed with: %s", err.message) self.add_errors(errors.UNKNOWN_ERROR, line=self.offset + 2)
def create_user(email, **kwargs): user = Person(email=email, **kwargs) db.session.add(user) db.session.flush() log_event(db.session, user, user.id) user_context = Context( name='Personal Context for {0}'.format(email), description='', related_object=user, context_id=1, ) db.session.add(user_context) db.session.commit() get_indexer().create_record(fts_record_for(user)) return user
def start_recurring_cycles(): today = date.today() workflows = models.Workflow.query.filter( models.Workflow.next_cycle_start_date <= today, models.Workflow.recurrences == True # noqa ) for workflow in workflows: # Follow same steps as in model_posted.connect_via(models.Cycle) while workflow.next_cycle_start_date <= date.today(): cycle = build_cycle(workflow) if not cycle: break db.session.add(cycle) notification.handle_cycle_created(cycle, False) notification.handle_workflow_modify(None, workflow) log_event(db.session) db.session.commit()
def start_recurring_cycles(): # Get all workflows that should start a new cycle today # The next_cycle_start_date is precomputed and stored when a cycle is created today = date.today() workflows = db.session.query(models.Workflow)\ .filter( models.Workflow.next_cycle_start_date == today, models.Workflow.recurrences == True # noqa ).all() # For each workflow, start and save a new cycle. for workflow in workflows: cycle = models.Cycle() cycle.workflow = workflow cycle.calculator = workflow_cycle_calculator.get_cycle_calculator( workflow) cycle.context = workflow.context # We can do this because we selected only workflows with # next_cycle_start_date = today cycle.start_date = date.today() # Flag the cycle to be saved db.session.add(cycle) if workflow.non_adjusted_next_cycle_start_date: base_date = workflow.non_adjusted_next_cycle_start_date else: base_date = date.today() # Create the cycle (including all child objects) build_cycle(cycle, base_date=base_date) # Update the workflow next_cycle_start_date to push it ahead based on the # frequency. adjust_next_cycle_start_date(cycle.calculator, workflow, move_forward=True) db.session.add(workflow) notification.handle_workflow_modify(None, workflow) notification.handle_cycle_created(None, obj=cycle) log_event(db.session) db.session.commit()
def create_user(email, **kwargs): """Create User attr: email (string) required """ user = Person(email=email, **kwargs) db.session.add(user) db.session.flush() log_event(db.session, user, user.id) user_context = Context( name='Personal Context for {0}'.format(email), description='', related_object=user, context_id=1, ) db.session.add(user_context) db.session.commit() return user
def start_recurring_cycles(): # Get all workflows that should start a new cycle today # The next_cycle_start_date is precomputed and stored when a cycle is created today = date.today() workflows = db.session.query(models.Workflow)\ .filter( models.Workflow.next_cycle_start_date == today, models.Workflow.recurrences == True # noqa ).all() # For each workflow, start and save a new cycle. for workflow in workflows: cycle = models.Cycle() cycle.workflow = workflow cycle.calculator = workflow_cycle_calculator.get_cycle_calculator(workflow) cycle.context = workflow.context # We can do this because we selected only workflows with # next_cycle_start_date = today cycle.start_date = date.today() # Flag the cycle to be saved db.session.add(cycle) if workflow.non_adjusted_next_cycle_start_date: base_date = workflow.non_adjusted_next_cycle_start_date else: base_date = date.today() # Create the cycle (including all child objects) build_cycle(cycle, base_date=base_date) # Update the workflow next_cycle_start_date to push it ahead based on the # frequency. adjust_next_cycle_start_date(cycle.calculator, workflow, move_forward=True) db.session.add(workflow) notification.handle_workflow_modify(None, workflow) notification.handle_cycle_created(None, obj=cycle) log_event(db.session) db.session.commit()
def save_import(self): """Commit all changes in the session and update memcache.""" try: modified_objects = get_modified_objects(db.session) import_event = log_event(db.session, None) update_memcache_before_commit( self, modified_objects, CACHE_EXPIRY_IMPORT) db.session.commit() update_memcache_after_commit(self) update_snapshot_index(db.session, modified_objects) return import_event except exc.SQLAlchemyError as err: db.session.rollback() logger.exception("Import failed with: %s", err.message) self.add_errors(errors.UNKNOWN_ERROR, line=self.offset + 2)
def log_event(): """Log event action.""" common.get_modified_objects(db.session) common.log_event(db.session, flush=False)