def test_sql_task_is_updated_on_plone_object_update(self): self.task.responsible_client = 'asd' self.task.title = u'Gopf, iz mach mau' notify(ObjectModifiedEvent(self.task)) self.assertEqual(1, Session.query(Task).count()) task = Session.query(Task).one() self.assertEqual('asd', task.assigned_org_unit) self.assertEqual(u'dossier > subdossier > Gopf, iz mach mau', task.breadcrumb_title) self.assertEqual(u'Gopf, iz mach mau', task.title) self.assertEqual(get_current_org_unit().id(), task.issuing_org_unit) self.assertEqual(get_current_admin_unit().id(), task.admin_unit_id) self.assertEqual(self.user.userid, task.issuer) self.assertEqual(self.user.userid, task.responsible) self.assertEqual(u'task-state-open', task.review_state) self.assertEqual(u'dossier-1/dossier-2/task-1', task.physical_path) self.assertIsNotNone(task.icon) self.assertEqual(task.deadline, date(2010, 1, 1)) self.assertIsNotNone(task.modified) self.assertEqual('direct-execution', task.task_type) self.assertFalse(task.is_subtask) self.assertEqual(1, task.sequence_number) self.assertEqual('Client1 / 1.1', task.reference_number) self.assertEqual('dossier', task.containing_dossier) self.assertEqual('subdossier', task.containing_subdossier) self.assertEqual(2, task.dossier_sequence_number) self.assertEqual('Lorem ipsum dolor sit amet, consectetur', task.text) self.assertSequenceEqual([TEST_USER_ID], task.principals) self.assertIsNone(task.predecessor)
def get_sql_task(self): task = self.task_query.first() if task is None: task = Task(self.obj_id, self.admin_unit_id, issuing_org_unit=self.current_org_unit_id, assigned_org_unit=self.assigned_org_unit, sequence_number=self.sequence_number) Session.add(task) return task
def _get_tasks_for_issuer_query(self, issuer, sort_on='modified', sort_order='reverse'): """Returns a sqlachemy query of all tasks issued by the given issuer. """ sort_on = getattr(Task, sort_on) if sort_order == 'reverse': return Session().query(Task).filter( Task.issuer == issuer).order_by(desc(sort_on)) else: return Session().query(Task).filter( Task.issuer == issuer).order_by(asc(sort_on))
def _get_tasks_for_assigned_client_query(self, client, sort_on='modified', sort_order='reverse'): """Return a sqlachemy query of all task assigned to the actual client. """ sort_on = getattr(Task, sort_on) if sort_order == 'reverse': return Session().query(Task).filter( Task.assigned_client == client).order_by(desc(sort_on)) else: return Session().query(Task).filter( Task.assigned_client == client).order_by(asc(sort_on))
def get_task_rows(self): """Returns a dict of task-rows (tuples of cells) of all open tasks on the current client: incoming -- open tasks assigned to the current client outgoing -- open tasks assigned to another client """ org_unit_id = get_current_org_unit().id() incoming_query = Session().query(Task) incoming_query = incoming_query.filter( Task.assigned_org_unit == org_unit_id) incoming_query = self._extend_task_query(incoming_query) incoming = [] for task in incoming_query.all(): incoming.append(self.get_row_for_item( task, display_issuing_org_unit=True)) outgoing_query = Session().query(Task) outgoing_query = outgoing_query.filter( Task.issuing_org_unit == org_unit_id) outgoing_query = self._extend_task_query(outgoing_query) outgoing = [] for task in outgoing_query.all(): outgoing.append(self.get_row_for_item( task, display_assigned_org_unit=True)) return {'incoming': incoming, 'outgoing': outgoing}
def _get_tasks_for_responsible_query(self, responsible, sort_on='modified', sort_order='reverse'): """Returns a sqlalchemy query of all tasks assigned to the given responsible. """ sort_on = getattr(Task, sort_on) if sort_order == 'reverse': return Session().query(Task).filter( Task.responsible == responsible).order_by(desc(sort_on)) else: return Session().query(Task).filter( Task.responsible == responsible).order_by(asc(sort_on))
def get_sql_task(self): admin_unit_id = get_current_admin_unit().id() current_org_unit_id = get_current_org_unit().id() sequence_number = self.obj.get_sequence_number() assigned_org_unit = self.obj.responsible_client task = Session.query(Task).filter_by( admin_unit_id=admin_unit_id, int_id=self.obj_id).first() if task is None: task = Task(self.obj_id, admin_unit_id, issuing_org_unit=current_org_unit_id, assigned_org_unit=assigned_org_unit, sequence_number=sequence_number, created=self.obj.created().asdatetime()) Session.add(task) return task
def get_tasks_for_client(self, client, sort_on='modified'): """Return a sqlachemy query of all task on the specified client. """ sort_on = getattr(Task, sort_on) return Session().query(Task).filter(Task.client_id == client).order_by( asc(sort_on)).all()
def check_predecessor_sync(self): """Method wich checks the synchronisation between predecessors and successors""" log = self.mklog() successors = Session().query(Task).filter(Task.predecessor != None) info = getUtility(IContactInformation) sync_problems_counter = 0 for successor in successors: predecessor = successor.predecessor # check review_state if (predecessor.review_state != successor.review_state and predecessor.review_state != u'forwarding-state-closed'): client = info.get_client_by_id(predecessor.client_id) predecessor_url = ' - Predecessor:%s\n State: %s\n Url: %s/%s' % ( predecessor.title, predecessor.review_state, client.public_url, predecessor.physical_path) client = info.get_client_by_id(successor.client_id) successor_url = ' - Successor:%s\n State: %s\n Url: %s/%s' % ( successor.title, successor.review_state, client.public_url, successor.physical_path) log("State synchronisation invalid:\n%s\n%s\n\n" % (predecessor_url, successor_url)) sync_problems_counter += 1 log('Predecessor synchronisation check finished:' '\n %i Problems detected' % (sync_problems_counter))
def get_task(self, int_id, client_id): """Returns the task identified by the given int_id and client_id. """ try: task = Session().query(Task).filter( Task.client_id == client_id).filter( Task.int_id == int_id).one() except NoResultFound: task = None return task
def test_sql_task_is_updated_when_container_is_moved(self): dossier2 = create(Builder('dossier') .titled(u'Dossier 2')) api.content.move(source=self.subdossier, target=dossier2) task = Session.query(Task).one() self.assertEqual('subdossier', task.containing_subdossier) self.assertEqual(u'dossier-3/dossier-2/task-1', task.physical_path) self.assertEqual(2, task.dossier_sequence_number) self.assertEqual(u'Client1 / 2.1', task.reference_number)
def __init__(self, obj, event): self.obj = obj self.event = event if not self.is_uninstalling_plone(): self.admin_unit_id = get_current_admin_unit().id() self.current_org_unit_id = get_current_org_unit().id() self.sequence_number = self.obj.get_sequence_number() self.assigned_org_unit = obj.responsible_client self.obj_id = self.get_object_id() self.task_query = Session.query(Task).filter_by( admin_unit_id=self.admin_unit_id, int_id=self.obj_id)
def get_task_by_path(self, path, client_id): """Returns a task on the specified client identified by its physical path (which is relative to the site root!). """ try: task = Session().query(Task).filter( Task.client_id == client_id).filter( Task.physical_path == path).one() except NoResultFound: return None else: return task
def __call__(self): ptool = getToolByName(self, 'plone_utils') catalog = getToolByName(self, 'portal_catalog') session = Session() # Get the current client's ID registry = getUtility(IRegistry) client_config = registry.forInterface(IClientConfiguration) client_id = client_config.client_id # Get all tasks and forwardings currently in the global index task_query = TaskQuery() indexed_tasks = task_query.get_tasks_for_client(client=client_id) # Clear existing tasks in global index that have been created on this client for task in indexed_tasks: session.delete(task) transaction.commit() # Get tasks and forwardings that need to be reindexed from catalog cataloged_tasks = catalog(portal_type="opengever.task.task") forwardings = catalog(portal_type="opengever.inbox.forwarding") objs_to_reindex = cataloged_tasks + forwardings # Re-Index tasks for obj in objs_to_reindex: index_task(obj.getObject(), None) ptool.addPortalMessage(_( "Global task index has been cleared (${cleared}) and rebuilt (${rebuilt})", mapping={ 'cleared': len(indexed_tasks), 'rebuilt': len(objs_to_reindex) }), type="info") return self.context.REQUEST.RESPONSE.redirect( self.context.absolute_url() + '/@@ogds-controlpanel#ogds-cp-alltasks')
def get_task_by_oguid(self, oguid): """Return a task identified by its OGUID, which is [client_id]:[int_id] """ client_id, int_id = oguid.split(':') try: task = Session().query(Task).filter( Task.client_id == client_id).filter( Task.int_id == int_id).one() except NoResultFound: return None else: return task
def __call__(self): ptool = getToolByName(self, 'plone_utils') catalog = getToolByName(self, 'portal_catalog') session = Session() # Get the current client's ID registry = getUtility(IRegistry) client_config = registry.forInterface(IClientConfiguration) client_id = client_config.client_id # Get all tasks and forwardings currently in the global index task_query = TaskQuery() indexed_tasks = task_query.get_tasks_for_client(client=client_id) # Clear existing tasks in global index that have been created on this client for task in indexed_tasks: session.delete(task) transaction.commit() # Get tasks and forwardings that need to be reindexed from catalog cataloged_tasks = catalog(portal_type="opengever.task.task") forwardings = catalog(portal_type="opengever.inbox.forwarding") objs_to_reindex = cataloged_tasks + forwardings # Re-Index tasks for obj in objs_to_reindex: index_task(obj.getObject(), None) ptool.addPortalMessage( _("Global task index has been cleared (${cleared}) and rebuilt (${rebuilt})", mapping = {'cleared': len(indexed_tasks), 'rebuilt': len(objs_to_reindex)}), type="info") return self.context.REQUEST.RESPONSE.redirect(self.context.absolute_url() + '/@@ogds-controlpanel#ogds-cp-alltasks')
def _global_responsible_synchronisation(self, debug=False): """ """ log = self.mklog() successors = Session().query(Task).filter(Task.predecessor != None) for successor in successors: predecessor = successor.predecessor # skip discarded forwardings if predecessor.review_state == u'forwarding-state-closed': continue # check responsible if predecessor.responsible != successor.responsible: log('Defective synchronistation dedected ...\n') log('Successor: (%s/%s) responsible: %s ' % (successor.client_id, successor.physical_path, successor.responsible)) log('Predecessor: (%s/%s) responsible: %s ' % (predecessor.client_id, predecessor.physical_path, predecessor.responsible)) try: result = self._fix_responsible_synchronisation(successor, predecessor, log, debug=debug) except HTTPError: log('FAILED %s Could not be synchronised, because of HTTPError' % (successor.title)) if not result: log('%s Could not be synchronised' % (successor.title)) else: log('%s successfully synchronised' % (successor.title)) log(100 * '-')
def index_task(obj, event): """Index the given task in opengever.globalindex. """ # Skip this handler when trying to remove a Plone site. Otherwise the # component registry is already gone and we'll run into trouble. if IObjectRemovedEvent.providedBy(event) \ and IPloneSiteRoot.providedBy(event.object): return None parent = aq_parent(aq_inner(obj)) client_id = get_client_id() intids = getUtility(IIntIds) try: int_id = intids.getId(obj) except KeyError: try: # In some case (remote task updating etc) # only the base_object provides an intid. int_id = intids.getId(aq_base(obj)) except KeyError: # The intid event handler didn' create a intid for this object # yet. The event will be fired again after creating the id. return session = Session() try: task = session.query(Task).filter(Task.client_id == client_id).filter( Task.int_id == int_id).one() except NoResultFound: task = Task(int_id, client_id) session.add(task) # title maximum_title_length = Task.title.property.columns[0].type.length title = obj.title if not isinstance(title, unicode): title = title.decode('utf-8') title = title[:maximum_title_length] task.title = title # Generate and store the breadcrumb tooltip breadcrumb_titles = [] breadcrumbs_view = getMultiAdapter((obj, obj.REQUEST), name='breadcrumbs_view') for elem in breadcrumbs_view.breadcrumbs(): if isinstance(elem.get('Title'), unicode): breadcrumb_titles.append(elem.get('Title')) else: breadcrumb_titles.append(elem.get('Title').decode('utf-8')) # we prevent to raise database-error, if we have a too long string # Shorten the breadcrumb_title to: mandant1 > repo1 > ... join_value = ' > ' end_value = '...' maximum_length = Task.breadcrumb_title.property.columns[0].type.length maximum_length -= len(end_value) breadcrumb_title = breadcrumb_titles actual_length = 0 for i, breadcrumb in enumerate(breadcrumb_titles): add_length = len(breadcrumb) + len(join_value) + len(end_value) if (actual_length + add_length) > maximum_length: breadcrumb_title = breadcrumb_titles[:i] breadcrumb_title.append(end_value) break actual_length += len(breadcrumb) + len(join_value) task.breadcrumb_title = join_value.join(breadcrumb_title) url_tool = obj.unrestrictedTraverse('@@plone_tools').url() task.physical_path = '/'.join(url_tool.getRelativeContentPath(obj)) wftool = getToolByName(obj, 'portal_workflow') task.review_state = wftool.getInfoFor(obj, 'review_state') task.icon = obj.getIcon() task.responsible = obj.responsible task.issuer = obj.issuer # we need to have python datetime objects for make it work with sqlite etc. task.deadline = obj.deadline task.completed = obj.date_of_completion task.modified = obj.modified().asdatetime().replace(tzinfo=None) task.task_type = obj.task_type task.is_subtask = parent.portal_type == 'opengever.task.task' task.sequence_number = getUtility(ISequenceNumber).get_number(obj) task.reference_number = IReferenceNumber(obj).get_number() #get the containing_dossier value directly with the indexer catalog = getToolByName(obj, 'portal_catalog') task.containing_dossier = getMultiAdapter( (obj, catalog), IIndexer, name='containing_dossier')() # the dossier_sequence_number index is required for generating lists # of tasks as PDFs (LaTeX) as defined by the customer. task.dossier_sequence_number = get_dossier_sequence_number(obj) task.assigned_client = obj.responsible_client # index the predecessor if obj.predecessor: pred_client_id, pred_init_id = obj.predecessor.split(':', 1) try: predecessor = session.query(Task).filter_by( client_id=pred_client_id, int_id=pred_init_id).one() except NoResultFound: # For some reason the referenced predecessor doesn't exist predecessor = None else: predecessor = None task.predecessor = predecessor # index the principal which have View permission. This is according to the # allowedRolesAndUsers index but it does not car of global roles. allowed_roles = rolesForPermissionOn(View, obj) principals = [] for principal, roles in _mergedLocalRoles(obj).items(): for role in roles: if role in allowed_roles: principals.append(principal.decode('utf-8')) break task.principals = principals
def get_base_query(self): """Returns the base search query (sqlalchemy) """ return Session().query(Task)
def test_accept_forwarding_with_successor_with_dossier(self): create_client('plone') set_current_client_id(self.portal, 'plone') # create fake predecessor predecessor = Task(FAKE_INTID, 'client2') predecessor.physical_path = 'eingangskorb/forwarding-1' predecessor.issuer = 'testuser2' predecessor.responsible_client = 'plone' predecessor.responsible = TEST_USER_ID predecessor.deadline = datetime.now() remote_request = self.mocker.replace('opengever.ogds.base.utils.remote_request') self.expect(remote_request( 'client2', '@@transporter-extract-object-json', path='eingangskorb/forwarding-1', data={}, headers={})).result(FakeResponse(FORWARDING_EXTRACTION)) self.expect(remote_request( 'client2', '@@task-documents-extract', path='eingangskorb/forwarding-1', data={'documents': 'null'}, headers={})).result(FakeResponse(DOCUMENT_EXTRACTION)) # TODO replace any with the realy expected data self.expect(remote_request( 'client2', '@@task-responses-extract', path='eingangskorb/forwarding-1', data=ANY)).result(FakeResponse('[]')) self.expect(remote_request( 'client2', '@@store_forwarding_in_yearfolder', path='eingangskorb/forwarding-1', # data={'response_text': 'This is a message', # 'successor_oguid': u'plone:1231066935', # 'transition': 'forwarding-transition-accept'} data=ANY, )).result(FakeResponse('OK')) self.replay() wft = getToolByName(self.portal, 'portal_workflow') intids = getUtility(IIntIds) session = Session() session.add(predecessor) accept_forwarding_with_successor( self.portal, 'client2:%s' % FAKE_INTID, u'This is a message', dossier=self.dossier) # CHECKS # --------------------- # yearfolder: current_year = datetime.now().year yearfolder = self.inbox.get(str(current_year), None) self.assertTrue(yearfolder) self.assertEquals(yearfolder.title, u'Closed %s' % current_year) # forwarding ... # is stored in the yearfolder forwarding = yearfolder.get('forwarding-1', None) self.assertTrue(forwarding) # and closed self.assertEquals(wft.getInfoFor(forwarding, 'review_state'), 'forwarding-state-closed') # attributes are correctly moved self.assertEquals(forwarding.responsible, u'inbox:plone') # the issuer should be changed to the local inbox group self.assertEquals(forwarding.issuer, u'inbox:plone') # also the response is correctly added response = IResponseContainer(forwarding)[0] self.assertEquals(response.transition, 'forwarding-transition-accept') # task (succesor of the forwarding) task = self.dossier.get('task-1') self.assertTrue(task) self.assertEquals( ISuccessorTaskController(forwarding).get_successors()[0].int_id, intids.getId(task)) # the succesor link is also in the response correctly self.assertEquals( response.successor_oguid, ISuccessorTaskController(task).get_oguid())
def get_tasks_by_paths(self, task_paths): """Returns a set of tasks whos pahts are listed in `paths`. """ return Session().query(Task).filter( Task.physical_path.in_(task_paths)).all()
def get_tasks(self, task_ids): """Returns a set of tasks whos task_ids are listed in `task_ids`. """ return Session().query(Task).filter(Task.task_id.in_(task_ids)).all()