class AuthorizationManager(grok.Adapter): grok.context(interfaces.ISilvaObject) grok.provides(interfaces.IAuthorizationManager) grok.implements(interfaces.IAuthorizationManager) def __init__(self, context): self.context = context self._current_user_id = getSecurityManager().getUser().getId() self._member = getUtility(IMemberService) self._group = queryUtility(IGroupService) def get_user_role(self, identifier=None): authorization = self.get_authorization(identifier=identifier) if authorization is None: return None return authorization.role def get_user_id(self, identifier=None): if identifier is None: return self._current_user_id return identifier def _get_identity(self, identifier): identity = self._member.get_member(identifier, location=self.context) if identity is None and self._group is not None: identity = self._group.get_group(identifier, location=self.context) return identity def get_authorization(self, identifier=None, dont_acquire=False): """Return authorization object for the given user. If no user is specified, return authorization object for the current authenticated user. """ identifier = self.get_user_id(identifier) identity = self._get_identity(identifier) if identity is None: return None local_roles = self.context.get_local_roles_for_userid(identifier) acquired_roles = None if not dont_acquire: acquired_roles = [] content = self.context while not interfaces.IRoot.providedBy(content): content = aq_parent(content) acquired_roles.extend( content.get_local_roles_for_userid(identifier)) return IAuthorizationFactory(identity)(self.context, self, local_roles, acquired_roles) def get_authorizations(self, identifiers, dont_acquire=False): """Return all current authorizations at this level, and authorization objects for given users/groups or other. """ authorizations = {} for identifier in identifiers: if identifier not in authorizations: authorization = self.get_authorization( identifier, dont_acquire=dont_acquire) if authorization is not None: authorizations[identifier] = authorization return authorizations def get_defined_authorizations(self, dont_acquire=False): """Return current all authorizations at this level. """ identifiers = set() local_roles = defaultdict(list) acquired_roles = defaultdict(list) # Collect user with roles here, tag as local_roles for identifier, roles in self.context.get_local_roles(): identifiers.add(identifier) local_roles[identifier].extend(roles) if not dont_acquire: # Collect user with parent roles content = self.context while not interfaces.IRoot.providedBy(content): content = aq_parent(content) for identifier, roles in content.get_local_roles(): identifiers.add(identifier) acquired_roles[identifier].extend(roles) auth = {} for identifier in identifiers: identity = self._get_identity(identifier) if identity is None: continue identity_local_roles = silva_roles(local_roles[identifier]) identity_acquired_roles = silva_roles(acquired_roles[identifier]) if not (identity_local_roles or identity_acquired_roles): # If the user doesn't have any Silva roles, we ignore it continue if dont_acquire: identity_acquired_roles = None auth[identifier] = IAuthorizationFactory(identity)( self.context, self, identity_local_roles, identity_acquired_roles) return auth
class DirectoryCatalog(grok.Adapter): grok.context(IDirectoryBase) grok.provides(IDirectoryCatalog) def __init__(self, context): self.directory = context self.catalog = getToolByName(context, 'portal_catalog') self.path = '/'.join(context.getPhysicalPath()) def query(self, **kwargs): """Runs a query on the catalog with default parameters set. Not part of the official IDirectoryCatalog interface as this is highly storage_backend dependent. """ results = self.catalog( path={ 'query': self.path, 'depth': 1 }, object_provides=IDirectoryItemLike.__identifier__, **kwargs) return results def sortkey(self): """Returns the default sortkey.""" uca_sortkey = utils.unicode_collate_sortkey() return lambda b: uca_sortkey(b.Title.decode('utf-8')) def items(self): return sorted(self.query(), key=self.sortkey()) def filter(self, term): results = self.query(categories={ 'query': term.values(), 'operator': 'and' }) filter_key = partial(is_exact_match, term=term) return sorted(filter(filter_key, results), key=self.sortkey()) def search(self, text): # remove the fuzzy-search first text = text.replace('*', '') # wrap the text in quotes so the query parser ignores the content # (the user should not be able to get his text inspected by the # query parser, because it's a hidden feature at best and a security # problem at worst) # # add a wildcard at the end for fuzzyness text = '"{}*"'.format(text) return sorted(self.query(SearchableText=text), key=self.sortkey()) def possible_values(self, items=None, categories=None): """Returns a dictionary with the keys being the categories of the directory, filled with a list of all possible values for each category. If an item contains a list of values (as opposed to a single string) those values flattened. In other words, there is no hierarchy in the resulting list. """ items = items or self.query() categories = categories or self.directory.all_categories() values = dict([(c, list()) for c in categories]) for item in items: for key, label, value in item.categories: if not key in categories: continue if not value: continue if isinstance(value, basestring): values[key].append(value) else: map(values[key].append, utils.flatten(value)) return values def grouped_possible_values(self, items=None, categories=None): """Same as possible_values, but with the categories of the dictionary being unique and each value being wrapped in a tuple with the first element as the actual value and the second element as the count non-unique values. It's really the grouped result of possible_values. """ possible = self.possible_values(items, categories) grouped = dict([(k, dict()) for k in possible.keys()]) for category, items in possible.items(): groups = groupby(sorted(items)) for group, values in groups: grouped[category][group] = len(list(values)) return grouped def grouped_possible_values_counted(self, items=None, categories=None): """Returns a dictionary of categories with a list of possible values including counts in brackets. """ possible = self.grouped_possible_values(items, categories) result = dict((k, []) for k in possible.keys()) for category, values in possible.items(): counted = [] for text, count in values.items(): counted.append(utils.add_count(text, count)) result[category] = sorted(counted, key=utils.unicode_collate_sortkey()) return result
class Polls(grok.GlobalUtility): ''' Utility methods for dealing with polls ''' grok.implements(IPolls) grok.provides(IPolls) grok.name('collective.polls') @property def ct(self): return getToolByName(getSite(), 'portal_catalog') @property def mt(self): return getToolByName(getSite(), 'portal_membership') @property def wt(self): return getToolByName(getSite(), 'portal_workflow') @property def member(self): return self.mt.getAuthenticatedMember() def _query_for_polls(self, **kw): ''' Use Portal Catalog to return a list of polls ''' kw['portal_type'] = 'collective.polls.poll' results = self.ct.searchResults(**kw) return results def uid_for_poll(self, poll): ''' Return a uid for a poll ''' return IUUID(poll) def recent_polls(self, show_all=False, limit=5, **kw): ''' Return recent polls in a given context ''' kw['sort_on'] = 'created' kw['sort_order'] = 'reverse' kw['sort_limit'] = limit if not show_all: kw['review_state'] = 'open' results = self._query_for_polls(**kw) return results[:limit] def recent_polls_in_context(self, context, show_all=False, limit=5, **kw): ''' Return recent polls in a given context ''' context_path = '/'.join(context.getPhysicalPath()) kw['path'] = context_path results = self.recent_polls(show_all, limit, **kw) return results def poll_by_uid(self, uid): ''' Return the poll for the given uid ''' if uid == 'latest': results = self.recent_polls(show_all=False, limit=1) else: kw = {'UID': uid} results = self._query_for_polls(**kw) if results: poll = results[0].getObject() return poll def voted_in_a_poll(self, poll, request=None): ''' check if current user already voted ''' anonymous_allowed = poll.allow_anonymous poll_uid = self.uid_for_poll(poll) member = self.member member_id = member.getId() voters = poll.voters() if member_id: return member_id in voters elif anonymous_allowed and request: cookie = COOKIE_KEY % poll_uid value = request.cookies.get(cookie, '') if value: value = 'Anonymous-%s' % value in voters return value else: # If we cannot be sure, we will block this user from voting again return True def allowed_to_edit(self, poll): ''' Is user allowed to edit this poll ''' return (self.mt.checkPermission('Modify portal content', poll) and True) or False def allowed_to_view(self, poll): ''' Is user allowed to view this poll ''' return (self.mt.checkPermission('View', poll) and True) or False def allowed_to_vote(self, poll, request=None): ''' is current user allowed to vote in this poll ?''' canVote = (self.mt.checkPermission('collective.polls: Vote', poll) and True) or False if canVote: # User must view the poll # and poll must be open to allow votes if not self.voted_in_a_poll(poll, request): # If user did not vote here, we allow him to vote return True # All other cases shall not pass raise Unauthorized def anonymous_vote_id(self): ''' return a identifier for vote_id ''' vote_id = int(time.time() * 10000000) + random.randint(0, 99) return vote_id
class ContactInformation(grok.GlobalUtility): """The principal information utility provides useful functions for building vocabularies with users, contacts, in-boxes groups and committees. Terminology: :client: A client is a installation of opengever, which may be connected to other similar installations (clients). :user: A user is a person who can log into the system and is available for any client. Users are stored in the SQL database using the sql-alchemy model provided by this package. :contact: A contact is a person who does not have acces to the system and do not participate directly, but he may be involved in the topic of a dossier or parts of it. :committee: A committee is a heap of users and contacts. They are not relevant for the security. :group: Groups is a sql reprensantation of a LDAP Group. """ grok.provides(IContactInformation) # USERS def is_user(self, principal): """Returns true, if `principal` is a userid. """ return principal and ':' not in principal @ram.cache(ogds_class_cachekey) def list_users(self): """A list of dicts. """ session = create_session() userdata_keys = User.__table__.columns.keys() result = session.execute(User.__table__.select()) return [UserDict(**dict(zip(userdata_keys, row))) for row in result] def list_inactive_users(self): session = create_session() users = session.query(User).filter_by(active=False) return users def list_assigned_users(self, client_id=None): """Lists all users assigned to a client. """ if not client_id: client_id = get_client_id() if not client_id: logger.warn("can't list assigned users, without a client_id") return [] session = create_session() users = session.query(Group).join(Client.users_group).filter( Client.client_id == client_id).first().users return users def list_group_users(self, groupid): """Return all users of a group""" if groupid: session = create_session() group = session.query(Group).get(groupid) if group: return group.users return [] def list_user_groups(self, userid): if userid: session = create_session() groups = session.query(User).get(userid).groups return groups return [] def get_user(self, userid): """Returns the user with the userid `principal`. """ if not self.is_user(userid): raise ValueError('principal %s is not a userid' % str(userid)) return self._users_query().get(userid) def is_user_in_inbox_group(self, userid=None, client_id=None): if not client_id: client_id = get_client_id() if not userid: member = getToolByName( getSite(), 'portal_membership').getAuthenticatedMember() userid = member.getId() client = self.get_client_by_id(client_id) if client: return self.is_group_member(client.inbox_group_id, userid) else: return False def is_group_member(self, groupid, userid): in_group = create_session().query( Group.groupid).join(groups_users).filter( Group.groupid == groupid, groups_users.columns.userid == userid).count() return in_group > 0 # CONTACTS def is_contact(self, principal): """Return true, if `principal` is a contact. """ return principal and principal.startswith('contact:') def list_contacts(self): """Returns a catalog result set of contact brains. """ catalog = getToolByName(getSite(), 'portal_catalog') query = {'portal_type': 'opengever.contact.contact'} # make catalog query without checking security (allowedRolesAndUsers) # since the contacts are not visible for foreign users but should be # in the vocabulary anyway... brains = ZCatalog.searchResults(catalog, **query) return brains def get_contact(self, principal, check_permissions=False): """Returns the contact object of this principal. """ if not self.is_contact(principal): raise ValueError('Principal %s is not a contact' % str(principal)) catalog = getToolByName(getSite(), 'portal_catalog') query = { 'portal_type': 'opengever.contact.contact', 'contactid': principal } if not check_permissions: # usually foreign users may not have access to the contacts, # but we want to be able to print the name etc. in this case too. # So we need to use ZCatalog for ignoring the allowedRolesAndUsers # index. contacts = ZCatalog.searchResults(catalog, **query) else: contacts = catalog.searchResults(**query) if len(contacts) == 0: return None elif len(contacts) > 1: raise ValueError('Found %i contacts with principal %s' % (len(contacts), principal)) else: return contacts[0] # INBOXES def is_inbox(self, principal): """Returns true, if `principal` is a inbox. """ return principal and principal.startswith('inbox:') def list_inboxes(self): """Returns a set of inboxes of all enabled clients. """ clients = self._clients_query() active_clients = clients.filter_by(enabled=True) for client in active_clients: principal = u'inbox:%s' % client.client_id yield (principal, self.describe(principal)) def get_client_of_inbox(self, principal): """Returns the client object of the `principal`. """ if not self.is_inbox(principal): raise ValueError('Principal %s is not a inbox' % (str(principal))) client_id = principal.split(':', 1)[1] return self._clients_query().get(client_id) @ram.cache(ogds_principal_cachekey) def get_group_of_inbox(self, principal): """Returns the group principal of the inbox `principal`. """ client = self.get_client_of_inbox(principal) if client is None: raise ValueError('Client not found for: %s' % principal) return client.inbox_group # CLIENTS def get_clients(self): """Returns a list of all enabled clients. """ # If the current client is not enabled, we should not be able to # assign something to another client or interact in any way with # another client. This client is completely isolated. if not get_current_client().enabled: return [] return self._clients_query().filter_by(enabled=True).order_by( Client.title).all() def get_client_by_id(self, client_id): """Returns a client identified by `client_id`. """ client = self._clients_query().filter_by(enabled=True, client_id=client_id).first() return client def get_assigned_clients(self, userid=None): """Returns all assigned clients (home clients). """ if not userid: member = getToolByName( getSite(), 'portal_membership').getAuthenticatedMember() userid = member.getId() session = create_session() # select all clients with the user in the user group clients = session.query(Client).join(Client.users_group).join( Group.users).filter(User.userid == userid).all() return clients @ram.cache(ogds_user_client_cachekey) def _is_client_assigned(self, userid, client_id): session = create_session() # check if the specified user is in the user_group of the specified # client if session.query(Client).join(Client.users_group).join( Group.users).filter(User.userid == userid).filter( Client.client_id == client_id).count() > 0: return True return False def is_client_assigned(self, userid=None, client_id=None): """Return True if the specified user is in the user_group of the specified client""" if not client_id: client_id = get_client_id() if not userid: member = getToolByName( getSite(), 'portal_membership').getAuthenticatedMember() userid = member.getId() return self._is_client_assigned(userid, client_id) def is_one_client_setup(self): """Return True if only one client is available""" clients = self.get_clients() return len(clients) == 1 # general principal methods def describe(self, principal, with_email=False, with_email2=False, with_principal=True): """Represent a user / contact / inbox / ... as string. This usually returns the fullname or another label / title. `principal` could also be a user object or a contact brain. """ if not principal: return '' is_string = isinstance(principal, types.StringTypes) brain = None contact = None user = None # is principal a brain? if not is_string and ICatalogBrain.providedBy(principal): brain = principal # ok, lets check what we have... # string inbox if is_string and self.is_inbox(principal): # just do it client = self.get_client_of_inbox(principal) # we need to instantly translate, because otherwise # stuff like the autocomplete widget will not work # properly. label = _(u'inbox_label', default=u'Inbox: ${client}', mapping=dict(client=client.title)) return translate(label, context=getRequest()) # string contact elif is_string and self.is_contact(principal): contact = self.get_contact(principal) # string user elif is_string and self.is_user(principal): user = self.get_user(principal) # contact brain elif brain and brain_is_contact(brain): contact = brain principal = contact.contactid # user object elif IUser.providedBy(principal) or isinstance(principal, UserDict): user = principal principal = user.userid # ok, now lookup the stuff if contact: if not contact: return principal elif contact.lastname and contact.firstname: name = ' '.join((contact.lastname, contact.firstname)) elif contact.lastname: name = contact.lastname elif contact.firstname: name = contact.firstname elif 'userid' in contact: name = contact.userid else: name = contact.id if with_email2 and contact.email2: return '%s (%s)' % (name, contact.email2) elif with_principal and contact.email: return '%s (%s)' % (name, contact.email) else: return name elif user: if user.lastname and user.firstname: name = ' '.join((user.lastname, user.firstname)) elif user.lastname: name = user.lastname elif user.firstname: name = user.firstname else: name = user.userid infos = [] if with_principal: infos.append(user.userid) if with_email and user.email: infos.append(user.email) elif with_email2 and user.email2: infos.append(user.email2) if infos: return '%s (%s)' % (name, ', '.join(infos)) else: return name elif is_string: # fallback for acl_users portal = getSite() portal_membership = getToolByName(portal, 'portal_membership') member = portal_membership.getMemberById(principal) if not member: if isinstance(principal, str): return principal.decode('utf-8') else: return principal name = member.getProperty('fullname', principal) email = member.getProperty('email', None) infos = [] if with_principal: infos.append(principal) if with_email and email: infos.append(email) if infos: return '%s (%s)' % (name, ', '.join(infos)) else: return name else: raise ValueError('Unknown principal type: %s' % str(principal)) @ram.cache(ogds_principal_cachekey) def get_profile_url(self, principal): """Returns the profile url of this `principal`. """ if isinstance(principal, User): portal = getSite() return '/'.join( (portal.portal_url(), '@@user-details', principal.userid)) elif self.is_inbox(principal): return None elif self.is_contact(principal): contact = self.get_contact(principal, check_permissions=True) if contact: return contact.getURL() else: return None elif self.is_user(principal): portal = getSite() user = self.get_user(principal) if user: return '/'.join( (portal.portal_url(), '@@user-details', user.userid)) else: # fallback with acl_users folder portal_membership = getToolByName(portal, 'portal_membership') member = portal_membership.getMemberById(principal) if member: return portal_membership.getMemberById( principal).getHomeUrl() return None @ram.cache(ogds_principal_cachekey) def render_link(self, principal): """Render a link to the `principal` """ if not principal or principal == '': return None url = self.get_profile_url(principal) if not url: return self.describe(principal) return '<a href="%s">%s</a>' % (url, self.describe(principal)) @ram.cache(ogds_class_language_cachekey) def get_user_sort_dict(self): """Returns a dict presenting userid and the fullname, that allows correct sorting on the fullname. Including also every client inbox. """ session = create_session() query = session.query(User.userid, User.lastname, User.firstname) query = query.order_by(User.lastname, User.firstname) ids = query.all() sort_dict = {} for userid, lastname, firstname in ids: sort_dict[userid] = u'%s %s' % (lastname, firstname) #includes every client inbox active_clients = self._clients_query().filter_by(enabled=True) for client in active_clients: principal = u'inbox:%s' % client.client_id sort_dict[principal] = translate(self.describe(principal)) return sort_dict def get_user_contact_sort_dict(self): sort_dict = self.get_user_sort_dict() for contact in self.list_contacts(): sort_dict['contact:%s' % (contact.id)] = u'%s %s' % (contact.lastname, contact.firstname) return sort_dict # internal methods def _users_query(self): session = create_session() return session.query(User) def _clients_query(self): session = create_session() return session.query(Client)
class RemovalProtocolLaTeXView(grok.MultiAdapter, MakoLaTeXView): grok.provides(ILaTeXView) grok.adapts(Interface, IRemovalProtocolLayer, ILaTeXLayout) template_directories = ['latex_templates'] template_name = 'removal_protocol.tex' def translate(self, msg): return translate(msg, context=self.request) def get_render_arguments(self): self.layout.show_contact = False dossier_listener = getMultiAdapter((self.context, self.request, self), ILaTexListing, name='destroyed_dossiers') history_listener = getMultiAdapter((self.context, self.request, self), ILaTexListing, name='disposition_history') return { 'label_protocol': self.translate( _('label_removal_protocol', default="Removal protocol")), 'title': self.context.title, 'disposition_metadata': self.get_disposition_metadata(), 'label_dossiers': translate(_('label_dossiers', default="Dossiers"), context=self.request), 'dossier_listing': dossier_listener.get_listing( self.context.get_dossier_representations()), 'label_history': translate(_('label_history', default="History"), context=self.request), 'history': history_listener.get_listing(self.context.get_history()) } def get_disposition_metadata(self): rows = [] config = [{ 'label': _('label_title', default=u'Title'), 'value': self.context.title }, { 'label': _('label_transfer_number', default=u'Transfer number'), 'value': self.context.transfer_number if self.context.transfer_number else u'' }] for row in config: label = translate(row.get('label'), context=self.request) rows.append(u'\\bf {} & {} \\\\%%'.format( self.convert_plain(label), row.get('value'))) return '\n'.join(rows)
class OpenTaskReportLaTeXView(grok.MultiAdapter, MakoLaTeXView): grok.provides(ILaTeXView) grok.adapts(Interface, IOpenTaskReportLayer, ILaTeXLayout) template_directories = ['templates'] template_name = 'opentaskreport.tex' def get_render_arguments(self): self.info = getUtility(IContactInformation) self.layout.show_organisation = True self.layout.use_package('longtable') args = self.get_task_rows() args['client'] = get_current_client().title return args def _extend_task_query(self, query): """Extends a globalindex task query. """ # sort by deadline query = query.order_by(asc(Task.deadline)) # list only open tasks query = query.filter(Task.review_state.in_(OPEN_TASK_STATES)) # If a task has a successor task, list only one of them. # List only the one which is assigned to this client. query = query.filter( or_(and_(Task.predecessor == None, Task.successors == None), Task.client_id == get_client_id())) return query def get_task_rows(self): """Returns a dict of task-rows (tuples of cells) of all open tasks on the current client: incoming -- open tasks assigned to the current client outgoing -- open tasks assigned to another client """ clientid = get_client_id() incoming_query = Session().query(Task) incoming_query = incoming_query.filter( Task.assigned_client == clientid) incoming_query = self._extend_task_query(incoming_query) incoming = [] for task in incoming_query.all(): incoming.append( self.get_row_for_item(task, display_issuer_client=True)) outgoing_query = Session().query(Task) outgoing_query = outgoing_query.filter(Task.client_id == clientid) outgoing_query = self._extend_task_query(outgoing_query) outgoing = [] for task in outgoing_query.all(): outgoing.append( self.get_row_for_item(task, display_responsible_client=True)) return {'incoming': incoming, 'outgoing': outgoing} def get_row_for_item(self, item, display_issuer_client=False, display_responsible_client=False): task_type = task_type_helper(item, item.task_type) sequence_number = unicode(item.sequence_number).encode('utf-8') deadline = helper.readable_date(item, item.deadline) title = unicode(getattr(item, 'Title', getattr(item, 'title', ''))).encode('utf-8') issuer = get_issuer_of_task(item, with_client=display_issuer_client, with_principal=False) responsible = self.info.describe(item.responsible, with_principal=False) if display_responsible_client: responsible_client = self.info.get_client_by_id( item.assigned_client).title responsible = '%s / %s' % (responsible_client, responsible) dossier_title = item.containing_dossier or '' reference = unicode( getattr(item, 'reference', getattr(item, 'reference_number', ''))).encode('utf-8') review_state = workflow_state(item, item.review_state) data = [ sequence_number, title, task_type, dossier_title, reference, issuer, responsible, deadline, review_state, ] return self.convert_list_to_row(data) def convert_list_to_row(self, row): return ' & '.join([self.convert_plain(cell) for cell in row])
class ContainerManager(grok.Adapter): grok.context(IContainer) grok.implements(IContainerManager) grok.provides(IContainerManager) def __make_id(self, type, identifier): # Create a new identifier that is unused in the container. count = 0 match = re.match('^' + type + '([0-9]*)_of_(.*)', identifier) if match: count = int(match.group(1) or '1') identifier = match.group(2) candidate = identifier while self.context._getOb(candidate, None) is not None: candidate = type + '%s_of_%s' % (count and count + 1 or '', identifier) count += 1 return candidate def __copy(self, content, to_identifier): # Copy a content to to_identifier in the container copy = content._getCopy(self.context) copy._setId(to_identifier) notify(ObjectCopiedEvent(copy, content)) self.context._setObject(to_identifier, copy) copy = self.context._getOb(to_identifier) compatibilityCall('manage_afterClone', copy, copy) notify(ObjectClonedEvent(copy)) return copy def __move(self, content, from_container, from_identifier, to_identifier): # Move a content into the container notify( ObjectWillBeMovedEvent(content, from_container, from_identifier, self.context, to_identifier)) from_container._delObject(from_identifier, suppress_events=True) content = aq_base(content) content._setId(to_identifier) self.context._setObject(to_identifier, content, set_owner=0, suppress_events=True) content = self.context._getOb(to_identifier) notify( ObjectMovedEvent(content, from_container, from_identifier, self.context, to_identifier)) return content @Lazy def __addables(self): return set(IAddableContents(self.context).get_authorized_addables()) def __verify_copyable(self, content): if not content.cb_isCopyable(): return ContainerError( _("You are unauthorized to copy this content."), content) if content.meta_type not in self.__addables: return ContainerError( _("You cannot add this content type in this container."), content) return None def __verify_moveable(self, content): try: content.is_deletable() except ContentError as error: return error if not content.cb_isMoveable(): return ContainerError( _("You are unauthorized to move this content."), content) if not move_check(self.context, content): return ContainerError( _("You cannot move this content to this container."), content) if content.meta_type not in self.__addables: return ContainerError( _("You cannot add this content type in this container."), content) return None @silvaconf.protect('silva.ChangeSilvaContent') @cofunction def copier(self): def make_copy(content): identifier = self.__make_id('copy', content.getId()) copy = self.__copy(content, identifier) # Close, maybe should be in a event helpers.unapprove_close_helper(copy) return copy content = yield while content is not None: result = self.__verify_copyable(content) if result is None: result = make_copy(content) content = yield result @silvaconf.protect('silva.ChangeSilvaContent') @cofunction def mover(self): any_moves = False def do_move(from_container, content): from_identifier = content.getId() to_identifier = self.__make_id('move', from_identifier) content = self.__move(content, from_container, from_identifier, to_identifier) content.manage_changeOwnershipType(explicit=0) notifyContainerModified(from_container) return content content = yield while content is not None: result = self.__verify_moveable(content) if result is None: from_container = aq_parent(aq_inner(content)) if (aq_base(from_container) is not aq_base(self.context)): result = do_move(from_container, content) any_moves = True else: result = ContainerError( _("Content already in the target container."), content) content = yield result if any_moves: notifyContainerModified(self.context) @silvaconf.protect('silva.ChangeSilvaContent') @cofunction def renamer(self): any_renames = False data = yield while data is not None: content, to_identifier, to_title = data result = None # Rename identifier from_identifier = content.getId() if to_identifier is not None and from_identifier != to_identifier: result = self.__verify_moveable(content) if result is None: try: ISilvaNameChooser(self.context).checkName( to_identifier, content) except ContentError as e: result = ContainerError(reason=e.reason, content=content) if result is None: content = self.__move(content, self.context, from_identifier, to_identifier) any_renames = True # Update title if to_title is not None: if not isinstance(to_title, str): to_title = to_title.decode('utf-8') editable = content.get_editable() if editable is None: if result is None: result = ContentError( _("There is no editable version to set the title on." ), content) elif editable.get_title() != to_title: try: editable.set_title(to_title) except ContentError as error: result = error if result is None: result = content data = yield result if any_renames: notifyContainerModified(self.context) @silvaconf.protect('silva.ChangeSilvaContent') @cofunction def ghoster(self): content = yield while content is not None: result = self.__verify_copyable(content) if result is None: factory = get_ghost_factory(self.context, content) if factory is None: identifier = self.__make_id('copy', content.getId()) result = self.__copy(content, identifier) else: identifier = self.__make_id('ghost', content.getId()) try: result = factory(identifier) except ContentError as result: pass content = yield result @silvaconf.protect('silva.ChangeSilvaContent') @cofunction def deleter(self): to_delete = [] container_ids = set(self.context.objectIds()) try: protected = self.context._reserved_names except: protected = () content = yield while content is not None: try: content.is_deletable() except ContentError as error: result = error else: content_id = content.getId() if (content_id in container_ids and content_id not in protected and aq_base(self.context) is aq_base(aq_parent(content))): to_delete.append((content_id, content)) result = content else: result = ContentError(_("Cannot delete content."), content) content = yield result # Event for identifier, content in to_delete: compatibilityCall('manage_beforeDelete', content, content, self.context) notify(ObjectWillBeRemovedEvent(content, self.context, identifier)) # Delete for identifier, content in to_delete: self.context._objects = tuple( [i for i in self.context._objects if i['id'] != identifier]) self.context._delOb(identifier) try: content._v__object_deleted__ = 1 except: pass # Event for identifier, content in to_delete: notify(ObjectRemovedEvent(content, self.context, identifier)) if to_delete: notifyContainerModified(self.context)
class Transporter(grok.GlobalUtility): """ The transporter utility is able to copy objects to other clients. """ grok.provides(ITransporter) def transport_to(self, obj, target_cid, container_path): """ Copies a *object* to another client (*target_cid*). """ jsondata = json.dumps(self._extract_data(obj)) request_data = { REQUEST_KEY: jsondata, } return remote_json_request(target_cid, '@@transporter-receive-object', path=container_path, data=request_data) def transport_from(self, container, source_cid, path): """ Copies the object under *path* from client with *source_cid* into the local folder *container* *path* is the relative path of the object to its plone site root. """ data = remote_json_request(source_cid, '@@transporter-extract-object-json', path=path) data = encode_after_json(data) obj = self._create_object(container, data) return obj def receive(self, container, request): jsondata = request.get(REQUEST_KEY) data = json.loads(jsondata) data = encode_after_json(data) obj = self._create_object(container, data) return obj def extract(self, obj): """ Returns a JSON dump of *obj* """ return json.dumps(self._extract_data(obj)) def _extract_data(self, obj): """ Serializes a object """ data = {} # base data creator = self._get_object_creator(obj.portal_type) data[BASEDATA_KEY] = creator.extract(obj) # collect data collectors = getAdapters((obj, ), IDataCollector) for name, collector in collectors: data[name] = collector.extract() data = decode_for_json(data) return data def _create_object(self, container, data): """ Creates the object with the data """ portal_type = data[BASEDATA_KEY]['portal_type'] # base data creator = self._get_object_creator(portal_type) obj = creator.create(container, data[BASEDATA_KEY]) # insert data from collectors collectors = getAdapters((obj, ), IDataCollector) for name, collector in collectors: collector.insert(data[name]) # let the object reindex by creating a modified event, which also # runs stuff like globalindex, if needed. notify(ObjectModifiedEvent(obj)) return obj def _get_object_creator(self, portal_type): # get the FTI fti = getUtility(IDexterityFTI, name=portal_type) # do we have a specific one? creator = queryAdapter(fti, IObjectCreator, name=portal_type) if not creator: creator = getAdapter(fti, IObjectCreator, name='') return creator
class FiveInchClub(grok.GlobalUtility): grok.implements(IFiveClub, ITinyClub) grok.provides(IFiveClub) grok.name('five_inch')
class ExternalSourceSaveFilter(TransformationFilter): """Process External Source information on save. """ grok.implements(ISaveEditorFilter) grok.provides(ISaveEditorFilter) grok.order(20) grok.adapts(ISourceEditableVersion, IBrowserRequest) def prepare(self, name, text): self.manager = getComponent( self.context, IExternalSourceManager)(self.context) self.seen = set() def __call__(self, tree): for node in tree.xpath(SOURCE_XPATH): name = node.attrib.get('data-silva-name') instance = node.attrib.get('data-silva-instance') changed = 'data-silva-settings' in node.attrib parameters = parse_qs(node.attrib.get('data-silva-settings', '')) try: source = self.manager( TestRequest(form=parameters), instance=instance, name=name) except SourceError: logger.error( u'Broken source %s(%s) on content %s', name, instance, '/'.join(self.context.getPhysicalPath())) else: if instance is None: status = source.create() instance = source.getId() elif changed: status = source.save() if changed and status is silvaforms.FAILURE: errors = {} for error in source.errors: if error.identifier != 'form': errors[error.identifier] = error.title logger.error( u"Errors %s while saving source parameters %s " u"for %s(%s) on content %s", errors, parameters, name, instance, '/'.join(self.context.getPhysicalPath())) node.attrib['data-source-instance'] = instance self.seen.add(instance) clean_editor_attributes(node) def finalize(self): # Remove all sources that we didn't see. for identifier in set(self.manager.all()).difference(self.seen): try: source = self.manager(self.request, instance=identifier) source.remove() except SourceError: logger.error( 'Error while removing source %s from text %s', identifier, '/'.join(self.context.getPhysicalPath())) def truncate(self, name, text): manager = getComponent( self.context, IExternalSourceManager)(self.context) for identifier in manager.all(): try: source = manager(self.request, instance=identifier) source.remove() except SourceError: logger.error( 'Error while removing source %s from text %s', identifier, '/'.join(self.context.getPhysicalPath()))
class TreeContent(grok.Adapter): grok.context(IContainer) grok.implements(ITreeContents) grok.provides(ITreeContents) def get_tree(self, depth=-1): """Get flattened tree of contents. The 'depth' argument limits the number of levels, defaults to unlimited """ l = [] self._get_tree_helper(l, 0, depth) return l def get_container_tree(self, depth=-1): l = [] self._get_container_tree_helper(l, 0, depth) return l def get_public_tree(self, depth=-1): """Get flattened tree with public content, excluding subpublications. The 'depth' argument limits the number of levels, defaults to unlimited """ filters = getUtility(IContentFilteringService).filter(None).filter l = [] self._get_public_tree_helper(l, 0, depth, 0, filters) return l def get_public_tree_all(self, depth=-1): """Get flattened tree with public content, including subpublications. The 'depth' argument limits the number of levels, defaults to unlimited """ filters = getUtility(IContentFilteringService).filter(None).filter l = [] self._get_public_tree_helper(l, 0, depth, 1, filters) return l def get_status_tree(self, depth=-1): '''get Silva tree''' l = [] self._get_status_tree_helper(l, 0, depth) return l def _get_tree_helper(self, l, indent, depth): for item in self.context.get_ordered_publishables(): if item.getId() == 'index': # default document should not be inserted continue if (IContainer.providedBy(item) and item.is_transparent()): l.append((indent, item)) if depth == -1 or indent < depth: ITreeContents(item)._get_tree_helper(l, indent + 1, depth) else: l.append((indent, item)) def _get_container_tree_helper(self, l, indent, depth): for item in self.context.get_ordered_publishables(): if not IContainer.providedBy(item): continue if item.is_transparent(): l.append((indent, item)) if depth == -1 or indent < depth: ITreeContents(item)._get_container_tree_helper( l, indent + 1, depth) else: l.append((indent, item)) def _get_public_tree_helper(self, l, indent, depth, include_publications, filters): for item in self.context.get_ordered_publishables(): if not filters(item): continue l.append((indent, item)) if (IContainer.providedBy(item) and (item.is_transparent() or include_publications)): if depth == -1 or indent < depth: ITreeContents(item)._get_public_tree_helper( l, indent + 1, depth, include_publications, filters) def _get_status_tree_helper(self, l, indent, depth): default = self.context.get_default() if default is not None: l.append((indent, default)) for item in self.context.get_ordered_publishables(): l.append((indent, item)) if not IContainer.providedBy(item): continue if (depth == -1 or indent < depth) and item.is_transparent(): ITreeContents(item)._get_status_tree_helper( l, indent + 1, depth)
class AddableContents(grok.Adapter): grok.context(IFolder) grok.implements(IAddableContents) grok.provides(IAddableContents) REQUIRES = [ISilvaObject] def __init__(self, context): self.context = context self.root = context.get_root() self._is_forbidden = self.root.is_silva_addable_forbidden def get_authorized_addables(self, require=None): check_permission = getSecurityManager().checkPermission can_add = lambda name: check_permission('Add %ss' % name, self.context) return list(filter(can_add, self.get_container_addables(require))) def get_container_addables(self, require=None): all_addables = self.get_all_addables(require) # Check for restriction on the container locally_addables = self._get_locally_addables() if locally_addables is not None: is_locally_addable = lambda name: name in locally_addables return list(filter(is_locally_addable, all_addables)) return all_addables def get_all_addables(self, require=None): if require is not None: if IInterface.providedBy(require): requires = [ require, ] else: requires = require else: requires = list(self.REQUIRES) return list( map( operator.itemgetter('name'), list( filter( self._is_installed, extensionRegistry.get_addables(requires=requires))))) def _get_locally_addables(self): container = self.context while IFolder.providedBy(container): addables = container.get_silva_addables_allowed_in_container() if addables is not None: return addables container = aq_parent(container) return None def _is_installed(self, content): if 'instance' in content: if IRoot.implementedBy(content['instance']): return False return (not self._is_forbidden(content['name']) and extensionRegistry.is_installed(content['product'], self.root)) return False
class CutAction(grok.Adapter): """Cut Action implementation that performs "cut" on the items in cart.""" grok.context(ISiteRoot) grok.provides(ICartAction) grok.name(NAME) name = NAME title = TITLE weight = WEIGHT def run(self): """Cut all items currently in cart and add them to clipboard. The tricky part here is that the method that Plone uses (manage_cutObjects) was only ment to work on objects of the same parent. However, our use case allows cutting objects of different parents. Hence we need to go one level deeper and reimplement some stuff that manage_cutObjects does in our own way. """ cart_view = self.context.restrictedTraverse('cart') request = self.context.REQUEST cart = cart_view.cart # create a list of "Monik-ed" object paths for those objects # that we will store into clipboard obj_list = [] for obj_uuid in cart: obj = api.content.get(UID=obj_uuid) if obj is None: # An object that is in cart was apparently deleted by someone # else and dosn't exist anymore, so there's nothing to do. continue if obj.wl_isLocked(): continue if not obj.cb_isMoveable(): continue m = Moniker(obj) obj_list.append(m.dump()) # now store cutdata into a cookie # TODO: what if there's nothing in the list? ct_data = (1, obj_list) ct_data = _cb_encode(ct_data) # probably means "clipboard encode"? response = request.response path = '{0}'.format(cookie_path(request)) response.setCookie('__cp', ct_data, path=path) request['__cp'] = ct_data api.portal.show_message( message="{0} item(s) cut.".format(len(obj_list)), request=request, type="info") portal = api.portal.get() response.redirect(portal.absolute_url() + '/@@cart')
class OGDSUpdater(grok.Adapter): """Adapter to synchronize users and groups from LDAP into OGDS. """ grok.provides(IOGDSUpdater) grok.context(IPloneSiteRoot) def __init__(self, context): self.context = context def get_sql_user(self, userid): """Returns the OGDS user object identified by `userid`. """ session = create_session() return session.query(User).filter_by(userid=userid).first() def user_exists(self, userid): """Checks whether the OGDS user identified by `userid` exists or not. """ session = create_session() return session.query(User).filter_by(userid=userid).count() != 0 def group_exists(self, groupid): """Checks whether the OGDS group identified by `groupid` exists or not. """ session = create_session() return session.query(Group).filter_by(groupid=groupid).count() != 0 def _ldap_plugins(self): ldap_plugins = [] for item in self.context['acl_users'].objectValues(): if ILDAPMultiPlugin.providedBy(item): ldap_plugins.append(item) return ldap_plugins def _get_uid_attr(self, ldap_userfolder): """Returns the UID attribute from the given LDAPUserFolder. If that attribute is mapped, the mapped public name will be returned. """ uid_attr = ldap_userfolder._uid_attr schema_dicts = ldap_userfolder.getSchemaDict() for schema_map in schema_dicts: if uid_attr == schema_map['ldap_name']: return schema_map['public_name'] return uid_attr def import_users(self): """Imports users from all the configured LDAP plugins into OGDS. """ session = create_session() # Set all SQL users inactive first - the ones still contained in the LDAP # will be set active again below (in the same transaction). for user in session.query(User): user.active = 0 for plugin in self._ldap_plugins(): ldap_userfolder = plugin._getLDAPUserFolder() uid_attr = self._get_uid_attr(ldap_userfolder) ldap_util = ILDAPSearch(ldap_userfolder) ldap_users = ldap_util.get_users() for ldap_user in ldap_users: dn, info = ldap_user # Ignore users without an UID in LDAP if not uid_attr in info: continue userid = info[uid_attr] # Skip users with uid longer than SQL 'userid' column # FIXME: Increase size of SQL column to 64 if len(userid) > 30: continue if not self.user_exists(userid): # Create the new user user = User(userid) session.add(user) else: # Get the existing user user = session.query(User).filter_by(userid=userid).first() # Iterate over all SQL columns and update their values columns = User.__table__.columns for col in columns: if col.name == 'userid': # We already set the userid when creating the user # object, and it may not be called the same in LDAP as # in our SQL model continue value = info.get(col.name) # We can't store sequences in SQL columns. So if we do get a multi-valued field # to be stored directly in OGDS, we treat it as a multi-line string and join it. if isinstance(value, list) or isinstance(value, tuple): value = ' '.join([str(v) for v in value]) setattr(user, col.name, value) # Set the user active user.active = 1 logger.info("Imported user '%s'..." % userid) session.flush() def import_groups(self): """Imports groups from all the configured LDAP plugins into OGDS. """ session = create_session() for plugin in self._ldap_plugins(): ldap_userfolder = plugin._getLDAPUserFolder() ldap_util = ILDAPSearch(ldap_userfolder) ldap_groups = ldap_util.get_groups() for ldap_group in ldap_groups: dn, info = ldap_group # Group name is in the 'cn' attribute, which may be mapped to 'fullname' if 'cn' in info: groupid = info['cn'] if isinstance(groupid, list): groupid = groupid[0] else: groupid = info['fullname'] groupid = groupid.decode('utf-8') info['groupid'] = groupid if not self.group_exists(groupid): # Create the new group group = Group(groupid) session.add(group) else: # Get the existing group group = session.query(Group).filter_by( groupid=groupid).first() # Iterate over all SQL columns and update their values columns = Group.__table__.columns for col in columns: value = info.get(col.name) # We can't store sequences in SQL columns. So if we do get # a multi-valued field to be stored directly in OGDS, we # treat it as a multi-line string and join it. if isinstance(value, list) or isinstance(value, tuple): value = ' '.join([str(v) for v in value]) setattr(group, col.name, value) contained_users = [] group_members = ldap_util.get_group_members(info) for user_dn in group_members: try: ldap_user = ldap_util.entry_by_dn(user_dn) user_dn, user_info = ldap_user if not ldap_util.is_ad: if not 'userid' in user_info: logger.warn(NO_UID_MSG % user_dn) continue userid = user_info['userid'] else: # Active Directory uid_found = False for uid_key in AD_UID_KEYS: if uid_key in user_info: userid = user_info[uid_key] uid_found = True break if not uid_found: # No suitable UID found, skip this user logger.warn(NO_UID_AD_MSG % (AD_UID_KEYS, user_dn)) continue if isinstance(userid, list): userid = userid[0] user = self.get_sql_user(userid) if user is None: logger.warn(USER_NOT_FOUND_SQL % userid) continue contained_users.append(user) logger.info("Importing user '%s'..." % userid) except NO_SUCH_OBJECT: logger.warn(USER_NOT_FOUND_LDAP % user_dn) group.users = contained_users session.flush()
class ContactWidgetSettings(grok.GlobalUtility): grok.provides(IContactWidgetSettings) grok.implements(ICustomSettings) def add_url_for_portal_type(self, directory_url, portal_type): url = '%s/++add++%s' % (directory_url, portal_type) return url def add_contact_infos(self, widget): source = widget.bound_source criteria = source.selectable_filter.criteria addlink_enabled = widget.field.addlink portal_types = criteria.get('portal_type', []) catalog = getToolByName(widget.context, 'portal_catalog') results = catalog.unrestrictedSearchResults(portal_type='directory') actions = [] if len(results) == 0: addlink_enabled = False else: directory = results[0].getObject() sm = getSecurityManager() if not sm.checkPermission("Add portal content", directory): addlink_enabled = False close_on_click = True if addlink_enabled: directory_url = directory.absolute_url() if len(portal_types) == 1: portal_type = portal_types[0] if portal_type == 'held_position' and not IPerson.providedBy( widget.context): url = "%s/@@add-contact" % directory_url type_name = _(u"Contact") label = _(u"Create ${name}", mapping={'name': type_name}) if getattr(source, 'relations', None): # if we have a relation, with an organization or a position # we will pre-complete contact creation form if 'position' in source.relations: related_path = source.relations['position'] related_to = api.content.get(related_path) if related_to is not None: label = _(u"Create ${name} (${position})", mapping={ 'name': type_name, 'position': related_to.Title() }) url += '?oform.widgets.%s=%s' % ( related_to.portal_type, '/'.join( related_to.getPhysicalPath())) action = { 'url': url, 'label': label, 'klass': 'addnew', 'formselector': '#oform', 'closeselector': '[name="oform.buttons.cancel"]' } actions.append(action) close_on_click = False else: custom_settings = queryAdapter(directory, ICustomSettings, default=self) url = custom_settings.add_url_for_portal_type( directory_url, portal_type) fti = getUtility(IDexterityFTI, name=portal_type) type_name = fti.Title() label = _(u"Create ${name}", mapping={'name': type_name}) action = {'url': url, 'label': label} actions.append(action) else: if len(portal_types) == 2 and \ 'organization' in portal_types and \ 'position' in portal_types: url = "%s/@@add-organization" % directory_url type_name = _(u"organization/position") else: url = "%s/@@add-contact" % directory_url type_name = _(u"Contact") close_on_click = False label = _(u"Create ${name}", mapping={'name': type_name}) action = { 'url': url, 'label': label, 'klass': 'addnew', 'formselector': '#oform', 'closeselector': '[name="oform.buttons.cancel"]' } actions.append(action) return { 'actions': actions, 'close_on_click': close_on_click, 'formatItem': """function(row, idx, count, value) { return '<img src="' + portal_url + '/' + row[2] + '_icon.png' +'" /> ' + row[1] }""" }
class DexterityFieldDataCollector(grok.Adapter): """The `DexterityFieldDataCollector` is used for extracting field data from a dexterity object and for setting it later on the target. This adapter is used by the transporter utility. """ grok.context(IDexterityContent) grok.provides(IDataCollector) grok.name('field-data') def extract(self): """Extracts the field data and returns a dict of all data. """ data = {} for schemata in iterSchemata(self.context): subdata = {} repr = schemata(self.context) for name, field in schema.getFieldsInOrder(schemata): value = getattr(repr, name, _marker) if value == _marker: value = getattr(self.context, name, None) value = self.pack(name, field, value) subdata[name] = value if schemata.getName() in data.keys(): raise TransportationError( ('Duplacte behavior names are not supported', schemata.getName())) data[schemata.getName()] = subdata return data def insert(self, data): """Inserts the field data on self.context """ for schemata in iterSchemata(self.context): repr = schemata(self.context) subdata = data[schemata.getName()] for name, field in schema.getFieldsInOrder(schemata): value = subdata[name] value = self.unpack(name, field, value) if value != _marker: setattr(repr, name, value) def pack(self, name, field, value): """Packs the field data and makes it ready for transportation with json, which does only support basic data types. """ if self._provided_by_one_of(field, [ schema.interfaces.IDate, schema.interfaces.ITime, schema.interfaces.IDatetime, ]): if value: return str(value) elif self._provided_by_one_of(field, [ INamedFileField, ]): if value: return { 'filename': value.filename, 'data': base64.encodestring(value.data), } elif self._provided_by_one_of(field, ( IRelation, IRelationChoice, IRelationList, )): # Remove all relations since we cannot guarantee anyway the they # are on the target. Relations have to be rebuilt by to tool which # uses the transporter - if required. if self._provided_by_one_of(field, (IRelation, IRelationChoice)): return None elif self._provided_by_one_of(field, (IRelationList, )): return [] return value def unpack(self, name, field, value): """Unpacks the value from the basic json types to the objects which are stored on the field later. """ if self._provided_by_one_of(field, [ schema.interfaces.IDate, schema.interfaces.ITime, schema.interfaces.IDatetime, ]): if value: return DateTime.DateTime(value).asdatetime() if self._provided_by_one_of(field, [INamedFileField]): if value and isinstance(value, dict): filename = value['filename'] data = base64.decodestring(value['data']) return field._type(data=data, filename=filename) return value def _provided_by_one_of(self, obj, ifaces): """Checks if at least one interface of the list `ifaces` is provied by the `obj`. """ for ifc in ifaces: if ifc.providedBy(obj): return True return False
class ZipFileImporter(grok.Adapter): """ Adapter for container-like objects to facilitate the import of archive files (e.g. zipfiles) and create Assets out of its contents and, optionally, to recreate the 'directory' structure contained in the archive file. """ grok.implements(interfaces.IArchiveFileImporter) grok.provides(interfaces.IArchiveFileImporter) grok.context(interfaces.IContainer) def importArchive(self, archive, assettitle='', recreatedirs=1, replace=0): zip = zipfile.ZipFile(archive) # Lists the names of the files in the archive which were # succesfully added (or, if something went wrong, list it in # failed_list). succeeded_list = [] failed_list = [] # Extract filenames, not directories. for name in zip.namelist(): path, filename = os.path.split(name) if not filename: # Its a directory entry continue if (re.match("^__MACOSX", path) or re.match(".*\.DS_Store$", filename)): # It's meta information from a Mac archive, and we # don't need it continue if recreatedirs and path: dirs = path.split('/') container = self._getSilvaContainer( self.context, dirs, replace) if container is None: failed_list.append('/'.join(dirs)) # Creating the folder failed - bailout for this # zipped file... continue else: filename = name container = self.context # Actually add object... mimetype, enc = contenttype.guess_content_type(name) factory = mimetypeRegistry.get(mimetype) extracted_file = StringIO(zip.read(name)) id = self._makeId(filename, container, extracted_file, replace) added_object = factory( container, id, assettitle, extracted_file) if added_object is None: # Factories return None upon failure. # FIXME: can I extract some info for the reason of failure? failed_list.append(name) else: succeeded_list.append(name) return succeeded_list, failed_list def _makeId(self, filename, container, extracted_file, replace): if replace: id = filename if id in container.objectIds(): container.manage_delObjects([id]) else: id = self._getUniqueId( container, filename, file=extracted_file, interface=interfaces.IAsset) return id def _getSilvaContainer(self, context, path, replace=0): container = context for id in path: if replace and id in container.objectIds(): container = container._getOb(id) else: container = self._addSilvaContainer(container, id) return container def _addSilvaContainer(self, context, id): idObj = mangle.Id( context, id, interface=interfaces.IContainer, allow_dup=1) if not idObj.isValid(): return None while id in context.objectIds(): obj = context[id] if interfaces.IContainer.providedBy(obj): return obj id = str(idObj.new()) context.manage_addProduct['Silva'].manage_addFolder(id, id) return context[id] def _getUniqueId(self, context, suggestion, **kw): # Make filename valid and unique. id = mangle.Id(context, suggestion, **kw) id.cook().unique() return str(id)
class SecurityCheckerUtility(grok.GlobalUtility): """Verificador de Segurança. """ grok.provides(ISecurityChecker) def _run_verificacoes(self, acao, verificacoes, **kwargs): """Esse método executa as verificações, de acordo com as definições da estrutura de dados CONFIG, considerando que as componentes da tupla representam um OR das diversas possibilidades de verificação. Quando a componente for uma tupla, a validação deve ser calculada como um AND das componentes. """ # o widget de autocomplete faz chamadas ajax no contexto da # view e esses requests não devem ser bloqueados if kwargs.get('ajax', None) is not None and acao.startswith('acessar_'): r = getRequest() url = r.URL view = acao.replace('acessar_', '/@@').replace('_', '-') if not url.endswith(view): logger( _(u'Permitindo acesso direto a URL: ') + unicode(url, 'utf-8')) return True context = self for v in verificacoes: if type(v) == type(()): result = True for vi in v: func = getattr(context, "_valida_%s" % vi, None) if func is None: result = False break ret = func(**kwargs) if not ret: result = False break if result: return True else: func = getattr(context, "_valida_%s" % v, None) if func is not None: ret = func(**kwargs) if ret: return True return False def check(self, acao, **kwargs): verificacoes = CONFIG.get(acao, None) if verificacoes is None: logger( _(u'Ação informada não existe na configuração do sistema. Ação: ' ) + unicode(acao, 'utf-8')) raise Unauthorized return self._run_verificacoes(acao, verificacoes, **kwargs) def enforce(self, acao, **kwargs): if not self.check(acao, **kwargs): logger( kwargs.get( 'msg', _(u'Privilégios Insuficientes. Ação: ') + unicode(acao, 'utf-8'))) raise Unauthorized # Métodos Auxiliares # def _valida_role(self, role): pm = getToolByName(getSite(), 'portal_membership') return role in pm.getAuthenticatedMember().getRoles() def _valida_permissao(self, permissao): pu = getToolByName(getSite(), 'portal_url') pm = getToolByName(getSite(), 'portal_membership') portal = pu.getPortalObject() return pm.checkPermission(permissao, portal) def _get_area_usuario(self): pm = getToolByName(getSite(), 'portal_membership') user_id = str(pm.getAuthenticatedMember()) api = getUtility(ISPDOAPI) pessoa = api.getPessoaByEmail(user_id) if pessoa is not None: return pessoa.area_id def _valida_area_responsavel(self, area_id): session = Session() return bool(session.query(db.Responsavel).\ filter_by(area_id=area_id).first()) # Verificações # def _valida_privilegio_usuario(self, **kwargs): """Estar autenticado com role ROLE_USUARIO e possuir um registro pessoa que case com o e-mail de login. """ pm = getToolByName(getSite(), 'portal_membership') user_id = str(pm.getAuthenticatedMember()) api = getUtility(ISPDOAPI) return self._valida_role(ROLE_USUARIO) and api.getPessoaByEmail( user_id) is not None def _valida_privilegio_operador(self, **kwargs): """Estar autenticado com role ROLE_OPERADOR e possuir um registro pessoa lotado em uma área do organograma. """ if not self._valida_role(ROLE_OPERADOR): return False area_id = self._get_area_usuario() if area_id is None: return False return self._valida_area_responsavel(area_id) def _valida_privilegio_gestor(self, **kwargs): """Estar autenticado com role ROLE_GESTOR e possuir um registro pessoa lotado em uma área do organograma. """ if not self._valida_role(ROLE_GESTOR): return False area_id = self._get_area_usuario() if area_id is None: return False return self._valida_area_responsavel(area_id) def _valida_privilegio_admin(self, **kwargs): """Estar autenticado com ROLE_ADMIN e ter a permissão “Manage portal” no contexto da raiz do site. """ return self._valida_role(ROLE_ADMIN) and self._valida_permissao( 'Manage portal') def _valida_privilegio_criador_anexo(self, **kwargs): """O anexo deve ter sido criado pelo usuário que quer apagá-lo. Não deve existir um tramite com data de disponibilização posterior a data do anexo. """ anexo_id = kwargs.get('anexo_id', None) protocolo_id = kwargs.get('protocolo_id', None) if anexo_id is None or protocolo_id is None: logger( _(u'O método _valida_privilegio_criador_anexo não recebeu os parâmetros anexo_id ou protocolo_id.' )) return False session = Session() api = getUtility(ISPDOAPI) usuario = api.getAuthId() anexo = session.query(db.Anexo).\ filter_by(id=anexo_id).\ filter_by(protocolo_id=protocolo_id).\ filter_by(usuario=usuario).first() if anexo is None: return False tramite = session.query(db.Tramite).\ filter_by(protocolo_id=anexo.protocolo_id).\ filter(db.Tramite.data_disponibilizacao > anexo.data_anexo).first() return not bool(tramite) def _valida_privilegio_criador_observacao(self, **kwargs): """A observação deve ter sido criada pelo usuário que quer modificá-la ou apagá-la. Não deve existir um tramite com data de disponibilização posterior a data da observação. """ observacao_id = kwargs.get('observacao_id', None) protocolo_id = kwargs.get('protocolo_id', None) if observacao_id is None or protocolo_id is None: logger( _(u'O método _valida_privilegio_criador_observacao não recebeu os parâmetros observacao_id ou protocolo_id.' )) return False session = Session() api = getUtility(ISPDOAPI) usuario = api.getAuthId() observacao = session.query(db.Observacao).\ filter_by(id=observacao_id).\ filter_by(protocolo_id=protocolo_id).\ filter_by(usuario=usuario).first() if observacao is None: return False tramite = session.query(db.Tramite).\ filter_by(protocolo_id=observacao.protocolo_id).\ filter(db.Tramite.data_disponibilizacao > observacao.data_observacao).first() return not bool(tramite) def _valida_protocolo_apensado(self, **kwargs): """Protocolo não estar apensado em outro protocolo. """ api = getUtility(ISPDOAPI) protocolo_id = kwargs.get('protocolo_id', api.getProtocoloId()) if protocolo_id is None: logger( _(u'O método _valida_protocolo_apensado não recebeu o parâmetro protocolo_id.' )) return False protocolo = api.getProtocolo(protocolo_id) if protocolo is None: return False return protocolo.apenso_id is None def _procura_ciclo(self, protocolo, apenso_id, ids_visitados): ret = False for p in protocolo.apenso: # ids_visitados evita uma recursão infinita if p.id in ids_visitados: continue ids_visitados.append(p.id) if p.id == apenso_id or self._procura_ciclo( p, apenso_id, ids_visitados): ret = True break return ret def _valida_protocolo_apenso_ciclo(self, **kwargs): """Não podem existir ciclos nas definições de apensos. """ protocolo_id = kwargs.get('protocolo_id', None) apenso_id = kwargs.get('apenso_id', None) if protocolo_id is None or apenso_id is None: logger( _(u'O método _valida_protocolo_apenso_ciclo não recebeu os parâmetros protocolo_id ou apenso_id.' )) return False api = getUtility(ISPDOAPI) protocolo = api.getProtocolo(protocolo_id) if protocolo is None: return False ids_visitados = [] return not self._procura_ciclo(protocolo, apenso_id, ids_visitados) def _compara_protocolos(self, p1, p2): if p1.tipoprotocolo != p2.tipoprotocolo or p1.tipodocumento_id != p2.tipodocumento_id: return False inbox1 = [i.area_id for i in p1.tramite_inbox] inbox1.sort() inbox2 = [i.area_id for i in p2.tramite_inbox] inbox2.sort() if inbox1 != inbox2: return False outbox1 = [i.area_id for i in p1.tramite_outbox] outbox1.sort() outbox2 = [i.area_id for i in p2.tramite_outbox] outbox2.sort() if outbox1 != outbox2: return False return True def _valida_protocolo_apenso_momento(self, **kwargs): """Para que um protocolo seja apensado em outro é necessário que ambos compartilhem o mesmo momento na tramitação, ou seja, estejam tramitando nas mesmas áreas e tenham o mesmo tipo de documento e protocolo. """ protocolo_id = kwargs.get('protocolo_id', None) apenso_id = kwargs.get('apenso_id', None) if protocolo_id is None or apenso_id is None: logger( _(u'O método _valida_protocolo_apenso_momento não recebeu os parâmetros protocolo_id ou apenso_id.' )) return False api = getUtility(ISPDOAPI) protocolo = api.getProtocolo(protocolo_id) if protocolo is None: return False apenso = api.getProtocolo(apenso_id) if apenso is None: return False return self._compara_protocolos(protocolo, apenso) def _valida_protocolo_enviado(self, **kwargs): """Protocolo enviado para área de lotação do usuário mas ainda não recebido. """ api = getUtility(ISPDOAPI) protocolo_id = kwargs.get('protocolo_id', api.getProtocoloId()) if protocolo_id is None: logger( _(u'O método _valida_protocolo_enviado não recebeu o parâmetro protocolo_id.' )) return False area_id_auth = self._get_area_usuario() if area_id_auth is None: return False session = Session() return bool(session.query(db.Tramite).\ filter_by(area_id=area_id_auth).\ filter_by(protocolo_id=protocolo_id).\ filter_by(data_recebimento=None).first()) def _valida_protocolo_fluxo_area_inicial(self, **kwargs): """O tipo de protocolo e o tipo de documento possuem uma definição de fluxo rigoroso. A área onde o usuário está lotado deve corresponder a uma área inicial de uma das transições desse fluxo. """ tipoprotocolo = kwargs.get('tipoprotocolo', None) tipodocumento_id = kwargs.get('tipodocumento_id', None) if tipoprotocolo is None or tipodocumento_id is None: logger( _(u'O método _valida_protocolo_fluxo_area_inicial não recebeu os parâmetros tipoprotocolo e tipodocumento_id.' )) return False area_id_auth = self._get_area_usuario() if area_id_auth is None: return False session = Session() fluxo = session.query(db.Fluxo).\ filter_by(tipoprotocolo=tipoprotocolo).\ filter_by(tipodocumento_id=tipodocumento_id).\ filter_by(flexivel=False).first() if fluxo is None: return True return bool(session.query(db.Transicao).\ filter_by(fluxo_id=fluxo.id).\ filter_by(area_origem_id=area_id_auth).\ filter_by(inicial=True).first()) def _valida_protocolo_fluxo(self, **kwargs): """O tipo de protocolo e o tipo de documento possuem uma definição de fluxo rigoroso. A tramitação de envio deve seguir uma transição desse fluxo. """ protocolo_id = kwargs.get('protocolo_id', None) areas = kwargs.get('areas', []) if protocolo_id is None or not areas: logger( _(u'O método _valida_protocolo_fluxo não recebeu os parâmetros protocolo_id e areas.' )) return False api = getUtility(ISPDOAPI) protocolo = api.getProtocolo(protocolo_id) if protocolo is None: return False tipoprotocolo = protocolo.tipoprotocolo tipodocumento_id = protocolo.tipodocumento_id session = Session() fluxo = session.query(db.Fluxo).\ filter_by(tipoprotocolo=tipoprotocolo).\ filter_by(tipodocumento_id=tipodocumento_id).\ filter_by(flexivel=False).first() if fluxo is None: return True area_id_auth = self._get_area_usuario() if area_id_auth is None: return False for area in areas: if not bool(session.query(db.Transicao).\ filter_by(fluxo_id=fluxo.id).\ filter_by(area_origem_id=area_id_auth).\ filter_by(area_destino_id=area).first()): return False return True def _valida_protocolo_nao_recebido(self, **kwargs): """Protocolo enviado pela área de lotação do usuário mas ainda não recebido pela área destino. """ api = getUtility(ISPDOAPI) protocolo_id = kwargs.get('protocolo_id', api.getProtocoloId()) if protocolo_id is None: logger( _(u'O método _valida_protocolo_nao_recebido não recebeu o parâmetro protocolo_id.' )) return False area_id_auth = self._get_area_usuario() if area_id_auth is None: return False session = Session() return bool(session.query(db.TramiteOutbox).\ filter_by(area_id=area_id_auth).\ filter_by(protocolo_id=protocolo_id).first()) def _valida_protocolo_situacao_final(self, **kwargs): """Protocolo não pode estar em situação final. """ api = getUtility(ISPDOAPI) protocolo_id = kwargs.get('protocolo_id', api.getProtocoloId()) if protocolo_id is None: logger( _(u'O método _valida_protocolo_situacao_final não recebeu o parâmetro protocolo_id.' )) return False protocolo = api.getProtocolo(protocolo_id) if protocolo is None: return False return not protocolo.situacao.final def _valida_protocolo_tramita_area(self, **kwargs): """Protocolo tramita na área onde o usuário autenticado está lotado. """ api = getUtility(ISPDOAPI) protocolo_id = kwargs.get('protocolo_id', api.getProtocoloId()) if protocolo_id is None: logger( _(u'O método _valida_protocolo_tramita_area não recebeu o parâmetro protocolo_id.' )) return False area_id_auth = self._get_area_usuario() if area_id_auth is None: return False session = Session() return bool(session.query(db.TramiteInbox).\ filter_by(protocolo_id=protocolo_id).\ filter_by(area_id=area_id_auth).first())
class OrderManager(grok.Annotation): grok.context(IOrderableContainer) grok.provides(IOrderManager) grok.implements(IOrderManager) order_only = IPublishable def __init__(self): super(OrderManager, self).__init__() self.order = [] def _get_id(self, content): # Poor man cache. utility = getattr(self, '_v_utility', None) if utility is None: utility = self._v_utility = getUtility(IIntIds) return utility.register(content) def _is_valid(self, content): return (self.order_only.providedBy(content) and not (IPublishable.providedBy(content) and content.is_default())) def add(self, content): if self._is_valid(content): identifier = self._get_id(content) if identifier not in self.order: self.order.append(identifier) self._p_changed = True return True return False def remove(self, content): position = self.get_position(content) if position >= 0: del self.order[position] self._p_changed = True return True return False def move(self, content, position): if position >= 0 and position < len(self.order): old_position = self.get_position(content) identifier = self.order[old_position] if old_position >= 0: del self.order[old_position] self.order.insert(position, identifier) self._p_changed = True notify( ContentOrderChangedEvent(content, position, old_position)) return True return False def get_position(self, content): if not self._is_valid(content): return -1 try: return self.order.index(self._get_id(content)) except ValueError: return -1 def __len__(self): return len(self.order) def repair(self, contents): # Must be called like this: # IObjectManager(folder).repair(folder.objectValues()) valid_ids = set([]) for content in contents: if self._is_valid(content): valid_ids.add(self._get_id(content)) order = [] seen_ids = set([]) # Check for duplicates changed = False for identifier in self.order: if identifier in valid_ids and identifier not in seen_ids: order.append(identifier) seen_ids.add(identifier) else: changed = True if changed: self.order = order return changed
class DossierDetailsLaTeXView(grok.MultiAdapter, MakoLaTeXView): grok.provides(ILaTeXView) grok.adapts(Interface, IDossierDetailsLayer, ILaTeXLayout) template_directories = ['templates'] template_name = 'dossierdetails.tex' def get_render_arguments(self): self.layout.show_contact = False args = {'dossier_metadata': self.get_dossier_metadata()} parent = aq_parent(aq_inner(self.context)) args['is_subdossier'] = IDossierMarker.providedBy(parent) args['participants'] = self.get_participants() # subdossiers args['subdossierstitle'] = translate(_('label_subdossiers', default="Subdossiers"), context=self.request) listing = getMultiAdapter((self.context, self.request, self), ILaTexListing, name='subdossiers') args['subdossiers'] = listing.get_listing(self.get_subdossiers()) # documents args['documentstitle'] = translate(_('label_documents', default="Documents"), context=self.request) listing = getMultiAdapter((self.context, self.request, self), ILaTexListing, name='documents') args['documents'] = listing.get_listing(self.get_documents()) # tasks args['taskstitle'] = translate(_('label_tasks', default="Tasks"), context=self.request) listing = getMultiAdapter((self.context, self.request, self), ILaTexListing, name='tasks') args['tasks'] = listing.get_listing(self.get_tasks()) self.layout.use_package('pdflscape') self.layout.use_package('longtable') return args def get_metadata_order(self): return [ 'reference', 'sequence', 'repository', 'title', 'subdossier_title', 'state', 'responsible', 'participants', 'start', 'end' ] def get_metadata_config(self): return { 'reference': { 'label': _('label_reference_number', default='Reference number'), 'getter': self.get_reference_number }, 'sequence': { 'label': _('label_sequence_number', default='Sequence number'), 'getter': self.get_sequence_number }, 'repository': { 'label': _('label_repository', default='Repository'), 'getter': self.get_repository_path }, 'title': { 'label': _('label_title', default='Title'), 'getter': self.get_title }, 'subdossier_title': { 'label': _('label_subdossier_title', default='Subdossier Title'), 'getter': self.get_subdossier_title }, 'responsible': { 'label': _('label_responsible', default='Responsible'), 'getter': self.get_responsible }, 'start': { 'label': _('label_start', default='Start'), 'getter': self.get_start_date }, 'end': { 'label': _('label_end', default='End'), 'getter': self.get_end_date }, 'participants': { 'label': _('label_participants', default='Participants'), 'getter': self.get_participants, 'is_latex': True }, 'state': { 'label': _('label_review_state', default='State'), 'getter': self.get_review_state } } def get_dossier_metadata(self): rows = [] config = self.get_metadata_config() for key in self.get_metadata_order(): row = config.get(key) value = row.get('getter')() if not value: continue if not row.get('is_latex'): value = self.convert_plain(value) label = translate(row.get('label'), context=self.request) rows.append('\\bf {} & {} \\\\%%'.format(self.convert_plain(label), value)) return '\n'.join(rows) def get_reference_number(self): return IReferenceNumber(self.context).get_number() def get_sequence_number(self): return str(getUtility(ISequenceNumber).get_number(self.context)) def get_title(self): if self.context.is_subdossier(): return aq_parent(aq_inner(self.context)).Title() return self.context.Title() def get_subdossier_title(self): if self.context.is_subdossier(): return self.context.Title() return None def get_review_state(self): state = self.context.restrictedTraverse('@@plone_context_state') return translate(state.workflow_state(), domain='plone', context=self.request) def get_start_date(self): return helper.readable_date(self.context, IDossier(self.context).start) def get_end_date(self): return helper.readable_date(self.context, IDossier(self.context).end) def get_responsible(self): return self.context.get_responsible_actor().get_label_with_admin_unit() def get_repository_path(self): """Returns a reverted, path-like list of parental repository folder titles, not including the dossier itself nor the repository root, seperated by slashes. """ titles = [] obj = self.context while not IPloneSiteRoot.providedBy(obj): if IRepositoryFolder.providedBy(obj): titles.append(obj.Title()) obj = aq_parent(aq_inner(obj)) return ' / '.join(titles) def get_participants(self): dossier = IDossier(self.context) rows = [] # add the responsible rows.append('%s, %s' % (readable_ogds_author(None, dossier.responsible), translate(_dossier(u'label_responsible', 'Responsible'), context=self.request))) # add the participants participants = list( IParticipationAware(self.context).get_participations()) for participant in participants: rows.append( '%s, %s' % (readable_ogds_author(participant, participant.contact), role_list_helper(participant, participant.roles))) values = ['{', '\\vspace{-\\baselineskip}\\begin{itemize}'] for row in self.convert_list(rows): values.append('\\item {}'.format(row)) values.append('\\vspace{-\\baselineskip}\\end{itemize}') values.append('}') return ' \n'.join(values) def get_subdossiers(self): sort_on, sort_order = self.get_sorting('subdossiers') return self.context.get_subdossiers(sort_on=sort_on, sort_order=sort_order) def get_tasks(self): return Task.query.by_container(self.context, get_current_admin_unit())\ .order_by(Task.sequence_number)\ .all() def get_documents(self): sort_on, sort_order = self.get_sorting('documents') catalog = getToolByName(self.context, 'portal_catalog') query = { 'path': '/'.join(self.context.getPhysicalPath()), 'object_provides': [IDocumentSchema.__identifier__, IMail.__identifier__] } return catalog(query) def get_sorting(self, tab_name): """Read the sort_on and sort_order attributes from the gridstate, for the given tab and returns them""" tab = self.context.restrictedTraverse('tabbedview_view-%s' % tab_name) tab.table_options = {} tab.load_grid_state() sort_on = tab.sort_on sort_order = 'descending' if tab.sort_order == 'asc': sort_order = 'ascending' return sort_on, sort_order def convert_list(self, items): """Returns a new list, containing all values in `items` convertend into LaTeX. """ data = [] for item in items: if item is None: item = '' if isinstance(item, unicode): item = item.encode('utf-8') if not isinstance(item, str): item = str(item) data.append(self.convert_plain(item)) return data
class SPDOAPI(grok.GlobalUtility): """API SPDO. """ grok.provides(ISPDOAPI) def getAuthId(self): """Retorna o identificador do usuário autenticado (username). """ context = getSite() request = getRequest() portal_state = getMultiAdapter((context, request), name=u'plone_portal_state') return portal_state.member().getId() def getAuthPessoa(self): """Retorna o objeto Pessoa que representa o usuário autenticado. """ session = Session() return session.query( db.Pessoa).filter_by(email=self.getAuthId()).first() def getPessoaByEmail(self, email): """Retorna um objeto pessoa a partir do email. """ session = Session() return session.query(db.Pessoa).filter_by(email=email).first() def getProtocoloId(self): """Retorna o valor do parâmetro protocolo_id. """ r = getRequest() id = r.get('protocolo_id', r.get('form.widgets.protocolo_id', None)) if id is not None: id = int(id) return id def getProtocolo(self, id=None): """Retorna o objeto Protocolo a partir do ID. """ session = Session() if id is None: id = self.getProtocoloId() return session.query(db.Protocolo).get(id) def getFluxoId(self): """Retorna o valor do parâmetro fluxo_id. """ r = getRequest() id = r.get('fluxo_id', r.get('form.widgets.fluxo_id', None)) if id is not None: id = int(id) return id def getFluxo(self, id=None): """Retorna o objeto Fluxo a partir do ID. """ session = Session() if id is None: id = self.getFluxoId() return session.query(db.Fluxo).get(id) @log def addProtocolo(self, tipoprotocolo, tipodocumento_id, numero_documento, data_emissao, assunto, situacao_id, origem, destino, **kwargs): """Adiciona protocolo. """ session = Session() protocolo = db.Protocolo( tipoprotocolo=tipoprotocolo, tipodocumento_id=tipodocumento_id, numero_documento=numero_documento, data_emissao=data_emissao, assunto=assunto, situacao_id=situacao_id, usuario=self.getAuthId(), ) session.add(protocolo) session.flush() protocolo_id = protocolo.id for pessoa_id in origem: pessoa_origem = db.PessoaOrigem( protocolo_id=protocolo_id, pessoa_id=pessoa_id, ) session.add(pessoa_origem) for pessoa_id in destino: pessoa_destino = db.PessoaDestino( protocolo_id=protocolo_id, pessoa_id=pessoa_id, ) session.add(pessoa_destino) session.flush() return protocolo_id @log def addObservacao(self, protocolo_id, texto): """Adiciona observação. """ if not texto: return session = Session() observacao = db.Observacao( protocolo_id=protocolo_id, texto=texto, usuario=self.getAuthId(), ) session.add(observacao) session.flush() return observacao.id @log def addAnexos(self, protocolo_id, anexos): """Adiciona anexos. """ p = self.getProtocolo(protocolo_id) if p is None or anexos is None: return [] session = Session() path_protocolo = os.path.join(PATH_ANEXOS, ScopeID(), str(p.ano), str(p.id)) if not os.path.exists(path_protocolo): os.makedirs(path_protocolo, 0700) ret = [] for arquivo in anexos: anexo = db.Anexo( protocolo_id=protocolo_id, arquivo=arquivo.filename, tamanho=arquivo.getSize(), usuario=self.getAuthId(), ) session.add(anexo) session.flush() ret.append(anexo.id) path_anexo = os.path.join(path_protocolo, str(anexo.id)) with open(path_anexo, 'w') as file_anexo: file_anexo.write(arquivo.data) # propaga anexos nos apensos for apenso in p.apenso: ret.extend(self.addAnexos(apenso.id, anexos)) return ret def _add_box(self, box, protocolo_id, area_id): assert (box in (db.TramiteInbox, db.TramiteOutbox)) session = Session() t = session.query(box).get((protocolo_id, area_id)) if t is None: t = box() t.protocolo_id = protocolo_id t.area_id = area_id tbl = box is db.TramiteInbox and 'tramite_inbox' or 'tramite_outbox' t.version = db.nextVersion(tbl, protocolo_id=protocolo_id, area_id=area_id) session.add(t) def _del_box(self, box, protocolo_id, area_id): assert (box in (db.TramiteInbox, db.TramiteOutbox)) session = Session() t = session.query(box).get((protocolo_id, area_id)) if t is not None: session.delete(t) @log def TramiteInicial(self, protocolo_id): """Tramite inicial. """ session = Session() area_id_auth = self.getAuthPessoa().area_id tramite = db.Tramite( protocolo_id=protocolo_id, area_id=area_id_auth, data_disponibilizacao=None, data_recebimento=datetime.datetime.now(), despacho=_(u'Protocolo Criado'), usuario=self.getAuthId(), ) session.add(tramite) self._add_box(db.TramiteInbox, protocolo_id, area_id_auth) session.flush() def TramiteEnvio(self, protocolos, areas, despacho): """Wrapper do tramite de envio. """ protocolos_tramitados = self._TramiteEnvio(protocolos, areas, despacho) self._EnviaNotificacoes(protocolos_tramitados) @log def _TramiteEnvio(self, protocolos, areas, despacho, apenso=False): """Tramite de envio. """ protocolos = list(set(protocolos)) areas = list(set(areas)) session = Session() area_id_auth = self.getAuthPessoa().area_id # evita o envio para a própria área if areas.count(area_id_auth): areas.pop(areas.index(area_id_auth)) copia = len(areas) > 1 ret = [] for protocolo_id in protocolos: ret.append(protocolo_id) protocolo = self.getProtocolo(protocolo_id) if not apenso: sc = getUtility(ISecurityChecker) msg = _(u'Protocolo não disponível para envio. Protocolo: ' ) + protocolo.numero sc.enforce('tramitar_envio', protocolo_id=protocolo_id, msg=msg) for area_id in areas: tramite = db.Tramite( protocolo_id=protocolo_id, area_id=area_id, data_disponibilizacao=datetime.datetime.now(), data_recebimento=None, despacho=despacho, usuario=self.getAuthId(), copia=copia, area_id_anterior=area_id_auth, ) session.add(tramite) self._add_box(db.TramiteOutbox, protocolo_id, area_id_auth) self._del_box(db.TramiteInbox, protocolo_id, area_id_auth) session.flush() # propaga tramitação nos apensos for apenso in protocolo.apenso: ret.extend( self._TramiteEnvio([apenso.id], areas, despacho, apenso=True)) return ret def TramiteRecebimento(self, protocolos): """Wrapper do tramite de recebimento. """ self._TramiteRecebimento(protocolos) @log def _TramiteRecebimento(self, protocolos, apenso=False): """Tramite de recebimento. """ protocolos = list(set(protocolos)) session = Session() area_id_auth = self.getAuthPessoa().area_id ret = [] for protocolo_id in protocolos: ret.append(protocolo_id) protocolo = self.getProtocolo(protocolo_id) if not apenso: sc = getUtility(ISecurityChecker) msg = _( u'Protocolo não disponível para recebimento. Protocolo: ' ) + protocolo.numero sc.enforce('tramitar_recebimento', protocolo_id=protocolo_id, msg=msg) tramite = session.query(db.Tramite).\ filter_by(protocolo_id=protocolo_id).\ filter_by(area_id=area_id_auth).\ filter_by(data_recebimento=None).first() tramite.data_recebimento = datetime.datetime.now() self._add_box(db.TramiteInbox, protocolo_id, area_id_auth) self._del_box(db.TramiteOutbox, protocolo_id, tramite.area_id_anterior) # propaga tramitação nos apensos for apenso in protocolo.apenso: ret.extend(self._TramiteRecebimento([apenso.id], apenso=True)) return ret def TramiteRecuperacao(self, protocolos): """Wrapper do tramite de recuperação. """ self._TramiteRecuperacao(protocolos) @log def _TramiteRecuperacao(self, protocolos, apenso=False): """Tramite de recuperação (recupera um protocolo enviado que não foi recebido). """ protocolos = list(set(protocolos)) session = Session() area_id_auth = self.getAuthPessoa().area_id ret = [] for protocolo_id in protocolos: ret.append(protocolo_id) protocolo = self.getProtocolo(protocolo_id) if not apenso: sc = getUtility(ISecurityChecker) msg = _( u'Protocolo não disponível para recuperação. Protocolo: ' ) + protocolo.numero sc.enforce('tramitar_recuperacao', protocolo_id=protocolo_id, msg=msg) tramites = session.query(db.Tramite).\ filter_by(protocolo_id=protocolo_id).\ filter_by(area_id_anterior=area_id_auth).\ filter_by(data_recebimento=None).all() for tramite in tramites: session.delete(tramite) self._add_box(db.TramiteInbox, protocolo_id, area_id_auth) self._del_box(db.TramiteOutbox, protocolo_id, area_id_auth) # propaga tramitação nos apensos for apenso in protocolo.apenso: ret.extend(self._TramiteRecuperacao([apenso.id], apenso=True)) return ret def _getProtocolosData(self, protocolos): ret = [] for i in protocolos: ret.append({ 'id': i.id, 'numero': i.numero, 'data_protocolo': i.data_protocolo, 'assunto': i.assunto, 'tipodocumento': i.tipodocumento.nome, 'situacao': i.situacao.nome, 'url': url('show-protocolo', id=i.id), }) return ret def getProtocolosCriadosRecebidos(self): """Consulta os protocolos criados ou recebidos pela área. """ session = Session() area_id_auth = self.getAuthPessoa().area_id items = session.query(db.TramiteInbox).\ filter_by(area_id=area_id_auth).\ join(db.Protocolo).\ filter(db.Protocolo.apenso_id == None).\ join(db.Situacao).\ filter(db.Situacao.final == False).all() return self._getProtocolosData([i.protocolo for i in items]) def getProtocolosNaoRecebidos(self): """Consulta os protocolos não recebidos pela área. """ session = Session() area_id_auth = self.getAuthPessoa().area_id items = session.query(db.Tramite).\ filter_by(area_id=area_id_auth).\ filter_by(data_recebimento=None).\ join(db.Protocolo).\ filter(db.Protocolo.apenso_id == None).\ join(db.Situacao).\ filter(db.Situacao.final == False).all() return self._getProtocolosData([i.protocolo for i in items]) def getProtocolosEnviados(self): """Consulta os protocolos enviados pela área. """ session = Session() area_id_auth = self.getAuthPessoa().area_id items = session.query(db.TramiteOutbox).\ filter_by(area_id=area_id_auth).\ join(db.Protocolo).\ filter(db.Protocolo.apenso_id == None).\ join(db.Situacao).\ filter(db.Situacao.final == False).all() return self._getProtocolosData([i.protocolo for i in items]) def pesquisaProtocolos(self): """Pesquisa protocolos. """ session = Session() items = session.query(db.Protocolo) tipoprotocolo = getTipoProtocolo() if tipoprotocolo is not None: items = items.filter_by(tipoprotocolo=tipoprotocolo) tipodocumento_id = getTipoDocumento() if tipodocumento_id is not None: items = items.filter_by(tipodocumento_id=tipodocumento_id) assunto = getAssunto() if assunto is not None: clause = rdb.or_(db.Protocolo.assunto.contains(assunto), db.Protocolo.assunto.ilike(assunto)) items = items.filter(clause) situacao_id = getSituacao() if situacao_id is not None: items = items.filter_by(situacao_id=situacao_id) origem = getOrigem() if origem is not None: pessoa1 = aliased(db.Pessoa) items = items.join(db.PessoaOrigem).join(pessoa1) clause = rdb.or_(pessoa1.nome.contains(origem), pessoa1.nome.ilike(origem)) items = items.filter(clause) destino = getDestino() if destino is not None: pessoa2 = aliased(db.Pessoa) items = items.join(db.PessoaDestino).join(pessoa2) clause = rdb.or_(pessoa2.nome.contains(destino), pessoa2.nome.ilike(destino)) items = items.filter(clause) area_id = getArea() if area_id is not None: inbox1 = aliased(db.TramiteInbox) items = items.join(inbox1).filter_by(area_id=area_id) tempo_inativo = getTempoInativo() if tempo_inativo is not None: d = datetime.datetime.now() - datetime.timedelta( days=tempo_inativo) inbox2 = aliased(db.TramiteInbox) items = items.join(inbox2).filter(inbox2.version_date < d) return self._getProtocolosData(items.limit(SEARCH_LIMIT).all()) def _EnviaNotificacoes(self, protocolos): """Envia emails de notificação, avisando as pessoas interessadas que os protocolos tramitaram. """ protocolos = list(set(protocolos)) pu = getToolByName(getSite(), 'portal_url') portal = pu.getPortalObject() mh = portal.MailHost session = Session() for protocolo_id in protocolos: notificacoes = session.query(db.Notificacao).\ filter_by(protocolo_id=protocolo_id).all() for notificacao in notificacoes: logger(_(u'Notificando ') + notificacao.pessoa.email) # TODO: refatorar. Essa lista de tramites pode vir # pronta do método TramiteEnvio, evitando notificações # desnecessárias nas tramitações por cópia. tramites = session.query(db.Tramite).\ filter_by(protocolo_id=protocolo_id).\ filter_by(data_recebimento=None).all() for tramite in tramites: d = { 'numero': notificacao.protocolo.numero, 'data_tramitacao': tramite.data_disponibilizacao, 'assunto': notificacao.protocolo.assunto, 'area_origem': tramite.area_anterior.nome, 'responsavel_origem': tramite.area_anterior.responsavel[-1].pessoa.nome, 'area_destino': tramite.area.nome, 'responsavel_destino': tramite.responsavel.pessoa.nome, 'situacao': notificacao.protocolo.situacao.nome, 'url_protocolo': url('show-protocolo', id=notificacao.protocolo.id), 'url_notificacoes': url('list-notificacao') } mfrom = unicode(portal.getProperty('email_from_address'), 'utf-8') mto = notificacao.pessoa.email subject = NOTIFICACAO_ASSUNTO % d body = NOTIFICACAO_MSG % d text = u"From: %s\nTo: %s\nSubject: %s\n\n%s" % ( mfrom, mto, subject, body) try: mh.send(text, immediate=True, charset='utf8') except: logger(_(u'Erro ao enviar a mensagem de notificação.'))
class TaskListingLaTeXView(grok.MultiAdapter, MakoLaTeXView): grok.provides(ILaTeXView) grok.adapts(Interface, ITaskListingLayer, ILaTeXLayout) template_directories = ['templates'] template_name = 'tasklisting.tex' def __init__(self, *args, **kwargs): MakoLaTeXView.__init__(self, *args, **kwargs) self.info = None def get_render_arguments(self): self.layout.show_organisation = True return {'rows': self.get_rows()} def get_rows(self): rows = [] for row in get_selected_items(self.context, self.request): rows.append(self.get_row_for_item(row)) return rows def get_data_for_item(self, item): admin_unit = item.get_admin_unit().abbreviation task_type = task_type_helper(item, item.task_type) sequence_number = unicode(item.sequence_number).encode('utf-8') deadline = helper.readable_date(item, item.deadline) title = unicode(getattr(item, 'Title', getattr(item, 'title', ''))).encode('utf-8') issuer = get_issuer_of_task(item, with_client=True, with_principal=False) responsible_org_unit = item.get_assigned_org_unit() responsible = Actor.lookup(item.responsible) responsible_label = responsible_org_unit.prefix_label( responsible.get_label(with_principal=False)) dossier_title = item.containing_dossier or '' reference = unicode( getattr(item, 'reference', getattr(item, 'reference_number', ''))).encode('utf-8') review_state = workflow_state(item, item.review_state) return [ admin_unit, sequence_number, title, task_type, dossier_title, reference, issuer, responsible_label, deadline, review_state, ] def get_row_for_item(self, item): return self.convert_list_to_row(self.get_data_for_item(item)) def convert_list_to_row(self, row): return ' & '.join([self.convert_plain(cell) for cell in row])
class DottedReferenceFormatter(grok.Adapter): grok.provides(IReferenceNumberFormatter) grok.context(Interface) grok.name('dotted') repository_dossier_seperator = u' / ' dossier_document_seperator = u' / ' repository_title_seperator = u'.' def complete_number(self, numbers): """Generate the complete reference number, for the given numbers dict. """ reference_number = u' '.join(numbers.get('site', [])) if self.repository_number(numbers): reference_number = u'%s %s' % (reference_number, self.repository_number(numbers)) if self.dossier_number(numbers): reference_number = u'%s%s%s' % (reference_number, self.repository_dossier_seperator, self.dossier_number(numbers)) if self.document_number(numbers): reference_number = u'%s%s%s' % (reference_number, self.dossier_document_seperator, self.document_number(numbers)) return reference_number.encode('utf-8') def repository_number(self, numbers): """Generate the reposiotry reference number part. Seperate every part with a dot. Example: 3.5.7.1.4 """ return u'.'.join(numbers.get('repository', [])) def dossier_number(self, numbers): """Generate the dossier reference number part, Seperate every part with a dot. Example: 3.2.1""" return u'.'.join(numbers.get('dossier', [])) def document_number(self, numbers): """Generate the document reference number part. """ return u'.'.join(numbers.get('document', [])) def sorter(self, brain_or_value): """ Converts the "reference" into a tuple containing integers, which are converted well. Sorting "10" and "2" as strings results in wrong order. """ if not isinstance(brain_or_value, basestring): value = brain_or_value.reference else: value = brain_or_value splitter = re.compile('[/\., ]') if not isinstance(value, str) and not isinstance(value, unicode): return value parts = [] for part in splitter.split(value): part = part.strip() try: part = int(part) except ValueError: pass parts.append(part) return parts
class ReferenceNumberPrefixAdpater(grok.Adapter): """This Adapter handles the whole Reference number prefix assignment""" grok.provides(IReferenceNumberPrefix) grok.context(IFolderish) def __init__(self, context): self.context = context def get_reference_mapping(self, obj=None): type_key = self.get_type_key(obj) annotations = IAnnotations(self.context) if not annotations.get(type_key): annotations[type_key] = PersistentDict({}) return annotations[type_key] def get_child_mapping(self, obj=None): reference_mapping = self.get_reference_mapping(obj) if not reference_mapping.get(CHILD_REF_KEY, None): reference_mapping[CHILD_REF_KEY] = PersistentDict() return reference_mapping.get(CHILD_REF_KEY, None) def get_prefix_mapping(self, obj=None): reference_mapping = self.get_reference_mapping(obj) if not reference_mapping.get(PREFIX_REF_KEY, None): reference_mapping[PREFIX_REF_KEY] = PersistentDict() return reference_mapping.get(PREFIX_REF_KEY, None) def get_type_key(self, obj=None): if obj and IDossierMarker.providedBy(obj): return DOSSIER_KEY return REPOSITORY_FOLDER_KEY def get_first_number(self, obj=None): if self.get_type_key(obj) == DOSSIER_KEY: return u'1' registry = getUtility(IRegistry) proxy = registry.forInterface(IReferenceNumberSettings) return proxy.reference_prefix_starting_point def get_next_number(self, obj=None): """ return the next possible reference number for object at the actual context """ child_mapping = self.get_child_mapping(obj) if not child_mapping.keys(): # It's the first number ever issued return self.get_first_number(obj) else: prefixes_in_use = child_mapping.keys() # Sort the list of unicode strings *numerically* prefixes_in_use.sort(key=split_string_by_numbers) lastnumber = prefixes_in_use[-1] # if its a number, we increase the whole number try: lastnumber = int(lastnumber) return unicode(lastnumber + 1) except ValueError: pass # .. otherwise try to increment the last numeric part xpr = re.compile('\d+') matches = tuple(xpr.finditer(lastnumber)) if len(matches) > 0: span = matches[-1].span() subvalue = lastnumber[span[0]:span[1]] try: subvalue = int(subvalue) except (ValueError, TypeError): pass else: subvalue += 1 subvalue = unicode(subvalue) lastnumber = lastnumber[:span[0]] + \ subvalue + lastnumber[span[1]:] return lastnumber else: return u'' def get_number(self, obj): """return the reference number for the object, if no number is registred for this obj, we generate a new one. """ intids = getUtility(IIntIds) try: intid = intids.getId(aq_base(obj)) # In some cases we might not have an intid yet. except KeyError: return None prefix_mapping = self.get_prefix_mapping(obj) if intid in prefix_mapping: return prefix_mapping.get(intid) return None def set_number(self, obj, number=None): """Store the number in the Annotations, If number is None, we get the standard value """ intids = getUtility(IIntIds) intid = intids.getId(aq_base(obj)) if not number: number = self.get_next_number(obj) if not isinstance(number, unicode): number = unicode(number) self.get_prefix_mapping(obj)[intid] = number self.get_child_mapping(obj)[number] = intid return number def is_valid_number(self, number, obj=None): """ check the given reference number for the given context """ child_mapping = self.get_child_mapping(obj) if number not in child_mapping.keys(): return True elif obj is not None: # check if the given object has the given number ever intids = getUtility(IIntIds) intid = intids.getId(aq_base(obj)) if child_mapping[number] == intid: return True return False def is_prefix_used(self, prefix): """ Checks if prefix is in use""" if not isinstance(prefix, unicode): prefix = unicode(prefix) return prefix in self.get_reference_mapping( )['reference_prefix'].values() def get_number_mapping(self): merge = [] intid_util = getUtility(IIntIds) for prefix, intid in self.get_child_mapping().iteritems(): merge.append({ 'prefix': prefix, 'obj': intid_util.getObject(intid), 'active': (self.get_prefix_mapping()[intid] == prefix) }) def key_sorter(obj): key = obj['prefix'] if (key.isdigit()): return int(key) return key return sorted(merge, key=key_sorter) def free_number(self, prefix): if not isinstance(prefix, unicode): prefix = unicode(prefix) if self.is_prefix_used(prefix): raise Exception("Prefix is in use.") if prefix in self.get_child_mapping().keys(): self.get_child_mapping().pop(prefix)
class VersionedContentPublicationWorkflow(grok.Adapter): grok.context(IVersionedObject) grok.implements(IPublicationWorkflow) grok.provides(IPublicationWorkflow) # TODO: Silva 3.1 add a method purge_old_versions. @silvaconf.protect('silva.ChangeSilvaContent') def new_version(self): self.context.create_copy() return True @silvaconf.protect('silva.ChangeSilvaContent') def request_approval(self, message=None): if message is None: message = "Request immediate publication of this content. " + \ "(automatically generated message)." self.context.request_version_approval(message) return True @silvaconf.protect('silva.ChangeSilvaContent') def withdraw_request(self, message=None): if message is None: message = "Approval was withdrawn " + \ "(automatically generated message)." self.context.withdraw_version_approval(message) return True @silvaconf.protect('silva.ApproveSilvaContent') def reject_request(self, message=None): if message is None: message = "Approval was rejected " +\ "(automatically generated message)." self.context.reject_version_approval(message) return True @silvaconf.protect('silva.ApproveSilvaContent') def revoke_approval(self): self.context.unapprove_version() return True @silvaconf.protect('silva.ApproveSilvaContent') def approve(self, time=None): if self.context.get_unapproved_version() is None: raise VersioningError( _("There is no unapproved version to approve."), self.context) if time is not None: if isinstance(time, datetime): time = DateTime(time) self.context.set_unapproved_version_publication_datetime(time) elif self.context.get_unapproved_version_publication_datetime() is None: self.context.set_unapproved_version_publication_datetime(DateTime()) self.context.approve_version() return True @silvaconf.protect('silva.ApproveSilvaContent') def publish(self): # Do the same job than approve, but works on closed content as # well. if not self.context.get_unapproved_version(): if self.context.is_published(): raise VersioningError( _("There is no unapproved version to approve."), self.context) self.context.create_copy() current = self.context.get_unapproved_version_publication_datetime() if current is None or current.isFuture(): # If the publication date is in the future, set it correct to now. self.context.set_unapproved_version_publication_datetime(DateTime()) self.context.approve_version() return True @silvaconf.protect('silva.ApproveSilvaContent') def close(self): self.context.close_version() return True def get_versions(self, sort_attribute='id'): versions = list(filter(IVersion.providedBy, self.context.objectValues())) if sort_attribute == 'id': versions.sort(key=lambda a: int(a.id)) elif sort_attribute: versions.sort(key=operator.attrgetter(sort_attribute)) return versions
class Ingestor(grok.Adapter): grok.provides(IIngestor) grok.context(IIngestableFolder) def getContainedObjectInterface(self): u'''Return the interface for objects that should be contained in the folder that this class adapts.''' raise NotImplementedError( u'Subclasses must implement getContainedObjectInterface') def getTitle(self, predicates): u'''Get the DC title from the given ``predicates``. Subclasses may override this.''' return predicates.get(DC_ID) def _checkPredicates(self, predicates): u'''Check the given ``predicates`` to see if they make sense for the kinds of objects we'll be creating. If not, raise an exception. But if so, return the type's interface, the factory type info, the predicate map, and the object's title.''' iface = self.getContainedObjectInterface() # Content type's interface fti = iface.getTaggedValue('fti') # Factory Type Information predicateMap = iface.getTaggedValue( 'predicateMap') # Mapping RDF predicate to content's field name desiredType = iface.getTaggedValue( 'typeValue') # RDF type URI that we want types = predicates.get( iface.getTaggedValue('typeKey')) # ingest type that we're given title = self.getTitle(predicates) # Get the object's title if types: if desiredType in types: # Do we have the right json type? return iface, fti, predicateMap, unicode(title) # Done! return None, None, None, None def _setValue(self, obj, fti, iface, predicate, predicateMap, values): u'''On the object ``obj`` set the field indicated by ``predicate`` (which we can find via the ``predicateMap``) to the given ``values``. We can indicate a problem with the named ``fti`` and can access fields by the given ``iface``. ''' catalog = plone.api.portal.get_tool('portal_catalog') fieldName, isRef, urlprefix = predicateMap[unicode(predicate)] if not values: _logger.info( u'For type %s we want predicate %s but not given; leaving %s un-set', fti, predicate, fieldName) return field = iface.get( fieldName) # Get the field out of the content interface fieldBinding = field.bind(obj) # Bind that field to the content object if isRef: # Is this a reference field? items = [ i.getObject() for i in catalog(subjectURI=[urlprefix + s for s in values]) ] # Find matching objects if len(items) != len(values): # Find them all? _logger.info( u'For type %s predicate %s linked to %d URIs, but only %d found', fti, predicate, len(values), len(items)) if schema.interfaces.ICollection.providedBy( field): # Multi reference? fieldBinding.set(obj, items) # Yes, set them all elif len(items) > 0: # Single reference and we have an item? fieldBinding.set(obj, items) # Set single value else: # It's a non-reference field if schema.interfaces.ICollection.providedBy( field): # Is it multi valued? fieldBinding.validate(values) # Yes, validate all the values fieldBinding.set(obj, values) # And set all the values else: # No, it's not multi valued fieldBinding.validate(values[0]) # Validate just one value fieldBinding.set(obj, values[0]) # And set just one value def createObjects(self, context, uris, statements): u'''Create new objects in the ``context`` identified by ``uris`` and described in the ``statements``. Return a sequence of those newly created objects. Subclasses may override this for special ingest needs.''' createdObjects = [] # For each subject URI in the RDF for uri in uris: # Get the predicates for just that subject predicates = statements[uri] try: # Get the content type's interface, factory type info, # mapping of predicates to fields, and the title iface, fti, predicateMap, title = self._checkPredicates( predicates) if not iface: continue except IngestError as ex: _logger.exception(u'Ingest error on %s: %r; skipping %s', u'/'.join(context.getPhysicalPath()), ex, uri) continue # Create a brand new content object obj = createContentInContainer(context, fti, title=title, subjectURI=unicode(uri)) # Now set its fields for predicate in predicateMap: values = predicates.get(predicate) # Get the values if not values: continue # Skip if empty if isinstance(values, basestring): values = [values] values = [unicode(i) for i in values ] # Convert Literal+URIRefs to unicode try: self._setValue(obj, fti, iface, predicate, predicateMap, values) except schema.ValidationError: _logger.exception( u'Data "%r" for field %s invalid; skipping', values, predicate) continue publish(obj) obj.reindexObject() createdObjects.append(obj) return createdObjects def updateObjects(self, context, uris, brains, statements): u'''Update those objects in ``context`` that have matching ``uris`` by using the ``statements`` to determine what needs updating. To quickly find those objects, there's a lookup table ``brains`` that maps from subject URI to a portal catalog brain. Subclasses may override this for special ingest needs.''' updatedObjects = [] # Start w/no updated objs for uri in uris: # For each subject URI brain = brains[uri] # Get matching brain obj = brain.getObject() # Get matching object predicates = statements[uri] # Subject-specific preds objectUpdated = False # Assume no update iface, fti, predicateMap, title = self._checkPredicates( predicates) # Get usual suspects for predicate, (fieldName, isRef, urlprefix) in predicateMap.iteritems( ): # For each pred+field name field = iface.get(fieldName) # Get the field fieldBinding = field.bind(obj) # Bind it to the obj newValues = predicates.get(rdflib.URIRef(predicate), []) # Get new values if isinstance(newValues, basestring): newValues = [newValues] newValues = [unicode(i) for i in newValues] # Literals to unicodes if isRef: # Is this a reference? currentRefs = [ i.subjectURI for i in fieldBinding.get(obj) ] # Get cur ref'd sub URIs currentRefs.sort() # Sort 'em newValues.sort() # Sort the new ones, too if currentRefs != newValues: # Any change? self._setValue(obj, fti, iface, predicate, predicateMap, newValues) # Yup, update objectUpdated = True # We changed else: # Literal field currentValues = fieldBinding.get(obj) # Get current values if schema.interfaces.ICollection.providedBy( field): # Multi-valued field? if currentValues != newValues: # Values different? self._setValue(obj, fti, iface, predicate, predicateMap, newValues) # Yep, set new values objectUpdated = True # We updated the obj else: # Single-valued field if currentValues != newValues[0]: # Value different? self._setValue(obj, fti, iface, predicate, predicateMap, newValues) # Set thew new value objectUpdated = True # We updated the obj if objectUpdated: # Did we update the obj? obj.reindexObject() # Yep, reindex it updatedObjects.append(obj) # Add it to the list return updatedObjects # We updated these objs def ingest(self): u'''Ingest''' context = aq_inner(self.context) # Get our container if not context.ingestEnabled: raise IngestDisabled(context) # Do we ingest? catalog = plone.api.portal.get_tool( 'portal_catalog') # Get the catalog statements = self._readLabcasSolr( context.labcasurl, context.labcas_sourceurl_prefix) # Read the RDF # Find out what we currently contain results = catalog(object_provides=IScienceDataObject.__identifier__, path=dict(query='/'.join(context.getPhysicalPath()), depth=1)) # Make a lookup table from those current brains' subjectURIs to the brains existingBrains = {} for i in results: uri = i['subjectURI'].decode('utf-8') existingBrains[rdflib.URIRef(uri)] = i existingURIs = set(existingBrains.keys() ) # Set of currently existing URIs in the context statementURIs = set( statements.keys()) # Set of URIs in the newly read RDF newURIs = statementURIs - existingURIs # Set of URIs for brand new objects deadURIs = existingURIs - statementURIs # Set of URIs for objects to delete updateURIs = statementURIs & existingURIs # Set of URIs for objects that may need to be updated newObjects = self.createObjects(context, newURIs, statements) updatedObjects = self.updateObjects(context, updateURIs, existingBrains, statements) context.manage_delObjects( [existingBrains[i]['id'].decode('utf-8') for i in deadURIs]) return IngestResults(newObjects, updatedObjects, deadURIs) def _readLabcasSolr(self, labcasurl, labcas_sourceurl_prefix): u'''Read the statements made at the RDF at ``url`` and return a dictionary of {s → [{p → [o]}]} where ``s`` is a subject URI mapping to a sequence of dictionaries whose keys ``p`` are predicate URIs mapping to a sequence of ``o`` objects, which may be literal values or reference URIs.''' solr_conn = Solr(base_url=labcasurl, version=4) solr_query = {'q': '*:*'} solr_response = solr_conn.search(**solr_query) results = {} for obj in solr_response.documents: obj['sourceurl'] = labcas_sourceurl_prefix + obj.get("id") results[obj.get("id")] = obj return results
class DefaultLayout(CustomizableLayout, grok.MultiAdapter): """Opengever default layout. """ grok.adapts(Interface, Interface, IBuilder) grok.provides(ILaTeXLayout) template_directories = ['resources'] template_name = 'default_layout.tex' def __init__(self, context, request, builder): super(DefaultLayout, self).__init__(context, request, builder) self.show_contact = False self.show_logo = False self.show_organisation = False def before_render_hook(self): self.use_package('inputenc', options='utf8', append_options=False) self.use_package('ae,aecompl') self.use_package('babel', 'ngerman', append_options=False) self.use_package('fancyhdr') self.use_package('geometry', 'left=35mm') self.use_package('geometry', 'right=10mm') self.use_package('geometry', 'top=55mm') self.use_package('geometry', 'bottom=10.5mm') self.use_package('graphicx') self.use_package('lastpage') self.use_package('paralist', 'neveradjust', append_options=False) self.use_package('textcomp') self.use_package('textpos', 'absolute') self.use_package('textpos', 'overlay') self.use_package('titlesec', 'compact') self.use_package('wrapfig') self.use_package('array,supertabular') self.use_package('setspace') def get_render_arguments(self): owner = self.get_owner() owner_phone = ' ' if owner: owner_phone = owner.getProperty('phone_number', ' ') convert = self.get_converter().convert return { 'client_title': convert(self.get_client_title()), 'member_phone': convert(owner_phone), 'show_contact': self.show_contact, 'show_logo': self.show_logo, 'show_organisation': self.show_organisation, 'location': convert(self.get_location()) } def get_client_title(self): registry = getUtility(IRegistry) return registry.forInterface(IBaseClientID).client_title def get_location(self): registry = getUtility(IRegistry) return registry.forInterface(ILaTeXSettings).location def get_owner(self): mtool = getToolByName(self.context, 'portal_membership') creator_id = self.context.Creator() return mtool.getMemberById(creator_id)
class Contactable(grok.Adapter): """Base adapter class for objects that have the IContactDetails behavior fields""" grok.provides(IContactable) grok.context(IContactContent) grok.baseclass() @property def person(self): return None @property def position(self): return None @property def organizations(self): return [] def _get_contactables(self): """ Build a list of objects which have the IContactDetails behavior for each contact information (email, phone, ...) we use the one of the first object in this list which have this information """ contactables = [] related_items = [self.context, self.person, self.position] + list( reversed(self.organizations)) for related_item in related_items: if related_item is not None \ and IContactDetails.providedBy(related_item) \ and related_item not in contactables: contactables.append(related_item) return contactables def _get_address(self, contactables): for obj in contactables: obj = aq_base(obj) if obj.use_parent_address is True: continue else: address = get_address(obj) if address: return address return {} def get_contact_details(self): contact_details = {} contact_details_fields = [ 'email', 'phone', 'cell_phone', 'fax', 'website', 'im_handle' ] contactables = self._get_contactables() for field in contact_details_fields: # search the object that carries the field for obj in contactables: obj = aq_base(obj) value = getattr(obj, field, '') or '' if value: contact_details[field] = value break else: contact_details[field] = '' contact_details['address'] = self._get_address(contactables) return contact_details def get_parent_address(self): contactables = self._get_contactables() url = self.context.REQUEST.URL # we don't want self.context address if the object is already created if '/++add++' not in url and '/@@add' not in url: contactables.remove(self.context) address = self._get_address(contactables) if not address: # Very important to return unicode here, RichTextWidget needs it. return u'' template_path = os.path.join(TEMPLATES_DIR, 'address.pt') template = ViewPageTemplateFile(template_path) self.request = getRequest() return template(self, address)
class TimeLineContent(grok.Adapter): grok.provides(ITimelineContent) grok.context(IDexterityContent) def date(self): date = None context = self.context adapter = ITimelineBehavior(context, None) # Eventish items use the event start if hasattr(context, 'start_date'): date = context.start_date elif adapter and adapter.use_pub_date: # The DCFieldProperty is already a DateTime return self.context.effective_date elif adapter: date = adapter.timeline_date return date and DateTime(date.year, date.month, date.day, date.hour, date.minute) def end(self): date = None context = self.context adapter = ITimelineBehavior(context, None) # Eventish items use the event start if hasattr(context, 'end_date'): date = context.end_date elif adapter and adapter.use_pub_date: # The DCFieldProperty is already a DateTime return self.context.expiration_date elif adapter: date = adapter.timeline_end return date and DateTime(date.year, date.month, date.day, date.hour, date.minute) def data(self, ignore_date=False): context = self.context adapter = ITimelineBehavior(context) bce = adapter.bce_year year_only = adapter.year_only data = { "headline": context.Title(), "text": "<p>%s</p>" % context.Description(), } if not ignore_date: date = self.date() if not date: return data['startDate'] = format_datetime(date, year_only) if bce: data['startDate'] = '-' + data['startDate'] end = self.end() if end: data['endDate'] = format_datetime(end, year_only) if bce: data['endDate'] = '-' + data['endDate'] subject = context.Subject() if subject and adapter.show_tag: # Take the first keyword, somewhat arbitrarily data['tag'] = subject[0] data['asset'] = {} # Links if hasattr(context, 'remoteUrl'): data['asset']['media'] = context.remoteUrl.encode('utf-8') elif not ignore_date: # Include a url to the content url = context.absolute_url() site_properties = getToolByName( context, 'portal_properties').site_properties if (context.portal_type in site_properties.typesUseViewActionInListings): url = url + '/view' data['text'] = (data['text'] + ' <a href="%s">more …</a>' % url) image_url = get_image_url(self.context) # Items with Images if image_url: data['asset']['thumbnail'] = get_image_url(self.context, 'icon') if 'media' not in data['asset']: data['asset']['media'] = image_url # News-like items if 'asset' in data and hasattr(context, 'image_caption'): data['asset']['caption'] = (context.image_caption.encode('utf-8')) # TODO: Asset 'credit'? return data
for collection, storageTypes in collectionTypes.iteritems(): for storage, organs in storageTypes.iteritems(): for organ, pptIDs in organs.iteritems(): totalSpecimens = sum(pptIDs.values()) totalPpts = len(pptIDs) cases, controls = totalPpts, 0 # FIXME: but how? No idea how to compute # cases or # controls from ERNE data records.append(ERNESpecimenSummary( storage, totalSpecimens,cases,controls,organ,withCancer,available,email,protocolID,collection )) return records except urllib2.HTTPError, ex: _logger.info('Ignoring failed attempt to get specimens from %s via %s: %r', erneID, erneWS, ex) try: con.close() except (IOError, AttributeError): pass return records grok.provides(IJsonGenerator) grok.context(ISpecimenSummarizerGenerator) def generateJson(self): context = aq_inner(self.context) erneWS = context.queryDataSource specimenCount = {} if erneWS: for siteID, erneID in SITES.items(): specimenCount = self.getSpecimens(erneID, erneWS) # C'est tout. return jsonlib.write(specimenCount)
class Contactable(grok.Adapter): """Base adapter class for contact content types with fallback system""" grok.provides(IContactable) grok.context(IContactContent) grok.baseclass() @property def person(self): return None @property def held_position(self): return None @property def position(self): return None @property def organizations(self): return [] def _get_contactables(self): """ Build a list of objects which have the IContactDetails behavior for each contact information (email, phone, ...) we use the one of the first object in this list which have this information """ contactables = [] related_items = [self.context, self.held_position, self.position] + list(reversed(self.organizations)) if not api.portal.get_registry_record(name='person_contact_details_private', interface=IContactCoreParameters): related_items.insert(2, self.person) for related_item in related_items: if related_item is not None \ and IContactDetails.providedBy(related_item) \ and related_item not in contactables: contactables.append(related_item) return contactables def _get_address(self, contactables): for obj in contactables: address = get_address(obj) if address: return address return {} def get_contact_details(self, keys=(), fallback=True): contact_details = {} if keys: contact_details_fields = [k for k in keys if k != 'address'] else: contact_details_fields = CONTACT_DETAILS_FIELDS if fallback: contactables = self._get_contactables() else: contactables = [self.context] for field in contact_details_fields: # search the object that carries the field for obj in contactables: obj = aq_base(obj) value = getattr(obj, field, '') or '' if value: contact_details[field] = value break else: contact_details[field] = '' if (not keys) or ('address' in keys): contact_details['address'] = self._get_address(contactables) if 'website' in contact_details: contact_details['website'] = get_valid_url(contact_details['website']) return contact_details def get_parent_address(self): contactables = self._get_contactables() url = self.context.REQUEST.URL # we don't want self.context address if the object is already created if '/++add++' not in url and '/@@add' not in url: contactables.remove(self.context) address = self._get_address(contactables) if not address: # Very important to return unicode here, RichTextWidget needs it. return u'' template_path = os.path.join(TEMPLATES_DIR, 'address.pt') template = ViewPageTemplateFile(template_path) self.request = getRequest() return template(self, address)