def test_relative_object_path(self): from naaya.core.zope2util import relative_object_path subportal = self.portal.info.subsite self.assertEqual(relative_object_path(subportal, subportal), '') self.assertEqual(relative_object_path(subportal, self.portal), 'info/subsite') self.assertEqual(relative_object_path(subportal.info, subportal), 'info') self.assertEqual(relative_object_path(subportal.info, self.portal), 'info/subsite/info') self.assertRaises(ValueError, relative_object_path, self.portal, subportal)
def _update(self, portal): notif_tool = portal.getNotificationTool() auth_tool = portal.getAuthenticationTool() admins = auth_tool.search_users('', role='Administrator', rkey=0, skey='name', all_users=True, location='_all_') self.log.debug('Started update in %s' % portal.getId()) for admin in admins: for role in admin.roles: if 'Administrator' in role[0]: user_id = admin.user_id own_site_location = path_in_site(role[1]) this_site_location = relative_object_path(role[1], portal) if own_site_location != this_site_location: self.log.debug('Location %s is probably in a subportal' % own_site_location) continue obj = portal.restrictedTraverse(this_site_location) if match_account_subscription( ISubscriptionContainer(obj), user_id, 'administrative', 'en'): self.log.debug('Subscription for user %s already present ' 'in location %s' %(user_id, this_site_location or '/')) else: notif_tool.add_account_subscription(user_id, this_site_location, 'administrative', 'en', []) self.log.debug('Subscription added for user %s in location %s' %(user_id, this_site_location or '/')) return True
def set_acl_for_user(ob, user): ldap_source_title = auth_tool.getUserSource(user) location = relative_object_path(ob, site) for source in auth_tool.getSources(): if source.title == ldap_source_title: ldap_source = source ldap_source.addUserRoles(str(user), ['Viewer'], location) break
def admin_add_account_subscription(self, REQUEST, user_id, location, notif_type, lang): """ """ if location == '/': location = '' ob = self.getSite().unrestrictedTraverse(location) location = relative_object_path(ob, self.getSite()) try: self.add_account_subscription(user_id.strip(), location, notif_type, lang) except ValueError, msg: self.setSessionErrorsTrans(msg)
def _set_items_view_permissions(self): if getattr(self, 'restrict_items', True): agenda_pointer = str(getattr(self, 'agenda_pointer', '')) site = self.getSite() for item in self.objectValues(): if relative_object_path(item, site) == agenda_pointer: _unrestrict_meeting_item_view(item) else: _restrict_meeting_item_view(item) else: for item in self.objectValues(): _unrestrict_meeting_item_view(item)
def admin_get_subscriptions(self, user_query=''): user_query = user_query.strip() for obj, sub_id, subscription in utils.walk_subscriptions( self.getSite()): user = subscription.to_string(obj) if not user_query or re.match('.*%s.*' % user_query, user, re.IGNORECASE): yield { 'user': user, 'location': relative_object_path(obj, self.getSite()), 'sub_id': sub_id, 'lang': subscription.lang, 'notif_type': subscription.notif_type, 'content_types': getattr(subscription, 'content_types', []), }
def admin_add_account_subscription(self, REQUEST, user_id, location, notif_type, lang, content_types=[]): """ """ #Even if some content types were selected (by turning off javascript) #they should be ignored, no filtering in administrative notifications if notif_type == 'administrative': content_types = [] if location == '/': location = '' ob = self.getSite().unrestrictedTraverse(location) location = relative_object_path(ob, self.getSite()) try: self.add_account_subscription(user_id.strip(), location, notif_type, lang, content_types) except ValueError, msg: self.setSessionErrorsTrans(msg)
def add_roles_from_ob(ob, is_brain=False): if is_brain: ob_roles = getattr(ob, "ny_ldap_group_roles", MV) if not ob.has_key("ny_ldap_group_roles") or ob_roles is MV: # catalog field (meta) not created or missing value in brain is_brain = False ob = ob.getObject() if not is_brain: try: ob_roles = ob.acl_satellite.getAllLocalRoles() except AttributeError: return # looks like we found a broken object elif ob_roles: # brain with roles, get the object ob = ob.getObject() for group_id, group_roles in ob_roles.iteritems(): all_group_roles = groups_roles_map.setdefault(group_id, []) for role in group_roles: location = {"ob": ob, "path": relative_object_path(ob, site), "is_site": ob == site} all_group_roles.append((role, location))
def add_notifications_from_circa_export(site, filepath, notif_type): logger.debug('start importing notifications') from notifications_extract import get_notifications_mapping dbfile = open(filepath, 'rb') notifications, not_matched = get_notifications_mapping(dbfile) dbfile.close() notif_tool = site.getNotificationTool() auth_tool = site.getAuthenticationTool() for user_id, values in notifications.items(): user = auth_tool.get_user_with_userid(user_id) if user is None: logger.error('User not found: %s', user_id) continue for val in values: if val['notif_type'] == 3: logger.info('Ignoring (turned off) subscription for user %s at location %s', user_id, val['path']) continue val['path'] = val['path'].strip('/') if val['path'] not in ('', '/'): val['path'] = "library/%s" % val['path'] try: ob = site.unrestrictedTraverse(val['path']) except KeyError: logger.error("Couldn't find object at path: %s", val['path']) continue location = relative_object_path(ob, site) try: notif_tool.add_account_subscription(user_id, location, notif_type, 'en') except ValueError, msg: logger.error("Couldn't add subscription for user %s at location %s: %s", user_id, val['path'], msg) continue logger.info('Added subscription for user %s at location %s', user_id, val['path'])
def add_roles_from_ob(ob, is_brain=False): if is_brain: ob_roles = getattr(ob, 'ny_ldap_group_roles', MV) if not ob.has_key('ny_ldap_group_roles') or ob_roles is MV: # catalog field (meta) not created or missing value in brain is_brain = False ob = ob.getObject() if not is_brain: try: ob_roles = ob.acl_satellite.getAllLocalRoles() except AttributeError: return # looks like we found a broken object elif ob_roles: # brain with roles, get the object ob = ob.getObject() for group_id, group_roles in ob_roles.iteritems(): all_group_roles = groups_roles_map.setdefault(group_id, []) for role in group_roles: location = { 'ob': ob, 'path': relative_object_path(ob, site), 'is_site': ob == site, } all_group_roles.append((role, location))
def _update(self, portal): notif_tool = portal.getNotificationTool() auth_tool = portal.getAuthenticationTool() admins = auth_tool.search_users('', role='Administrator', rkey=0, skey='name', all_users=True, location='_all_') self.log.debug('Started update in %s' % portal.getId()) for admin in admins: for role in admin.roles: if 'Administrator' in role[0]: user_id = admin.user_id own_site_location = path_in_site(role[1]) this_site_location = relative_object_path(role[1], portal) if own_site_location != this_site_location: self.log.debug( 'Location %s is probably in a subportal' % own_site_location) continue obj = portal.restrictedTraverse(this_site_location) if match_account_subscription(ISubscriptionContainer(obj), user_id, 'administrative', 'en'): self.log.debug( 'Subscription for user %s already present ' 'in location %s' % (user_id, this_site_location or '/')) else: notif_tool.add_account_subscription( user_id, this_site_location, 'administrative', 'en', []) self.log.debug( 'Subscription added for user %s in location %s' % (user_id, this_site_location or '/')) return True
def add_roles_from_ob(ob, is_brain=False): if is_brain: _marker = object() ob_roles = getattr(ob, 'ny_ldap_group_roles', _marker) if (ob_roles is _marker) or (ob_roles is MV): # catalog field (meta) not created or missing brain value is_brain = False ob = ob.getObject() if not is_brain: try: ob_roles = ob.acl_satellite.getAllLocalRoles() except AttributeError: return # looks like we found a broken object elif ob_roles: # brain with roles, get the object ob = ob.getObject() for group_id, group_roles in ob_roles.iteritems(): all_group_roles = groups_roles_map.setdefault(group_id, []) for role in group_roles: location = { 'ob': ob, 'path': relative_object_path(ob, site), 'is_site': ob == site, } all_group_roles.append((role, location))
def add_acls_from_circa_export(site, filepath): logger.debug('start importing acls') from AccessControl.Permissions import view from AccessControl.Permission import Permission from acl_extract import get_acl_mapping auth_tool = site.getAuthenticationTool() def set_acl_for_user(ob, user): ldap_source_title = auth_tool.getUserSource(user) location = relative_object_path(ob, site) for source in auth_tool.getSources(): if source.title == ldap_source_title: ldap_source = source ldap_source.addUserRoles(str(user), ['Viewer'], location) break def set_acl_for_roles(ob, roles): permission_object = Permission(view, (), ob) current_roles = permission_object.getRoles() is_tuple = isinstance(current_roles, tuple) current_roles = list(current_roles) new_roles = set(roles + current_roles) if is_tuple: new_roles = tuple(new_roles) else: new_roles = list(new_roles) permission_object.setRoles(new_roles) ROLES_MAPPING = {'0': 'Administrator', '1': 'Viewer', '2': 'Contributor', '3': 'Viewer', '4': 'Viewer'} def compute_roles_mapping(acls): """ Computes the ROLES_MAPPING variable based on: https://svn.eionet.europa.eu/projects/Zope/ticket/4095#comment:10 The last 2 roles are 'Anonymous' and 'Authenticated': any other roles are added before those """ non_userids = [] for values in acls.values(): non_userids.extend([val for val in values if not val.endswith('@circa')]) roles = [val[2:] for val in non_userids if val.startswith('__')] roles = list(set(roles)) # remove duplicates roles = map(int, roles) # convert to integers max_role = max(roles) max_role = max(max_role, 6) # max role should be at least 6 ROLES_MAPPING[str(max_role)] = 'Authenticated' ROLES_MAPPING[str(max_role - 1)] = 'Anonymous' for i in xrange(5, max_role - 1): ROLES_MAPPING[str(i)] = 'Viewer' def get_role(circa_profile): DEFAULT_ROLE = 'Viewer' if circa_profile in ROLES_MAPPING: return ROLES_MAPPING[circa_profile] return DEFAULT_ROLE fd = open(filepath, 'rb') acls, not_matched = get_acl_mapping(fd) fd.close() if not acls: logger.info('No matched acls') if not_matched: logger.warn('Not matched rows (from the exported file): %s' % not_matched) return compute_roles_mapping(acls) for path, values in acls.items(): path = path.strip('/') if path not in ('', '/'): path = "library/%s" % path try: ob = site.unrestrictedTraverse(path) except KeyError: logger.error("Couldn't find object at path: %s", path) continue if ob.meta_type != 'Naaya Folder': logger.error("Object %r is not a folder (Naaya can only restrict permissions on folders)", relative_object_path(ob, site)) continue # add users acls userids = [val[:-6] for val in values if val.endswith('@circa')] non_userids = [val for val in values if not val.endswith('@circa')] for user_id in userids: user = auth_tool.get_user_with_userid(user_id) if user is None: logger.error('User not found: %s', user_id) continue #set_acl_for_user(ob, user) logger.warn('(Deactivated) Granted view on path %s for user %s', path, user_id) # add roles acls roles = [val[2:] for val in non_userids if val.startswith('__')] roles = list(set(map(get_role, roles))) if roles: #set_acl_for_roles(ob, roles) logger.warn('(Deactivated) Granted view on path %s for roles %s', path, roles) # not matched values nonvals = [val for val in non_userids if not val.startswith('__')] if nonvals: logger.warn('Not matched user or profile for %s' % nonvals) if not_matched: logger.warn('Not matched rows (from the exported file): %s' % not_matched) logger.debug('done importing acls')
if overwrite: filename = slugify(file_name).rsplit('.', 1)[0] if file_container._getOb(filename, None): file_container.manage_delObjects([filename]) file_ob_id = add_file(file_container, file_name, file_data) file_ob = file_container[file_ob_id] except Exception, e: errors.append( ((u"Error while creating file ${file_path}: " "${error}"), { 'file_path': file_path, 'error': force_to_unicode(str(e)) })) else: p = relative_object_path(file_ob, container) created_file_paths.add(p) if errors: if REQUEST is not None: transaction.abort() self.setSessionErrorsTrans(errors) return self.index_html(REQUEST) else: return errors else: notify(ZipImportEvent(container, sorted(created_file_paths))) if REQUEST is not None:
try: file_name.decode('utf-8') except UnicodeDecodeError: #Try to decode the filename using the WinZip encoding ('DOS Latin') file_name = file_name.decode('CP437').encode('utf-8') assert file_container_path in folder_map try: file_container = folder_map[file_container_path] file_ob_id = add_file(file_container, file_name, file_data) file_ob = file_container[file_ob_id] except Exception, e: errors.append((u"Error while creating file ${file_path}: ${error}", {'file_path': file_path, 'error': force_to_unicode(str(e))})) else: p = relative_object_path(file_ob, container) created_file_paths.add(p) if errors: if REQUEST is not None: transaction.abort() self.setSessionErrorsTrans(errors) return self.index_html(REQUEST) else: return errors else: notify(ZipImportEvent(container, sorted(created_file_paths))) if REQUEST is not None:
def walk_backup(index_file, open_backup_file, get_date, actor): folders_info = {'root_path': '', 'known_folders': {'': None}} gen_existing = ofs_walk(actor.context, [INyFolder], [INyFolder]) for folder in gen_existing: rel_path = relative_object_path(folder, actor.context) folders_info['known_folders'][rel_path] = None def remove_root_path(path): assert path.startswith(folders_info['root_path']) result = path[len(folders_info['root_path']):] if result.startswith('/'): result = result[1:] return result def handle_folder(line): title = line.get('TITLE', line['Title']) description = line.get('ABSTRACT', line['Abstract']) userid = parse_userid(line.get('OWNER', line['Owner'])) filename = line.get('FILENAME', line['Filename']) folder_zip_path = filename[:-1].encode('utf-8') # for zope replace starting underscores folder_zope_path = sanitize_folder_path(folder_zip_path) # use get_date as a backup try: date = parse_date(line.get('CREATED', line['Created'])) except ValueError: date = get_date(folder_zip_path) if '/' in folder_zope_path: parent_path, folder_id = folder_zope_path.rsplit('/', 1) else: parent_path = '' folder_id = folder_zope_path if len(folders_info['known_folders']) == 1: assert folders_info['root_path'] == '' if parent_path: folders_info['root_path'] = parent_path else: folders_info['root_path'] = '' parent_path = remove_root_path(parent_path) folder_zope_path = remove_root_path(folder_zope_path) assert parent_path in folders_info['known_folders'] if folder_zope_path in folders_info['known_folders']: created = line.get('CREATED', line['Created']) owner = line.get('OWNER', line['Owner']) folder_info = folders_info['known_folders'][folder_zope_path] assert created == folder_info.get('CREATED', folder_info['Created']) assert owner == folder_info.get('OWNER', folder_info['Owner']) return folders_info['known_folders'][folder_zope_path] = line actor.folder_entry(parent_path, folder_id, title, description, date, userid) def handle_file(line): title = line.get('TITLE', line['Title']) description = line.get('ABSTRACT', line['Abstract']) userid = parse_userid(line.get('OWNER', line['Owner'])) keywords = line.get('KEYWORDS', line['Keywords']) reference = line.get('REFERENCE', line.get('Reference', '')) status = line.get('STATUS', line['Status']) doc_zip_path = line.get('FILENAME', line['Filename']) # for zope replace starting underscores doc_zope_path = sanitize_folder_path(doc_zip_path) # use get_date as a backup try: date = parse_date(line.get('UPLOADDATE', line['Uploaddate'])) except ValueError: date = get_date(doc_zip_path) doc_split_path = doc_zope_path.split('/') doc_filename = doc_split_path[-1].encode('utf-8') doc_langver = doc_split_path[-2] doc_dpl_name = str(doc_split_path[-3]) parent_path = '/'.join(doc_split_path[:-3]).encode('utf-8') parent_path = remove_root_path(parent_path) assert parent_path in folders_info['known_folders'] doc_id = doc_dpl_name[:-len('.dpl')] doc_id = sanitize_id(doc_id) full_path = parent_path+'/'+doc_id if not doc_langver.startswith('EN_'): actor.warn('non-english content: %r at %r' % (doc_langver, full_path)) ranking = line.get('RANKING', line['Ranking']) if ranking != 'Public': actor.warn('ranking is %r for %r' % (str(ranking), full_path)) if description.lower() == 'n/a': description = '' if status.lower() == 'n/a': status = '' if reference.lower() == 'n/a': reference = '' if status not in ('Draft', ''): description = (("<p>Status: %s</p>\n" % status) + description) if reference: description = (("<p>Reference: %s</p>\n" % reference) + description) doc_data_file = open_backup_file(doc_zip_path.encode('latin-1')) if doc_filename.endswith('.url'): url = doc_data_file.read().strip() if '\n' in url: matched = re.search(r'URL=(.*)', url) if matched: url = matched.groups()[0].strip() assert (url.startswith('http://') or url.startswith('https://') or url.startswith('ftp://'), "bad url: %r" % url) actor.url_entry(parent_path, doc_id, doc_filename, url, title, description, keywords, date, userid) else: actor.document_entry(parent_path, doc_id, doc_filename, doc_data_file, title, description, keywords, date, userid) for line in read_index(index_file, actor.warn): filename = line.get('FILENAME', line['Filename']) if filename.endswith('/'): handle_folder(line) else: handle_file(line)
def walk_backup(index_file, open_backup_file, get_date, actor): folders_info = {'root_path': '', 'known_folders': {'': None}} gen_existing = ofs_walk(actor.context, [INyFolder], [INyFolder]) for folder in gen_existing: rel_path = relative_object_path(folder, actor.context) folders_info['known_folders'][rel_path] = None def remove_root_path(path): assert path.startswith(folders_info['root_path']) result = path[len(folders_info['root_path']):] if result.startswith('/'): result = result[1:] return result def handle_folder(line): title = line['TITLE'] description = line['ABSTRACT'] userid = parse_userid(line['OWNER']) folder_zip_path = line['FILENAME'][:-1].encode('utf-8') # for zope replace starting underscores folder_zope_path = sanitize_folder_path(folder_zip_path) # use get_date as a backup try: date = parse_date(line['CREATED']) except ValueError: date = get_date(folder_zip_path) if '/' in folder_zope_path: parent_path, folder_id = folder_zope_path.rsplit('/', 1) else: parent_path = '' folder_id = folder_zope_path if len(folders_info['known_folders']) == 1: assert folders_info['root_path'] == '' if parent_path: folders_info['root_path'] = parent_path else: folders_info['root_path'] = '' parent_path = remove_root_path(parent_path) folder_zope_path = remove_root_path(folder_zope_path) assert parent_path in folders_info['known_folders'] if folder_zope_path in folders_info['known_folders']: assert line['CREATED'] == folders_info['known_folders'][folder_zope_path]['CREATED'] assert line['OWNER'] == folders_info['known_folders'][folder_zope_path]['OWNER'] return folders_info['known_folders'][folder_zope_path] = line actor.folder_entry(parent_path, folder_id, title, description, date, userid) def handle_file(line): title = line['TITLE'] description = line['ABSTRACT'] userid = parse_userid(line['OWNER']) keywords = line['KEYWORDS'] reference = line.get('REFERENCE', '') status = line['STATUS'] doc_zip_path = line['FILENAME'] # for zope replace starting underscores doc_zope_path = sanitize_folder_path(doc_zip_path) # use get_date as a backup try: date = parse_date(line['UPLOADDATE']) except ValueError: date = get_date(doc_zip_path) doc_split_path = doc_zope_path.split('/') doc_filename = doc_split_path[-1].encode('utf-8') doc_langver = doc_split_path[-2] doc_dpl_name = str(doc_split_path[-3]) parent_path = '/'.join(doc_split_path[:-3]).encode('utf-8') parent_path = remove_root_path(parent_path) assert parent_path in folders_info['known_folders'] doc_id = doc_dpl_name[:-len('.dpl')] doc_id = sanitize_id(doc_id) full_path = parent_path+'/'+doc_id if not doc_langver.startswith('EN_'): actor.warn('non-english content: %r at %r' % (doc_langver, full_path)) if line['RANKING'] != 'Public': actor.warn('ranking is %r for %r' % (str(line['RANKING']), full_path)) if description.lower() == 'n/a': description = '' if status.lower() == 'n/a': status = '' if reference.lower() == 'n/a': reference = '' if status not in ('Draft', ''): description = ( ("<p>Status: %s</p>\n" % status) + description) if reference: description = ( ("<p>Reference: %s</p>\n" % reference) + description) doc_data_file = open_backup_file(doc_zip_path.encode('latin-1')) if doc_filename.endswith('.url'): url = doc_data_file.read().strip() if '\n' in url: matched = re.search(r'URL=(.*)', url) if matched: url = matched.groups()[0].strip() assert url.startswith('http://') or url.startswith('https://') or url.startswith('ftp://'), "bad url: %r" % url actor.url_entry(parent_path, doc_id, doc_filename, url, title, description, keywords, date, userid) else: actor.document_entry(parent_path, doc_id, doc_filename, doc_data_file, title, description, keywords, date, userid) for line in read_index(index_file, actor.warn): filename = line['FILENAME'] if filename.endswith('/'): handle_folder(line) else: handle_file(line)