def __init__(self, context): self.context = context key = str(context) annotations = IAnnotations(context) storage = annotations.setdefault(ANNOT_KEY, PersistentMapping()) storage.setdefault('hmac_key', key) self.storage = storage
def __init__(self, context): self.context = context if self.bcrypt is None: return annotations = IAnnotations(context) storage = annotations.setdefault(ANNOT_KEY, PersistentMapping()) storage.setdefault('bcrypt_salt', self.bcrypt.gensalt()) self.storage = storage
def _annotateHash(self, node): """ Use the hash type specified in the xml file, and annotate mbtool If not specified, use a default hash, iff not already annotated """ for child in node.childNodes: if child.nodeName != 'hash-type': continue htype = str(child.getAttribute('name')) if htype not in HASHERS: raise ValueError('Unknown hash type: %s - Specify one of %s' % (htype, HASHERS)) mbtool = getToolByName(self.context, 'membrane_tool') annot = IAnnotations(mbtool) annot.setdefault(ANNOT_KEY, PersistentMapping()) annot[ANNOT_KEY]['hash_type'] = htype self._logger.info("Remember hash-type imported: %s" % htype) break
def testExportNodeWithAnnotation(self): """ when membrane tool is annotated, hash-type node should get exported attribute 'name' on the node should contain the hash-type """ # initially add the annotation on to the membrane_tool annot = IAnnotations(self.portal.membrane_tool) annot.setdefault(ANNOT_KEY, {})['hash_type'] = 'bcrypt' node = self.adapter._exportNode() self.failUnless('<hash-type name="bcrypt"/>' in node.toxml()) # clear the bogus annotation del annot[ANNOT_KEY]
def _update_last_modified_author(page, user_id=None): # check if user_id needs to be set if user_id is None: # find last logged in user user_id = get_member(page) # annotate page object with it page_annot = IAnnotations(page) annot = page_annot.setdefault(ANNOT_KEY, OOBTree()) annot['lastModifiedAuthor'] = user_id page.reindexObject(idxs=['lastModifiedAuthor']) # if part of a project, annotate the project with the user id as well proj = interface_in_aq_chain(page.aq_inner, IProject) if proj is None: return proj_annot = IAnnotations(proj) annot = proj_annot.setdefault(ANNOT_KEY, OOBTree()) annot['lastModifiedAuthor'] = user_id proj.reindexObject(idxs=['lastModifiedAuthor'])
def test_hashers(self): for htype in HASHERS: login_id = 'hashtest_%s' % htype member = self.portal_member if not getAdapter(member, IHashPW, htype).isAvailable(): continue mbtool = getToolByName(member, 'membrane_tool') annot = IAnnotations(mbtool) annot.setdefault(ANNOT_KEY, {})['hash_type'] = htype member.setRoles('Member') member.processForm(values=MEM_DATA) password = member.getPassword() hash_type, hashed = password.split(':', 1) self.assertEqual(htype, hash_type) self.failUnless(member.verifyCredentials(dict(login='******', **MEM_DATA)))
def __init__(self, context): self.context = context annot = IAnnotations(context) listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) self.digest = listen_annot.setdefault('digest', PersistentList())
def __init__(self, context): self.context = context annot = IAnnotations(context) self.listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) self.mail_sender = ISendMail(context) self.mem_list = IWriteMembershipList(self.context)
class CatalogSourceSection(object): classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous self.context = transmogrifier.context # next is for communication with 'logger' section self.anno = IAnnotations(transmogrifier) self.storage = self.anno.setdefault(VALIDATIONKEY, []) self.pathkey = options.pop('path-key', '_path') self.entrieskey = options.pop('entries-key', '_entries') # handle exclude-contained parameter if "exclude-contained" in options.keys(): self.exclude_contained = options.pop('exclude-contained') self.exclude_contained = self.exclude_contained == "true" else: self.exclude_contained = False # remove 'blueprint' option - it cannot be a query options.pop('blueprint') self.query = {} for k, v in options.items(): for p in v.split(';'): params = p.split('=', 1) if len(params) == 1: self.query[k] = p.strip() else : q = self.query.setdefault(k, {}) q[params[0].strip()] = params[1].strip() self.catalog = utils.getToolByName(self.context, 'portal_catalog') def __iter__(self): for item in self.previous: yield item exported = [] exported_parents = [] results = list(self.catalog(**self.query)) results.sort(key=lambda x: x.getPath()) for brain in results: # discussion items are indexed and they must be replaced to # content objects to which they correspond # we need to skip them if brain.portal_type == 'Discussion Item': path = '/'.join(brain.getPath().split('/')[:-2]) cp, id_ = path.rsplit('/', 1) brain = self.catalog(path=cp, id=id_)[0] else: path = brain.getPath() # folderish objects are tried to export twice: # when their contained items are exported and when they are # returned in catalog search results if path in exported: continue exported.append(path) # export also all parents of current object containers = [] container_path = path.rsplit('/', 1)[0] while container_path: if container_path in exported: container_path = container_path.rsplit('/', 1)[0] continue exported_parents.append(container_path) contained = self.getContained(container_path, results, exported_parents) if contained: exported.append(container_path) containers.append({ self.pathkey: '/'.join(container_path.split('/')[2:]), self.entrieskey: contained, }) container_path = container_path.rsplit('/', 1)[0] containers.reverse() # order metter for us for i in containers: self.storage.append(i[self.pathkey]) yield i item = { self.pathkey: '/'.join(path.split('/')[2:]), } if brain.is_folderish: contained = self.getContained(path, results, exported_parents) if contained: item[self.entrieskey] = contained self.storage.append(item[self.pathkey]) yield item # cleanup if VALIDATIONKEY in self.anno: del self.anno[VALIDATIONKEY] def getContained(self, path, orignal_results, parents): """ Return list of (object_id, portal_type) for objects that are returned by catalog and contained in folder with given 'path'. """ results = [] seen = [] # Remove the orignal path element from the query if there was one query = copy.deepcopy(self.query) if "path" in query: del query["path"] raw_results = self.catalog(path=path, **query) for brain in raw_results: current = brain.getPath() relative = current[len(path):] relative = relative.strip('/') if not relative: # it's object with path that was given in catalog query continue elif '/' in relative: # object stored in subfolders, we need append to results their parent folder parent_path = '/'.join([path, relative.split('/', 1)[0]]) if parent_path not in seen: res = self.catalog(path=path) #, meta_type='Folder') for i in res: if i.getPath() == parent_path: results.append(i) seen.append(parent_path) break elif current not in seen: # object is directly stored in folder, that has path given in query seen.append(current) results.append(brain) def filter(r): # Parent objects must be allowed always for parent in parents: if r.getPath() == parent: return True if r["UID"] in allowed_uids: return True else: logger.info("Excluded contained item as it did not match the orignal catalog query:" + str(r.getPath())) if self.exclude_contained and orignal_results is not None: # Filter contained results against our query, so that # we do not export results from parent objects which did not match # Build list of allowed object UIDs - allowed_uids = [ r["UID"] for r in orignal_results ] # All parents must be allowed always filtered_results = [ r for r in results if filter(r) == True ] else: # Don't filter child items filtered_results = results contained = [(i.getId, str(i.portal_type)) for i in filtered_results ] return tuple(contained)
def __init__(self, context): self.context = context annot = IAnnotations(context) self.listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) self.migration_annot = self.listen_annot.setdefault('migration', PersistentList())
class ReaderSection(object): classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous self.context = transmogrifier.context self.options = options self.anno = IAnnotations(transmogrifier) self.storage = self.anno.setdefault(VALIDATIONKEY, []) self.pathkey = options.get('path-key', '_path').strip() self.fileskey = options.get('files-key', '_files').strip() self.contextkey = options.get('context-key', '_import_context').strip() if 'prefix' in options: self.prefix = options['prefix'].strip() self.prefix = self.prefix.strip('/') else: self.prefix = '' context_type = options.get('context', 'tarball').strip() if context_type not in ['directory', 'tarball', 'snapshot']: context_type = 'tarball' path = options.get('path', '').strip() setup_tool = utils.getToolByName(self.context, 'portal_setup') if context_type == 'directory': self.import_context = context.DirectoryImportContext(setup_tool, path) elif context_type == 'tarball': if os.path.exists(path): archive = file(path, 'rb') archive_bits = archive.read() archive.close() else: archive_bits = '' self.import_context = context.TarballImportContext(setup_tool, archive_bits) elif context_type == 'snapshot': self.import_context = context.SnapshotImportContext(setup_tool, path) def walk(self, top): names = self.import_context.listDirectory(top) if names is None: names = [] yield self.readFiles(top, names) for name in names: name = os.path.join(top, name) if self.import_context.isDirectory(name): for i in self.walk(name): yield i def readFiles(self, top, names): path = top[len(self.prefix):] path = path.lstrip('/') item = {self.pathkey: path} for name in names: full_name = os.path.join(top, name) if self.import_context.isDirectory(full_name): continue section = self.options.get(name, name).strip() files = item.setdefault(self.fileskey, {}) files[section] = { 'name': name, 'data': self.import_context.readDataFile(name, top) } return item def __iter__(self): for item in self.previous: yield item for item in self.walk(self.prefix): # add import context to item (some next section may use it) item[self.contextkey] = self.import_context self.storage.append(item[self.pathkey]) yield item if VALIDATIONKEY in self.anno: del self.anno[VALIDATIONKEY]
def __init__(self, context): self.context = context annot = IAnnotations(context) self.listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) self.migration_annot = self.listen_annot.setdefault( 'migration', PersistentList())
class ManifestImporterSection(object): classProvides(ISectionBlueprint) implements(ISection) def __init__(self, transmogrifier, name, options, previous): self.previous = previous self.context = transmogrifier.context self.pathkey = defaultMatcher(options, 'path-key', name, 'path') self.fileskey = defaultMatcher(options, 'files-key', name, 'files') self.typekey = options.get('type-key', '_type').strip() # communication with logger self.anno = IAnnotations(transmogrifier) self.storage = self.anno.setdefault(VALIDATIONKEY, []) # we need this dictionary to store manifest data, because reader section # uses recursion when walking through content folders self.manifests = {} def __iter__(self): for item in self.previous: pathkey = self.pathkey(*item.keys())[0] fileskey = self.fileskey(*item.keys())[0] # skip items without path if not pathkey: continue path = item[pathkey] if path != '': parent, item_id = os.path.split(path) manifest = self.manifests.get(parent, {}) # skip that are not listed in their parent's manifest if item_id not in manifest: continue item[self.typekey] = manifest.pop(item_id) # remove empty manifest dict if not manifest: del self.manifests[parent] # this item is folderish - parse manifest if fileskey and 'manifest' in item[fileskey]: self.extractManifest(path, item[fileskey]['manifest']['data']) yield item # now we yield items that were defined in manifests but not generated by # previous sections - it is posible if self.manifests: containers = self.manifests.keys() containers.sort() for i in containers: manifest = self.manifests[i] ids = manifest.keys() ids.sort() for id_ in ids: if i == '': path = id_ else: path = '/'.join([i, id_]) self.storage.append(path) yield {pathkey: path, self.typekey: manifest[id_]} # cleanup if VALIDATIONKEY in self.anno: del self.anno[VALIDATIONKEY] def extractManifest(self, path, data): doc = minidom.parseString(data) objects = {} for record in doc.getElementsByTagName('record'): type_ = str(record.getAttribute('type')) object_id = str(record.firstChild.nodeValue.strip()) objects[object_id] = type_ self.manifests[path] = objects