def getSpamPatterns(self): """Fetch spam patterns from the global zwiki spam blacklist, or a local property. Returns a list of stripped non-empty regular expression strings. """ if safe_hasattr(self.folder(), 'spampatterns'): return list(getattr(self.folder(),'spampatterns',[])) else: BLATHER('checking zwiki.org spam blacklist') req = urllib2.Request( ZWIKI_SPAMPATTERNS_URL, None, {'User-Agent':'Zwiki %s' % self.zwiki_version()} ) # have to set timeout this way for python 2.4. XXX safe ? saved = socket.getdefaulttimeout() socket.setdefaulttimeout(ZWIKI_SPAMPATTERNS_TIMEOUT) try: try: response = urllib2.urlopen(req) t = response.read() except urllib2.URLError, e: BLATHER('failed to read blacklist, skipping (%s)' % e) t = '' finally: socket.setdefaulttimeout(saved) return self.parseSpamPatterns(t)
def index_object(self,idxs=[],log=1): """Index this page in the wiki's catalog, if any, and log problems. Updates only certain indexes, if specified. """ if self.hasCatalog() and self.isCatalogable(): if log: BLATHER('indexing',self.url()) try: # XXX zwiki catalogs prior to 0.60 indexed the text # method, it now returns unicode which standard catalogs # can't handle, we now index SearchableText instead but # the old text index may still be present, we could # specify idxs so as to no longer update it self.catalog().catalog_object(self,self.url(),idxs) except: BLATHER('failed to index',self.id(),'\n',formattedTraceback())
def upgradeId(self, REQUEST=None): """ Make sure a page's id conforms with its title (may also change title!) See also canonicalIdFrom, http://zwiki.org/HowZwikiTitleAndIdWorks . Does not leave a placeholder, so may break incoming links. Does update backlinks, because it's less work than fixing up links by hand afterward. This makes it too slow to use in auto-upgrade, though, so people must call this manually or more usually via upgradeAll. With legacy pages (or manually renamed pages), it may happen that there's a clash between two similarly-named pages mapping to the same canonical id. In this case we just log the error and move on. The tracker plugin modifies this to also rename old IssueNoNNNN pages to the new #NNNN style. """ # let plugins influence the new title & id.. name = callHooks(upgradeId_hooks, self) or self.pageName() # now we can just call rename, it will do what's necessary try: self.rename(name, updatebacklinks=1, sendmail=0, REQUEST=REQUEST) except CopyError: BLATHER( 'upgradeId for "%s" (%s) failed - does %s already exist ?' \ % (self.pageName(),self.getId(),self.canonicalIdFrom(name)))
def subscribe( self, email, REQUEST=None, parent=0, edits=0 ): # -> none; redirects; depends on self, folder; modifies self, folder, catalog """ Add an email subscriber to this page. subscriber may be an email address or a CMF member id. With parent flag, add to the parent folder's subscriber list instead. With edits flag, mark this subscriber as one who wants notification of all edits. """ subscriber = email if subscriber: if not self.isSubscriber(subscriber, parent): BLATHER('subscribed', subscriber, 'to', self.id(), edits and '(all edits)' or '') subs = self._getSubscribers(parent) subs.append(subscriber + (edits and ':edits' or '')) self._setSubscribers(subs, parent) if not parent: self.index_object() if REQUEST: REQUEST.RESPONSE.redirect( REQUEST.get( 'redirectURL', REQUEST['URL1'] + '/subscribeform?email=' + subscriber))
def unsubscribe( self, email, REQUEST=None, parent=0 ): # -> none; redirects; depends on self, folder; modifies self, folder, catalog """ Remove email from this page's subscriber list. email may be an email address or CMF username, we try to convert usernames to email addresses as needed. If parent flag is true, remove it from the parent folder's subscriber list instead. """ subscriber = email.lower() if self.isSubscriber(subscriber, parent): sl = self._getSubscribers(parent) for s in sl: if (self.emailAddressFrom(s) == self.emailAddressFrom( subscriber)): BLATHER('unsubscribed', subscriber, 'from', self.id()) sl.remove(s) self._setSubscribers(sl, parent) if not parent: self.index_object() if REQUEST: REQUEST.RESPONSE.redirect( REQUEST.get( 'redirectURL', REQUEST['URL1'] + '/subscribeform?email=' + subscriber))
def send( self, fields ): # -> none; depends on: self, mailhost; other effects: sends msg if self.context.meta_type in ('Secure Mail Host', 'Secure Maildrop Host'): r = self.context.secureSend(fields['body'], mto=fields['To'], mfrom=fields['From'], subject=fields['Subject'], mbcc=fields['Bcc'], charset=fields['charset'], **fields) else: msg = """\ From: %(From)s Reply-To: %(Reply-To)s To: %(To)s Bcc: %(Bcc)s Subject: %(Subject)s%(In-Reply-To)s Message-ID: %(Message-ID)s X-Zwiki-Version: %(X-Zwiki-Version)s X-BeenThere: %(X-BeenThere)s List-Id: %(List-Id)s List-Post: %(List-Post)s List-Subscribe: %(List-Subscribe)s List-Unsubscribe: %(List-Unsubscribe)s List-Archive: %(List-Archive)s List-Help: %(List-Help)s Content-Type: text/plain; charset="%(charset)s" %(body)s """ % fields r = self.context.send(msg) if r: BLATHER(r)
def fixEncoding(self, enc=None, REQUEST=None): """Try to fix character encoding problems in this page's name, text, or parents property. We now store text as unicode, so this just tries to convert any non-unicode text to unicode using the standard encoding - currently utf-8, which will work for most zwikis. Some (ancient) zwikis might have other encodings, so we also check for the common iso8859-1 encoding, or another specified with the enc argument. (Ideally this would fix anything without such a hint.) Returns True if changes were made. """ name, text, parents = self.pageName(), self.text(), self.getParents() if isunicode(name) and isunicode(text) and self.hasAllUnicodeParents(): return False else: BLATHER('converting %s to unicode' % (self.pageName())) enc = enc or self.encoding() self.clearCache() #self.setText(self.tounicode(text,enc)) #bypass slow prerendering for now self.raw = self.cleanupText(self.tounicode(text, enc)) self.rename(self.tounicode(name, enc)) self.convertParentsToUnicode(enc) REQUEST = REQUEST or getattr(self, 'REQUEST', None) if REQUEST: REQUEST.RESPONSE.redirect(self.pageUrl()) return True
def _replaceLinksEverywhere(self,oldlink,newlink,REQUEST=None): """Replace one link with another throughout the wiki. Freeform links should not be enclosed in brackets. Comes with an appropriately big scary-sounding name. See _replaceLinks for more. """ BLATHER('replacing all %s links with %s' % (oldlink,newlink)) for p in self.backlinksFor(self.canonicalIdFrom(oldlink)): # this is an extensive, risky operation which can fail for # a number of reasons - carry on regardless so we don't # block renames # poor caching try: p.getObject()._replaceLinks(oldlink,newlink,REQUEST) except: BLATHER('_replaceLinks failed to update %s links in %s' \ % (oldlink,p.id))
def upgradePageType(self): """Correct any problems with this page's page type.""" t = self.page_type if not t in PAGE_TYPES.keys(): t2 = modernPageTypeFor(t) BLATHER("upgrading %s's page type from %s to %s" % (self.getId(), t, t2)) self.setPageType(t2)
def rebuildWikiOutline(self): """Regenerate the wiki folder's cached outline object, throwing away the old data. Will reset subtopic order to alphabetic. """ BLATHER('purging outline data for wiki', self.folder().getId()) if 'outline' in self.folder().objectIds(): self.folder()._delObject('outline') self.updateWikiOutline()
def removeParent(self, parent): self.ensureParentsPropertyIsList() try: self.parents.remove(parent) self._p_changed = 1 except ValueError: BLATHER("failed to remove %s from %s's parents (%s)" \ % (parent,self.getId(),self.parents))
def archive(self, REQUEST=None, pagename=None): """Move this page, and all offspring solely parented under this page, to the archive subfolder. This has no effect if called on a page already in the archive folder, or a non-ZODB object (such as a temporary page object created by plone's portal_factory). As with delete, if a pagename argument is provided, redirect all incoming wiki links there. NB this requires 'Delete objects' permission on the wiki folder. """ if self.inArchiveFolder() or inPortalFactory(self): return self.ensureArchiveFolder() f, af, rf = self.folder(), self.archiveFolder(), self.revisionsFolder() # which pages to move oids = self.offspringIdsAsList() id = self.getId() ids = [id] + oids def notParentedElsewhere(id): pids = [self.pageWithName(p).getId() for p in self.pageWithId(id).getParents()] for p in pids: if not p in ids: return False return True ids2 = [id] + filter(notParentedElsewhere, oids) # and their revisions rids = [] for i in ids2: rids.extend(self.pageWithId(i).oldRevisionIds()) if pagename and strip(pagename): self._replaceLinksEverywhere(oldname,pagename,REQUEST) # where to go afterward - up, or to default page (which may change) redirecturl = self.primaryParent() and self.primaryParentUrl() or None # XXX disable outline cache creation with similar kludge to saveRevision's saved_manage_afterAdd = self.__class__.manage_afterAdd self.__class__.manage_afterAdd = lambda self,item,container:None # move pages and revisions af.manage_pasteObjects(f.manage_cutObjects(ids2), REQUEST) if rids: af[id].ensureRevisionsFolder() af[id].revisionsFolder().manage_pasteObjects(rf.manage_cutObjects(rids), REQUEST) self.__class__.manage_afterAdd = saved_manage_afterAdd # log, notify, redirect msg = 'archived %s' % self.pageName() \ + (len(oids) and ' and %d subtopics' % len(oids) or '') \ + (len(rids) and ' and %d revisions' % len(rids) or '') BLATHER(msg) self.sendMailToEditSubscribers( msg+'\n', REQUEST=REQUEST, subject='(archived)') redirecturl = redirecturl or self.defaultPageUrl() if REQUEST: REQUEST.RESPONSE.redirect(redirecturl)
def reparentChildren(self,newparent): children = self.childrenIdsAsList() if children: BLATHER('reparenting children of',self.getId()) for id in children: child = getattr(self.folder(),id) # XXX poor caching child.removeParent(self.pageName()) child.addParent(newparent) child.index_object() # XXX need only reindex parents
def ensureCatalog(self): """ Ensure this wiki has a zcatalog, for fast standardized searching. We'll create one if needed, and index all current pages. This could take a while (minutes) in a large wiki. """ if not self.hasCatalog() and not self.inRevisionsFolder(): BLATHER('creating catalog for wiki', self.folder().getId()) self.setupCatalog()
def rename(self,pagename,leaveplaceholder=LEAVE_PLACEHOLDER, updatebacklinks=1,sendmail=1,REQUEST=None): """Rename this page, if permissions allow. Also ensure name/id conformance, keep our children intact, and optionally - leave a placeholder page - update links to this page throughout the wiki. Warning, this is not 100% reliable. - notify subscribers. Note the sendmail arg can stop the primary creation mailout but not the ones that may result from updatebacklinks. """ oldname, oldid = self.pageName(), self.getId() oldnameisfreeform = oldname != oldid def clean(s): return re.sub(r'[\r\n]','',s) newname = clean(self.tounicode(pagename)) newid = self.canonicalIdFrom(newname) namechanged, idchanged = newname != oldname, newid != oldid if not newname or not (namechanged or idchanged): return # sequence is important here BLATHER('renaming %s (%s) to %s (%s)...' % ( self.toencoded(oldname),oldid,self.toencoded(newname),newid)) self.ensureTitle() if idchanged: self.changeIdCarefully(newid) if namechanged: self.changeNameCarefully(newname) if (idchanged or namechanged) and updatebacklinks: self._replaceLinksEverywhere(oldname,newname,REQUEST) self.index_object() # update catalog XXX manage_renameObject may also, if idchanged if idchanged and leaveplaceholder: try: self._makePlaceholder(oldid,newname) except BadRequestException: # special case for CMF/Plone: we'll end up here when first # saving a page that was created via the CMS ui - we can't # save a placeholder page since the canonical ID hasn't # really changed pass if namechanged and sendmail: self._sendRenameNotification(oldname,newname,REQUEST) BLATHER('rename complete') if REQUEST: REQUEST.RESPONSE.redirect(self.pageUrl())
def expungeEditsEverywhereBy(self, username, REQUEST=None, batch=0): # -> none # depends on: all pages, revisions ; modifies: all pages, revisions """Expunge all the most recent edits by username throughout the wiki. This is a powerful spam repair tool for managers. It removes all recent consecutive edits by username from each page in the wiki. The corresponding revisions will disappear from the page history. See #1157. Should this use the catalog ? Currently uses a more expensive and failsafe brute force ZODB search. """ batch = int(batch) for n,p in izip(count(1), [p for p in self.pageObjects() if p.last_editor==username]): try: p.expungeEditsBy(username,REQUEST=REQUEST) except IndexError: BLATHER('failed to expunge edits by %s at %s: %s' \ % (username,p.id(),formattedTraceback())) if batch and (n % batch)==0: BLATHER('committing after %d expunges' % n) get_transaction().commit()
def ensureParentsPropertyIsList(self): """ Ensure our parents property is a list, returning true if changed. Zope lines properties switched from tuple to list a while back, and lingering tuples cause ongoing breakage. Called by upgrade, ensureValidParents and accessors for maximum robustness. """ if type(self.parents) != ListType: BLATHER("converting %s's parents property to a list" % self.pageName()) self.setParents(self.parents) return True else: return False
def ensureValidParents(self): """ Ensure that this page's parents are all valid, and reindex if needed. """ parents = self.getParents() # convert to exact page names, filtering out any which don't exist cleanedupparents = map( lambda x: absattr(x.Title), filter(lambda x: x, map(lambda x: self.pageWithName(x), parents))) # make sure we're not parented under ourself if self.pageName() in cleanedupparents: cleanedupparents.remove(self.pageName()) # sort cleanedupparents.sort() # if changed, save and reindex if cleanedupparents != parents: BLATHER("adjusting %s's parents from %s to %s" % (self.pageName(), parents, cleanedupparents)) self.setParents(cleanedupparents) self.index_object( ) #XXX only need to update parents index & metadata
def expunge(self, rev, REQUEST=None): """Revert myself to the specified revision, discarding later history.""" if not rev: return rev = int(rev) oldrevs = self.oldRevisionNumbers() if not rev in oldrevs: return id = self.getId() def replaceMyselfWith(o): # in zodb (self is not changed) self.folder()._delObject(id) self.folder()._setObject(id,o) def replaceMyselfWithRev(r): newself = self.revision(rev) newself._setId(id) replaceMyselfWith(newself) def deleteRevsSince(r): for r in oldrevs[oldrevs.index(rev):]: self.revisionsFolder()._delObject('%s.%d' % (id,r)) replaceMyselfWithRev(rev) deleteRevsSince(rev) BLATHER('expunged %s history after revision %d' % (id,rev)) if REQUEST is not None: REQUEST.RESPONSE.redirect(self.pageUrl())
def initializeForCMF(context): """Do global CMF/Plone-specific initialization, if they are installed.""" try: import Products.CMFCore.DirectoryView from Products.CMFCore.utils import ContentInit #from Products.CMFCore.permissions import AddPortalContent #from Products.Archetypes.public import listTypes, process_types from Products.CMFPlone.interfaces import IPloneSiteRoot from Products.GenericSetup import EXTENSION, profile_registry except ImportError: return BLATHER( 'registering "zwiki" skin layer and "Wiki Page" content type with Plone' ) # register our GenericSetup profiles profile_registry.registerProfile( 'default', 'ZWiki', 'Extension profile for default Zwiki-in-Plone/CMF setup', 'profiles/default', 'ZWiki', EXTENSION, for_=IPloneSiteRoot) profile_registry.registerProfile( 'uninstall', 'ZWiki', 'Extension profile for removing default Zwiki-in-Plone/CMF setup', 'profiles/uninstall', 'ZWiki', EXTENSION) # register our skin layer(s) Products.CMFCore.DirectoryView.registerDirectory('skins', globals()) # initialize portal content PROJECT = 'Zwiki' #types, cons, ftis = process_types(listTypes(PROJECT),PROJECT) ContentInit( PROJECT + ' Content', content_types=(ZWikiPage.ZWikiPage, ), # types, #permission = AddPortalContent, # Add portal content permission=Permissions.Add, # Zwiki: Add pages extra_constructors=(addWikiPageInCMF, ), # cons #fti = ftis, # ignored ).initialize(context)
def updateWikiOutline(self): """ Regenerate the wiki folder's cached outline object. The wiki's outline object (a PersistentOutline) is a representation of the page hierarchy, containing the same information as in the pages' parents properties but in a form easier to query. This method either generates a new one from the parents properties, or updates an old one trying to preserve the order of subtopics. Complications. This checks and corrects any invalid parents information. It also loads all page objects from the ZODB, which is probably ok as this is not done frequently. """ BLATHER('updating outline data for wiki', self.folder().getId()) oldchildmap = {} # backwards compatibility # there have been three kinds of outline cache: folder attribute, # non-SimpleItem-based PersistentOutline object, and # SimpleItem-based PersistentOutline object # a pre-0.39 outline is just an attribute, delete (but save childmap) if (safe_hasattr(self.folder().aq_base, 'outline') and not 'outline' in self.folder().objectIds()): oldchildmap = self.folder().outline.childmap() del self.folder().outline # if there's no outline object, make one if not safe_hasattr(self.folder().aq_base, 'outline'): self.folder()._setObject('outline', PersistentOutline()) self.folder().outline.setChildmap(oldchildmap) # regenerate the parentmap parentmap = {} for p in self.pageObjects(): p.ensureValidParents() # poor caching parentmap[p.pageName()] = p.getParents() self.folder().outline.setParentmap(parentmap) # update the childmap (without losing subtopics order) and nesting self.folder().outline.update()
[c.encode('utf8') for c in unicode(string.lowercase, encoding)]) relocaleflag = r'(?L)' wordboundary = r'\b' except (TypeError, LookupError): # no locale is set, or there was a problem detecting it or a # problem decoding its letters. # XXX must be a less ugly way to do this: # if it's just that there's no locale, don't log a warning try: lang, encoding = locale.getlocale() except locale.Error: lang, encoding = -1, -1 if (lang, encoding) == (None, None): pass else: BLATHER( 'the system locale gave a problem in Regexps.py, so WikiNames will not be locale-aware' ) DEBUG(formattedTraceback()) # define a useful default set of non-ascii letters, mainly european letters # from http://zwiki.org/InternationalCharacterExamples # XXX more have been added to that page (latvian, polish).. how far # should we go with this ? Could we make it always recognise all # letters and forget locale awareness ? Are regexps getting slow ? # XXX needs more work, see failing links at # http://zwiki.org/InternationalCharactersInPageNames uppercase = string.uppercase + '\xc3\x80\xc3\x81\xc3\x82\xc3\x83\xc3\x84\xc3\x85\xc3\x86\xc3\x88\xc3\x89\xc3\x8a\xc3\x8b\xc3\x8c\xc3\x8d\xc3\x8e\xc3\x8f\xc3\x92\xc3\x93\xc3\x94\xc3\x95\xc3\x96\xc3\x98\xc3\x99\xc3\x9a\xc3\x9b\xc3\x9c\xc3\x9d\xc3\x87\xc3\x90\xc3\x91\xc3\x9e' lowercase = string.lowercase + '\xc3\xa0\xc3\xa1\xc3\xa2\xc3\xa3\xc3\xa4\xc3\xa5\xc3\xa6\xc3\xa8\xc3\xa9\xc3\xaa\xc3\xab\xc3\xac\xc3\xad\xc3\xae\xc3\xaf\xc3\xb2\xc3\xb3\xc3\xb4\xc3\xb5\xc3\xb6\xc3\xb8\xc3\xb9\xc3\xba\xc3\xbb\xc3\xbc\xc3\xbd\xc3\xbf\xc2\xb5\xc3\x9f\xc3\xa7\xc3\xb0\xc3\xb1\xc3\xbe' U = '|'.join([c.encode('utf8') for c in unicode(uppercase, 'utf-8')]) L = '|'.join([c.encode('utf8') for c in unicode(lowercase, 'utf-8')]) Ubr = '[%s]' % ''.join( [c.encode('utf8') for c in unicode(uppercase, 'utf-8')])
def upgradeAll(self, render=1, batch=0, REQUEST=None): # -> none # depends on: wiki # modifies: wiki (folder, pages, dtml methods, catalog, outline, revisions..) """ Update, upgrade, pre-render and re-index all pages and data structures. Normally individual pages are upgraded and pre-rendered on demand, when viewed. An administrator may want to do this for all pages ahead of time, particularly after a zwiki upgrade, to ensure all pages have the latest properties and have been rendered by the latest code, minimizing delay and possible problems later on. Also installs a wiki catalog if not present, re-indexes each page, validates page parents, and rebuilds the wiki outline cache. Also installs the index_html and standard_error_message dtml methods. XXX split ? You can set render=0 to skip the page pre-rendering part, completing much faster on large wikis. The optional batch argument forces a commit every N pages. This may be useful to get a complete run in large/busy wikis, which can be difficult due to conflict errors or memory usage. Requires 'Manage properties' permission on the folder. """ if not self.checkPermission(Permissions.manage_properties, self.folder()): raise Unauthorized, ( _('You are not authorized to upgrade all pages.') + \ _('(folder -> Manage properties)')) batch = int(batch) if render: BLATHER('upgrading/reindexing/pre-rendering all pages:') else: BLATHER('upgrading/reindexing all pages:') starttime = clock() n, total = 0, self.pageCount() self.setupCatalog(reindex=0) self.rebuildWikiOutline() for p in self.pageObjects(): # poor caching (not a problem here) n += 1 try: p.upgrade(REQUEST) p.upgradeId(REQUEST) p.fixEncoding() if render: p.preRender(clear_cache=1) msg = 'upgraded and pre-rendered page' else: msg = 'upgraded page' # make sure every page is cataloged - slow but thorough p.index_object(log=0) BLATHER('%s %d/%d %s' % (msg, n, total, p.id())) except: BLATHER('failed to upgrade page %d/%d %s: %s' \ % (n,total,p.id(),formattedTraceback())) if batch and n % batch == 0: BLATHER('committing') get_transaction().commit() self.setupDtmlMethods() endtime = clock() BLATHER('upgrade complete, %d pages processed in %fs, %.1f pages/s' \ %(n, endtime-starttime, n/(endtime-starttime)))
def sendMailTo( self, recipients, text, REQUEST, subjectSuffix='', subject='', message_id=None, in_reply_to=None, to=None, exclude_address=None, ): # -> none; depends on self, wiki, mailhost, time; other effects: sends encoded msg """Send a mail-out containing text to a list of email addresses. If mail-out is not configured in this wiki or there are no valid recipients, do nothing. Log any errors but don't stop. text can be body text or rfc-822 message text. """ if not self.isMailoutEnabled(): return if exclude_address in recipients: recipients.remove(exclude_address) # help mailin.py avoid loops if not recipients: return try: msgid = message_id or self.messageIdFromTime(self.ZopeTime()) # encode subject with RFC 2047 subj = str( Header(self.subjectHeader(subject, subjectSuffix), self.encoding())) fields = { 'body': '%s\n\n%s' % (self.toencoded(text), self.toencoded(self.signature(msgid))), 'From': self.toencoded(self.fromHeader(REQUEST)), 'Reply-To': self.toencoded(self.replyToHeader()), 'To': self.toencoded(to or self.toHeader()), 'Bcc': self.toencoded(self.bccHeader(recipients)), 'Subject': subj, 'Message-ID': self.toencoded(msgid), 'In-Reply-To': self.toencoded( (in_reply_to and '\nIn-reply-to: %s' % in_reply_to.splitlines()[0]) or ''), 'Content-Type': 'text/plain; charset="%s"' % self.encoding(), 'charset': self.encoding(), 'X-Zwiki-Version': self.zwiki_version(), 'X-BeenThere': self.toencoded(self.xBeenThereHeader()), 'List-Id': self.toencoded(self.listIdHeader()), 'List-Post': self.toencoded(self.listPostHeader()), 'List-Subscribe': '<' + self.pageUrl() + '/subscribeform>', 'List-Unsubscribe': '<' + self.pageUrl() + '/subscribeform>', 'List-Archive': '<' + self.pageUrl() + '>', 'List-Help': '<' + self.wikiUrl() + '>', } GenericMailHost(self.mailhost()).send(fields) BLATHER('sent mail to subscribers:\nTo: %s\nBcc: %s' % (fields['To'], fields['Bcc'])) except: BLATHER('**** failed to send mail to %s: %s' % (recipients, formattedTraceback()))
def forbid(reason): BLATHER('%s blocked edit from %s (%s), %s:\n%s' % (path, ip, username, reason, t)) raise Forbidden, "There was a problem, please contact the site admin."
def setupCatalog(self, REQUEST=None, reindex=1): """ Create and/or configure a catalog for this wiki. Safe to call more than once; will ignore any already existing items. For simplicity we install all metadata for plugins (like Tracker) here as well. """ if self.inRevisionsFolder(): return if not self.hasCatalog(): self.folder().manage_addProduct['ZCatalog'].manage_addZCatalog( 'Catalog', '') catalog = self.catalog() catalogindexes, catalogmetadata = catalog.indexes(), catalog.schema() PluginIndexes = catalog.manage_addProduct['PluginIndexes'] class Extra: """ Just a dummy to build records for the Lexicon.""" pass if 'ZwikiLexicon' not in catalog.objectIds('ZCTextIndex Lexicon'): wordSplitter = Extra() wordSplitter.group = 'Word Splitter' wordSplitter.name = 'Unicode HTML aware splitter' caseNormalizer = Extra() caseNormalizer.group = 'Case Normalizer' caseNormalizer.name = 'Unicode Case normalizer' catalog.manage_addProduct['ZCTextIndex'].manage_addLexicon( 'ZwikiLexicon', 'Lexicon', (wordSplitter, caseNormalizer)) for i in TEXTINDEXES: # we use a ZCTI here and set up appropriately # this will always work with what stock Zope and Zwiki provides # admins can replace with a more potent or differently setup index # a TextIndex is case sensitive, exact word matches only # a ZCTextIndex can be case insensitive and do right-side wildcards # a TextIndexNG2 can be case insensitive and do both wildcards if not i in catalogindexes or \ catalog._catalog.getIndex(i).meta_type == 'TextIndex': if i in catalogindexes: # it's there, so it's the wrong kind catalog.delIndex(i) extra = Extra() extra.index_type = 'Okapi BM25 Rank' extra.lexicon_id = 'ZwikiLexicon' extra.doc_attr = i catalog.addIndex(i, 'ZCTextIndex', extra) for i in FIELDINDEXES: if not i in catalogindexes: PluginIndexes.manage_addFieldIndex(i) for i in KEYWORDINDEXES: if not i in catalogindexes: PluginIndexes.manage_addKeywordIndex(i) for i in DATEINDEXES: if not i in catalogindexes: PluginIndexes.manage_addDateIndex(i) for i in PATHINDEXES: if not i in catalogindexes: PluginIndexes.manage_addPathIndex(i) for m in PAGE_METADATA: if not m in catalogmetadata: catalog.manage_addColumn(m) if reindex: # now index each page, to make all indexes and metadata current n = 0 cid = self.catalog().getId() for p in self.pageObjects(): n = n + 1 BLATHER('indexing page %d %s in %s' % (n, p.id(), cid)) p.index_object(log=0) BLATHER('indexing complete, %d pages processed' % n) if REQUEST: REQUEST.RESPONSE.redirect(self.pageUrl())
def _upgradeSubscribers( self): # -> none; depends on self, folder; modifies self, folder """ Upgrade old subscriber lists, both this page's and the folder's. Called as needed, ie on each access and also from ZWikiPage.upgrade() (set AUTO_UPGRADE=0 in Default.py to disable). XXX Lord have mercy! couldn't this be simpler """ # upgrade the folder first; we'll check attributes then properties changed = 0 f = self.folder().aq_base # migrate an old zwiki subscribers or wikifornow _subscribers attribute oldsubs = None if (safe_hasattr(f, 'subscribers') and type(f.subscribers) is StringType): if f.subscribers: oldsubs = re.sub(r'[ \t]+', r'', f.subscribers).split(',') try: del f.subscribers except KeyError: BLATHER('failed to delete self.folder().subscribers') changed = 1 elif safe_hasattr(f, '_subscribers'): oldsubs = f._subscribers.keys() try: del f._subscribers except KeyError: BLATHER('failed to delete self.folder()._subscribers') changed = 1 # ensure a subscriber_list attribute if not safe_hasattr(f, 'subscriber_list'): f.subscriber_list = [] # transfer old subscribers to subscriber_list, unless it's already # populated in which case discard them if oldsubs and not f.subscriber_list: f.subscriber_list = oldsubs # update _properties props = map(lambda x: x['id'], f._properties) if 'subscribers' in props: f._properties = filter(lambda x: x['id'] != 'subscribers', f._properties) changed = 1 if not 'subscriber_list' in props: f._properties = f._properties + \ ({'id':'subscriber_list','type':'lines','mode':'w'},) if changed: BLATHER('upgraded %s folder subscriber list' % (f.id)) # now do the page.. changed = 0 self = self.aq_base # migrate an old zwiki subscribers attribute oldsubs = None if (safe_hasattr(self, 'subscribers') and type(self.subscribers) is StringType): if self.subscribers: oldsubs = re.sub(r'[ \t]+', r'', self.subscribers).split(',') try: del self.subscribers except KeyError: BLATHER('failed to delete %s.subscribers' % (self.id())) changed = 1 # copy old subscribers to subscriber_list, unless it's already # got some # XXX merge instead if oldsubs and not self.subscriber_list: self.subscriber_list = oldsubs # migrate a wikifornow _subscribers attribute oldsubs = None if safe_hasattr(self, '_subscribers'): oldsubs = self._subscribers.keys() try: del self._subscribers except KeyError: BLATHER('failed to delete %s._subscribers' % (self.id())) changed = 1 if oldsubs and not self.subscriber_list: self.subscriber_list = oldsubs # update _properties props = map(lambda x: x['id'], self._properties) if 'subscribers' in props: self._properties = filter(lambda x: x['id'] != 'subscribers', self._properties) changed = 1 if not 'subscriber_list' in props: self._properties = self._properties + \ ({'id':'subscriber_list','type':'lines','mode':'w'},) if changed: BLATHER('upgraded %s subscriber list' % (self.id()))
def upgrade(self, REQUEST=None): """ Upgrade an old page instance (and possibly the folder as well). Called on every page view (set AUTO_UPGRADE=0 in Default.py to prevent this). You could also call this on every page in your wiki to do a batch upgrade. Affects bobobase_modification_time. If you later downgrade zwiki, the upgraded pages may not work so well. """ # Note that the objects don't get very far unpickling, some # by-hand adjustment via command-line interaction is necessary # to get them over the transition, sigh. --ken # not sure what this means --SM # What happens in the zodb when class definitions change ? I think # all instances in the zodb conform to the new class shape # immediately on refresh/restart, but what happens to # (a) old _properties lists ? not sure, assume they remain in # unaffected and we need to add the new properties # and (b) old properties & attributes no longer in the class # definition ? I think these lurk around, and we need to delete # them. changed = 0 # As of 0.17, page ids are always canonicalIdFrom(title); we'll # rename to conform with this where necessary # too slow! # changed = self.upgradeId() # fix up attributes first, then properties # NB be a bit careful about acquisition while doing this # migrate a WikiForNow _st_data attribute if safe_hasattr(self.aq_base, '_st_data'): self.raw = self._st_data del self._st_data changed = 1 # upgrade old page types pagetype = self.pageTypeId() if pagetype in PAGE_TYPE_UPGRADES.keys(): self.setPageType(self.modernPageTypeFor(pagetype)) # clear render cache; don't bother prerendering just now self.clearCache() changed = 1 # Pre-0.9.10, creation_time has been a string in custom format and # last_edit_time has been a DateTime. Now both are kept as # ISO 8601-format strings. Might not be strictly necessary to upgrade # in all cases.. will cause a lot of bobobase_mod_time # updates.. do it anyway. if not self.last_edit_time: self.last_edit_time = self.bobobase_modification_time().ISO8601() changed = 1 elif type(self.last_edit_time) is not StringType: self.last_edit_time = self.last_edit_time.ISO8601() changed = 1 elif len(self.last_edit_time) != 25: try: if len(self.last_edit_time) == 19: # older "ISO()" format # we're using the behaviour that was standard in # Zope <= 2.9, where a timestamp without timezone # information was assumed to be in UTC (aka GMT) self.last_edit_time = \ DateTime(self.last_edit_time+' GMT+0').ISO8601() else: # some other timestamp format, leave tz information # untouched, or let DateTime guess at it self.last_edit_time = \ DateTime(self.last_edit_time).ISO8601() changed = 1 except DateTimeSyntaxError: # can't convert to ISO 8601, just leave it be pass # If no creation_time, just leave it blank for now. # we shouldn't find DateTimes here, but check anyway if not self.creation_time: pass elif type(self.creation_time) is not StringType: self.creation_time = self.creation_time.ISO8601() changed = 1 elif len(self.creation_time) != 25: try: if len(self.creation_time) == 19: # older "ISO()" format self.creation_time = \ DateTime(self.creation_time+' GMT+0').ISO8601() else: self.creation_time = \ DateTime(self.creation_time).ISO8601() changed = 1 except DateTimeSyntaxError: # can't convert to ISO 8601, just leave it be pass # _wikilinks, _links and _prelinked are no longer used for a in ( '_wikilinks', '_links', '_prelinked', ): if safe_hasattr(self.aq_base, a): delattr(self, a) self.clearCache() changed = 1 # update _properties # keep in sync with _properties above. Better if we used that as # the template (efficiently) oldprops = { # not implemented 'page_type' :{'id':'page_type','type':'string'}, } newprops = { #'page_type' :{'id':'page_type','type':'selection','mode': 'w', # 'select_variable': 'ZWIKI_PAGE_TYPES'}, 'creator': { 'id': 'creator', 'type': 'string', 'mode': 'r' }, 'creator_ip': { 'id': 'creator_ip', 'type': 'string', 'mode': 'r' }, 'creation_time': { 'id': 'creation_time', 'type': 'string', 'mode': 'r' }, 'last_editor': { 'id': 'last_editor', 'type': 'string', 'mode': 'r' }, 'last_editor_ip': { 'id': 'last_editor_ip', 'type': 'string', 'mode': 'r' }, 'last_edit_time': { 'id': 'creation_time', 'type': 'string', 'mode': 'r' }, 'last_log': { 'id': 'last_log', 'type': 'string', 'mode': 'r' }, 'NOT_CATALOGED': { 'id': 'NOT_CATALOGED', 'type': 'boolean', 'mode': 'w' }, } props = map(lambda x: x['id'], self._properties) for p in oldprops.keys(): if p in props: # and oldprops[p]['type'] != blah blah blah : pass #ack! #self._properties = filter(lambda x:x['id'] != p, # self._properties) #changed = 1 # XXX this does work in python 1.5 surely.. what's the # problem ? for p in newprops.keys(): if not p in props: self._properties = self._properties + (newprops[p], ) changed = 1 # ensure parents property is a list if self.ensureParentsPropertyIsList(): changed = 1 # call any extra upgrade actions eg from plugins if callHooks(upgrade_hooks, self): changed = 1 if changed: # do a commit now so the current render will have the correct # bobobase_modification_time for display (many headers/footers # still show it) # XXX I don't think we need to dick around with commits any more #get_transaction().commit() BLATHER('upgraded ' + self.id()) self.upgradeComments(REQUEST) # PageMailSupport does a bit more (merge here ?) self._upgradeSubscribers() # make sure there is a catalog for this wiki self.ensureCatalog() # make sure there is an up-to-date outline cache self.ensureWikiOutline()