def processMeaning(self, compId, namespace, activeLangs, meaning): """Updates all the pages in a given cluster.""" if self.opts.verbose: niceText = '[' + ', '.join(map(lambda p: p[0] + ':' + p[1], meaning)) + ']' print 'DEBUG: Processing meaning: %s' % (niceText,) # Load pages pages = {} for page in meaning: lang, title = page if not lang in activeLangs: continue site = wikipedia.getSite(lang) page = wikipedia.Page(site, title, site, namespace) if self.opts.verbose: print 'DEBUG: Fetching page: %s' % (page,) if not page.exists(): print 'WARNING: Skipping this meaning because of nonexistent page: %s' % (page,) return if page.isRedirectPage(): print 'WARNING: Skipping this meaning because of unexpected redirect page: %s' % (page,) return pages[site] = page # Process each page for page in pages.values(): interwiki = {} for p in page.interwiki(): interwiki[p.site()] = p # Find interwikis to add/change/remove add, change, remove = [], [], [] for site in interwiki: if not site in pages: remove += [str(site.language())] continue if interwiki[site] != pages[site]: change += [str(site.language())] continue otherPages = {} for site in pages: if page.site() == site: continue otherPages[site] = page for site in otherPages: if not site in interwiki: add += [str(site.language())] # Check if update needed if not add and not change and not remove: continue # Update the page comment = self.reportMods(add, change, remove) print 'INFO: page: %s %s' % (page, comment) if not self.opts.dry: text = wikipedia.replaceLanguageLinks(page.get(), otherPages) page.put(text, comment)
def updateInterwiki (self, wikipediaPage = None, commonsPage = None): ''' Update the interwiki's at commons from a wikipedia page. The bot just replaces the interwiki links at the commons page with the interwiki's from the wikipedia page. This should probably be more intelligent. We could use add all the interwiki's and remove duplicates. Or only remove language links if multiple language links to the same language exist. This function is disabled for the moment untill i figure out what the best way is to update the interwiki's. ''' interwikis = {} comment= u'' interwikilist = wikipediaPage.interwiki() interwikilist.append(wikipediaPage) for interwikiPage in interwikilist: interwikis[interwikiPage.site()]=interwikiPage oldtext = commonsPage.get() # The commonssite object doesnt work with interwiki's newtext = pywikibot.replaceLanguageLinks(oldtext, interwikis, pywikibot.getSite(u'nl')) comment = u'Updating interwiki\'s from [[' + \ wikipediaPage.site().language() + \ u':' + wikipediaPage.title() + u']]' if newtext != oldtext: #This doesnt seem to work. Newtext has some trailing whitespace pywikibot.showDiff(oldtext, newtext) commonsPage.put(newtext=newtext, comment=comment)
def updateInterwiki(self, wikipediaPage=None, commonsPage=None): ''' Update the interwiki's at commons from a wikipedia page. The bot just replaces the interwiki links at the commons page with the interwiki's from the wikipedia page. This should probably be more intelligent. We could use add all the interwiki's and remove duplicates. Or only remove language links if multiple language links to the same language exist. This function is disabled for the moment untill i figure out what the best way is to update the interwiki's. ''' interwikis = {} comment = u'' interwikilist = wikipediaPage.interwiki() interwikilist.append(wikipediaPage) for interwikiPage in interwikilist: interwikis[interwikiPage.site()] = interwikiPage oldtext = commonsPage.get() # The commonssite object doesnt work with interwiki's newtext = pywikibot.replaceLanguageLinks(oldtext, interwikis, pywikibot.getSite(u'nl')) comment = u'Updating interwiki\'s from [[' + \ wikipediaPage.site().language() + \ u':' + wikipediaPage.title() + u']]' if newtext != oldtext: #This doesnt seem to work. Newtext has some trailing whitespace pywikibot.showDiff(oldtext, newtext) commonsPage.put(newtext=newtext, comment=comment)
def standardizeInterwiki(self, text): """ Makes sure that interwiki links are put to the correct position and into the right order. """ interwikiLinks = wikipedia.getLanguageLinks(text, insite = self.site) text = wikipedia.replaceLanguageLinks(text, interwikiLinks, site = self.site) return text
def putAfterTemplate(page, template, toadd, loose=True): ''' Try to put text after template. If the template is not found return False if loose is set to False If loose is set to True: Remove interwiki's, categories, add template, restore categories, restore interwiki's. Based on cc-by-sa-3.0 code by Dschwen ''' oldtext = page.get() newtext = u'' templatePosition = oldtext.find(u'{{%s' % (template, )) if templatePosition >= 0: previousChar = u'' currentChar = u'' templatePosition += 2 curly = 1 square = 0 while templatePosition < len(oldtext): currentChar = oldtext[templatePosition] if currentChar == u'[' and previousChar == u'[': square += 1 previousChar = u'' if currentChar == u']' and previousChar == u']': square -= 1 previousChar = u'' if currentChar == u'{' and previousChar == u'{': curly += 1 previousChar = u'' if currentChar == u'}' and previousChar == u'}': curly -= 1 previousChar = u'' previousChar = currentChar templatePosition += 1 if curly == 0 and square <= 0: # Found end of template break newtext = oldtext[:templatePosition] + u'\n' + toadd + oldtext[ templatePosition:] else: if loose: newtext = oldtext cats = wikipedia.getCategoryLinks(newtext) ll = wikipedia.getLanguageLinks(newtext) nextext = wikipedia.removeLanguageLinks(newtext) newtext = wikipedia.removeCategoryLinks(newtext) newtext = newtext + u'\n' + toadd newtext = wikipedia.replaceCategoryLinks(newtext, cats) newtext = wikipedia.replaceLanguageLinks(newtext, ll) return newtext
def run(self): hints, removeHints = self.warnfileReader.getHints() k=hints.keys() k.sort() print "Fixing... %i pages" % len(k) for page in k: old={} try: for page2 in page.interwiki(): old[page2.site()] = page2 except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect page; not changing" % page.title(asLink=True)) continue except pywikibot.NoPage: pywikibot.output(u"Page %s not found; skipping" % page.title(asLink=True)) continue new={} new.update(old) if page in hints: for page2 in hints[page]: site = page2.site() new[site] = page2 if page in removeHints: for page2 in removeHints[page]: site = page2.site() try: del new[site] except KeyError: pass mods, adding, removing, modifying = interwiki.compareLanguages(old, new, insite=page.site()) if mods: pywikibot.output(page.title(asLink=True) + mods) oldtext = page.get() newtext = pywikibot.replaceLanguageLinks(oldtext, new) if 1: pywikibot.showDiff(oldtext, newtext) try: status, reason, data = page.put(newtext, comment='warnfile '+mods) except pywikibot.LockedPage: pywikibot.output(u"Page is locked. Skipping.") continue except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) continue except pywikibot.Error: pywikibot.output(u"Error while saving page.") continue if str(status) != '302': print status, reason
def run(self): hints, removeHints = self.warnfileReader.getHints() k = hints.keys() k.sort() print "Fixing... %i pages" % len(k) for page in k: old = {} try: for page2 in page.interwiki(): old[page2.site()] = page2 except wikipedia.IsRedirectPage: wikipedia.output(u"%s is a redirect page; not changing" % page.aslink()) continue except wikipedia.NoPage: wikipedia.output(u"Page %s not found; skipping" % page.aslink()) continue new = {} new.update(old) if page in hints: for page2 in hints[page]: site = page2.site() new[site] = page2 if page in removeHints: for page2 in removeHints[page]: site = page2.site() try: del new[site] except KeyError: pass mods, adding, removing, modifying = interwiki.compareLanguages( old, new, insite=page.site()) if mods: wikipedia.output(page.aslink() + mods) oldtext = page.get() newtext = wikipedia.replaceLanguageLinks(oldtext, new) if 1: wikipedia.showDiff(oldtext, newtext) try: status, reason, data = page.put(newtext, comment='warnfile ' + mods) except wikipedia.LockedPage: wikipedia.output(u"Page is locked. Skipping.") continue except wikipedia.SpamfilterError, e: wikipedia.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) continue except wikipedia.Error: wikipedia.output(u"Error while saving page.") continue if str(status) != '302': print status, reason
def putAfterTemplate (page, template, toadd, loose=True): ''' Try to put text after template. If the template is not found return False if loose is set to False If loose is set to True: Remove interwiki's, categories, add template, restore categories, restore interwiki's. Based on cc-by-sa-3.0 code by Dschwen ''' oldtext = page.get() newtext = u'' templatePosition = oldtext.find(u'{{%s' % (template,)) if templatePosition >= 0: previousChar = u'' currentChar = u'' templatePosition += 2 curly = 1 square = 0 while templatePosition < len(oldtext): currentChar = oldtext[templatePosition] if currentChar == u'[' and previousChar == u'[' : square += 1 previousChar = u'' if currentChar == u']' and previousChar == u']' : square -= 1 previousChar = u'' if currentChar == u'{' and previousChar == u'{' : curly += 1 previousChar = u'' if currentChar == u'}' and previousChar == u'}' : curly -= 1 previousChar = u'' previousChar = currentChar templatePosition +=1 if curly == 0 and square <= 0 : # Found end of template break newtext = oldtext[:templatePosition] + u'\n' + toadd + oldtext[templatePosition:] else: if loose: newtext = oldtext cats = wikipedia.getCategoryLinks(newtext) ll = wikipedia.getLanguageLinks(newtext) nextext = wikipedia.removeLanguageLinks (newtext) newtext = wikipedia.removeCategoryLinks(newtext) newtext = newtext + u'\n' + toadd newtext = wikipedia.replaceCategoryLinks(newtext, cats) newtext = wikipedia.replaceLanguageLinks(newtext, ll) return newtext
def run(self): hints, removeHints = self.warnfileReader.getHints() k = hints.keys() k.sort() print "Fixing... %i pages" % len(k) for page in k: old = {} try: for page2 in page.interwiki(): old[page2.site()] = page2 except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect page; not changing" % page.title(asLink=True)) continue except pywikibot.NoPage: pywikibot.output(u"Page %s not found; skipping" % page.title(asLink=True)) continue new = {} new.update(old) if page in hints: for page2 in hints[page]: site = page2.site() new[site] = page2 if page in removeHints: for page2 in removeHints[page]: site = page2.site() try: del new[site] except KeyError: pass mods, mcomment, adding, removing, modifying = interwiki.compareLanguages( old, new, insite=page.site()) if mods: pywikibot.output(page.title(asLink=True) + mods) oldtext = page.get() newtext = pywikibot.replaceLanguageLinks(oldtext, new) pywikibot.showDiff(oldtext, newtext) try: #TODO: special warnfile comment needed like in previous releases? status, reason, data = page.put(newtext, comment=mcomment) except pywikibot.LockedPage: pywikibot.output(u"Page is locked. Skipping.") continue except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) continue if str(status) != '302': print status, reason
# What follows is the main part of the code. try: for pl in site.allpages(start): plname = pl.title() pywikibot.output(u'\nLoading %s...' % plname) try: oldtext = pl.get() except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect!" % plname) continue old = pl.interwiki() new = {} for pl2 in old: new[pl2.site()] = pl2 newtext = pywikibot.replaceLanguageLinks(oldtext, new) if new: if oldtext != newtext: pywikibot.showDiff(oldtext, newtext) # Submit changes try: status, reason, data = pl.put(newtext, comment=comm) if str(status) != '302': pywikibot.output(status, reason) except pywikibot.LockedPage: pywikibot.output(u"%s is locked" % plname) continue else: pywikibot.output(u'No changes needed.') continue else:
def standardizePageFooter(self, text): """ Makes sure that interwiki links, categories and star templates are put to the correct position and into the right order. This combines the old instances standardizeInterwiki and standardizeCategories The page footer has the following section in that sequence: 1. categories 2. additional information depending on local site policy 3. stars templates for featured and good articles 4. interwiki links """ starsList = [ u"bueno", u"cyswllt[ _]erthygl[ _]ddethol", u"dolen[ _]ed", u"destacado", u"destaca[tu]", u"enllaç[ _]ad", u"enllaz[ _]ad", u"leam[ _]vdc", u"legătură[ _]a[bcf]", u"liamm[ _]pub", u"lien[ _]adq", u"lien[ _]ba", u"liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt", u"liên[ _]kết[ _]chọn[ _]lọc", u"ligam[ _]adq", u"ligoelstara", u"ligoleginda", u"link[ _][afgu]a", u"link[ _]adq", u"link[ _]f[lm]", u"link[ _]km", u"link[ _]sm", u"linkfa", u"na[ _]lotura", u"nasc[ _]ar", u"tengill[ _][úg]g", u"ua", u"yüm yg", u"רא", u"وصلة مقالة جيدة", u"وصلة مقالة مختارة", ] categories = None interwikiLinks = None allstars = [] hasCommentLine = False # The PyWikipediaBot is no longer allowed to touch categories on the German Wikipedia. # See http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/bis_2006#Position_der_Personendaten_am_.22Artikelende.22 # ignoring nn-wiki of cause of the comment line above iw section if not self.template and not "{{Personendaten" in text: categories = pywikibot.getCategoryLinks(text, site=self.site) if not self.talkpage: # and pywikibot.calledModuleName() <> 'interwiki': subpage = False if self.template: loc = None try: tmpl, loc = moved_links[self.site.lang] del tmpl except KeyError: pass if loc != None and loc in self.title: subpage = True interwikiLinks = pywikibot.getLanguageLinks(text, insite=self.site, template_subpage=subpage) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site=self.site) # Removing the stars' issue starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile("(\{\{(?:template:|)%s\|.*?\}\}[\s]*)" % star, re.I) found = regex.findall(starstext) if found != []: if pywikibot.verbose: print found text = regex.sub("", text) allstars += found # nn got a message between the categories and the iw's # and they want to keep it there, first remove it if self.site.language() == "nn": regex = re.compile( "(<!-- ?interwiki \(no(?:/nb)?, ?sv, ?da first; then other languages alphabetically by name\) ?-->)" ) found = regex.findall(text) if found: if pywikibot.verbose: print found hasCommentLine = True text = regex.sub("", text) # Adding categories if categories: text = pywikibot.replaceCategoryLinks(text, categories, site=self.site) # Put the nn iw message back if self.site.language() == "nn" and not self.talkpage and (interwikiLinks or hasCommentLine): text = text + "\r\n\r\n" + nn_iw_msg # Adding stars templates if allstars: text = text.strip() + self.site.family.interwiki_text_separator allstars.sort() for element in allstars: text += "%s\r\n" % element.strip() if pywikibot.verbose: pywikibot.output(u"%s" % element.strip()) # Adding the interwiki if interwikiLinks: text = pywikibot.replaceLanguageLinks( text, interwikiLinks, site=self.site, template=self.template, template_subpage=subpage ) return text
def standardizePageFooter(self, text): """ Makes sure that interwiki links, categories and star templates are put to the correct position and into the right order. This combines the old instances standardizeInterwiki and standardizeCategories The page footer has the following section in that sequence: 1. categories 2. ## TODO: template beyond categories ## 3. additional information depending on local site policy 4. stars templates for featured and good articles 5. interwiki links """ starsList = [ u'bueno', u'bom interwiki', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligazón[ _]a[bd]', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] categories = None interwikiLinks = None allstars = [] # The PyWikipediaBot is no longer allowed to touch categories on the # German Wikipedia. See # http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Position_der_Personendaten_am_.22Artikelende.22 # ignoring nn-wiki of cause of the comment line above iw section if not self.template and '{{Personendaten' not in text and \ '{{SORTIERUNG' not in text and '{{DEFAULTSORT' not in text and \ self.site.lang not in ('et', 'it', 'bg', 'ru'): try: categories = pywikibot.getCategoryLinks(text, site=self.site) # there are categories like [[categoy:Foo {{#time:Y...}}]] except pywikibot.InvalidTitle: pass if not self.talkpage: # and pywikibot.calledModuleName() <> 'interwiki': subpage = False if self.template: loc = None try: tmpl, loc = moved_links[self.site.lang] del tmpl except KeyError: pass if loc is not None and loc in self.title: subpage = True interwikiLinks = pywikibot.getLanguageLinks( text, insite=self.site, template_subpage=subpage) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site=self.site) # Removing the stars' issue starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: text = regex.sub('', text) allstars += found # Adding categories if categories: ##Sorting categories in alphabetic order. beta test only on Persian Wikipedia, TODO fix bug for sorting #if self.site.language() == 'fa': # categories.sort() ##Taking main cats to top # for name in categories: # if re.search(u"(.+?)\|(.{,1}?)",name.title()) or name.title()==name.title().split(":")[0]+title: # categories.remove(name) # categories.insert(0, name) text = pywikibot.replaceCategoryLinks(text, categories, site=self.site) # Adding stars templates if allstars: text = text.strip() + self.site.family.interwiki_text_separator allstars.sort() for element in allstars: text += '%s\r\n' % element.strip() if pywikibot.verbose: pywikibot.output(u'%s' % element.strip()) # Adding the interwiki if interwikiLinks: text = pywikibot.replaceLanguageLinks(text, interwikiLinks, site=self.site, template=self.template, template_subpage=subpage) return text
def featuredWithInterwiki(fromsite, tosite, template_on_top, pType, quiet, dry=False): if not fromsite.lang in cache: cache[fromsite.lang] = {} if not tosite.lang in cache[fromsite.lang]: cache[fromsite.lang][tosite.lang] = {} cc = cache[fromsite.lang][tosite.lang] if nocache: cc = {} templatelist = getTemplateList(tosite.lang, pType) findtemplate = '(' + '|'.join(templatelist) + ')' re_Link_FA = re.compile( ur"\{\{%s\|%s\}\}" % (findtemplate.replace(u' ', u'[ _]'), fromsite.lang), re.IGNORECASE) re_this_iw = re.compile(ur"\[\[%s:[^]]+\]\]" % fromsite.lang) arts = featuredArticles(fromsite, pType) pairs = [] for a in arts: if a.title() < afterpage: continue if u"/" in a.title() and a.namespace() != 0: wikipedia.output(u"%s is a subpage" % a.title()) continue if a.title() in cc: wikipedia.output(u"(cached) %s -> %s" % (a.title(), cc[a.title()])) continue if a.isRedirectPage(): a = a.getRedirectTarget() try: if not a.exists(): wikipedia.output(u"source page doesn't exist: %s" % a.title()) continue atrans = findTranslated(a, tosite, quiet) if pType != 'former': if atrans: text = atrans.get() m = re_Link_FA.search(text) if m: wikipedia.output(u"(already done)") else: # insert just before interwiki if (not interactive or wikipedia.input( u'Connecting %s -> %s. Proceed? [Y/N]' % (a.title(), atrans.title())) in ['Y', 'y']): m = re_this_iw.search(text) if not m: wikipedia.output( u"no interwiki record, very strange") continue site = wikipedia.getSite() if pType == 'good': comment = wikipedia.setAction( wikipedia.translate(site, msg_good) % (fromsite.lang, a.title())) elif pType == 'list': comment = wikipedia.setAction( wikipedia.translate(site, msg_lists) % (fromsite.lang, a.title())) else: comment = wikipedia.setAction( wikipedia.translate(site, msg) % (fromsite.lang, a.title())) ### Moving {{Link FA|xx}} to top of interwikis ### if template_on_top == True: # Getting the interwiki iw = wikipedia.getLanguageLinks(text, site) # Removing the interwiki text = wikipedia.removeLanguageLinks( text, site) text += u"\r\n{{%s|%s}}\r\n" % ( templatelist[0], fromsite.lang) # Adding the interwiki text = wikipedia.replaceLanguageLinks( text, iw, site) ### Placing {{Link FA|xx}} right next to corresponding interwiki ### else: text = (text[:m.end()] + (u" {{%s|%s}}" % (templatelist[0], fromsite.lang)) + text[m.end():]) if not dry: try: atrans.put(text, comment) except wikipedia.LockedPage: wikipedia.output(u'Page %s is locked!' % atrans.title()) cc[a.title()] = atrans.title() else: if atrans: text = atrans.get() m = re_Link_FA.search(text) if m: # insert just before interwiki if (not interactive or wikipedia.input( u'Connecting %s -> %s. Proceed? [Y/N]' % (a.title(), atrans.title())) in ['Y', 'y']): m = re_this_iw.search(text) if not m: wikipedia.output( u"no interwiki record, very strange") continue site = wikipedia.getSite() comment = wikipedia.setAction( wikipedia.translate(site, msg_former) % (fromsite.lang, a.title())) name = templatelist[0] name2 = name[0].lower() + name[1:] text = text.replace( u"{{%s|%s}}" % (name, fromsite.lang), '', 1) text = text.replace( u"{{%s|%s}}" % (name2, fromsite.lang), '', 1) if not dry: try: atrans.put(text, comment) except wikipedia.LockedPage: wikipedia.output(u'Page %s is locked!' % atrans.title()) else: wikipedia.output(u"(already done)") cc[a.title()] = atrans.title() except wikipedia.PageNotSaved, e: wikipedia.output(u"Page not saved")
def standardizePageFooter(self, text): """ Makes sure that interwiki links, categories and star templates are put to the correct position and into the right order. This combines the old instances standardizeInterwiki and standardizeCategories The page footer has the following section in that sequence: 1. categories 2. additional information depending on local site policy 3. stars templates for featured and good articles 4. interwiki links """ starsList = [ u'bueno', u'bom interwiki', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] categories = None interwikiLinks = None allstars = [] hasCommentLine = False # The PyWikipediaBot is no longer allowed to touch categories on the # German Wikipedia. See # http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Position_der_Personendaten_am_.22Artikelende.22 # ignoring nn-wiki of cause of the comment line above iw section if not self.template and not '{{Personendaten' in text: categories = pywikibot.getCategoryLinks(text, site = self.site) if not self.talkpage:# and pywikibot.calledModuleName() <> 'interwiki': subpage = False if self.template: loc = None try: tmpl, loc = moved_links[self.site.lang] del tmpl except KeyError: pass if loc != None and loc in self.title: subpage = True interwikiLinks = pywikibot.getLanguageLinks( text, insite=self.site, template_subpage=subpage) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site = self.site) # Removing the stars' issue starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: if pywikibot.verbose: print found text = regex.sub('', text) allstars += found # nn got a message between the categories and the iw's # and they want to keep it there, first remove it if self.site.lang in msg_interwiki: iw_msg = msg_interwiki[self.site.lang] if isinstance(iw_msg, tuple): iw_reg = iw_msg[1] iw_msg = iw_msg[0] else: iw_reg = u'(%s)' % iw_msg regex = re.compile(iw_reg) found = regex.findall(text) if found: if pywikibot.verbose: print found hasCommentLine = True text = regex.sub('', text) # Adding categories if categories: text = pywikibot.replaceCategoryLinks(text, categories, site=self.site) # Put the iw message back if not self.talkpage and \ ((interwikiLinks or hasCommentLine) and self.site.language() == 'nn' or (interwikiLinks and hasCommentLine) and self.site.language() == 'fr'): text = text + '\r\n\r\n' + iw_msg # Adding stars templates if allstars: text = text.strip()+self.site.family.interwiki_text_separator allstars.sort() for element in allstars: text += '%s\r\n' % element.strip() if pywikibot.verbose: pywikibot.output(u'%s' %element.strip()) # Adding the interwiki if interwikiLinks: text = pywikibot.replaceLanguageLinks(text, interwikiLinks, site=self.site, template=self.template, template_subpage=subpage) return text
def standardizePageFooter(self, text): """ Makes sure that interwiki links, categories and star templates are put to the correct position and into the right order. This combines the old instances standardizeInterwiki and standardizeCategories The page footer has the following section in that sequence: 1. categories 2. additional information depending on local site policy 3. stars templates for featured and good articles 4. interwiki links """ starsList = [ u'bueno', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] categories = None interwikiLinks = None allstars = [] hasCommentLine = False # The PyWikipediaBot is no longer allowed to touch categories on the German Wikipedia. # See http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/bis_2006#Position_der_Personendaten_am_.22Artikelende.22 # ignoring nn-wiki of cause of the comment line above iw section if not self.template and not '{{Personendaten' in text: categories = pywikibot.getCategoryLinks(text, site=self.site) if not self.talkpage: # and pywikibot.calledModuleName() <> 'interwiki': subpage = False if self.template: loc = None try: tmpl, loc = moved_links[self.site.lang] del tmpl except KeyError: pass if loc != None and loc in self.title: subpage = True interwikiLinks = pywikibot.getLanguageLinks( text, insite=self.site, template_subpage=subpage) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site=self.site) # Removing the stars' issue starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile( '(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: if pywikibot.verbose: print found text = regex.sub('', text) allstars += found # nn got a message between the categories and the iw's # and they want to keep it there, first remove it if self.site.language() == 'nn': regex = re.compile( '(<!-- ?interwiki \(no(?:/nb)?, ?sv, ?da first; then other languages alphabetically by name\) ?-->)' ) found = regex.findall(text) if found: if pywikibot.verbose: print found hasCommentLine = True text = regex.sub('', text) # Adding categories if categories: text = pywikibot.replaceCategoryLinks(text, categories, site=self.site) # Put the nn iw message back if self.site.language() == 'nn' and not self.talkpage and ( interwikiLinks or hasCommentLine): text = text + '\r\n\r\n' + nn_iw_msg # Adding stars templates if allstars: text = text.strip() + self.site.family.interwiki_text_separator allstars.sort() for element in allstars: text += '%s\r\n' % element.strip() if pywikibot.verbose: pywikibot.output(u'%s' % element.strip()) # Adding the interwiki if interwikiLinks: text = pywikibot.replaceLanguageLinks(text, interwikiLinks, site=self.site, template=self.template, template_subpage=subpage) return text
def featuredbot(arts, cc, tosite, template_on_top, pType, quiet, dry): templatelist = getTemplateList(tosite.lang, pType) findtemplate = '(' + '|'.join(templatelist) + ')' re_Link_FA=re.compile(ur"\{\{%s\|%s\}\}" % (findtemplate.replace(u' ', u'[ _]'), fromsite.lang), re.IGNORECASE) re_this_iw=re.compile(ur"\[\[%s:[^]]+\]\]" % fromsite.lang) pairs=[] for a in arts: if a.title() < afterpage: continue if u"/" in a.title() and a.namespace() != 0: pywikibot.output(u"%s is a subpage" % a.title()) continue if a.title() in cc: pywikibot.output(u"(cached) %s -> %s"%(a.title(), cc[a.title()])) continue if a.isRedirectPage(): a=a.getRedirectTarget() try: if not a.exists(): pywikibot.output(u"source page doesn't exist: %s" % a.title()) continue atrans = findTranslated(a, tosite, quiet) if pType!='former': if atrans: text=atrans.get() m=re_Link_FA.search(text) if m: pywikibot.output(u"(already done)") else: # insert just before interwiki if (not interactive or pywikibot.input( u'Connecting %s -> %s. Proceed? [Y/N]' % (a.title(), atrans.title())) in ['Y','y'] ): m=re_this_iw.search(text) if not m: pywikibot.output( u"no interwiki record, very strange") continue site = pywikibot.getSite() comment = pywikibot.setAction( i18n.twtranslate( site, 'featured-' + pType, {'page': a.title( asLink=True, forceInterwiki=True)})) ### Moving {{Link FA|xx}} to top of interwikis ### if template_on_top == True: # Getting the interwiki iw = pywikibot.getLanguageLinks(text, site) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site) text += u"\r\n{{%s|%s}}\r\n" % (templatelist[0], fromsite.lang) # Adding the interwiki text = pywikibot.replaceLanguageLinks(text, iw, site) ### Placing {{Link FA|xx}} right next to corresponding interwiki ### else: text=(text[:m.end()] + (u" {{%s|%s}}" % (templatelist[0], fromsite.lang)) + text[m.end():]) if not dry: try: atrans.put(text, comment) except pywikibot.LockedPage: pywikibot.output(u'Page %s is locked!' % atrans.title()) cc[a.title()]=atrans.title() else: if atrans: text=atrans.get() m=re_Link_FA.search(text) if m: # insert just before interwiki if (not interactive or pywikibot.input( u'Connecting %s -> %s. Proceed? [Y/N]' % (a.title(), atrans.title())) in ['Y','y'] ): m=re_this_iw.search(text) if not m: pywikibot.output( u"no interwiki record, very strange") continue site = pywikibot.getSite() comment = pywikibot.setAction( i18n.twtranslate( site, 'featured-former', {'page': a.title( asLink=True, forceInterwiki=True)})) text = re.sub(re_Link_FA,'',text) if not dry: try: atrans.put(text, comment) except pywikibot.LockedPage: pywikibot.output(u'Page %s is locked!' % atrans.title()) else: pywikibot.output(u"(already done)") cc[a.title()]=atrans.title() except pywikibot.PageNotSaved, e: pywikibot.output(u"Page not saved")
def add_text(page = None, addText = None, summary = None, regexSkip = None, regexSkipUrl = None, always = False, up = False, putText = True, oldTextGiven = None): if not addText: raise NoEnoughData('You have to specify what text you want to add!') if not summary: summary = wikipedia.setAction(wikipedia.translate(wikipedia.getSite(), msg) % addText) # When a page is tagged as "really well written" it has a star in the interwiki links. # This is a list of all the templates used (in regex format) to make the stars appear. starsList = ['link[ _]fa', 'link[ _]adq', 'enllaç[ _]ad', 'link[ _]ua', 'legătură[ _]af', 'destacado', 'ua', 'liên k[ _]t[ _]chọn[ _]lọc'] errorCount = 0 site = wikipedia.getSite() # /wiki/ is not always the right path in non-wiki projects pathWiki = site.family.nicepath(site.lang) if putText: wikipedia.output(u'Loading %s...' % page.title()) if oldTextGiven == None: try: text = page.get() except wikipedia.NoPage: wikipedia.output(u"%s doesn't exist, skip!" % page.title()) return (False, always) # continue except wikipedia.IsRedirectPage: wikipedia.output(u"%s is a redirect, skip!" % page.title()) return (False, always) # continue else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl != None: url = '%s%s' % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: wikipedia.output(u'Exception! regex (or word) used with -exceptUrl is in the page. Skip!') return (False, always) # continue if regexSkip != None: result = re.findall(regexSkip, text) if result != []: wikipedia.output(u'Exception! regex (or word) used with -except is in the page. Skip!') return (False, always) # continue # If not up, text put below if not up: newtext = text # Getting the categories categoriesInside = wikipedia.getCategoryLinks(newtext, site) # Deleting the categories newtext = wikipedia.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = wikipedia.getLanguageLinks(newtext, site) # Removing the interwiki newtext = wikipedia.removeLanguageLinks(newtext, site) #nn got a message between the categories and the iw's and they want to keep it there, first remove it if (site.language()==u'nn'): newtext = newtext.replace(nn_iw_msg, '') # Translating the \\n into binary \n addText = addText.replace('\\n', '\n') # Adding the text newtext += u"\n%s" % addText # Reputting the categories newtext = wikipedia.replaceCategoryLinks(newtext, categoriesInside, site, True) #Put the nn iw message back if (site.language()==u'nn'): newtext = newtext + u'\n' + nn_iw_msg # Dealing the stars' issue starsListInPage = list() for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}\n)' % star, re.I) risultato = regex.findall(newtext) if risultato != []: newtext = regex.sub('', newtext) for element in risultato: newtext += '\n%s' % element # Adding the interwiki newtext = wikipedia.replaceLanguageLinks(newtext, interwikiInside, site) # If instead the text must be added above... else: newtext = addText + '\n' + text if putText and text != newtext: wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) wikipedia.showDiff(text, newtext) choice = '' # Let's put the changes. while 1: # If someone load it as module, maybe it's not so useful to put the text in the page if putText: if not always: choice = wikipedia.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if choice == 'a': always = True if choice == 'n': return (False, always) if choice == 'y' or always: try: if always: page.put(newtext, summary) else: page.put_async(newtext, summary) except wikipedia.EditConflict: wikipedia.output(u'Edit conflict! skip!') return (False, always) except wikipedia.ServerError: errorCount += 1 if errorCount < 5: wikipedia.output(u'Server Error! Wait..') time.sleep(3) continue else: raise wikipedia.ServerError(u'Fifth Server Error!') except wikipedia.SpamfilterError, e: wikipedia.output(u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, always) except wikipedia.PageNotSaved, error: wikipedia.output(u'Error putting page: %s' % error.args) return (False, always) except wikipedia.LockedPage: wikipedia.output(u'Skipping %s (locked page)' % page.title()) return (False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, always)
def add_text(page = None, addText = None, summary = None, regexSkip = None, regexSkipUrl = None, always = False, up = False, putText = True, oldTextGiven = None, reorderEnabled = True, create=False): if not addText: raise NoEnoughData('You have to specify what text you want to add!') if not summary: summary = i18n.twtranslate(pywikibot.getSite(), 'add_text-adding', {'adding': addText[:200]}) # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. starsList = [ u'bueno', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] errorCount = 0 site = pywikibot.getSite() # /wiki/ is not always the right path in non-wiki projects pathWiki = site.family.nicepath(site.lang) if putText: pywikibot.output(u'Loading %s...' % page.title()) if oldTextGiven == None: try: text = page.get() except pywikibot.NoPage: if create: pywikibot.output(u"%s doesn't exist, creating it!" % page.title()) text = u'' else: pywikibot.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) # continue except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) # continue else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl != None: url = '%s%s' % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -exceptUrl is in the page. Skip! Match was: %s''' % result) return (False, False, always) # continue if regexSkip != None: result = re.findall(regexSkip, text) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -except is in the page. Skip! Match was: %s''' % result) return (False, False, always) # continue # If not up, text put below if not up: newtext = text # Translating the \\n into binary \n addText = addText.replace('\\n', '\n') if (reorderEnabled): # Getting the categories categoriesInside = pywikibot.getCategoryLinks(newtext, site) # Deleting the categories #newtext = pywikibot.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = pywikibot.getLanguageLinks(newtext, site) # Removing the interwiki newtext = pywikibot.removeLanguageLinks(newtext, site) # nn got a message between the categories and the iw's # and they want to keep it there, first remove it hasCommentLine = False if (site.language()==u'nn'): regex = re.compile('(<!-- ?interwiki \(no(?:/nb)?, ?sv, ?da first; then other languages alphabetically by name\) ?-->)') found = regex.findall(newtext) if found: hasCommentLine = True newtext = regex.sub('', newtext) # Adding the text newtext += u"\n%s" % addText # Reputting the categories #newtext = pywikibot.replaceCategoryLinks(newtext, #categoriesInside, site, True) #Put the nn iw message back if site.language()==u'nn' and (interwikiInside or hasCommentLine): newtext = newtext + u'\r\n\r\n' + nn_iw_msg # Dealing the stars' issue allstars = [] starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub('', newtext) allstars += found if allstars != []: newtext = newtext.strip()+'\r\n\r\n' allstars.sort() for element in allstars: newtext += '%s\r\n' % element.strip() # Adding the interwiki newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside, site) else: # Adding the text newtext += u"\n%s" % addText # If instead the text must be added above... else: newtext = addText + '\n' + text if putText and text != newtext: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if not always: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if choice == 'a': always = True elif choice == 'n': return (False, False, always) if always or choice == 'y': try: if always: page.put(newtext, summary) else: page.put_async(newtext, summary) except pywikibot.EditConflict: pywikibot.output(u'Edit conflict! skip!') return (False, False, always) except pywikibot.ServerError: errorCount += 1 if errorCount < 5: pywikibot.output(u'Server Error! Wait..') time.sleep(5) continue else: raise pywikibot.ServerError(u'Fifth Server Error!') except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, False, always) except pywikibot.PageNotSaved, error: pywikibot.output(u'Error putting page: %s' % error.args) return (False, False, always) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always)
def finish(self, sa = None): """Round up the subject, making any necessary changes. This method should be called exactly once after the todo list has gone empty. This contains a shortcut: if a subject array is given in the argument sa, just before submitting a page change to the live wikipedia it is checked whether we will have to wait. If that is the case, the sa will be told to make another get request first.""" if not self.isDone(): raise "Bugcheck: finish called before done" if self.inpl.isRedirectPage(): return if not self.untranslated and globalvar.untranslatedonly: return if len(self.done) == 1: # No interwiki at all return print "======Post-processing %s======"%(self.inpl.asasciilink()) # Assemble list of accepted interwiki links if globalvar.autonomous: new = self.assemble() if new == None: # There are questions return else: new = self.assemble(returnonquestion = True) if new == None: # There are questions new = self.assemble(askall = True) if new == None: return # User said give up print "==status==" old={} try: for pl in self.inpl.interwiki(): old[pl.code()] = pl except wikipedia.NoPage: print "BUG:", self.inpl.asasciilink(), "No longer exists?" #### mods, removing = compareLanguages(old, new) if not mods and not globalvar.always: print "No changes needed" if globalvar.backlink: self.reportBacklinks(new) else: if mods: print "Changes to be made:",mods oldtext = self.inpl.get() newtext = wikipedia.replaceLanguageLinks(oldtext, new) if globalvar.debug: showDiff(oldtext, newtext) if newtext == oldtext: if globalvar.backlink: self.reportBacklinks(new) else: print "NOTE: Replace %s" % self.inpl.asasciilink() if globalvar.forreal: # Determine whether we need permission to submit ask = False if removing: self.problem('removing: %s'%(",".join(removing))) ask = True if globalvar.force: ask = False if globalvar.confirm: ask = True # If we need to ask, do so if ask: if globalvar.autonomous: # If we cannot ask, deny permission answer = 'n' else: if globalvar.bell: sys.stdout.write('\07') answer = raw_input('submit y/n ?') else: # If we do not need to ask, allow answer = 'y' if answer == 'y': # Check whether we will have to wait for wikipedia. If so, make # another get-query first. if sa: while wikipedia.get_throttle.waittime() + 2.0 < wikipedia.put_throttle.waittime(): print "NOTE: Performing a recursive query first to save time...." qdone = sa.oneQuery() if not qdone: # Nothing more to do break print "NOTE: Updating live wikipedia..." status, reason, data = self.inpl.put(newtext, comment='robot '+mods) if str(status) != '302': print status, reason else: if globalvar.backlink: self.reportBacklinks(new)
# What follows is the main part of the code. try: for pl in site.allpages(start): plname = pl.title() wikipedia.output(u'\nLoading %s...' % plname) try: oldtext = pl.get() except wikipedia.IsRedirectPage: wikipedia.output(u"%s is a redirect!" % plname) continue old = pl.interwiki() new = {} for pl2 in old: new[pl2.site()] = pl2 newtext = wikipedia.replaceLanguageLinks(oldtext, new) if new: if oldtext != newtext: wikipedia.showDiff(oldtext, newtext) # Submit changes try: status, reason, data = pl.put(newtext, comment=comm) if str(status) != '302': wikipedia.output(status, reason) except wikipedia.LockedPage: wikipedia.output(u"%s is locked" % plname) continue else: wikipedia.output(u'No changes needed.') continue else:
def featuredWithInterwiki(fromsite, tosite, template_on_top, pType, quiet, dry = False): if not fromsite.lang in cache: cache[fromsite.lang]={} if not tosite.lang in cache[fromsite.lang]: cache[fromsite.lang][tosite.lang]={} cc=cache[fromsite.lang][tosite.lang] if nocache: cc={} templatelist = getTemplateList(tosite.lang, pType) findtemplate = '(' + '|'.join(templatelist) + ')' re_Link_FA=re.compile(ur"\{\{%s\|%s\}\}" % (findtemplate.replace(u' ', u'[ _]'), fromsite.lang), re.IGNORECASE) re_this_iw=re.compile(ur"\[\[%s:[^]]+\]\]" % fromsite.lang) arts=featuredArticles(fromsite, pType) pairs=[] for a in arts: if a.title()<afterpage: continue if u"/" in a.title() and a.namespace() != 0: wikipedia.output(u"%s is a subpage" % a.title()) continue if a.title() in cc: wikipedia.output(u"(cached) %s -> %s"%(a.title(), cc[a.title()])) continue if a.isRedirectPage(): a=a.getRedirectTarget() try: if not a.exists(): wikipedia.output(u"source page doesn't exist: %s" % a.title()) continue atrans = findTranslated(a, tosite, quiet) if pType!='former': if atrans: text=atrans.get() m=re_Link_FA.search(text) if m: wikipedia.output(u"(already done)") else: # insert just before interwiki if (not interactive or wikipedia.input(u'Connecting %s -> %s. Proceed? [Y/N]'%(a.title(), atrans.title())) in ['Y','y'] ): m=re_this_iw.search(text) if not m: wikipedia.output(u"no interwiki record, very strange") continue site = wikipedia.getSite() if pType == 'good': comment = wikipedia.setAction(wikipedia.translate(site, msg_good) % (fromsite.lang, a.title())) elif pType == 'list': comment = wikipedia.setAction(wikipedia.translate(site, msg_lists) % (fromsite.lang, a.title())) else: comment = wikipedia.setAction(wikipedia.translate(site, msg) % (fromsite.lang, a.title())) ### Moving {{Link FA|xx}} to top of interwikis ### if template_on_top == True: # Getting the interwiki iw = wikipedia.getLanguageLinks(text, site) # Removing the interwiki text = wikipedia.removeLanguageLinks(text, site) text += u"\r\n{{%s|%s}}\r\n"%(templatelist[0], fromsite.lang) # Adding the interwiki text = wikipedia.replaceLanguageLinks(text, iw, site) ### Placing {{Link FA|xx}} right next to corresponding interwiki ### else: text=(text[:m.end()] + (u" {{%s|%s}}" % (templatelist[0], fromsite.lang)) + text[m.end():]) if not dry: try: atrans.put(text, comment) except wikipedia.LockedPage: wikipedia.output(u'Page %s is locked!' % atrans.title()) cc[a.title()]=atrans.title() else: if atrans: text=atrans.get() m=re_Link_FA.search(text) if m: # insert just before interwiki if (not interactive or wikipedia.input(u'Connecting %s -> %s. Proceed? [Y/N]'%(a.title(), atrans.title())) in ['Y','y'] ): m=re_this_iw.search(text) if not m: wikipedia.output(u"no interwiki record, very strange") continue site = wikipedia.getSite() comment = wikipedia.setAction(wikipedia.translate(site, msg_former) % (fromsite.lang, a.title())) name=templatelist[0] name2=name[0].lower()+name[1:] text=text.replace(u"{{%s|%s}}" %(name, fromsite.lang),'',1) text=text.replace(u"{{%s|%s}}" %(name2, fromsite.lang),'',1) if not dry: try: atrans.put(text, comment) except wikipedia.LockedPage: wikipedia.output(u'Page %s is locked!' % atrans.title()) else: wikipedia.output(u"(already done)") cc[a.title()]=atrans.title() except wikipedia.PageNotSaved, e: wikipedia.output(u"Page not saved")
def featuredbot(arts, cc, tosite, template_on_top, pType, quiet, dry): templatelist = getTemplateList(tosite.lang, pType) findtemplate = '(' + '|'.join(templatelist) + ')' re_Link_FA = re.compile( ur"\{\{%s\|%s\}\}" % (findtemplate.replace(u' ', u'[ _]'), fromsite.lang), re.IGNORECASE) re_this_iw = re.compile(ur"\[\[%s:[^]]+\]\]" % fromsite.lang) pairs = [] for a in arts: if a.title() < afterpage: continue if u"/" in a.title() and a.namespace() != 0: pywikibot.output(u"%s is a subpage" % a.title()) continue if a.title() in cc: pywikibot.output(u"(cached) %s -> %s" % (a.title(), cc[a.title()])) continue if a.isRedirectPage(): a = a.getRedirectTarget() try: if not a.exists(): pywikibot.output(u"source page doesn't exist: %s" % a.title()) continue atrans = findTranslated(a, tosite, quiet) if pType != 'former': if atrans: text = atrans.get() m = re_Link_FA.search(text) if m: pywikibot.output(u"(already done)") else: # insert just before interwiki if (not interactive or pywikibot.input( u'Connecting %s -> %s. Proceed? [Y/N]' % (a.title(), atrans.title())) in ['Y', 'y']): m = re_this_iw.search(text) if not m: pywikibot.output( u"no interwiki record, very strange") continue site = pywikibot.getSite() comment = pywikibot.setAction( i18n.twtranslate( site, 'featured-' + pType, { 'page': a.title(asLink=True, forceInterwiki=True) })) ### Moving {{Link FA|xx}} to top of interwikis ### if template_on_top == True: # Getting the interwiki iw = pywikibot.getLanguageLinks(text, site) # Removing the interwiki text = pywikibot.removeLanguageLinks( text, site) text += u"\r\n{{%s|%s}}\r\n" % ( templatelist[0], fromsite.lang) # Adding the interwiki text = pywikibot.replaceLanguageLinks( text, iw, site) ### Placing {{Link FA|xx}} right next to corresponding interwiki ### else: text = (text[:m.end()] + (u" {{%s|%s}}" % (templatelist[0], fromsite.lang)) + text[m.end():]) if not dry: try: atrans.put(text, comment) except pywikibot.LockedPage: pywikibot.output(u'Page %s is locked!' % atrans.title()) cc[a.title()] = atrans.title() else: if atrans: text = atrans.get() m = re_Link_FA.search(text) if m: # insert just before interwiki if (not interactive or pywikibot.input( u'Connecting %s -> %s. Proceed? [Y/N]' % (a.title(), atrans.title())) in ['Y', 'y']): m = re_this_iw.search(text) if not m: pywikibot.output( u"no interwiki record, very strange") continue site = pywikibot.getSite() comment = pywikibot.setAction( i18n.twtranslate( site, 'featured-former', { 'page': a.title(asLink=True, forceInterwiki=True) })) text = re.sub(re_Link_FA, '', text) if not dry: try: atrans.put(text, comment) except pywikibot.LockedPage: pywikibot.output(u'Page %s is locked!' % atrans.title()) else: pywikibot.output(u"(already done)") cc[a.title()] = atrans.title() except pywikibot.PageNotSaved, e: pywikibot.output(u"Page not saved")
def add_text( page=None, addText=None, summary=None, regexSkip=None, regexSkipUrl=None, always=False, up=False, putText=True, oldTextGiven=None, create=False, ): if not addText: raise NoEnoughData("You have to specify what text you want to add!") if not summary: summary = wikipedia.translate(wikipedia.getSite(), msg) % addText[:200] # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. starsList = [ u"bueno", u"cyswllt[ _]erthygl[ _]ddethol", u"dolen[ _]ed", u"destacado", u"destaca[tu]", u"enllaç[ _]ad", u"enllaz[ _]ad", u"leam[ _]vdc", u"legătură[ _]a[bcf]", u"liamm[ _]pub", u"lien[ _]adq", u"lien[ _]ba", u"liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt", u"liên[ _]kết[ _]chọn[ _]lọc", u"ligam[ _]adq", u"ligoelstara", u"ligoleginda", u"link[ _][afgu]a", u"link[ _]adq", u"link[ _]f[lm]", u"link[ _]km", u"link[ _]sm", u"linkfa", u"na[ _]lotura", u"nasc[ _]ar", u"tengill[ _][úg]g", u"ua", u"yüm yg", u"רא", u"وصلة مقالة جيدة", u"وصلة مقالة مختارة", ] errorCount = 0 site = wikipedia.getSite() # /wiki/ is not always the right path in non-wiki projects pathWiki = site.family.nicepath(site.lang) if putText: wikipedia.output(u"Loading %s..." % page.title()) if oldTextGiven == None: try: text = page.get() except wikipedia.NoPage: if create: wikipedia.output(u"%s doesn't exist, creating it!" % page.title()) text = u"" else: wikipedia.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) # continue except wikipedia.IsRedirectPage: wikipedia.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) # continue else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl != None: url = "%s%s" % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: wikipedia.output(u"Exception! regex (or word) used with -exceptUrl is in the page. Skip!") return (False, False, always) # continue if regexSkip != None: result = re.findall(regexSkip, text) if result != []: wikipedia.output(u"Exception! regex (or word) used with -except is in the page. Skip!") return (False, False, always) # continue # If not up, text put below if not up: newtext = text # Getting the categories categoriesInside = wikipedia.getCategoryLinks(newtext, site) # Deleting the categories newtext = wikipedia.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = wikipedia.getLanguageLinks(newtext, site) # Removing the interwiki newtext = wikipedia.removeLanguageLinks(newtext, site) # nn got a message between the categories and the iw's and they want to keep it there, first remove it if site.language() == u"nn": newtext = newtext.replace(nn_iw_msg, "") # Translating the \\n into binary \n addText = addText.replace("\\n", "\n") # Adding the text newtext += u"\n%s" % addText # Reputting the categories newtext = wikipedia.replaceCategoryLinks(newtext, categoriesInside, site, True) # Put the nn iw message back if site.language() == u"nn": newtext = newtext + u"\n" + nn_iw_msg # Dealing the stars' issue allstars = [] starstext = wikipedia.removeDisabledParts(text) for star in starsList: regex = re.compile("(\{\{(?:template:|)%s\|.*?\}\}[\s]*)" % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub("", newtext) allstars += found if allstars != []: newtext = newtext.strip() + "\r\n\r\n" allstars.sort() for element in allstars: newtext += "%s\r\n" % element.strip() # Adding the interwiki newtext = wikipedia.replaceLanguageLinks(newtext, interwikiInside, site) # If instead the text must be added above... else: newtext = addText + "\n" + text if putText and text != newtext: wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) wikipedia.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if not always: choice = wikipedia.inputChoice( u"Do you want to accept these changes?", ["Yes", "No", "All"], ["y", "N", "a"], "N" ) if choice == "a": always = True elif choice == "n": return (False, False, always) if always or choice == "y": try: if always: page.put(newtext, summary) else: page.put_async(newtext, summary) except wikipedia.EditConflict: wikipedia.output(u"Edit conflict! skip!") return (False, False, always) except wikipedia.ServerError: errorCount += 1 if errorCount < 5: wikipedia.output(u"Server Error! Wait..") time.sleep(5) continue else: raise wikipedia.ServerError(u"Fifth Server Error!") except wikipedia.SpamfilterError, e: wikipedia.output(u"Cannot change %s because of blacklist entry %s" % (page.title(), e.url)) return (False, False, always) except wikipedia.PageNotSaved, error: wikipedia.output(u"Error putting page: %s" % error.args) return (False, False, always) except wikipedia.LockedPage: wikipedia.output(u"Skipping %s (locked page)" % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always)
def add_text(page=None, addText=None, summary=None, regexSkip=None, regexSkipUrl=None, always=False, up=False, putText=True, oldTextGiven=None, reorderEnabled=True, create=False): # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. starsList = [ u'bueno', u'bom interwiki', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] errorCount = 0 site = pywikibot.getSite() pathWiki = site.family.nicepath(site.lang) site = pywikibot.getSite() if oldTextGiven is None: try: text = page.get() except pywikibot.NoPage: if create: pywikibot.output(u"%s doesn't exist, creating it!" % page.title()) text = u'' else: pywikibot.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) else: text = oldTextGiven # If not up, text put below if not up: newtext = text # Translating the \\n into binary \n addText = addText.replace('\\n', '\n') if (reorderEnabled): # Getting the categories categoriesInside = pywikibot.getCategoryLinks(newtext, site) # Deleting the categories newtext = pywikibot.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = pywikibot.getLanguageLinks(newtext, site) # Removing the interwiki newtext = pywikibot.removeLanguageLinks(newtext, site) # Adding the text newtext += u"\n%s" % addText # Reputting the categories newtext = pywikibot.replaceCategoryLinks(newtext, categoriesInside, site, True) # Dealing the stars' issue allstars = [] starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub('', newtext) allstars += found if allstars != []: newtext = newtext.strip() + '\r\n\r\n' allstars.sort() for element in allstars: newtext += '%s\r\n' % element.strip() # Adding the interwiki newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside, site) else: newtext += u"\n%s" % addText else: newtext = addText + '\n' + text if putText and text != newtext: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) #pywikibot.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if always or choice == 'y': try: pass if always: page.put(newtext, summary, minorEdit=False) else: page.put_async(newtext, summary, minorEdit=False) except pywikibot.EditConflict: pywikibot.output(u'Edit conflict! skip!') return (False, False, always) except pywikibot.ServerError: errorCount += 1 if errorCount < 5: pywikibot.output(u'Server Error! Wait..') time.sleep(5) continue else: raise pywikibot.ServerError(u'Fifth Server Error!') except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, False, always) except pywikibot.PageNotSaved, error: pywikibot.output(u'Error putting page: %s' % error.args) return (False, False, always) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always)
def add_text(page=None, addText=None, summary=None, regexSkip=None, regexSkipUrl=None, always=False, up=False, putText=True, oldTextGiven=None, reorderEnabled=True, create=False): if not addText: raise NoEnoughData('You have to specify what text you want to add!') if not summary: summary = i18n.twtranslate(pywikibot.getSite(), 'add_text-adding', {'adding': addText[:200]}) # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. errorCount = 0 site = pywikibot.getSite() pathWiki = site.family.nicepath(site.lang) if putText: pywikibot.output(u'Loading %s...' % page.title()) if oldTextGiven is None: try: text = page.get() except pywikibot.NoPage: if create: pywikibot.output(u"%s doesn't exist, creating it!" % page.title()) text = u'' else: pywikibot.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl is not None: url = '%s%s' % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -exceptUrl is in the page. Skip! Match was: %s''' % result) return (False, False, always) if regexSkip is not None: result = re.findall(regexSkip, text) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -except is in the page. Skip! Match was: %s''' % result) return (False, False, always) # If not up, text put below if not up: newtext = text # Translating the \\n into binary \n addText = addText.replace('\\n', config.line_separator) if (reorderEnabled): # Getting the categories categoriesInside = pywikibot.getCategoryLinks(newtext, site) # Deleting the categories newtext = pywikibot.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = pywikibot.getLanguageLinks(newtext, site) # Removing the interwiki newtext = pywikibot.removeLanguageLinks(newtext, site) # Adding the text newtext += u"%s%s" % (config.line_separator, addText) # Reputting the categories newtext = pywikibot.replaceCategoryLinks(newtext, categoriesInside, site, True) # Dealing the stars' issue allstars = [] starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub('', newtext) allstars += found if allstars != []: newtext = newtext.strip() + config.line_separator * 2 allstars.sort() for element in allstars: newtext += '%s%s' % (element.strip(), config.LS) # Adding the interwiki newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside, site) else: newtext += u"%s%s" % (config.line_separator, addText) else: newtext = addText + config.line_separator + text if putText and text != newtext: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if not always: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All', 'open in Browser'], ['y', 'n', 'a', 'b'], 'n') if choice == 'a': always = True elif choice == 'n': return (False, False, always) elif choice == 'b': webbrowser.open("http://%s%s" % ( page.site.hostname(), page.site.nice_get_address(page.title()) )) pywikibot.input("Press Enter when finished in browser.") if always or choice == 'y': try: if always: page.put(newtext, summary, minorEdit=page.namespace() != 3) else: page.put_async(newtext, summary, minorEdit=page.namespace() != 3) except pywikibot.EditConflict: pywikibot.output(u'Edit conflict! skip!') return (False, False, always) except pywikibot.ServerError: errorCount += 1 if errorCount < config.maxretries: pywikibot.output(u'Server Error! Wait..') time.sleep(5) continue else: raise pywikibot.ServerError(u'Fifth Server Error!') except pywikibot.SpamfilterError as e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, False, always) except pywikibot.PageNotSaved as error: pywikibot.output(u'Error putting page: %s' % error.args) return (False, False, always) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always) else: return (text, newtext, always)
def featuredWithInterwiki(fromsite, tosite, template_on_top, pType, quiet): if fromsite.lang not in cache: cache[fromsite.lang] = {} if tosite.lang not in cache[fromsite.lang]: cache[fromsite.lang][tosite.lang] = {} cc = cache[fromsite.lang][tosite.lang] if nocache: cc = {} templatelist = getTemplateList(tosite.lang, pType) findtemplate = '(' + '|'.join(templatelist) + ')' re_Link_FA = re.compile(ur"\{\{%s\|%s\}\}" % (findtemplate.replace(u' ', u'[ _]'), fromsite.lang), re.IGNORECASE) gen = featuredArticles(fromsite, pType) gen = PreloadingGenerator(gen) for a in gen: if a.title() < afterpage: continue if u"/" in a.title() and a.namespace() != 0: pywikibot.output(u"%s is a subpage" % a.title()) continue if a.title() in cc: pywikibot.output(u"(cached) %s -> %s" % (a.title(), cc[a.title()])) continue if a.isRedirectPage(): a = a.getRedirectTarget() try: if not a.exists(): pywikibot.output(u"source page doesn't exist: %s" % a.title()) continue atrans = findTranslated(a, tosite, quiet) if pType != 'former': if atrans: text = atrans.get() m = re_Link_FA.search(text) if m: pywikibot.output(u"(already done)") else: # insert just before interwiki if (not interactive or pywikibot.input( u'Connecting %s -> %s. Proceed? [Y/N]' % (a.title(), atrans.title())) in ['Y', 'y']): site = pywikibot.getSite() comment = pywikibot.setAction( i18n.twtranslate(site, 'featured-' + pType, {'page': unicode(a)})) ### Moving {{Link FA|xx}} to top of interwikis ### if template_on_top: # Getting the interwiki iw = pywikibot.getLanguageLinks(text, site) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site) text += u"\r\n{{%s|%s}}\r\n" % (templatelist[0], fromsite.lang) # Adding the interwiki text = pywikibot.replaceLanguageLinks(text, iw, site) # Placing {{Link FA|xx}} right next to # corresponding interwiki else: text = (text[:m.end()] + (u" {{%s|%s}}" % (templatelist[0], fromsite.lang)) + text[m.end():]) try: atrans.put(text, comment) except pywikibot.LockedPage: pywikibot.output(u'Page %s is locked!' % atrans.title()) cc[a.title()] = atrans.title() else: if atrans: text = atrans.get() m = re_Link_FA.search(text) if m: # insert just before interwiki if (not interactive or pywikibot.input( u'Connecting %s -> %s. Proceed? [Y/N]' % (a.title(), atrans.title())) in ['Y', 'y']): site = pywikibot.getSite() comment = pywikibot.setAction( i18n.twtranslate(site, 'featured-former', {'page': unicode(a)})) text = re.sub(re_Link_FA, '', text) try: atrans.put(text, comment) except pywikibot.LockedPage: pywikibot.output(u'Page %s is locked!' % atrans.title()) else: pywikibot.output(u"(already done)") cc[a.title()] = atrans.title() except pywikibot.PageNotSaved: pywikibot.output(u"Page not saved")
def processMeaning(self, compId, namespace, activeLangs, meaning): """Updates all the pages in a given cluster.""" if self.opts.verbose: niceText = '[' + ', '.join( map(lambda p: p[0] + ':' + p[1], meaning)) + ']' print 'DEBUG: Processing meaning: %s' % (niceText, ) # Load pages pages = {} for page in meaning: lang, title = page if not lang in activeLangs: continue site = wikipedia.getSite(lang) page = wikipedia.Page(site, title, site, namespace) if self.opts.verbose: print 'DEBUG: Fetching page: %s' % (page, ) if not page.exists(): print 'WARNING: Skipping this meaning because of nonexistent page: %s' % ( page, ) return if page.isRedirectPage(): print 'WARNING: Skipping this meaning because of unexpected redirect page: %s' % ( page, ) return pages[site] = page # Process each page for page in pages.values(): interwiki = {} for p in page.interwiki(): interwiki[p.site()] = p # Find interwikis to add/change/remove add, change, remove = [], [], [] for site in interwiki: if not site in pages: remove += [str(site.language())] continue if interwiki[site] != pages[site]: change += [str(site.language())] continue otherPages = {} for site in pages: if page.site() == site: continue otherPages[site] = page for site in otherPages: if not site in interwiki: add += [str(site.language())] # Check if update needed if not add and not change and not remove: continue # Update the page comment = self.reportMods(add, change, remove) print 'INFO: page: %s %s' % (page, comment) if not self.opts.dry: text = wikipedia.replaceLanguageLinks(page.get(), otherPages) page.put(text, comment)
def add_text(page = None, addText = None, summary = None, regexSkip = None, regexSkipUrl = None, always = False, up = False, putText = True, oldTextGiven = None, reorderEnabled = True, create=False): if not addText: raise NoEnoughData('You have to specify what text you want to add!') if not summary: summary = i18n.twtranslate(pywikibot.getSite(), 'add_text-adding', {'adding': addText[:200]}) # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. starsList = [ u'bueno', u'bom interwiki', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] errorCount = 0 site = pywikibot.getSite() # /wiki/ is not always the right path in non-wiki projects pathWiki = site.family.nicepath(site.lang) if putText: pywikibot.output(u'Loading %s...' % page.title()) if oldTextGiven == None: try: text = page.get() except pywikibot.NoPage: if create: pywikibot.output(u"%s doesn't exist, creating it!" % page.title()) text = u'' else: pywikibot.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) # continue except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) # continue else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl != None: url = '%s%s' % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -exceptUrl is in the page. Skipping! Match was: %s''' % result) return (False, False, always) # continue if regexSkip != None: result = re.findall(regexSkip, text) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -except is in the page. Skipping! Match was: %s''' % result) return (False, False, always) # continue # If not up, text put below if not up: newtext = text # Translating the \\n into binary \n addText = addText.replace('\\n', '\n') if (reorderEnabled): # Getting the categories categoriesInside = pywikibot.getCategoryLinks(newtext, site) # Deleting the categories newtext = pywikibot.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = pywikibot.getLanguageLinks(newtext, site) # Removing the interwiki newtext = pywikibot.removeLanguageLinks(newtext, site) # nn got a message between the categories and the iw's # and they want to keep it there, first remove it hasCommentLine = False if (site.language()==u'nn'): regex = re.compile('(<!-- ?interwiki \(no(?:/nb)?, ?sv, ?da first; then other languages alphabetically by name\) ?-->)') found = regex.findall(newtext) if found: hasCommentLine = True newtext = regex.sub('', newtext) # Adding the text newtext += u"\n%s" % addText # Reputting the categories newtext = pywikibot.replaceCategoryLinks(newtext, categoriesInside, site, True) #Put the nn iw message back if site.language()==u'nn' and (interwikiInside or hasCommentLine): newtext = newtext + u'\r\n\r\n' + nn_iw_msg # Dealing the stars' issue allstars = [] starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub('', newtext) allstars += found if allstars != []: newtext = newtext.strip()+'\r\n\r\n' allstars.sort() for element in allstars: newtext += '%s\r\n' % element.strip() # Adding the interwiki newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside, site) else: # Adding the text newtext += u"\n%s" % addText # If instead the text must be added above... else: newtext = addText + '\n' + text if putText and text != newtext: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if not always: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All', 'open in Browser'], ['y', 'N', 'a', 'b'], 'N') if choice == 'a': always = True elif choice == 'n': return (False, False, always) elif choice == 'b': webbrowser.open("http://%s%s" % ( page.site().hostname(), page.site().nice_get_address(page.title()) )) pywikibot.input("Press Enter when finished in browser.") if always or choice == 'y': try: if always: page.put(newtext, summary) else: page.put_async(newtext, summary) except pywikibot.EditConflict: pywikibot.output(u'Edit conflict! skip!') return (False, False, always) except pywikibot.ServerError: errorCount += 1 if errorCount < 5: pywikibot.output(u'Server Error! Wait..') time.sleep(5) continue else: raise pywikibot.ServerError(u'Fifth Server Error!') except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, False, always) except pywikibot.PageNotSaved, error: pywikibot.output(u'Error putting page: %s' % error.args) return (False, False, always) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always)
def add_text( page=None, addText=None, summary=None, regexSkip=None, regexSkipUrl=None, always=False, up=False, putText=True, oldTextGiven=None, reorderEnabled=True, create=False, ): if not addText: raise NoEnoughData("You have to specify what text you want to add!") if not summary: summary = i18n.twtranslate(pywikibot.getSite(), "add_text-adding", {"adding": addText[:200]}) # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. errorCount = 0 site = pywikibot.getSite() pathWiki = site.family.nicepath(site.lang) if putText: pywikibot.output(u"Loading %s..." % page.title()) if oldTextGiven is None: try: text = page.get() except pywikibot.NoPage: if create: pywikibot.output(u"%s doesn't exist, creating it!" % page.title()) text = u"" else: pywikibot.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl is not None: url = "%s%s" % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: pywikibot.output( u"""Exception! regex (or word) used with -exceptUrl is in the page. Skip! Match was: %s""" % result ) return (False, False, always) if regexSkip is not None: result = re.findall(regexSkip, text) if result != []: pywikibot.output( u"""Exception! regex (or word) used with -except is in the page. Skip! Match was: %s""" % result ) return (False, False, always) # If not up, text put below if not up: newtext = text # Translating the \\n into binary \n addText = addText.replace("\\n", config.line_separator) if reorderEnabled: # Getting the categories categoriesInside = pywikibot.getCategoryLinks(newtext, site) # Deleting the categories newtext = pywikibot.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = pywikibot.getLanguageLinks(newtext, site) # Removing the interwiki newtext = pywikibot.removeLanguageLinks(newtext, site) # Adding the text newtext += u"%s%s" % (config.line_separator, addText) # Reputting the categories newtext = pywikibot.replaceCategoryLinks(newtext, categoriesInside, site, True) # Dealing the stars' issue allstars = [] starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile("(\{\{(?:template:|)%s\|.*?\}\}[\s]*)" % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub("", newtext) allstars += found if allstars != []: newtext = newtext.strip() + config.line_separator * 2 allstars.sort() for element in allstars: newtext += "%s%s" % (element.strip(), config.LS) # Adding the interwiki newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside, site) else: newtext += u"%s%s" % (config.line_separator, addText) else: newtext = addText + config.line_separator + text if putText and text != newtext: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if not always: choice = pywikibot.inputChoice( u"Do you want to accept these changes?", ["Yes", "No", "All", "open in Browser"], ["y", "n", "a", "b"], "n", ) if choice == "a": always = True elif choice == "n": return (False, False, always) elif choice == "b": webbrowser.open("http://%s%s" % (page.site.hostname(), page.site.nice_get_address(page.title()))) pywikibot.input("Press Enter when finished in browser.") if always or choice == "y": try: if always: page.put(newtext, summary, minorEdit=page.namespace() != 3) else: page.put_async(newtext, summary, minorEdit=page.namespace() != 3) except pywikibot.EditConflict: pywikibot.output(u"Edit conflict! skip!") return (False, False, always) except pywikibot.ServerError: errorCount += 1 if errorCount < config.maxretries: pywikibot.output(u"Server Error! Wait..") time.sleep(5) continue else: raise pywikibot.ServerError(u"Fifth Server Error!") except pywikibot.SpamfilterError as e: pywikibot.output(u"Cannot change %s because of blacklist entry %s" % (page.title(), e.url)) return (False, False, always) except pywikibot.PageNotSaved as error: pywikibot.output(u"Error putting page: %s" % error.args) return (False, False, always) except pywikibot.LockedPage: pywikibot.output(u"Skipping %s (locked page)" % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always) else: return (text, newtext, always)