def treat(self, page): text = self.load(page) if text is None: return cats = page.categories() # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.output(u"Current categories:") for cat in cats: pywikibot.output(u"* %s" % cat.title()) catpl = pywikibot.Page(self.site, self.newcatTitle, defaultNamespace=14) if catpl in cats: pywikibot.output(u"%s is already in %s." % (page.title(), catpl.title())) else: if self.sort: catpl = self.sorted_by_last_name(catpl, page) pywikibot.output(u'Adding %s' % catpl.title(asLink=True)) cats.append(catpl) text = pywikibot.replaceCategoryLinks(text, cats) if not self.save(text, page, self.editSummary): pywikibot.output(u'Page %s not saved.' % page.title(asLink=True))
def treat(self, page): text = self.load(page) if text is None: return cats = page.categories() # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output( u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.output(u"Current categories:") for cat in cats: pywikibot.output(u"* %s" % cat.title()) catpl = pywikibot.Page(self.site, self.newcatTitle, defaultNamespace=14) if catpl in cats: pywikibot.output(u"%s is already in %s." % (page.title(), catpl.title())) else: if self.sort: catpl = self.sorted_by_last_name(catpl, page) pywikibot.output(u'Adding %s' % catpl.title(asLink=True)) cats.append(catpl) text = pywikibot.replaceCategoryLinks(text, cats) if not self.save(text, page, self.editSummary): pywikibot.output(u'Page %s not saved.' % page.title(asLink=True))
def make_categories(page, list, site=None): if site is None: site = wikipedia.getSite() pllist = [] for p in list: cattitle = "%s:%s" % (site.category_namespace(), p) pllist.append(wikipedia.Page(site, cattitle)) page.put_async(wikipedia.replaceCategoryLinks(page.get(), pllist), comment=wikipedia.translate(site.lang, msg))
def addRemoveRepoCats(article, repos, allRepoCats, comment=None): # Create list of repos to be removed notRepos = [] if not article.canBeEdited(): pywikibot.output("Can't edit %s, skipping it..." % article.aslink()) return False cats = article.categories(get_redirect=True) site = article.site() changesMade = False newCatList = [] newCatSet = set() repoCatList = [] #remove all repos for i in range(len(cats)): cat = cats[i] if cat in allRepoCats: changesMade = True continue if cat.title() not in newCatSet: newCatSet.add(cat.title()) newCatList.append(cat) #add relevant repos for i in range(len(repos)): repo = repos[i] newCatList.append(catlib.Category(site, 'Category:' + repoCats[repo])) changesMade = True if not changesMade: pywikibot.output(u'No changes necessary to %s!' % article.title()) else: text = article.get(get_redirect=True) try: text = pywikibot.replaceCategoryLinks(text, newCatList) except ValueError: # Make sure that the only way replaceCategoryLinks() can return # a ValueError is in the case of interwiki links to self. pywikibot.output(u'Skipping %s because of interwiki link to self' % article) try: article.put(text, comment='Addon-Bot repo category update', watchArticle=None, minorEdit=True) except pywikibot.EditConflict: pywikibot.output(u'Skipping %s because of edit conflict' % article.title()) except pywikibot.SpamfilterError, e: pywikibot.output(u'Skipping %s because of blacklist entry %s' % (article.title(), e.url)) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s because page is locked' % article.title())
def make_categories(page, list, site = None): if site is None: site = pywikibot.getSite() pllist=[] for p in list: cattitle="%s:%s" % (site.category_namespace(), p) pllist.append(pywikibot.Page(site,cattitle)) page.put_async(pywikibot.replaceCategoryLinks(page.get(), pllist), comment=i18n.twtranslate(site.lang, 'catall-changing'))
def putAfterTemplate(page, template, toadd, loose=True): ''' Try to put text after template. If the template is not found return False if loose is set to False If loose is set to True: Remove interwiki's, categories, add template, restore categories, restore interwiki's. Based on cc-by-sa-3.0 code by Dschwen ''' oldtext = page.get() newtext = u'' templatePosition = oldtext.find(u'{{%s' % (template, )) if templatePosition >= 0: previousChar = u'' currentChar = u'' templatePosition += 2 curly = 1 square = 0 while templatePosition < len(oldtext): currentChar = oldtext[templatePosition] if currentChar == u'[' and previousChar == u'[': square += 1 previousChar = u'' if currentChar == u']' and previousChar == u']': square -= 1 previousChar = u'' if currentChar == u'{' and previousChar == u'{': curly += 1 previousChar = u'' if currentChar == u'}' and previousChar == u'}': curly -= 1 previousChar = u'' previousChar = currentChar templatePosition += 1 if curly == 0 and square <= 0: # Found end of template break newtext = oldtext[:templatePosition] + u'\n' + toadd + oldtext[ templatePosition:] else: if loose: newtext = oldtext cats = wikipedia.getCategoryLinks(newtext) ll = wikipedia.getLanguageLinks(newtext) nextext = wikipedia.removeLanguageLinks(newtext) newtext = wikipedia.removeCategoryLinks(newtext) newtext = newtext + u'\n' + toadd newtext = wikipedia.replaceCategoryLinks(newtext, cats) newtext = wikipedia.replaceLanguageLinks(newtext, ll) return newtext
def make_categories(page, list, site=None): if site is None: site = pywikibot.getSite() pllist = [] for p in list: cattitle = "%s:%s" % (site.category_namespace(), p) pllist.append(pywikibot.Page(site, cattitle)) page.put_async(pywikibot.replaceCategoryLinks(page.get(), pllist), comment=i18n.twtranslate(site.lang, 'catall-changing'))
def putAfterTemplate (page, template, toadd, loose=True): ''' Try to put text after template. If the template is not found return False if loose is set to False If loose is set to True: Remove interwiki's, categories, add template, restore categories, restore interwiki's. Based on cc-by-sa-3.0 code by Dschwen ''' oldtext = page.get() newtext = u'' templatePosition = oldtext.find(u'{{%s' % (template,)) if templatePosition >= 0: previousChar = u'' currentChar = u'' templatePosition += 2 curly = 1 square = 0 while templatePosition < len(oldtext): currentChar = oldtext[templatePosition] if currentChar == u'[' and previousChar == u'[' : square += 1 previousChar = u'' if currentChar == u']' and previousChar == u']' : square -= 1 previousChar = u'' if currentChar == u'{' and previousChar == u'{' : curly += 1 previousChar = u'' if currentChar == u'}' and previousChar == u'}' : curly -= 1 previousChar = u'' previousChar = currentChar templatePosition +=1 if curly == 0 and square <= 0 : # Found end of template break newtext = oldtext[:templatePosition] + u'\n' + toadd + oldtext[templatePosition:] else: if loose: newtext = oldtext cats = wikipedia.getCategoryLinks(newtext) ll = wikipedia.getLanguageLinks(newtext) nextext = wikipedia.removeLanguageLinks (newtext) newtext = wikipedia.removeCategoryLinks(newtext) newtext = newtext + u'\n' + toadd newtext = wikipedia.replaceCategoryLinks(newtext, cats) newtext = wikipedia.replaceLanguageLinks(newtext, ll) return newtext
def categories(self): for page in self.generator: try: pywikibot.output(u'\n>>>> %s <<<<' % page.title()) commons = pywikibot.getSite().image_repository() commonsCategory = catlib.Category(commons, 'Category:%s' % page.title()) try: getcommonscat = commonsCategory.get(get_redirect=True) commonsCategoryTitle = commonsCategory.title() categoryname = commonsCategoryTitle.split('Category:', 1)[1] if page.title() == categoryname: oldText = page.get() text = oldText # for commonscat template findTemplate = re.compile(ur'\{\{[Cc]ommons') s = findTemplate.search(text) findTemplate2 = re.compile(ur'\{\{[Ss]isterlinks') s2 = findTemplate2.search(text) if s or s2: pywikibot.output(u'** Already done.') else: text = pywikibot.replaceCategoryLinks( text + u'{{commonscat|%s}}' % categoryname, page.categories()) if oldText != text: pywikibot.showDiff(oldText, text) if not self.acceptall: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if choice == 'a': self.acceptall = True if self.acceptall or choice == 'y': try: msg = pywikibot.translate( pywikibot.getSite(), comment2) page.put(text, msg) except pywikibot.EditConflict: pywikibot.output( u'Skipping %s because of edit ' u'conflict' % (page.title())) except pywikibot.NoPage: pywikibot.output(u'Category does not exist in Commons!') except pywikibot.NoPage: pywikibot.output(u'Page %s does not exist' % page.title()) except pywikibot.IsRedirectPage: pywikibot.output(u'Page %s is a redirect; skipping.' % page.title()) except pywikibot.LockedPage: pywikibot.output(u'Page %s is locked' % page.title())
def categories(self): for page in self.generator: try: wikipedia.output(u'\n>>>> %s <<<<' % page.title()) getCommons = wikipedia.getSite('commons', 'commons') commonsCategory = catlib.Category(getCommons, 'Category:%s' % page.title()) try: getcommonscat = commonsCategory.get(get_redirect=True) commonsCategoryTitle = commonsCategory.title() categoryname = commonsCategoryTitle.split('Category:', 1)[1] if page.title() == categoryname: oldText = page.get() text = oldText # for commonscat template findTemplate = re.compile(ur'\{\{[Cc]ommons') s = findTemplate.search(text) findTemplate2 = re.compile(ur'\{\{[Ss]isterlinks') s2 = findTemplate2.search(text) if s or s2: wikipedia.output(u'** Already done.') else: text = wikipedia.replaceCategoryLinks( text + u'{{commonscat|%s}}' % categoryname, page.categories()) if oldText != text: wikipedia.showDiff(oldText, text) if not self.acceptall: choice = wikipedia.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if choice == 'a': self.acceptall = True if self.acceptall or choice == 'y': try: msg = wikipedia.translate( wikipedia.getSite(), comment2) page.put(text, msg) except wikipedia.EditConflict: wikipedia.output( u'Skipping %s because of edit conflict' % (page.title())) except wikipedia.NoPage: wikipedia.output(u'Category does not exist in Commons!') except wikipedia.NoPage: wikipedia.output(u'Page %s does not exist?!' % page.title()) except wikipedia.IsRedirectPage: wikipedia.output(u'Page %s is a redirect; skipping.' % page.title()) except wikipedia.LockedPage: wikipedia.output(u'Page %s is locked?!' % page.title())
def standardizeCategories(self, text): """ Makes sure that categories are put to the correct position, but does not sort them. """ # The PyWikipediaBot is no longer allowed to touch categories on the German Wikipedia. See http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/bis_2006#Position_der_Personendaten_am_.22Artikelende.22 if self.site != wikipedia.getSite('de', 'wikipedia'): categories = wikipedia.getCategoryLinks(text, site = self.site) text = wikipedia.replaceCategoryLinks(text, categories, site = self.site) return text
def pages(self): for page in self.generator: try: pywikibot.output(u'\n>>>> %s <<<<' % page.title()) commons = pywikibot.getSite().image_repository() commonspage = pywikibot.Page(commons, page.title()) try: getcommons = commonspage.get(get_redirect=True) if page.title() == commonspage.title(): oldText = page.get() text = oldText # for commons template findTemplate = re.compile(ur'\{\{[Cc]ommonscat') s = findTemplate.search(text) findTemplate2 = re.compile(ur'\{\{[Ss]isterlinks') s2 = findTemplate2.search(text) if s or s2: pywikibot.output(u'** Already done.') else: text = pywikibot.replaceCategoryLinks( text + u'{{commons|%s}}' % commonspage.title(), page.categories()) if oldText != text: pywikibot.showDiff(oldText, text) if not self.acceptall: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if choice == 'a': self.acceptall = True if self.acceptall or choice == 'y': try: msg = pywikibot.translate( pywikibot.getSite(), comment1) page.put(text, msg) except pywikibot.EditConflict: pywikibot.output( u'Skipping %s because of edit ' u'conflict' % (page.title())) except pywikibot.NoPage: pywikibot.output(u'Page does not exist in Commons!') except pywikibot.NoPage: pywikibot.output(u'Page %s does not exist?!' % page.title()) except pywikibot.IsRedirectPage: pywikibot.output(u'Page %s is a redirect; skipping.' % page.title()) except pywikibot.LockedPage: pywikibot.output(u'Page %s is locked?!' % page.title())
def categories(self): for page in self.generator: try: wikipedia.output(u"\n>>>> %s <<<<" % page.title()) getCommons = wikipedia.getSite("commons", "commons") commonsCategory = catlib.Category(getCommons, "Category:%s" % page.title()) try: getcommonscat = commonsCategory.get(get_redirect=True) commonsCategoryTitle = commonsCategory.title() categoryname = commonsCategoryTitle.split("Category:", 1)[1] if page.title() == categoryname: oldText = page.get() text = oldText # for commonscat template findTemplate = re.compile(ur"\{\{[Cc]ommons") s = findTemplate.search(text) findTemplate2 = re.compile(ur"\{\{[Ss]isterlinks") s2 = findTemplate2.search(text) if s or s2: wikipedia.output(u"** Already done.") else: text = wikipedia.replaceCategoryLinks( text + u"{{commonscat|%s}}" % categoryname, page.categories() ) if oldText != text: wikipedia.showDiff(oldText, text) if not self.acceptall: choice = wikipedia.inputChoice( u"Do you want to accept these changes?", ["Yes", "No", "All"], ["y", "N", "a"], "N", ) if choice == "a": self.acceptall = True if self.acceptall or choice == "y": try: msg = wikipedia.translate(wikipedia.getSite(), comment2) page.put(text, msg) except wikipedia.EditConflict: wikipedia.output(u"Skipping %s because of edit conflict" % (page.title())) except wikipedia.NoPage: wikipedia.output(u"Category does not exist in Commons!") except wikipedia.NoPage: wikipedia.output(u"Page %s does not exist?!" % page.title()) except wikipedia.IsRedirectPage: wikipedia.output(u"Page %s is a redirect; skipping." % page.title()) except wikipedia.LockedPage: wikipedia.output(u"Page %s is locked?!" % page.title())
def pages(self): for page in self.generator: try: pywikibot.output(u"\n>>>> %s <<<<" % page.title()) commons = pywikibot.getSite().image_repository() commonspage = pywikibot.Page(commons, page.title()) try: getcommons = commonspage.get(get_redirect=True) if page.title() == commonspage.title(): oldText = page.get() text = oldText # for commons template findTemplate = re.compile(ur"\{\{[Cc]ommonscat") s = findTemplate.search(text) findTemplate2 = re.compile(ur"\{\{[Ss]isterlinks") s2 = findTemplate2.search(text) if s or s2: pywikibot.output(u"** Already done.") else: text = pywikibot.replaceCategoryLinks( text + u"{{commons|%s}}" % commonspage.title(), page.categories() ) if oldText != text: pywikibot.showDiff(oldText, text) if not self.acceptall: choice = pywikibot.inputChoice( u"Do you want to accept these changes?", ["Yes", "No", "All"], ["y", "N", "a"], "N", ) if choice == "a": self.acceptall = True if self.acceptall or choice == "y": try: msg = pywikibot.translate(pywikibot.getSite(), comment1) page.put(text, msg) except pywikibot.EditConflict: pywikibot.output(u"Skipping %s because of edit " u"conflict" % (page.title())) except pywikibot.NoPage: pywikibot.output(u"Page does not exist in Commons!") except pywikibot.NoPage: pywikibot.output(u"Page %s does not exist?!" % page.title()) except pywikibot.IsRedirectPage: pywikibot.output(u"Page %s is a redirect; skipping." % page.title()) except pywikibot.LockedPage: pywikibot.output(u"Page %s is locked?!" % page.title())
def replaceCategories(page, oldcats, newcat): oldtext = page.get() newcats = [] newcats.append(newcat) for cat in page.categories(): if not (cat.titleWithoutNamespace()==oldcats[0].titleWithoutNamespace() or cat.titleWithoutNamespace()==oldcats[1].titleWithoutNamespace()): newcats.append(cat) newtext = wikipedia.replaceCategoryLinks (oldtext, newcats) comment = u'[[' + oldcats[0].title() + u']] \u2229 [[' + oldcats[1].title() + u']] (and 3 levels of subcategories) -> [[' + newcat.title() + u']]' wikipedia.showDiff(oldtext, newtext) page.put(newtext, comment)
def replaceCategories(page, oldcats, newcat): oldtext = page.get() newcats = [] newcats.append(newcat) for cat in page.categories(): if not (cat.titleWithoutNamespace() == oldcats[0].titleWithoutNamespace() or cat.titleWithoutNamespace() == oldcats[1].titleWithoutNamespace()): newcats.append(cat) newtext = wikipedia.replaceCategoryLinks(oldtext, newcats) comment = u'[[' + oldcats[0].title() + u']] \u2229 [[' + oldcats[1].title( ) + u']] (and 4 levels of subcategories) -> [[' + newcat.title() + u']]' wikipedia.showDiff(oldtext, newtext) page.put(newtext, comment)
def add_category(article, category, comment=None, createEmptyPages=False): """Given an article and a category, adds the article to the category.""" cats = article.categories(get_redirect=True) if not category in cats: cats.append(category) try: text = article.get() except pywikibot.NoPage: if createEmptyPages: text = "" else: raise text = pywikibot.replaceCategoryLinks(text, cats) try: article.put(text, comment=comment) except pywikibot.EditConflict: pywikibot.output(u"Skipping %s because of edit conflict" % article.title())
def pages(self): for page in self.generator: try: wikipedia.output(u'\n>>>> %s <<<<' % page.title()) commons = wikipedia.getSite('commons', 'commons') commonspage = wikipedia.Page(commons, page.title()) try: getcommons = commonspage.get(get_redirect=True) if page.title() == commonspage.title(): oldText = page.get() text = oldText # for commons template findTemplate=re.compile(ur'\{\{[Cc]ommonscat') s = findTemplate.search(text) findTemplate2=re.compile(ur'\{\{[Ss]isterlinks') s2 = findTemplate2.search(text) if s or s2: wikipedia.output(u'** Already done.') else: text = wikipedia.replaceCategoryLinks(text+u'{{commons|%s}}'%commonspage.title(), page.categories()) if oldText != text: wikipedia.showDiff(oldText, text) if not self.acceptall: choice = wikipedia.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if choice == 'a': self.acceptall = True if self.acceptall or choice == 'y': try: msg = wikipedia.translate(wikipedia.getSite(), comment1) page.put(text, msg) except wikipedia.EditConflict: wikipedia.output(u'Skipping %s because of edit conflict' % (page.title())) except wikipedia.NoPage: wikipedia.output(u'Page does not exist in Commons!') except wikipedia.NoPage: wikipedia.output(u'Page %s does not exist?!' % page.title()) except wikipedia.IsRedirectPage: wikipedia.output(u'Page %s is a redirect; skipping.' % page.title()) except wikipedia.LockedPage: wikipedia.output(u'Page %s is locked?!' % page.title())
def add_category(article, category, comment=None, createEmptyPages=False): """Given an article and a category, adds the article to the category.""" cats = article.categories(get_redirect=True) if not category in cats: cats.append(category) try: text = article.get() except pywikibot.NoPage: if createEmptyPages: text = '' else: raise text = pywikibot.replaceCategoryLinks(text, cats) try: article.put(text, comment=comment) except pywikibot.EditConflict: pywikibot.output(u'Skipping %s because of edit conflict' % article.title())
def categorizeImage(page, conn, cursor): wikipedia.output(u'Working on: %s' % page.title()) templates = page.templates() if not u'Rijksmonument' in page.templates(): wikipedia.output(u'Rijksmonument template not found at: %s' % page.title()) return False rijksmonumentid = -1 for (template, params) in page.templatesWithParams(): if template == u'Rijksmonument': if len(params) == 1: try: rijksmonumentid = int(params[0]) except ValueError: wikipedia.output(u'Unable to extract a valid id') break if (rijksmonumentid < 0 or 600000 < rijksmonumentid): wikipedia.output(u'Invalid id') return False rijksmonumentenLijst = getList(rijksmonumentid, conn, cursor) if not rijksmonumentenLijst: return False oldtext = page.get() currentcats = page.categories() newcats = getCategories(rijksmonumentenLijst) if newcats: for currentcat in currentcats: if not currentcat.title() == u'Category:Rijksmonumenten': newcats.append(currentcat) # Remove dupes newcats = list(set(newcats)) newtext = wikipedia.replaceCategoryLinks(oldtext, newcats) comment = u'Adding categories based on Rijksmonument identifier' wikipedia.showDiff(oldtext, newtext) page.put(newtext, comment)
def categorizeImage(page, conn, cursor): wikipedia.output(u'Working on: %s' % page.title()) templates = page.templates() if not u'Rijksmonument' in page.templates(): wikipedia.output(u'Rijksmonument template not found at: %s' % page.title()) return False rijksmonumentid=-1 for (template, params) in page.templatesWithParams(): if template==u'Rijksmonument': if len(params)==1: try: rijksmonumentid = int(params[0]) except ValueError: wikipedia.output(u'Unable to extract a valid id') break if (rijksmonumentid < 0 or 600000 < rijksmonumentid ): wikipedia.output(u'Invalid id') return False rijksmonumentenLijst = getList(rijksmonumentid, conn, cursor) if not rijksmonumentenLijst: return False oldtext = page.get() currentcats = page.categories() newcats = getCategories(rijksmonumentenLijst) if newcats: for currentcat in currentcats: if not currentcat.title()==u'Category:Rijksmonumenten': newcats.append(currentcat) # Remove dupes newcats = list(set(newcats)) newtext = wikipedia.replaceCategoryLinks(oldtext, newcats) comment = u'Adding categories based on Rijksmonument identifier' wikipedia.showDiff(oldtext, newtext) page.put(newtext, comment)
def include(pl, checklinks=True, realinclude=True, linkterm=None): cl = checklinks if linkterm: actualworkingcat = catlib.Category(mysite, workingcat.title(), sortKey=linkterm) else: actualworkingcat = workingcat if realinclude: try: text = pl.get() except pywikibot.NoPage: pass except pywikibot.IsRedirectPage: cl = True pass else: cats = pl.categories() if workingcat not in cats: cats = pl.categories() for c in cats: if c in parentcats: if removeparent: catlib.change_category(pl, c, actualworkingcat) break else: pl.put( pywikibot.replaceCategoryLinks( text, cats + [actualworkingcat])) if cl: if checkforward: for page2 in pl.linkedPages(): if needcheck(page2): tocheck.append(page2) checked[page2] = page2 if checkbackward: for refPage in pl.getReferences(): if needcheck(refPage): tocheck.append(refPage) checked[refPage] = refPage
def main(args): ''' Main loop. ''' site = wikipedia.getSite(u'commons', u'commons') wikipedia.setSite(site) conn = None cursor = None (conn, cursor) = connectDatabase() conn2 = None cursor2 = None (conn2, cursor2) = connectDatabase2('commonswiki-p.db.toolserver.org', u'commonswiki_p') imageSet = getImagesToCorrect(cursor2) #print imageSet for (pageName, fileId) in imageSet: wikipedia.output(pageName) if not pageName == u'' and not fileId == u'': #Get page contents page = wikipedia.Page(site, pageName) if page.exists(): categories = page.categories() #Get metadata metadata = getMetadata(fileId, cursor) #Check if we got metadata if metadata: #Get description description = getDescription(metadata) description = wikipedia.replaceCategoryLinks( description, categories, site) comment = u'Fixing description of Geograph image with broken template' wikipedia.output(description) page.put(description, comment)
def include(pl,checklinks=True,realinclude=True,linkterm=None): cl = checklinks if linkterm: actualworkingcat = catlib.Category(mysite,workingcat.title(), sortKey=linkterm) else: actualworkingcat = workingcat if realinclude: try: text = pl.get() except pywikibot.NoPage: pass except pywikibot.IsRedirectPage: cl = True pass else: cats = pl.categories() if not workingcat in cats: cats = pl.categories() for c in cats: if c in parentcats: if removeparent: catlib.change_category(pl,c,actualworkingcat) break else: pl.put(pywikibot.replaceCategoryLinks( text, cats + [actualworkingcat])) if cl: if checkforward: for page2 in pl.linkedPages(): if needcheck(page2): tocheck.append(page2) checked[page2] = page2 if checkbackward: for refPage in pl.getReferences(): if needcheck(refPage): tocheck.append(refPage) checked[refPage] = refPage
def main(args): ''' Main loop. ''' site = wikipedia.getSite(u'commons', u'commons') wikipedia.setSite(site) conn = None cursor = None (conn, cursor) = connectDatabase() conn2 = None cursor2 = None (conn2, cursor2) = connectDatabase2('commonswiki-p.db.toolserver.org', u'commonswiki_p') imageSet = getImagesToCorrect(cursor2) #print imageSet for (pageName, fileId) in imageSet: wikipedia.output(pageName) if not pageName==u'' and not fileId==u'': #Get page contents page = wikipedia.Page(site, pageName) if page.exists(): categories = page.categories() #Get metadata metadata = getMetadata(fileId, cursor) #Check if we got metadata if metadata: #Get description description = getDescription(metadata) description = wikipedia.replaceCategoryLinks(description, categories, site) comment= u'Fixing description of Geograph image with broken template' wikipedia.output(description) page.put(description, comment)
def treat(self, page): """ Adds the page to the appropriate birth and death year categories. """ pywikibot.output(u"Processing page %s..." % page.title(asLink=True)) text = self.load(page) if not text: return # Just print beginning of the article; strip full ref-tags reMatcher = re.compile(u"(?:\A|\n)([^\n|{]*?'''.*?)(\n|\Z)") match = reMatcher.search(text) headertext = None if match: headertext = re.sub('<ref[^/]*?>.*?</ref>', '', match.group(1)) pywikibot.output("\n---- CLIP ---\n%s\n--- CLIP ---\n" % headertext) else: pywikibot.output("\n---- CLIP ---\n%s\n--- CLIP ---\n" % text) cats = page.categories() addCats = [] newCatContent = {} foundTemplates = False for tmpl, params in page.templatesWithParams(): numParams = filter(lambda s: s.isdigit(), params) # Filter out birthplace fields for bdCat, bdCatInfo in self.bdCats.iteritems(): if tmpl in bdCatInfo['templates']: pywikibot.output(u'Found template {{%s|%s}}' % (tmpl, u'|'.join(params))) foundTemplates = True try: year = numParams[bdCatInfo['templates'][tmpl] - 1].strip() except IndexError: continue sortKey = self.getSortKey(cats, text) newCat = catlib.Category(self.site, bdCat % year, sortKey=sortKey) if newCat in cats or newCat in addCats: pywikibot.output(u"%s is already in %s." % (page.title(), newCat.title())) else: addCats.append(newCat) newCatContent[newCat.title()] = bdCatInfo['newcat']( year) # First set of regex against full articletext if not addCats and not foundTemplates: for bdCat, bdCatInfo in self.bdCats.iteritems(): for reMatcher in bdCatInfo['re1']: match = reMatcher.search(text) if match: pywikibot.output(u"%s" % match.groups()) year = match.group(1) pywikibot.output(u'Found %s in %s' % (year, match.group(0))) sortKey = self.getSortKey(cats, text) newCat = catlib.Category(self.site, bdCat % year, sortKey=sortKey) if newCat in cats or newCat in addCats: pywikibot.output(u"%s is already in %s." % (page.title(), newCat.title())) else: addCats.append(newCat) newCatContent[ newCat.title()] = bdCatInfo['newcat'](year) break #second set of regex against the header text if available. If not then failback to full text if not addCats and not foundTemplates: for bdCat, bdCatInfo in self.bdCats.iteritems(): for reMatcher in bdCatInfo['re2']: if headertext: match = reMatcher.search(headertext) else: match = reMatcher.search(text) if match: pywikibot.output(u"%s" % match.groups()) year = match.group(1) pywikibot.output(u'Found %s in %s' % (year, match.group(0))) sortKey = self.getSortKey(cats, text) newCat = catlib.Category(self.site, bdCat % year, sortKey=sortKey) if newCat in cats or newCat in addCats: pywikibot.output(u"%s is already in %s." % (page.title(), newCat.title())) else: addCats.append(newCat) newCatContent[ newCat.title()] = bdCatInfo['newcat'](year) break if addCats: cats.extend(addCats) # This will Add category living people if needed birth, death = self.getBirthDeathFromCats(cats, 'fi') if birth > 1885 and death == None: newCat = catlib.Category(self.site, u'Elävät henkilöt', sortKey=sortKey) if newCat not in cats: addCats.append(newCat) cats.append(newCat) text = pywikibot.replaceCategoryLinks(text, cats) summary = self.summary % ', '.join( map(lambda c: c.title(asLink=True), addCats)) if not self.save(text, page, summary): pywikibot.output(u'Page %s not saved.' % page.title(asLink=True)) return for cat in addCats: if not cat.exists(): pywikibot.output(u'%s should be created.' % (cat.title())) if not self.save(newCatContent[cat.title()], cat, self.summaryNewCat, minorEdit=False): pywikibot.output(u'%s not created.' % cat.title(asLink=True)) time.sleep(2)
def add_category(sort_by_last_name=False, create_pages=False): """A robot to mass-add a category to a list of pages.""" site = pywikibot.getSite() if gen: newcatTitle = pywikibot.input(u"Category to add (do not give namespace):") if not site.nocapitalize: newcatTitle = newcatTitle[:1].capitalize() + newcatTitle[1:] # set edit summary message editSummary = pywikibot.translate(site, msg_add) % newcatTitle cat_namespace = site.category_namespaces()[0] answer = "" for page in gen: if answer != "a": answer = "" while answer not in ("y", "n", "a"): answer = pywikibot.inputChoice(u"%s" % (page.aslink()), ["Yes", "No", "All"], ["y", "n", "a"], "n") if answer == "a": confirm = pywikibot.inputChoice( u"""\ This should be used if and only if you are sure that your links are correct! Are you sure?""", ["Yes", "No"], ["y", "n"], "n", ) if confirm == "n": answer = "" if answer == "y" or answer == "a": try: text = page.get() except pywikibot.NoPage: if create_pages: pywikibot.output(u"%s doesn't exist yet. Creating." % (page.title())) text = "" else: pywikibot.output(u"%s doesn't exist yet. Ignoring." % (page.title())) continue except pywikibot.IsRedirectPage, arg: redirTarget = pywikibot.Page(site, arg.args[0]) pywikibot.output(u"WARNING: %s is redirect to %s. Ignoring." % (page.title(), redirTarget.title())) continue cats = page.categories() # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.output(u"Current categories:") for cat in cats: pywikibot.output(u"* %s" % cat.title()) catpl = pywikibot.Page(site, cat_namespace + ":" + newcatTitle) if sort_by_last_name: catpl = sorted_by_last_name(catpl, page) if catpl in cats: pywikibot.output(u"%s is already in %s." % (page.title(), catpl.title())) else: pywikibot.output(u"Adding %s" % catpl.aslink()) cats.append(catpl) text = pywikibot.replaceCategoryLinks(text, cats) try: page.put(text, comment=editSummary) except pywikibot.EditConflict: pywikibot.output(u"Skipping %s because of edit conflict" % (page.title()))
def add_text( page=None, addText=None, summary=None, regexSkip=None, regexSkipUrl=None, always=False, up=False, putText=True, oldTextGiven=None, create=False, ): if not addText: raise NoEnoughData("You have to specify what text you want to add!") if not summary: summary = wikipedia.translate(wikipedia.getSite(), msg) % addText[:200] # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. starsList = [ u"bueno", u"cyswllt[ _]erthygl[ _]ddethol", u"dolen[ _]ed", u"destacado", u"destaca[tu]", u"enllaç[ _]ad", u"enllaz[ _]ad", u"leam[ _]vdc", u"legătură[ _]a[bcf]", u"liamm[ _]pub", u"lien[ _]adq", u"lien[ _]ba", u"liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt", u"liên[ _]kết[ _]chọn[ _]lọc", u"ligam[ _]adq", u"ligoelstara", u"ligoleginda", u"link[ _][afgu]a", u"link[ _]adq", u"link[ _]f[lm]", u"link[ _]km", u"link[ _]sm", u"linkfa", u"na[ _]lotura", u"nasc[ _]ar", u"tengill[ _][úg]g", u"ua", u"yüm yg", u"רא", u"وصلة مقالة جيدة", u"وصلة مقالة مختارة", ] errorCount = 0 site = wikipedia.getSite() # /wiki/ is not always the right path in non-wiki projects pathWiki = site.family.nicepath(site.lang) if putText: wikipedia.output(u"Loading %s..." % page.title()) if oldTextGiven == None: try: text = page.get() except wikipedia.NoPage: if create: wikipedia.output(u"%s doesn't exist, creating it!" % page.title()) text = u"" else: wikipedia.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) # continue except wikipedia.IsRedirectPage: wikipedia.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) # continue else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl != None: url = "%s%s" % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: wikipedia.output(u"Exception! regex (or word) used with -exceptUrl is in the page. Skip!") return (False, False, always) # continue if regexSkip != None: result = re.findall(regexSkip, text) if result != []: wikipedia.output(u"Exception! regex (or word) used with -except is in the page. Skip!") return (False, False, always) # continue # If not up, text put below if not up: newtext = text # Getting the categories categoriesInside = wikipedia.getCategoryLinks(newtext, site) # Deleting the categories newtext = wikipedia.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = wikipedia.getLanguageLinks(newtext, site) # Removing the interwiki newtext = wikipedia.removeLanguageLinks(newtext, site) # nn got a message between the categories and the iw's and they want to keep it there, first remove it if site.language() == u"nn": newtext = newtext.replace(nn_iw_msg, "") # Translating the \\n into binary \n addText = addText.replace("\\n", "\n") # Adding the text newtext += u"\n%s" % addText # Reputting the categories newtext = wikipedia.replaceCategoryLinks(newtext, categoriesInside, site, True) # Put the nn iw message back if site.language() == u"nn": newtext = newtext + u"\n" + nn_iw_msg # Dealing the stars' issue allstars = [] starstext = wikipedia.removeDisabledParts(text) for star in starsList: regex = re.compile("(\{\{(?:template:|)%s\|.*?\}\}[\s]*)" % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub("", newtext) allstars += found if allstars != []: newtext = newtext.strip() + "\r\n\r\n" allstars.sort() for element in allstars: newtext += "%s\r\n" % element.strip() # Adding the interwiki newtext = wikipedia.replaceLanguageLinks(newtext, interwikiInside, site) # If instead the text must be added above... else: newtext = addText + "\n" + text if putText and text != newtext: wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) wikipedia.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if not always: choice = wikipedia.inputChoice( u"Do you want to accept these changes?", ["Yes", "No", "All"], ["y", "N", "a"], "N" ) if choice == "a": always = True elif choice == "n": return (False, False, always) if always or choice == "y": try: if always: page.put(newtext, summary) else: page.put_async(newtext, summary) except wikipedia.EditConflict: wikipedia.output(u"Edit conflict! skip!") return (False, False, always) except wikipedia.ServerError: errorCount += 1 if errorCount < 5: wikipedia.output(u"Server Error! Wait..") time.sleep(5) continue else: raise wikipedia.ServerError(u"Fifth Server Error!") except wikipedia.SpamfilterError, e: wikipedia.output(u"Cannot change %s because of blacklist entry %s" % (page.title(), e.url)) return (False, False, always) except wikipedia.PageNotSaved, error: wikipedia.output(u"Error putting page: %s" % error.args) return (False, False, always) except wikipedia.LockedPage: wikipedia.output(u"Skipping %s (locked page)" % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always)
def add_text(page=None, addText=None, summary=None, regexSkip=None, regexSkipUrl=None, always=False, up=False, putText=True, oldTextGiven=None, reorderEnabled=True, create=False): if not addText: raise NoEnoughData('You have to specify what text you want to add!') if not summary: summary = i18n.twtranslate(pywikibot.getSite(), 'add_text-adding', {'adding': addText[:200]}) # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. errorCount = 0 site = pywikibot.getSite() pathWiki = site.family.nicepath(site.lang) if putText: pywikibot.output(u'Loading %s...' % page.title()) if oldTextGiven is None: try: text = page.get() except pywikibot.NoPage: if create: pywikibot.output(u"%s doesn't exist, creating it!" % page.title()) text = u'' else: pywikibot.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl is not None: url = '%s%s' % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -exceptUrl is in the page. Skip! Match was: %s''' % result) return (False, False, always) if regexSkip is not None: result = re.findall(regexSkip, text) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -except is in the page. Skip! Match was: %s''' % result) return (False, False, always) # If not up, text put below if not up: newtext = text # Translating the \\n into binary \n addText = addText.replace('\\n', config.line_separator) if (reorderEnabled): # Getting the categories categoriesInside = pywikibot.getCategoryLinks(newtext, site) # Deleting the categories newtext = pywikibot.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = pywikibot.getLanguageLinks(newtext, site) # Removing the interwiki newtext = pywikibot.removeLanguageLinks(newtext, site) # Adding the text newtext += u"%s%s" % (config.line_separator, addText) # Reputting the categories newtext = pywikibot.replaceCategoryLinks(newtext, categoriesInside, site, True) # Dealing the stars' issue allstars = [] starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub('', newtext) allstars += found if allstars != []: newtext = newtext.strip() + config.line_separator * 2 allstars.sort() for element in allstars: newtext += '%s%s' % (element.strip(), config.LS) # Adding the interwiki newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside, site) else: newtext += u"%s%s" % (config.line_separator, addText) else: newtext = addText + config.line_separator + text if putText and text != newtext: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if not always: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All', 'open in Browser'], ['y', 'n', 'a', 'b'], 'n') if choice == 'a': always = True elif choice == 'n': return (False, False, always) elif choice == 'b': webbrowser.open("http://%s%s" % ( page.site.hostname(), page.site.nice_get_address(page.title()) )) pywikibot.input("Press Enter when finished in browser.") if always or choice == 'y': try: if always: page.put(newtext, summary, minorEdit=page.namespace() != 3) else: page.put_async(newtext, summary, minorEdit=page.namespace() != 3) except pywikibot.EditConflict: pywikibot.output(u'Edit conflict! skip!') return (False, False, always) except pywikibot.ServerError: errorCount += 1 if errorCount < config.maxretries: pywikibot.output(u'Server Error! Wait..') time.sleep(5) continue else: raise pywikibot.ServerError(u'Fifth Server Error!') except pywikibot.SpamfilterError as e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, False, always) except pywikibot.PageNotSaved as error: pywikibot.output(u'Error putting page: %s' % error.args) return (False, False, always) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always) else: return (text, newtext, always)
def add_category(sort_by_last_name=False, create_pages=False): '''A robot to mass-add a category to a list of pages.''' site = pywikibot.getSite() if gen: newcatTitle = pywikibot.input( u'Category to add (do not give namespace):') if not site.nocapitalize: newcatTitle = newcatTitle[:1].capitalize() + newcatTitle[1:] # set edit summary message editSummary = pywikibot.translate(site, msg_add) % newcatTitle cat_namespace = site.category_namespaces()[0] answer = '' for page in gen: if answer != 'a': answer = '' while answer not in ('y', 'n', 'a'): answer = pywikibot.inputChoice(u'%s' % (page.aslink()), ['Yes', 'No', 'All'], ['y', 'n', 'a'], 'n') if answer == 'a': confirm = pywikibot.inputChoice( u"""\ This should be used if and only if you are sure that your links are correct! Are you sure?""", ['Yes', 'No'], ['y', 'n'], 'n') if confirm == 'n': answer = '' if answer == 'y' or answer == 'a': try: text = page.get() except pywikibot.NoPage: if create_pages: pywikibot.output(u"%s doesn't exist yet. Creating." % (page.title())) text = '' else: pywikibot.output(u"%s doesn't exist yet. Ignoring." % (page.title())) continue except pywikibot.IsRedirectPage, arg: redirTarget = pywikibot.Page(site, arg.args[0]) pywikibot.output( u"WARNING: %s is redirect to %s. Ignoring." % (page.title(), redirTarget.title())) continue cats = page.categories() # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output( u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.output(u"Current categories:") for cat in cats: pywikibot.output(u"* %s" % cat.title()) catpl = pywikibot.Page(site, cat_namespace + ':' + newcatTitle) if sort_by_last_name: catpl = sorted_by_last_name(catpl, page) if catpl in cats: pywikibot.output(u"%s is already in %s." % (page.title(), catpl.title())) else: pywikibot.output(u'Adding %s' % catpl.aslink()) cats.append(catpl) text = pywikibot.replaceCategoryLinks(text, cats) try: page.put(text, comment=editSummary) except pywikibot.EditConflict: pywikibot.output( u'Skipping %s because of edit conflict' % (page.title()))
def treat(self, page): """ Adds the page to the appropriate birth and death year categories. """ pywikibot.output(u"Processing page %s..." % page.title(asLink=True)) text = self.load(page) if not text: return # Just print beginning of the article; strip full ref-tags reMatcher = re.compile(u"(?:\A|\n)([^\n|{]*?'''.*?)(\n|\Z)") match = reMatcher.search(text) headertext = None if match: headertext = re.sub("<ref[^/]*?>.*?</ref>", "", match.group(1)) pywikibot.output("\n---- CLIP ---\n%s\n--- CLIP ---\n" % headertext) else: pywikibot.output("\n---- CLIP ---\n%s\n--- CLIP ---\n" % text) cats = page.categories() addCats = [] newCatContent = {} foundTemplates = False for tmpl, params in page.templatesWithParams(): numParams = filter(lambda s: s.isdigit(), params) # Filter out birthplace fields for bdCat, bdCatInfo in self.bdCats.iteritems(): if tmpl in bdCatInfo["templates"]: pywikibot.output(u"Found template {{%s|%s}}" % (tmpl, u"|".join(params))) foundTemplates = True try: year = numParams[bdCatInfo["templates"][tmpl] - 1].strip() except IndexError: continue sortKey = self.getSortKey(cats, text) newCat = catlib.Category(self.site, bdCat % year, sortKey=sortKey) if newCat in cats or newCat in addCats: pywikibot.output(u"%s is already in %s." % (page.title(), newCat.title())) else: addCats.append(newCat) newCatContent[newCat.title()] = bdCatInfo["newcat"](year) # First set of regex against full articletext if not addCats and not foundTemplates: for bdCat, bdCatInfo in self.bdCats.iteritems(): for reMatcher in bdCatInfo["re1"]: match = reMatcher.search(text) if match: pywikibot.output(u"%s" % match.groups()) year = match.group(1) pywikibot.output(u"Found %s in %s" % (year, match.group(0))) sortKey = self.getSortKey(cats, text) newCat = catlib.Category(self.site, bdCat % year, sortKey=sortKey) if newCat in cats or newCat in addCats: pywikibot.output(u"%s is already in %s." % (page.title(), newCat.title())) else: addCats.append(newCat) newCatContent[newCat.title()] = bdCatInfo["newcat"](year) break # second set of regex against the header text if available. If not then failback to full text if not addCats and not foundTemplates: for bdCat, bdCatInfo in self.bdCats.iteritems(): for reMatcher in bdCatInfo["re2"]: if headertext: match = reMatcher.search(headertext) else: match = reMatcher.search(text) if match: pywikibot.output(u"%s" % match.groups()) year = match.group(1) pywikibot.output(u"Found %s in %s" % (year, match.group(0))) sortKey = self.getSortKey(cats, text) newCat = catlib.Category(self.site, bdCat % year, sortKey=sortKey) if newCat in cats or newCat in addCats: pywikibot.output(u"%s is already in %s." % (page.title(), newCat.title())) else: addCats.append(newCat) newCatContent[newCat.title()] = bdCatInfo["newcat"](year) break if addCats: cats.extend(addCats) # This will Add category living people if needed birth, death = self.getBirthDeathFromCats(cats, "fi") if birth > 1885 and death == None: newCat = catlib.Category(self.site, u"Elävät henkilöt", sortKey=sortKey) if newCat not in cats: addCats.append(newCat) cats.append(newCat) text = pywikibot.replaceCategoryLinks(text, cats) summary = self.summary % ", ".join(map(lambda c: c.title(asLink=True), addCats)) if not self.save(text, page, summary): pywikibot.output(u"Page %s not saved." % page.title(asLink=True)) return for cat in addCats: if not cat.exists(): pywikibot.output(u"%s should be created." % (cat.title())) if not self.save(newCatContent[cat.title()], cat, self.summaryNewCat, minorEdit=False): pywikibot.output(u"%s not created." % cat.title(asLink=True)) time.sleep(2)
newCategory = Category(site, newCat.title(), sortKey=sortKey) newCatSet.add(newCat.title()) newCatList.append(newCategory) elif cat.title() not in newCatSet: newCatSet.add(cat.title()) newCatList.append(cat) if not changesMade: wikipedia.output(u'ERROR: %s is not in category %s!' % (article.aslink(), oldCat.title())) else: text = article.get(get_redirect=True) try: text = wikipedia.replaceCategoryLinks(text, newCatList) except ValueError: # Make sure that the only way replaceCategoryLinks() can return # a ValueError is in the case of interwiki links to self. wikipedia.output(u'Skipping %s because of interwiki link to self' % article) try: article.put(text, comment) except wikipedia.EditConflict: wikipedia.output(u'Skipping %s because of edit conflict' % article.title()) except wikipedia.SpamfilterError, e: wikipedia.output(u'Skipping %s because of blacklist entry %s' % (article.title(), e.url)) except wikipedia.LockedPage: wikipedia.output(u'Skipping %s because page is locked' %
def featuredWithInterwiki(fromsite, tosite, template_on_top): if not fromsite.lang in cache: cache[fromsite.lang]={} if not tosite.lang in cache[fromsite.lang]: cache[fromsite.lang][tosite.lang]={} cc=cache[fromsite.lang][tosite.lang] if nocache: cc={} try: templatelist = template[tosite.lang] templatelist += template['_default'] except KeyError: templatelist = template['_default'] findtemplate = '(' + '|'.join(templatelist) + ')' re_Link_FA=re.compile(ur"\{\{%s\|%s\}\}" % (findtemplate.replace(u' ', u'[ _]'), fromsite.lang), re.IGNORECASE) re_this_iw=re.compile(ur"\[\[%s:[^]]+\]\]" % fromsite.lang) arts=featuredArticles(fromsite) pairs=[] for a in arts: if a.title()<afterpage: continue if u"/" in a.title() and a.namespace() != 0: wikipedia.output(u"%s is a subpage" % a.title()) continue if a.title() in cc: wikipedia.output(u"(cached) %s -> %s"%(a.title(), cc[a.title()])) continue if a.isRedirectPage(): a=a.getRedirectTarget() try: if not a.exists(): wikipedia.output(u"source page doesn't exist: %s" % a.title()) continue atrans=findTranslated(a,tosite) if atrans: text=atrans.get() m=re_Link_FA.search(text) if m: wikipedia.output(u"(already done)") else: # insert just before interwiki if (not interactive or wikipedia.input(u'Connecting %s -> %s. Proceed? [Y/N]'%(a.title(), atrans.title())) in ['Y','y'] ): m=re_this_iw.search(text) if not m: wikipedia.output(u"no interwiki record, very strange") continue comment = wikipedia.setAction(wikipedia.translate(wikipedia.getSite(), msg) % (fromsite.lang, a.title())) ### Moving {{Link FA|xx}} to top of interwikis ### if template_on_top == True: text=wikipedia.replaceCategoryLinks(text+(u"{{%s|%s}}"%(templatelist[0], fromsite.lang)), atrans.categories()) ### Placing {{Link FA|xx}} right next to corresponding interwiki ### else: text=(text[:m.end()] + (u" {{%s|%s}}" % (templatelist[0], fromsite.lang)) + text[m.end():]) try: atrans.put(text, comment) except wikipedia.LockedPage: wikipedia.output(u'Page %s is locked!' % atrans.title()) cc[a.title()]=atrans.title() except wikipedia.PageNotSaved, e: wikipedia.output(u"Page not saved")
def add_text(page=None, addText=None, summary=None, regexSkip=None, regexSkipUrl=None, always=False, up=False, putText=True, oldTextGiven=None, reorderEnabled=True, create=False): if not addText: raise NoEnoughData('You have to specify what text you want to add!') if not summary: summary = i18n.twtranslate(pywikibot.getSite(), 'add_text-adding', {'adding': addText[:200]}) # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. starsList = [ u'bueno', u'bom interwiki', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] errorCount = 0 site = pywikibot.getSite() # /wiki/ is not always the right path in non-wiki projects pathWiki = site.family.nicepath(site.lang) if putText: pywikibot.output(u'Loading %s...' % page.title()) if oldTextGiven == None: try: text = page.get() except pywikibot.NoPage: if create: pywikibot.output(u"%s doesn't exist, creating it!" % page.title()) text = u'' else: pywikibot.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) # continue except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) # continue else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl != None: url = '%s%s' % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -exceptUrl is in the page. Skip! Match was: %s''' % result) return (False, False, always) # continue if regexSkip != None: result = re.findall(regexSkip, text) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -except is in the page. Skip! Match was: %s''' % result) return (False, False, always) # continue # If not up, text put below if not up: newtext = text # Translating the \\n into binary \n addText = addText.replace('\\n', '\n') if (reorderEnabled): # Getting the categories categoriesInside = pywikibot.getCategoryLinks(newtext, site) # Deleting the categories newtext = pywikibot.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = pywikibot.getLanguageLinks(newtext, site) # Removing the interwiki newtext = pywikibot.removeLanguageLinks(newtext, site) # nn got a message between the categories and the iw's # and they want to keep it there, first remove it hasCommentLine = False if (site.language() == u'nn'): regex = re.compile( '(<!-- ?interwiki \(no(?:/nb)?, ?sv, ?da first; then other languages alphabetically by name\) ?-->)' ) found = regex.findall(newtext) if found: hasCommentLine = True newtext = regex.sub('', newtext) # Adding the text newtext += u"\n%s" % addText # Reputting the categories newtext = pywikibot.replaceCategoryLinks(newtext, categoriesInside, site, True) #Put the nn iw message back if site.language() == u'nn' and (interwikiInside or hasCommentLine): newtext = newtext + u'\r\n\r\n' + nn_iw_msg # Dealing the stars' issue allstars = [] starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile( '(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub('', newtext) allstars += found if allstars != []: newtext = newtext.strip() + '\r\n\r\n' allstars.sort() for element in allstars: newtext += '%s\r\n' % element.strip() # Adding the interwiki newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside, site) else: # Adding the text newtext += u"\n%s" % addText # If instead the text must be added above... else: newtext = addText + '\n' + text if putText and text != newtext: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if not always: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All', 'open in Browser'], ['y', 'N', 'a', 'b'], 'N') if choice == 'a': always = True elif choice == 'n': return (False, False, always) elif choice == 'b': webbrowser.open( "http://%s%s" % (page.site().hostname(), page.site().nice_get_address( page.title()))) pywikibot.input("Press Enter when finished in browser.") if always or choice == 'y': try: if always: page.put(newtext, summary) else: page.put_async(newtext, summary) except pywikibot.EditConflict: pywikibot.output(u'Edit conflict! skip!') return (False, False, always) except pywikibot.ServerError: errorCount += 1 if errorCount < 5: pywikibot.output(u'Server Error! Wait..') time.sleep(5) continue else: raise pywikibot.ServerError(u'Fifth Server Error!') except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, False, always) except pywikibot.PageNotSaved, error: pywikibot.output(u'Error putting page: %s' % error.args) return (False, False, always) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always)
if newCat: if newCat.title() not in newCatSet: newCategory = Category(site, newCat.title(), sortKey=sortKey) newCatSet.add(newCat.title()) newCatList.append(newCategory) elif cat.title() not in newCatSet: newCatSet.add(cat.title()) newCatList.append(cat) if not changesMade: wikipedia.output(u'ERROR: %s is not in category %s!' % (article.aslink(), oldCat.title())) else: text = article.get(get_redirect=True) try: text = wikipedia.replaceCategoryLinks(text, newCatList) except ValueError: # Make sure that the only way replaceCategoryLinks() can return # a ValueError is in the case of interwiki links to self. wikipedia.output( u'Skipping %s because of interwiki link to self' % article) try: article.put(text, comment) except wikipedia.EditConflict: wikipedia.output( u'Skipping %s because of edit conflict' % article.title()) except wikipedia.SpamfilterError, e: wikipedia.output( u'Skipping %s because of blacklist entry %s' % (article.title(), e.url)) except wikipedia.LockedPage:
def add_category(sort_by_last_name = False, create_pages = False): '''A robot to mass-add a category to a list of pages.''' site = wikipedia.getSite() if gen: newcatTitle = wikipedia.input( u'Category to add (do not give namespace):') if not site.nocapitalize: newcatTitle = newcatTitle[:1].capitalize() + newcatTitle[1:] # set edit summary message editSummary = wikipedia.translate(site, msg_add) % newcatTitle cat_namespace = site.category_namespaces()[0] answer = '' for page in gen: if answer != 'a': answer = '' while answer not in ('y','n','a'): answer = wikipedia.input(u'%s [y/n/a(ll)]:' % (page.aslink())) if answer == 'a': confirm = '' while confirm not in ('y','n'): confirm = wikipedia.input(u"""\ This should be used if and only if you are sure that your links are correct! Are you sure? [y/n]:""") if confirm == 'n': answer = '' if answer == 'y' or answer == 'a': try: text = page.get() except wikipedia.NoPage: if create_pages: wikipedia.output(u"%s doesn't exist yet. Creating." % (page.title())) text = '' else: wikipedia.output(u"%s doesn't exist yet. Ignoring." % (page.title())) continue except wikipedia.IsRedirectPage, arg: redirTarget = wikipedia.Page(site, arg.args[0]) wikipedia.output( u"WARNING: %s is redirect to %s. Ignoring." % (page.title(), redirTarget.title())) continue cats = page.categories() # Show the title of the page we're working on. # Highlight the title in purple. wikipedia.output( u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) wikipedia.output(u"Current categories:") for cat in cats: wikipedia.output(u"* %s" % cat.title()) catpl = wikipedia.Page(site, cat_namespace + ':' + newcatTitle) if sort_by_last_name: catpl = sorted_by_last_name(catpl, page) if catpl in cats: wikipedia.output(u"%s is already in %s." % (page.title(), catpl.title())) else: wikipedia.output(u'Adding %s' % catpl.aslink()) cats.append(catpl) text = wikipedia.replaceCategoryLinks(text, cats) try: page.put(text, comment = editSummary) except wikipedia.EditConflict: wikipedia.output( u'Skipping %s because of edit conflict' % (page.title()))
def change_category(article, oldCat, newCat, comment=None, sortKey=None, inPlace=False): """ Remove page from oldCat and add it to newCat. @param oldCat and newCat: should be Category objects. If newCat is None, the category will be removed. @param comment: string to use as an edit summary @param sortKey: sortKey to use for the added category. Unused if newCat is None, or if inPlace=True @param inPlace: if True, change categories in place rather than rearranging them. """ cats = [] # get list of Category objects the article is in and remove duplicates for cat in article.categories(get_redirect=True): if cat not in cats: cats.append(cat) site = article.site() if not sortKey: sortKey = oldCat.sortKey if not article.canBeEdited(): pywikibot.output("Can't edit %s, skipping it..." % article.title(asLink=True)) return if oldCat not in cats: pywikibot.error(u'%s is not in category %s!' % (article.title(asLink=True), oldCat.title())) return if inPlace or article.namespace() == 10: oldtext = article.get(get_redirect=True) newtext = pywikibot.replaceCategoryInPlace(oldtext, oldCat, newCat) else: if newCat: cats[cats.index(oldCat)] = Category(site, newCat.title(), sortKey=sortKey) else: cats.pop(cats.index(oldCat)) oldtext = article.get(get_redirect=True) try: newtext = pywikibot.replaceCategoryLinks(oldtext, cats) except ValueError: # Make sure that the only way replaceCategoryLinks() can return # a ValueError is in the case of interwiki links to self. pywikibot.output(u'Skipping %s because of interwiki link to self' % article) if oldtext != newtext: try: article.put(newtext, comment) except pywikibot.EditConflict: pywikibot.output(u'Skipping %s because of edit conflict' % article.title()) except pywikibot.SpamfilterError, e: pywikibot.output(u'Skipping %s because of blacklist entry %s' % (article.title(), e.url)) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s because page is locked' % article.title())
def standardizePageFooter(self, text): """ Makes sure that interwiki links, categories and star templates are put to the correct position and into the right order. This combines the old instances standardizeInterwiki and standardizeCategories The page footer has the following section in that sequence: 1. categories 2. additional information depending on local site policy 3. stars templates for featured and good articles 4. interwiki links """ starsList = [ u'bueno', u'bom interwiki', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] categories = None interwikiLinks = None allstars = [] hasCommentLine = False # The PyWikipediaBot is no longer allowed to touch categories on the # German Wikipedia. See # http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Position_der_Personendaten_am_.22Artikelende.22 # ignoring nn-wiki of cause of the comment line above iw section if not self.template and not '{{Personendaten' in text: categories = pywikibot.getCategoryLinks(text, site = self.site) if not self.talkpage:# and pywikibot.calledModuleName() <> 'interwiki': subpage = False if self.template: loc = None try: tmpl, loc = moved_links[self.site.lang] del tmpl except KeyError: pass if loc != None and loc in self.title: subpage = True interwikiLinks = pywikibot.getLanguageLinks( text, insite=self.site, template_subpage=subpage) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site = self.site) # Removing the stars' issue starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: if pywikibot.verbose: print found text = regex.sub('', text) allstars += found # nn got a message between the categories and the iw's # and they want to keep it there, first remove it if self.site.lang in msg_interwiki: iw_msg = msg_interwiki[self.site.lang] if isinstance(iw_msg, tuple): iw_reg = iw_msg[1] iw_msg = iw_msg[0] else: iw_reg = u'(%s)' % iw_msg regex = re.compile(iw_reg) found = regex.findall(text) if found: if pywikibot.verbose: print found hasCommentLine = True text = regex.sub('', text) # Adding categories if categories: text = pywikibot.replaceCategoryLinks(text, categories, site=self.site) # Put the iw message back if not self.talkpage and \ ((interwikiLinks or hasCommentLine) and self.site.language() == 'nn' or (interwikiLinks and hasCommentLine) and self.site.language() == 'fr'): text = text + '\r\n\r\n' + iw_msg # Adding stars templates if allstars: text = text.strip()+self.site.family.interwiki_text_separator allstars.sort() for element in allstars: text += '%s\r\n' % element.strip() if pywikibot.verbose: pywikibot.output(u'%s' %element.strip()) # Adding the interwiki if interwikiLinks: text = pywikibot.replaceLanguageLinks(text, interwikiLinks, site=self.site, template=self.template, template_subpage=subpage) return text
def add_text(page=None, addText=None, summary=None, regexSkip=None, regexSkipUrl=None, always=False, up=False, putText=True, oldTextGiven=None, reorderEnabled=True, create=False): # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. starsList = [ u'bueno', u'bom interwiki', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] errorCount = 0 site = pywikibot.getSite() pathWiki = site.family.nicepath(site.lang) site = pywikibot.getSite() if oldTextGiven is None: try: text = page.get() except pywikibot.NoPage: if create: pywikibot.output(u"%s doesn't exist, creating it!" % page.title()) text = u'' else: pywikibot.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) else: text = oldTextGiven # If not up, text put below if not up: newtext = text # Translating the \\n into binary \n addText = addText.replace('\\n', '\n') if (reorderEnabled): # Getting the categories categoriesInside = pywikibot.getCategoryLinks(newtext, site) # Deleting the categories newtext = pywikibot.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = pywikibot.getLanguageLinks(newtext, site) # Removing the interwiki newtext = pywikibot.removeLanguageLinks(newtext, site) # Adding the text newtext += u"\n%s" % addText # Reputting the categories newtext = pywikibot.replaceCategoryLinks(newtext, categoriesInside, site, True) # Dealing the stars' issue allstars = [] starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub('', newtext) allstars += found if allstars != []: newtext = newtext.strip() + '\r\n\r\n' allstars.sort() for element in allstars: newtext += '%s\r\n' % element.strip() # Adding the interwiki newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside, site) else: newtext += u"\n%s" % addText else: newtext = addText + '\n' + text if putText and text != newtext: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) #pywikibot.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if always or choice == 'y': try: pass if always: page.put(newtext, summary, minorEdit=False) else: page.put_async(newtext, summary, minorEdit=False) except pywikibot.EditConflict: pywikibot.output(u'Edit conflict! skip!') return (False, False, always) except pywikibot.ServerError: errorCount += 1 if errorCount < 5: pywikibot.output(u'Server Error! Wait..') time.sleep(5) continue else: raise pywikibot.ServerError(u'Fifth Server Error!') except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, False, always) except pywikibot.PageNotSaved, error: pywikibot.output(u'Error putting page: %s' % error.args) return (False, False, always) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always)
def run(self): """ Starts the robot. """ # Run the generator which will yield Pages which might need to be # changed. for page in self.generator: if self.isTitleExcepted(page.title()): pywikibot.output( u'Skipping %s because the title is on the exceptions list.' % page.title(asLink=True)) continue try: # Load the page's text from the wiki original_text = page.get(get_redirect=True) if not (self.articles or page.canBeEdited()): pywikibot.output(u"You can't edit page %s" % page.title(asLink=True)) continue except pywikibot.NoPage: pywikibot.output(u'Page %s not found' % page.title(asLink=True)) continue new_text = original_text while True: if self.isTextExcepted(new_text): pywikibot.output( u'Skipping %s because it contains text that is on the exceptions list.' % page.title(asLink=True)) break new_text = self.doReplacements(new_text) if new_text == original_text: pywikibot.output(u'No changes were necessary in %s' % page.title(asLink=True)) break if self.recursive: newest_text = self.doReplacements(new_text) while (newest_text!=new_text): new_text = newest_text newest_text = self.doReplacements(new_text) if hasattr(self, "addedCat"): cats = page.categories() if self.addedCat not in cats: cats.append(self.addedCat) new_text = pywikibot.replaceCategoryLinks(new_text, cats) # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.showDiff(original_text, new_text) if self.acceptall: break if self.exctitles: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'no+eXcept', 'Edit', 'open in Browser', 'All', 'Quit'], ['y', 'N', 'x', 'e', 'b', 'a', 'q'], 'N') else: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'Edit', 'open in Browser', 'All', 'Quit'], ['y', 'N', 'e', 'b', 'a', 'q'], 'N') if choice == 'e': editor = editarticle.TextEditor() as_edited = editor.edit(original_text) # if user didn't press Cancel if as_edited and as_edited != new_text: new_text = as_edited continue if choice == 'b': webbrowser.open("http://%s%s" % ( page.site.hostname(), page.site.nice_get_address(page.title()) )) i18n.input('pywikibot-enter-finished-browser') try: original_text = page.get(get_redirect=True, force=True) except pywikibot.NoPage: pywikibot.output(u'Page %s has been deleted.' % page.title()) break new_text = original_text continue if choice == 'q': self.writeEditCounter() self.writeExceptCounter() return if choice == 'a': self.acceptall = True if choice == 'x': #May happen only if self.exctitles isn't None self.exctitles.write( u"ur'^%s$',\n" % re.escape(page.title())) self.exctitles.flush() self.exceptcounter += 1 if choice == 'y': if not self.articles: # Primary behaviour: working on wiki page.put_async(new_text, self.editSummary) self.editcounter += 1 # Bug: this increments even if put_async fails # This is separately in two clauses of if for # future purposes to get feedback form put_async else: #Save the title for later processing instead of editing self.editcounter += 1 self.articles.write(u'#%s\n%s' % (page.title(asLink=True, textlink=True), self.splitLine())) self.articles.flush() # For the peace of our soul :-) # choice must be 'N' break if self.acceptall and new_text != original_text: if not self.articles: #Primary behaviour: working on wiki try: page.put(new_text, self.editSummary) self.editcounter += 1 #increment only on success except pywikibot.EditConflict: pywikibot.output(u'Skipping %s because of edit conflict' % (page.title(),)) except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) except pywikibot.PageNotSaved, error: pywikibot.output(u'Error putting page: %s' % (error.args,)) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % (page.title(),))
def standardizePageFooter(self, text): """ Makes sure that interwiki links, categories and star templates are put to the correct position and into the right order. This combines the old instances standardizeInterwiki and standardizeCategories The page footer has the following section in that sequence: 1. categories 2. additional information depending on local site policy 3. stars templates for featured and good articles 4. interwiki links """ starsList = [ u'bueno', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] categories = None interwikiLinks = None allstars = [] hasCommentLine = False # The PyWikipediaBot is no longer allowed to touch categories on the German Wikipedia. # See http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/bis_2006#Position_der_Personendaten_am_.22Artikelende.22 # ignoring nn-wiki of cause of the comment line above iw section if not self.template and not '{{Personendaten' in text: categories = pywikibot.getCategoryLinks(text, site=self.site) if not self.talkpage: # and pywikibot.calledModuleName() <> 'interwiki': subpage = False if self.template: loc = None try: tmpl, loc = moved_links[self.site.lang] del tmpl except KeyError: pass if loc != None and loc in self.title: subpage = True interwikiLinks = pywikibot.getLanguageLinks( text, insite=self.site, template_subpage=subpage) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site=self.site) # Removing the stars' issue starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile( '(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: if pywikibot.verbose: print found text = regex.sub('', text) allstars += found # nn got a message between the categories and the iw's # and they want to keep it there, first remove it if self.site.language() == 'nn': regex = re.compile( '(<!-- ?interwiki \(no(?:/nb)?, ?sv, ?da first; then other languages alphabetically by name\) ?-->)' ) found = regex.findall(text) if found: if pywikibot.verbose: print found hasCommentLine = True text = regex.sub('', text) # Adding categories if categories: text = pywikibot.replaceCategoryLinks(text, categories, site=self.site) # Put the nn iw message back if self.site.language() == 'nn' and not self.talkpage and ( interwikiLinks or hasCommentLine): text = text + '\r\n\r\n' + nn_iw_msg # Adding stars templates if allstars: text = text.strip() + self.site.family.interwiki_text_separator allstars.sort() for element in allstars: text += '%s\r\n' % element.strip() if pywikibot.verbose: pywikibot.output(u'%s' % element.strip()) # Adding the interwiki if interwikiLinks: text = pywikibot.replaceLanguageLinks(text, interwikiLinks, site=self.site, template=self.template, template_subpage=subpage) return text
newCategory = Category(site, newCat.title(), sortKey=sortKey) newCatSet.add(newCat.title()) newCatList.append(newCategory) elif cat.title() not in newCatSet: newCatSet.add(cat.title()) newCatList.append(cat) if not changesMade: pywikibot.error(u'%s is not in category %s!' % (article.title(asLink=True), oldCat.title())) else: text = article.get(get_redirect=True) try: text = pywikibot.replaceCategoryLinks(text, newCatList) except ValueError: # Make sure that the only way replaceCategoryLinks() can return # a ValueError is in the case of interwiki links to self. pywikibot.output(u'Skipping %s because of interwiki link to self' % article) try: article.put(text, comment) except pywikibot.EditConflict: pywikibot.output(u'Skipping %s because of edit conflict' % article.title()) except pywikibot.SpamfilterError, e: pywikibot.output(u'Skipping %s because of blacklist entry %s' % (article.title(), e.url)) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s because page is locked' %
if newCat.title() not in newCatSet: newCategory = Category(site, newCat.title(), sortKey=sortKey) newCatSet.add(newCat.title()) newCatList.append(newCategory) elif cat.title() not in newCatSet: newCatSet.add(cat.title()) newCatList.append(cat) if not changesMade: pywikibot.error(u'%s is not in category %s!' % (article.title(asLink=True), oldCat.title())) else: text = article.get(get_redirect=True) try: text = pywikibot.replaceCategoryLinks(text, newCatList) except ValueError: # Make sure that the only way replaceCategoryLinks() can return # a ValueError is in the case of interwiki links to self. pywikibot.output(u'Skipping %s because of interwiki link to self' % article) try: article.put(text, comment) except pywikibot.EditConflict: pywikibot.output(u'Skipping %s because of edit conflict' % article.title()) except pywikibot.SpamfilterError, e: pywikibot.output(u'Skipping %s because of blacklist entry %s' % (article.title(), e.url)) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s because page is locked'
def standardizePageFooter(self, text): """ Makes sure that interwiki links, categories and star templates are put to the correct position and into the right order. This combines the old instances standardizeInterwiki and standardizeCategories The page footer has the following section in that sequence: 1. categories 2. additional information depending on local site policy 3. stars templates for featured and good articles 4. interwiki links """ starsList = [ u"bueno", u"cyswllt[ _]erthygl[ _]ddethol", u"dolen[ _]ed", u"destacado", u"destaca[tu]", u"enllaç[ _]ad", u"enllaz[ _]ad", u"leam[ _]vdc", u"legătură[ _]a[bcf]", u"liamm[ _]pub", u"lien[ _]adq", u"lien[ _]ba", u"liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt", u"liên[ _]kết[ _]chọn[ _]lọc", u"ligam[ _]adq", u"ligoelstara", u"ligoleginda", u"link[ _][afgu]a", u"link[ _]adq", u"link[ _]f[lm]", u"link[ _]km", u"link[ _]sm", u"linkfa", u"na[ _]lotura", u"nasc[ _]ar", u"tengill[ _][úg]g", u"ua", u"yüm yg", u"רא", u"وصلة مقالة جيدة", u"وصلة مقالة مختارة", ] categories = None interwikiLinks = None allstars = [] hasCommentLine = False # The PyWikipediaBot is no longer allowed to touch categories on the German Wikipedia. # See http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/bis_2006#Position_der_Personendaten_am_.22Artikelende.22 # ignoring nn-wiki of cause of the comment line above iw section if not self.template and not "{{Personendaten" in text: categories = pywikibot.getCategoryLinks(text, site=self.site) if not self.talkpage: # and pywikibot.calledModuleName() <> 'interwiki': subpage = False if self.template: loc = None try: tmpl, loc = moved_links[self.site.lang] del tmpl except KeyError: pass if loc != None and loc in self.title: subpage = True interwikiLinks = pywikibot.getLanguageLinks(text, insite=self.site, template_subpage=subpage) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site=self.site) # Removing the stars' issue starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile("(\{\{(?:template:|)%s\|.*?\}\}[\s]*)" % star, re.I) found = regex.findall(starstext) if found != []: if pywikibot.verbose: print found text = regex.sub("", text) allstars += found # nn got a message between the categories and the iw's # and they want to keep it there, first remove it if self.site.language() == "nn": regex = re.compile( "(<!-- ?interwiki \(no(?:/nb)?, ?sv, ?da first; then other languages alphabetically by name\) ?-->)" ) found = regex.findall(text) if found: if pywikibot.verbose: print found hasCommentLine = True text = regex.sub("", text) # Adding categories if categories: text = pywikibot.replaceCategoryLinks(text, categories, site=self.site) # Put the nn iw message back if self.site.language() == "nn" and not self.talkpage and (interwikiLinks or hasCommentLine): text = text + "\r\n\r\n" + nn_iw_msg # Adding stars templates if allstars: text = text.strip() + self.site.family.interwiki_text_separator allstars.sort() for element in allstars: text += "%s\r\n" % element.strip() if pywikibot.verbose: pywikibot.output(u"%s" % element.strip()) # Adding the interwiki if interwikiLinks: text = pywikibot.replaceLanguageLinks( text, interwikiLinks, site=self.site, template=self.template, template_subpage=subpage ) return text
def add_text(page = None, addText = None, summary = None, regexSkip = None, regexSkipUrl = None, always = False, up = False, putText = True, oldTextGiven = None, reorderEnabled = True, create=False): if not addText: raise NoEnoughData('You have to specify what text you want to add!') if not summary: summary = i18n.twtranslate(pywikibot.getSite(), 'add_text-adding', {'adding': addText[:200]}) # When a page is tagged as "really well written" it has a star in the # interwiki links. This is a list of all the templates used (in regex # format) to make the stars appear. starsList = [ u'bueno', u'bom interwiki', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] errorCount = 0 site = pywikibot.getSite() # /wiki/ is not always the right path in non-wiki projects pathWiki = site.family.nicepath(site.lang) if putText: pywikibot.output(u'Loading %s...' % page.title()) if oldTextGiven == None: try: text = page.get() except pywikibot.NoPage: if create: pywikibot.output(u"%s doesn't exist, creating it!" % page.title()) text = u'' else: pywikibot.output(u"%s doesn't exist, skip!" % page.title()) return (False, False, always) # continue except pywikibot.IsRedirectPage: pywikibot.output(u"%s is a redirect, skip!" % page.title()) return (False, False, always) # continue else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl != None: url = '%s%s' % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -exceptUrl is in the page. Skipping! Match was: %s''' % result) return (False, False, always) # continue if regexSkip != None: result = re.findall(regexSkip, text) if result != []: pywikibot.output( u'''Exception! regex (or word) used with -except is in the page. Skipping! Match was: %s''' % result) return (False, False, always) # continue # If not up, text put below if not up: newtext = text # Translating the \\n into binary \n addText = addText.replace('\\n', '\n') if (reorderEnabled): # Getting the categories categoriesInside = pywikibot.getCategoryLinks(newtext, site) # Deleting the categories newtext = pywikibot.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = pywikibot.getLanguageLinks(newtext, site) # Removing the interwiki newtext = pywikibot.removeLanguageLinks(newtext, site) # nn got a message between the categories and the iw's # and they want to keep it there, first remove it hasCommentLine = False if (site.language()==u'nn'): regex = re.compile('(<!-- ?interwiki \(no(?:/nb)?, ?sv, ?da first; then other languages alphabetically by name\) ?-->)') found = regex.findall(newtext) if found: hasCommentLine = True newtext = regex.sub('', newtext) # Adding the text newtext += u"\n%s" % addText # Reputting the categories newtext = pywikibot.replaceCategoryLinks(newtext, categoriesInside, site, True) #Put the nn iw message back if site.language()==u'nn' and (interwikiInside or hasCommentLine): newtext = newtext + u'\r\n\r\n' + nn_iw_msg # Dealing the stars' issue allstars = [] starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: newtext = regex.sub('', newtext) allstars += found if allstars != []: newtext = newtext.strip()+'\r\n\r\n' allstars.sort() for element in allstars: newtext += '%s\r\n' % element.strip() # Adding the interwiki newtext = pywikibot.replaceLanguageLinks(newtext, interwikiInside, site) else: # Adding the text newtext += u"\n%s" % addText # If instead the text must be added above... else: newtext = addText + '\n' + text if putText and text != newtext: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.showDiff(text, newtext) # Let's put the changes. while True: # If someone load it as module, maybe it's not so useful to put the # text in the page if putText: if not always: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All', 'open in Browser'], ['y', 'N', 'a', 'b'], 'N') if choice == 'a': always = True elif choice == 'n': return (False, False, always) elif choice == 'b': webbrowser.open("http://%s%s" % ( page.site().hostname(), page.site().nice_get_address(page.title()) )) pywikibot.input("Press Enter when finished in browser.") if always or choice == 'y': try: if always: page.put(newtext, summary) else: page.put_async(newtext, summary) except pywikibot.EditConflict: pywikibot.output(u'Edit conflict! skip!') return (False, False, always) except pywikibot.ServerError: errorCount += 1 if errorCount < 5: pywikibot.output(u'Server Error! Wait..') time.sleep(5) continue else: raise pywikibot.ServerError(u'Fifth Server Error!') except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, False, always) except pywikibot.PageNotSaved, error: pywikibot.output(u'Error putting page: %s' % error.args) return (False, False, always) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % page.title()) return (False, False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, True, always)
def run(self): """ Starts the robot. """ # Run the generator which will yield Pages which might need to be # changed. for page in self.generator: if self.isTitleExcepted(page.title()): pywikibot.output( u'Skipping %s because the title is on the exceptions list.' % page.title(asLink=True)) continue try: # Load the page's text from the wiki original_text = page.get(get_redirect=True) if not (self.articles or page.canBeEdited()): pywikibot.output(u"You can't edit page %s" % page.title(asLink=True)) continue except pywikibot.NoPage: pywikibot.output(u'Page %s not found' % page.title(asLink=True)) continue new_text = original_text while True: if self.isTextExcepted(new_text): pywikibot.output( u'Skipping %s because it contains text that is on the exceptions list.' % page.title(asLink=True)) break new_text = self.doReplacements(new_text) if new_text == original_text: pywikibot.output(u'No changes were necessary in %s' % page.title(asLink=True)) break if self.recursive: newest_text = self.doReplacements(new_text) while (newest_text != new_text): new_text = newest_text newest_text = self.doReplacements(new_text) if hasattr(self, "addedCat"): cats = page.categories() if self.addedCat not in cats: cats.append(self.addedCat) new_text = pywikibot.replaceCategoryLinks( new_text, cats) # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output( u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) pywikibot.showDiff(original_text, new_text) if self.acceptall: break choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'Edit', 'open in Browser', 'All', 'Quit'], ['y', 'N', 'e', 'b', 'a', 'q'], 'N') if choice == 'e': editor = editarticle.TextEditor() as_edited = editor.edit(original_text) # if user didn't press Cancel if as_edited and as_edited != new_text: new_text = as_edited continue if choice == 'b': webbrowser.open( "http://%s%s" % (page.site().hostname(), page.site().nice_get_address( page.title()))) pywikibot.input("Press Enter when finished in browser.") original_text = page.get(get_redirect=True, force=True) new_text = original_text continue if choice == 'q': self.writeEditCounter() return if choice == 'a': self.acceptall = True if choice == 'y': if not self.articles: #Primary behaviour: working on wiki page.put_async(new_text, self.editSummary) self.editcounter += 1 #Bug: this increments even if put_async fails #This is separately in two clauses of if for #future purposes to get feedback form put_async else: #Save the title for later processing instead of editing self.editcounter += 1 self.articles.write( u'#%s\n%s' % (page.title(asLink=True), self.splitLine())) self.articles.flush() # For the peace of our soul :-) # choice must be 'N' break if self.acceptall and new_text != original_text: if not self.articles: #Primary behaviour: working on wiki try: page.put(new_text, self.editSummary) self.editcounter += 1 #increment only on success except pywikibot.EditConflict: pywikibot.output( u'Skipping %s because of edit conflict' % (page.title(), )) except pywikibot.SpamfilterError, e: pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) except pywikibot.PageNotSaved, error: pywikibot.output(u'Error putting page: %s' % (error.args, )) except pywikibot.LockedPage: pywikibot.output(u'Skipping %s (locked page)' % (page.title(), ))
def change_category(article, oldCat, newCat, comment=None, sortKey=None, inPlace=False): """ Remove page from oldCat and add it to newCat. @param oldCat and newCat: should be Category objects. If newCat is None, the category will be removed. @param comment: string to use as an edit summary @param sortKey: sortKey to use for the added category. Unused if newCat is None, or if inPlace=True @param inPlace: if True, change categories in place rather than rearranging them. """ cats = [] # get list of Category objects the article is in and remove duplicates for cat in article.categories(get_redirect=True): if cat not in cats: cats.append(cat) site = article.site() if not sortKey: sortKey = oldCat.sortKey if not article.canBeEdited(): pywikibot.output("Can't edit %s, skipping it..." % article.title(asLink=True)) return if oldCat not in cats: pywikibot.error(u"%s is not in category %s!" % (article.title(asLink=True), oldCat.title())) return if inPlace or article.namespace() == 10: oldtext = article.get(get_redirect=True) newtext = pywikibot.replaceCategoryInPlace(oldtext, oldCat, newCat) else: if newCat: cats[cats.index(oldCat)] = Category(site, newCat.title(), sortKey=sortKey) else: cats.pop(cats.index(oldCat)) oldtext = article.get(get_redirect=True) try: newtext = pywikibot.replaceCategoryLinks(oldtext, cats) except ValueError: # Make sure that the only way replaceCategoryLinks() can return # a ValueError is in the case of interwiki links to self. pywikibot.output(u"Skipping %s because of interwiki link to self" % article) if oldtext != newtext: try: article.put(newtext, comment) except pywikibot.EditConflict: pywikibot.output(u"Skipping %s because of edit conflict" % article.title()) except pywikibot.SpamfilterError, e: pywikibot.output(u"Skipping %s because of blacklist entry %s" % (article.title(), e.url)) except pywikibot.LockedPage: pywikibot.output(u"Skipping %s because page is locked" % article.title())
def standardizePageFooter(self, text): """ Makes sure that interwiki links, categories and star templates are put to the correct position and into the right order. This combines the old instances standardizeInterwiki and standardizeCategories The page footer has the following section in that sequence: 1. categories 2. ## TODO: template beyond categories ## 3. additional information depending on local site policy 4. stars templates for featured and good articles 5. interwiki links """ starsList = [ u'bueno', u'bom interwiki', u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed', u'destacado', u'destaca[tu]', u'enllaç[ _]ad', u'enllaz[ _]ad', u'leam[ _]vdc', u'legătură[ _]a[bcf]', u'liamm[ _]pub', u'lien[ _]adq', u'lien[ _]ba', u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt', u'liên[ _]kết[ _]chọn[ _]lọc', u'ligam[ _]adq', u'ligazón[ _]a[bd]', u'ligoelstara', u'ligoleginda', u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]', u'link[ _]km', u'link[ _]sm', u'linkfa', u'na[ _]lotura', u'nasc[ _]ar', u'tengill[ _][úg]g', u'ua', u'yüm yg', u'רא', u'وصلة مقالة جيدة', u'وصلة مقالة مختارة', ] categories = None interwikiLinks = None allstars = [] # The PyWikipediaBot is no longer allowed to touch categories on the # German Wikipedia. See # http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Position_der_Personendaten_am_.22Artikelende.22 # ignoring nn-wiki of cause of the comment line above iw section if not self.template and '{{Personendaten' not in text and \ '{{SORTIERUNG' not in text and '{{DEFAULTSORT' not in text and \ self.site.lang not in ('et', 'it', 'bg', 'ru'): try: categories = pywikibot.getCategoryLinks(text, site=self.site) # there are categories like [[categoy:Foo {{#time:Y...}}]] except pywikibot.InvalidTitle: pass if not self.talkpage: # and pywikibot.calledModuleName() <> 'interwiki': subpage = False if self.template: loc = None try: tmpl, loc = moved_links[self.site.lang] del tmpl except KeyError: pass if loc is not None and loc in self.title: subpage = True interwikiLinks = pywikibot.getLanguageLinks( text, insite=self.site, template_subpage=subpage) # Removing the interwiki text = pywikibot.removeLanguageLinks(text, site=self.site) # Removing the stars' issue starstext = pywikibot.removeDisabledParts(text) for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)' % star, re.I) found = regex.findall(starstext) if found != []: text = regex.sub('', text) allstars += found # Adding categories if categories: ##Sorting categories in alphabetic order. beta test only on Persian Wikipedia, TODO fix bug for sorting #if self.site.language() == 'fa': # categories.sort() ##Taking main cats to top # for name in categories: # if re.search(u"(.+?)\|(.{,1}?)",name.title()) or name.title()==name.title().split(":")[0]+title: # categories.remove(name) # categories.insert(0, name) text = pywikibot.replaceCategoryLinks(text, categories, site=self.site) # Adding stars templates if allstars: text = text.strip() + self.site.family.interwiki_text_separator allstars.sort() for element in allstars: text += '%s\r\n' % element.strip() if pywikibot.verbose: pywikibot.output(u'%s' % element.strip()) # Adding the interwiki if interwikiLinks: text = pywikibot.replaceLanguageLinks(text, interwikiLinks, site=self.site, template=self.template, template_subpage=subpage) return text
def add_text(page = None, addText = None, summary = None, regexSkip = None, regexSkipUrl = None, always = False, up = False, putText = True, oldTextGiven = None): if not addText: raise NoEnoughData('You have to specify what text you want to add!') if not summary: summary = wikipedia.setAction(wikipedia.translate(wikipedia.getSite(), msg) % addText) # When a page is tagged as "really well written" it has a star in the interwiki links. # This is a list of all the templates used (in regex format) to make the stars appear. starsList = ['link[ _]fa', 'link[ _]adq', 'enllaç[ _]ad', 'link[ _]ua', 'legătură[ _]af', 'destacado', 'ua', 'liên k[ _]t[ _]chọn[ _]lọc'] errorCount = 0 site = wikipedia.getSite() # /wiki/ is not always the right path in non-wiki projects pathWiki = site.family.nicepath(site.lang) if putText: wikipedia.output(u'Loading %s...' % page.title()) if oldTextGiven == None: try: text = page.get() except wikipedia.NoPage: wikipedia.output(u"%s doesn't exist, skip!" % page.title()) return (False, always) # continue except wikipedia.IsRedirectPage: wikipedia.output(u"%s is a redirect, skip!" % page.title()) return (False, always) # continue else: text = oldTextGiven # Understand if the bot has to skip the page or not # In this way you can use both -except and -excepturl if regexSkipUrl != None: url = '%s%s' % (pathWiki, page.urlname()) result = re.findall(regexSkipUrl, site.getUrl(url)) if result != []: wikipedia.output(u'Exception! regex (or word) used with -exceptUrl is in the page. Skip!') return (False, always) # continue if regexSkip != None: result = re.findall(regexSkip, text) if result != []: wikipedia.output(u'Exception! regex (or word) used with -except is in the page. Skip!') return (False, always) # continue # If not up, text put below if not up: newtext = text # Getting the categories categoriesInside = wikipedia.getCategoryLinks(newtext, site) # Deleting the categories newtext = wikipedia.removeCategoryLinks(newtext, site) # Getting the interwiki interwikiInside = wikipedia.getLanguageLinks(newtext, site) # Removing the interwiki newtext = wikipedia.removeLanguageLinks(newtext, site) #nn got a message between the categories and the iw's and they want to keep it there, first remove it if (site.language()==u'nn'): newtext = newtext.replace(nn_iw_msg, '') # Translating the \\n into binary \n addText = addText.replace('\\n', '\n') # Adding the text newtext += u"\n%s" % addText # Reputting the categories newtext = wikipedia.replaceCategoryLinks(newtext, categoriesInside, site, True) #Put the nn iw message back if (site.language()==u'nn'): newtext = newtext + u'\n' + nn_iw_msg # Dealing the stars' issue starsListInPage = list() for star in starsList: regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}\n)' % star, re.I) risultato = regex.findall(newtext) if risultato != []: newtext = regex.sub('', newtext) for element in risultato: newtext += '\n%s' % element # Adding the interwiki newtext = wikipedia.replaceLanguageLinks(newtext, interwikiInside, site) # If instead the text must be added above... else: newtext = addText + '\n' + text if putText and text != newtext: wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) wikipedia.showDiff(text, newtext) choice = '' # Let's put the changes. while 1: # If someone load it as module, maybe it's not so useful to put the text in the page if putText: if not always: choice = wikipedia.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if choice == 'a': always = True if choice == 'n': return (False, always) if choice == 'y' or always: try: if always: page.put(newtext, summary) else: page.put_async(newtext, summary) except wikipedia.EditConflict: wikipedia.output(u'Edit conflict! skip!') return (False, always) except wikipedia.ServerError: errorCount += 1 if errorCount < 5: wikipedia.output(u'Server Error! Wait..') time.sleep(3) continue else: raise wikipedia.ServerError(u'Fifth Server Error!') except wikipedia.SpamfilterError, e: wikipedia.output(u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) return (False, always) except wikipedia.PageNotSaved, error: wikipedia.output(u'Error putting page: %s' % error.args) return (False, always) except wikipedia.LockedPage: wikipedia.output(u'Skipping %s (locked page)' % page.title()) return (False, always) else: # Break only if the errors are one after the other... errorCount = 0 return (True, always)
def run(self): """ Starts the robot. """ # Run the generator which will yield Pages which might need to be # changed. for page in self.generator: if self.isTitleExcepted(page.title()): wikipedia.output( u'Skipping %s because the title is on the exceptions list.' % page.aslink()) continue try: # Load the page's text from the wiki original_text = page.get(get_redirect=True) if not page.canBeEdited(): wikipedia.output(u"You can't edit page %s" % page.aslink()) continue except wikipedia.NoPage: wikipedia.output(u'Page %s not found' % page.aslink()) continue new_text = original_text while True: if self.isTextExcepted(new_text): wikipedia.output( u'Skipping %s because it contains text that is on the exceptions list.' % page.aslink()) break new_text = self.doReplacements(new_text) if new_text == original_text: wikipedia.output('No changes were necessary in %s' % page.aslink()) break if self.recursive: newest_text = self.doReplacements(new_text) while (newest_text!=new_text): new_text = newest_text newest_text = self.doReplacements(new_text) if hasattr(self, "addedCat"): cats = page.categories(nofollow_redirects=True) if self.addedCat not in cats: cats.append(self.addedCat) new_text = wikipedia.replaceCategoryLinks(new_text, cats) # Show the title of the page we're working on. # Highlight the title in purple. wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) wikipedia.showDiff(original_text, new_text) if self.acceptall: break choice = wikipedia.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'Edit', 'open in Browser', 'All', "Quit"], ['y', 'N', 'e', 'b', 'a', 'q'], 'N') if choice == 'e': editor = editarticle.TextEditor() as_edited = editor.edit(original_text) # if user didn't press Cancel if as_edited and as_edited != new_text: new_text = as_edited continue if choice == 'b': webbrowser.open("http://%s%s" % ( page.site().hostname(), page.site().nice_get_address(page.title()) )) wikipedia.input("Press Enter when finished in browser.") original_text = page.get(get_redirect=True, force=True) new_text = original_text continue if choice == 'q': return if choice == 'a': self.acceptall = True if choice == 'y': page.put_async(new_text, self.editSummary) # choice must be 'N' break if self.acceptall and new_text != original_text: try: page.put(new_text, self.editSummary) except wikipedia.EditConflict: wikipedia.output(u'Skipping %s because of edit conflict' % (page.title(),)) except wikipedia.SpamfilterError, e: wikipedia.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) except wikipedia.PageNotSaved, error: wikipedia.output(u'Error putting page: %s' % (error.args,)) except wikipedia.LockedPage: wikipedia.output(u'Skipping %s (locked page)' % (page.title(),))