def treat(self): page = wikipedia.Page(self.site, self.your_page) if page.exists(): wikipedia.output( u"\nWikitable on \03{lightpurple}%s\03{default} will be completed with:\n" % self.your_page ) text = page.get() newtext = self.newraw() wikipedia.output(newtext) choice = wikipedia.inputChoice(u"Do you want to add these on wikitable?", ["Yes", "No"], ["y", "N"], "N") text = text[:-3] + newtext summ = wikipedia.translate(self.site, summary_update) if choice == "y": try: page.put(u"".join(text), summ) except: wikipedia.output(u"Impossible to edit. It may be an edit conflict... Skipping...") else: wikipedia.output(u"\nWikitable on \03{lightpurple}%s\03{default} will be created with:\n" % self.your_page) newtext = self.newtable() + self.newraw() wikipedia.output(newtext) summ = wikipedia.translate(self.site, summary_creation) choice = wikipedia.inputChoice(u"Do you want to accept this page creation?", ["Yes", "No"], ["y", "N"], "N") if choice == "y": try: page.put(newtext, summ) except wikipedia.LockedPage: wikipedia.output(u"Page %s is locked; skipping." % title) except wikipedia.EditConflict: wikipedia.output(u"Skipping %s because of edit conflict" % title) except wikipedia.SpamfilterError, error: wikipedia.output(u"Cannot change %s because of spam blacklist entry %s" % (title, error.url))
def treat(self): page = pywikibot.Page(self.site, self.your_page) if page.exists(): pywikibot.output(u'\nWikitable on \03{lightpurple}%s\03{default} will be completed with:\n' % self.your_page ) text = page.get() newtext = self.newraw() pywikibot.output(newtext) choice = pywikibot.inputChoice(u'Do you want to add these on wikitable?', ['Yes', 'No'], ['y', 'N'], 'N') text = text[:-3] + newtext summ = pywikibot.translate(self.site, summary_update) if choice == 'y': try: page.put(u''.join(text), summ) except: pywikibot.output(u'Impossible to edit. It may be an edit conflict... Skipping...') else: pywikibot.output(u'\nWikitable on \03{lightpurple}%s\03{default} will be created with:\n' % self.your_page ) newtext = self.newtable()+self.newraw() pywikibot.output(newtext) summ = pywikibot.translate(self.site, summary_creation) choice = pywikibot.inputChoice(u'Do you want to accept this page creation?', ['Yes', 'No'], ['y', 'N'], 'N') if choice == 'y': try: page.put(newtext, summ) except pywikibot.LockedPage: pywikibot.output(u"Page %s is locked; skipping." % title) except pywikibot.EditConflict: pywikibot.output(u'Skipping %s because of edit conflict' % title) except pywikibot.SpamfilterError, error: pywikibot.output(u'Cannot change %s because of spam blacklist entry %s' % (title, error.url))
def guessReasonForDeletion(self, page): reason = None # TODO: The following check loads the page 2 times. Find a better way to # do it. if page.isTalkPage() and (page.toggleTalkPage().isRedirectPage() or not page.toggleTalkPage().exists()): # This is probably a talk page that is orphaned because we # just deleted the associated article. reason = pywikibot.translate(self.mySite, self.talk_deletion_msg) else: # Try to guess reason by the template used templateNames = page.templates() reasons = pywikibot.translate(self.mySite, self.deletion_messages) for templateName in templateNames: if templateName in reasons: if type(reasons[templateName]) is not unicode: # Make alias to delete_reasons reason = pywikibot.translate(self.mySite, self.delete_reasons)[reasons[templateName]] else: reason = reasons[templateName] break if not reason: # Unsuccessful in guessing the reason. Use a default message. reason = reasons[u"_default"] return reason
def run(self): site = wikipedia.getSite() newCat = catlib.Category(site, 'Category:' + self.newCatTitle) # Copy the category contents to the new category page copied = False oldMovedTalk = None if self.oldCat.exists() and self.moveCatPage: copied = self.oldCat.copyAndKeep( self.newCatTitle, wikipedia.translate(site, cfd_templates)) # Also move the talk page if copied: reason = wikipedia.translate(site, deletion_reason_move) \ % (self.newCatTitle, self.newCatTitle) oldTalk = self.oldCat.toggleTalkPage() if oldTalk.exists(): newTalkTitle = newCat.toggleTalkPage().title() try: talkMoved = oldTalk.move(newTalkTitle, reason) except (wikipedia.NoPage, wikipedia.PageNotSaved), e: #in order : #Source talk does not exist, or #Target talk already exists wikipedia.output(e.message) else: if talkMoved: oldMovedTalk = oldTalk
def __init__(self, generator, auto, dry): """ Constructor. Parameters: * generator - The page generator that determines on which pages to work on. * auto - If True, run in autonomous mode. * dry - If True, doesn't do any real changes, but only shows what would have been changed. """ self.generator = generator self.auto = auto self.dry = dry self.lang = pywikibot.getSite().lang # Get the correct localized parameters self.site = pywikibot.getSite() self.summary = pywikibot.translate(self.site, self.msg) self.summaryNewCat = pywikibot.translate(self.site, self.msgNewCat) self.bdCats = pywikibot.translate(self.site, self.birthDeathCats) self.defaultSortMatcher = None defaultSortLabels = self.site.getmagicwords('defaultsort') if defaultSortLabels: self.defaultSortMatcher = re.compile('\{\{(' + '|'.join(defaultSortLabels) + ')')
def __init__(self, generator, oldImage, newImage=None, summary='', always=False, loose=False): """ Arguments: * generator - A page generator. * oldImage - The title of the old image (without namespace) * newImage - The title of the new image (without namespace), or None if you want to remove the image. """ self.generator = generator self.oldImage = oldImage self.newImage = newImage self.editSummary = summary self.summary = summary self.always = always self.loose = loose # get edit summary message mysite = wikipedia.getSite() if summary: self.editSummary = summary elif self.newImage: self.editSummary = wikipedia.translate( mysite, self.msg_replace) % (self.oldImage, self.newImage) else: self.editSummary = wikipedia.translate( mysite, self.msg_remove) % self.oldImage
def __init__(self, generator, oldImage, newImage=None, summary='', always=False, loose=False): """ Arguments: * generator - A page generator. * oldImage - The title of the old image (without namespace) * newImage - The title of the new image (without namespace), or None if you want to remove the image. """ self.generator = generator self.oldImage = oldImage self.newImage = newImage self.editSummary = summary self.summary = summary self.always = always self.loose = loose # get edit summary message mysite = pywikibot.getSite() if summary: self.editSummary = summary elif self.newImage: self.editSummary = (pywikibot.translate(mysite, self.msg_replace) % (self.oldImage, self.newImage)) else: self.editSummary = (pywikibot.translate(mysite, self.msg_remove) % self.oldImage)
def guessReasonForDeletion(self, page): reason = None # TODO: The following check loads the page 2 times. Find a better way to do it. if page.isTalkPage() and (page.toggleTalkPage().isRedirectPage() or not page.toggleTalkPage().exists()): # This is probably a talk page that is orphaned because we # just deleted the associated article. reason = wikipedia.translate(self.mySite, self.talk_deletion_msg) else: # Try to guess reason by the template used templateNames = page.templates() reasons = wikipedia.translate(self.mySite, self.deletion_messages) for templateName in templateNames: if templateName in reasons: if type(reasons[templateName]) is not unicode: #Make alias to delete_reasons reason = wikipedia.translate( self.mySite, self.delete_reasons)[reasons[templateName]] else: reason = reasons[templateName] break if not reason: # Unsuccessful in guessing the reason. Use a default message. reason = reasons[u'_default'] return reason
def main(): global always always = False for arg in pywikibot.handleArgs(): if arg == '-always': always = True mysite = pywikibot.getSite() # If anything needs to be prepared, you can do it here template_image = pywikibot.translate(pywikibot.getSite(), template_to_the_image) template_user = pywikibot.translate(pywikibot.getSite(), template_to_the_user) except_text_translated = pywikibot.translate(pywikibot.getSite(), except_text) basicgenerator = pagegenerators.UnusedFilesGenerator() generator = pagegenerators.PreloadingGenerator(basicgenerator) for page in generator: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) if except_text_translated not in page.getImagePageHtml() and \ 'http://' not in page.get(): pywikibot.output(u'\n' + page.title()) if template_image in page.get(): pywikibot.output(u"%s done already" % page.aslink()) continue appendtext(page, u"\n\n"+template_image) uploader = page.getFileVersionHistory().pop()[1] usertalkname = u'User Talk:%s' % uploader usertalkpage = pywikibot.Page(mysite, usertalkname) msg2uploader = template_user % {'title': page.title()} appendtext(usertalkpage, msg2uploader)
def run(self): site = pywikibot.getSite() newCat = catlib.Category(site, self.newCatTitle) # set edit summary message if not self.editSummary: try: self.editSummary = pywikibot.translate( site, msg_change) % (self.oldCat.title(), newCat.title()) except TypeError: self.editSummary = pywikibot.translate( site, msg_change) % self.oldCat.title() # Copy the category contents to the new category page copied = False oldMovedTalk = None if self.oldCat.exists() and self.moveCatPage: copied = self.oldCat.copyAndKeep( self.newCatTitle, pywikibot.translate(site, cfd_templates)) # Also move the talk page if copied: reason = pywikibot.translate(site, deletion_reason_move) \ % (self.newCatTitle, self.newCatTitle) oldTalk = self.oldCat.toggleTalkPage() if oldTalk.exists(): newTalkTitle = newCat.toggleTalkPage().title() try: talkMoved = oldTalk.move(newTalkTitle, reason) except (pywikibot.NoPage, pywikibot.PageNotSaved), e: #in order : #Source talk does not exist, or #Target talk already exists pywikibot.output(e.message) else: if talkMoved: oldMovedTalk = oldTalk
def reportBadAccount(self, name=None, final=False): #Queue process if name: if globalvar.confirm: answer = pywikibot.inputChoice( u'%s may have an unwanted username, do you want to report this user?' % name, ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if answer in ['a', 'all']: answer = 'y' globalvar.confirm = False else: answer = 'y' if answer.lower() in ['yes', 'y'] or not globalvar.confirm: showStatus() pywikibot.output( u'%s is possibly an unwanted username. It will be reported.' % name) if hasattr(self, '_BAQueue'): self._BAQueue.append(name) else: self._BAQueue = [name] if len(self._BAQueue) >= globalvar.dumpToLog or final: rep_text = '' #name in queue is max, put detail to report page pywikibot.output("Updating badname accounts to report page...") rep_page = pywikibot.Page( self.site, pywikibot.translate(self.site, report_page)) if rep_page.exists(): text_get = rep_page.get() else: text_get = u'This is a report page for the Bad-username, please translate me. --~~~' pos = 0 # The talk page includes "_" between the two names, in this way i replace them to " ". for usrna in self._BAQueue: username = pywikibot.url2link(usrna, self.site, self.site) n = re.compile(re.escape(username), re.UNICODE) y = n.search(text_get, pos) if y: pywikibot.output(u'%s is already in the report page.' % username) else: # Adding the log. rep_text += pywikibot.translate(self.site, report_text) % username if self.site.lang == 'it': rep_text = "%s%s}}" % (rep_text, self.bname[username]) com = i18n.twtranslate(self.site, 'welcome-bad_username') if rep_text != '': rep_page.put(text_get + rep_text, comment=com, force=True, minorEdit=True) showStatus(5) pywikibot.output(u'Reported') self.BAQueue = list() else: return True
def get_wiki_save_page(stat_page=False): site = pywikibot.getSite() wiki_save_path = { '_default': u'User:%s/Report' % config.usernames[site.family.name][site.lang], 'es': u'Usuario:%s/Reporte' % config.usernames[site.family.name][site.lang], 'it': u'Utente:RevertBot/Report' } save_path = pywikibot.translate(site, wiki_save_path) if stat_page: return pywikibot.Page( site, '%s/%s' % (save_path, pywikibot.translate(site, stat_msg)[0])) if append_date_to_wiki_save_path: t = time.localtime() day = '' if append_day_to_wiki_save_path: day = '_' + str(t[2]) save_path += day + '_' + date.monthName(site.language(), t[1]) + '_' + str(t[0]) return pywikibot.Page(site, save_path)
def run(self): mySite = wikipedia.getSite() while True: now = time.strftime("%d %b %Y %H:%M:%S (UTC)", time.gmtime()) localSandboxTitle = wikipedia.translate(mySite, sandboxTitle) if type(localSandboxTitle) is list: titles = localSandboxTitle else: titles = [localSandboxTitle,] for title in titles: sandboxPage = wikipedia.Page(mySite, localSandboxTitle) try: text = sandboxPage.get() translatedContent = wikipedia.translate(mySite, content) if text.strip() == translatedContent.strip(): wikipedia.output(u'The sandbox is still clean, no change necessary.') else: translatedMsg = wikipedia.translate(mySite, msg) sandboxPage.put(translatedContent, translatedMsg) except wikipedia.EditConflict: wikipedia.output(u'*** Loading again because of edit conflict.\n') if self.no_repeat: wikipedia.output(u'\nDone.') return else: wikipedia.output('\nSleeping %s hours, now %s' % (self.hours, now) ) time.sleep(self.hours * 60 * 60)
def main(): always = False for arg in wikipedia.handleArgs(): if arg == '-always': always = True mysite = wikipedia.getSite() # If anything needs to be prepared, you can do it here template_image = wikipedia.translate(wikipedia.getSite(), template_to_the_image) template_user = wikipedia.translate(wikipedia.getSite(), template_to_the_user) except_text_translated = wikipedia.translate(wikipedia.getSite(), except_text) basicgenerator = pagegenerators.UnusedFilesGenerator() generator = pagegenerators.PreloadingGenerator(basicgenerator) for page in generator: wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) if except_text_translated not in page.getImagePageHtml( ) and 'http://' not in page.get(): wikipedia.output(u'\n' + page.title()) appendtext(page, template_image, always) uploader = page.getFileVersionHistory().pop()[1] usertalkname = u'User Talk:%s' % uploader usertalkpage = wikipedia.Page(mysite, usertalkname) msg2uploader = template_user % page.title() appendtext(usertalkpage, msg2uploader, always)
def get_wiki_save_page(stat_page=False): site = pywikibot.getSite() wiki_save_path = { '_default': u'User:%s/Report' % config.usernames[ site.family.name][site.lang], 'es': u'Usuario:%s/Reporte' % config.usernames[ site.family.name][site.lang], 'it': u'Utente:RevertBot/Report' } save_path = pywikibot.translate(site, wiki_save_path) if stat_page: return pywikibot.Page(site, '%s/%s' % (save_path, pywikibot.translate(site, stat_msg)[0])) if append_date_to_wiki_save_path: t = time.localtime() day = '' if append_day_to_wiki_save_path: day = '_' + str(t[2]) save_path += '%s_%s_%s' % (day, date.monthName(site.language(), t[1]), str(t[0])) return pywikibot.Page(site, save_path)
def main(): global always always = False for arg in pywikibot.handleArgs(): if arg == '-always': always = True mysite = pywikibot.getSite() # If anything needs to be prepared, you can do it here template_image = pywikibot.translate(pywikibot.getSite(), template_to_the_image) template_user = pywikibot.translate(pywikibot.getSite(), template_to_the_user) except_text_translated = pywikibot.translate(pywikibot.getSite(), except_text) basicgenerator = pagegenerators.UnusedFilesGenerator() generator = pagegenerators.PreloadingGenerator(basicgenerator) for page in generator: pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) if except_text_translated not in page.getImagePageHtml() and \ 'http://' not in page.get(): pywikibot.output(u'\n' + page.title()) if template_image in page.get(): pywikibot.output(u"%s done already" % page.aslink()) continue appendtext(page, u"\n\n" + template_image) uploader = page.getFileVersionHistory().pop()[1] usertalkname = u'User Talk:%s' % uploader usertalkpage = pywikibot.Page(mysite, usertalkname) msg2uploader = template_user % {'title': page.title()} appendtext(usertalkpage, msg2uploader)
def run(self): site = pywikibot.getSite() newCat = catlib.Category(site, self.newCatTitle) # set edit summary message if not self.editSummary: try: self.editSummary = pywikibot.translate(site, msg_change) % (self.oldCat.title(), newCat.title() ) except TypeError: self.editSummary = pywikibot.translate(site, msg_change) % self.oldCat.title() # Copy the category contents to the new category page copied = False oldMovedTalk = None if self.oldCat.exists() and self.moveCatPage: copied = self.oldCat.copyAndKeep( self.newCatTitle, pywikibot.translate(site, cfd_templates)) # Also move the talk page if copied: reason = pywikibot.translate(site, deletion_reason_move) \ % (self.newCatTitle, self.newCatTitle) oldTalk = self.oldCat.toggleTalkPage() if oldTalk.exists(): newTalkTitle = newCat.toggleTalkPage().title() try: talkMoved = oldTalk.move(newTalkTitle, reason) except (pywikibot.NoPage, pywikibot.PageNotSaved), e: #in order : #Source talk does not exist, or #Target talk already exists pywikibot.output(e.message) else: if talkMoved: oldMovedTalk = oldTalk
def __init__(self, generator, acceptall=False, limit=None, ignorepdf=False): """ - generator : Page generator - acceptall : boolean, is -always on ? - limit : int, stop after n modified pages - ignorepdf : boolean """ self.generator = generator self.acceptall = acceptall self.limit = limit self.ignorepdf = ignorepdf self.site = pywikibot.getSite() # Check manual = 'mw:Manual:Pywikibot/refLinks' code = None for alt in [self.site.code] + i18n._altlang(self.site.code): if alt in localized_msg: code = alt break if code: manual += '/%s' % code self.msg = i18n.twtranslate(self.site, 'reflinks-msg', locals()) self.stopPage = pywikibot.Page( self.site, pywikibot.translate(self.site, stopPage)) local = pywikibot.translate(self.site, badtitles) if local: bad = '(' + globalbadtitles + '|' + local + ')' else: bad = globalbadtitles self.titleBlackList = re.compile(bad, re.I | re.S | re.X) self.norefbot = noreferences.NoReferencesBot(None) self.deduplicator = DuplicateReferences() try: self.stopPageRevId = self.stopPage.latestRevision() except pywikibot.NoPage: pywikibot.output(u'The stop page %s does not exist' % self.stopPage.title(asLink=True)) raise # Regex to grasp content-type meta HTML tag in HTML source self.META_CONTENT = re.compile(ur'(?i)<meta[^>]*content\-type[^>]*>') # Extract the encoding from a charset property (from content-type !) self.CHARSET = re.compile(ur'(?i)charset\s*=\s*(?P<enc>[^\'";>/]*)') # Extract html title from page self.TITLE = re.compile(ur'(?is)(?<=<title>).*?(?=</title>)') # Matches content inside <script>/<style>/HTML comments self.NON_HTML = re.compile( ur'(?is)<script[^>]*>.*?</script>|<style[^>]*>.*?</style>|<!--.*?-->|<!\[CDATA\[.*?\]\]>' ) # Authorized mime types for HTML pages self.MIME = re.compile( ur'application/(?:xhtml\+xml|xml)|text/(?:ht|x)ml')
def main(): #Setup Familys for Wikia Involved anime = wikipedia.getSite(code=u'en', fam=u'anime') wikipedia.setAction(wikipedia.translate(anime, msg)) siteList = [] pageList = [] #Get Project Wiki Listing wikiaIds = [] page = wikipedia.Page(anime, u'Bots/Wiki', None, 4) #4=Project Namespace try: text = page.get() r = re.compile(u'^.*<!-- \|\|START\|\| -->\n?', re.UNICODE | re.DOTALL) text = re.sub(r, u'', text) r = re.compile(u'\n?<!-- \|\|END\|\| -->.*$', re.UNICODE | re.DOTALL) text = re.sub(r, u'', text) r = re.compile(u'\n', re.UNICODE | re.MULTILINE | re.DOTALL) wikilist = re.split(r, text) r = re.compile(u'^#|^\s*$|^\[', re.UNICODE | re.MULTILINE | re.DOTALL) for wiki in wikilist: if not re.match(r, wiki): wikiaIds.append(wiki) except wikipedia.NoPage: moreYears = False for wiki in wikiaIds: siteList.append(wikipedia.getSite(code=u'en', fam=wiki)) #Get Page List page = wikipedia.Page(anime, u'Bots/CleanDelete/Pages', None, 4) #4=Project Namespace try: text = page.get() r = re.compile(u'^.*<!-- \|\|START\|\| -->\n?', re.UNICODE | re.DOTALL) text = re.sub(r, u'', text) r = re.compile(u'\n?<!-- \|\|END\|\| -->.*$', re.UNICODE | re.DOTALL) text = re.sub(r, u'', text) r = re.compile(u'\n', re.UNICODE | re.MULTILINE | re.DOTALL) pages = re.split(r, text) r = re.compile(u'^#|^\s*$', re.UNICODE | re.MULTILINE | re.DOTALL) for p in pages: if not re.match(r, p): pageList.append(p) except wikipedia.NoPage: moreYears = False for page in pageList: wikipedia.output(u'Doing Page %s' % page) for site in siteList: p = wikipedia.Page(site, page) if p.exists(): wikipedia.output(u'Page %s exists on %s.' % (p.title(), site.family.name)) wikipedia.output(u'Deleting %s' % p.title()) p.delete(wikipedia.translate(anime, msg), True) else: wikipedia.output( u'Page %s does not exist on %s, skipping page on site.' % (p.title(), site.family.name))
def transferImage(self, sourceImagePage, debug=False): """Gets a wikilink to an image, downloads it and its description, and uploads it to another wikipedia. Returns the filename which was used to upload the image This function is used by imagetransfer.py and by copy_table.py """ sourceSite = sourceImagePage.site() if debug: print "-" * 50 if debug: print "Found image: %s"% imageTitle url = sourceImagePage.fileUrl() newname = sourceImagePage.titleWithoutNamespace() pywikibot.output(u"URL should be: %s" % url) # localize the text that should be printed on the image description page try: description = sourceImagePage.get() #unlink categories #description = pywikibot.removeCategoryLinks(description,pywikibot.getSite('commons', 'commons')) description = re.sub(u'\[\[Category', u'[[:Category', description, flags=re.IGNORECASE) # try to translate license templates if (sourceSite.sitename(), self.targetSite.sitename()) in licenseTemplates: for old, new in licenseTemplates[(sourceSite.sitename(), self.targetSite.sitename())].iteritems(): new = '{{%s}}' % new old = re.compile('{{%s}}' % old) description = pywikibot.replaceExcept(description, old, new, ['comment', 'math', 'nowiki', 'pre']) description = pywikibot.translate(self.targetSite, copy_message) \ % (sourceSite, description) description += '\n\n' + sourceImagePage.getFileVersionHistoryTable() # add interwiki link if sourceSite.family == self.targetSite.family: description += "\r\n\r\n" + sourceImagePage.aslink(forceInterwiki = True) #add cat description += "\n[[Kategooria:Commonsist kopeeritud pildid]]\n" except pywikibot.NoPage: description='' print "Image does not exist or description page is empty." except pywikibot.IsRedirectPage: description='' print "Image description page is redirect." else: #bot = UploadRobot(url=self.imagePage.fileUrl(), description=CH, useFilename=self.newname, keepFilename=True, verifyDescription=False, ignoreWarning = True, targetSite = pywikibot.getSite('commons', 'commons')) bot = upload.UploadRobot(url = url, description = description, useFilename = newname, keepFilename=True, verifyDescription=False, ignoreWarning = False, targetSite = self.targetSite) # try to upload targetFilename = bot.run() if targetFilename and self.targetSite.family.name == 'commons' and self.targetSite.lang == 'commons': # upload to Commons was successful reason = pywikibot.translate(sourceSite, nowCommonsMessage) # try to delete the original image if we have a sysop account if sourceSite.family.name in config.sysopnames and sourceSite.lang in config.sysopnames[sourceSite.family.name]: if sourceImagePage.delete(reason): return if sourceSite.lang in nowCommonsTemplate and sourceSite.family.name in config.usernames and sourceSite.lang in config.usernames[sourceSite.family.name]: # add the nowCommons template. pywikibot.output(u'Adding nowCommons template to %s' % sourceImagePage.title()) sourceImagePage.put(sourceImagePage.get() + '\n\n' + nowCommonsTemplate[sourceSite.lang] % targetFilename, comment = nowCommonsMessage[sourceSite.lang])
def setSummaryMessage(self, disambPage, new_targets=[], unlink=False, dn=False): # make list of new targets targets = '' for page_title in new_targets: targets += u'[[%s]], ' % page_title # remove last comma targets = targets[:-2] if not targets: targets = i18n.twtranslate(self.mysite, unknown_msg) # first check whether user has customized the edit comment if (self.mysite.family.name in pywikibot.config.disambiguation_comment and self.mylang in pywikibot.config.disambiguation_comment[ self.mysite.family.name]): try: self.comment = pywikibot.translate( self.mysite, pywikibot.config.disambiguation_comment[ self.mysite.family.name]) % (disambPage.title(), targets) # Backwards compatibility, type error probably caused by too # many arguments for format string except TypeError: self.comment = pywikibot.translate( self.mysite, pywikibot.config.disambiguation_comment[ self.mysite.family.name]) % disambPage.title() elif disambPage.isRedirectPage(): # when working on redirects, there's another summary message if unlink and not new_targets: self.comment = i18n.twtranslate(self.mysite, msg_redir_unlink, {'from': disambPage.title()}) elif dn and not new_targets: self.comment = i18n.twtranslate( self.mysite, msg_redir_dn, {'from': disambPage.title()}) else: self.comment = i18n.twtranslate(self.mysite, msg_redir, {'from': disambPage.title(), 'to': targets}) else: if unlink and not new_targets: self.comment = i18n.twtranslate(self.mysite, msg_unlink, {'from': disambPage.title()}) elif dn and not new_targets: self.comment = i18n.twtranslate(self.mysite, msg_dn, {'from': disambPage.title()}) else: self.comment = i18n.twtranslate(self.mysite, msg, {'from': disambPage.title(), 'to': targets})
def run(self): while not self.killed: if len(self.queue) == 0: if self.finishing: break else: time.sleep(0.1) else: self.semaphore.acquire() (url, errorReport, containingPage, archiveURL) = self.queue[0] self.queue = self.queue[1:] talkPage = containingPage.toggleTalkPage() pywikibot.output( u'\03{lightaqua}** Reporting dead link on %s...\03{default}' % talkPage.title(asLink=True)) try: content = talkPage.get() + "\n\n" if url in content: pywikibot.output( u'\03{lightaqua}** Dead link seems to have already been reported on %s\03{default}' % talkPage.title(asLink=True)) self.semaphore.release() continue except (pywikibot.NoPage, pywikibot.IsRedirectPage): content = u'' if archiveURL: archiveMsg = pywikibot.translate(pywikibot.getSite(), talk_report_archive) % archiveURL else: archiveMsg = u'' # The caption will default to "Dead link". But if there is # already such a caption, we'll use "Dead link 2", # "Dead link 3", etc. caption = pywikibot.translate(pywikibot.getSite(), talk_report_caption) i = 1 # Check if there is already such a caption on the talk page. while re.search('= *' + caption + ' *=', content) is not None: i += 1 caption = pywikibot.translate(pywikibot.getSite(), talk_report_caption) + " " + str(i) content += pywikibot.translate(pywikibot.getSite(), talk_report) % (caption, errorReport, archiveMsg) comment = u'[[%s#%s|→]]%s' % (talkPage.title(), caption, pywikibot.translate(pywikibot.getSite(), talk_report_msg)) try: talkPage.put(content, comment) except pywikibot.SpamfilterError, error: pywikibot.output( u'\03{lightaqua}** SpamfilterError while trying to change %s: %s\03{default}' % (talkPage.title(asLink=True), error.url)) self.semaphore.release()
def run(self): def minutesDiff(time1, time2): if type(time1) is long: time1 = str(time1) if type(time2) is long: time2 = str(time2) t1 = (((int(time1[0:4]) * 12 + int(time1[4:6])) * 30 + int(time1[6:8])) * 24 + int(time1[8:10]) * 60) + int( time1[10:12] ) t2 = (((int(time2[0:4]) * 12 + int(time2[4:6])) * 30 + int(time2[6:8])) * 24 + int(time2[8:10]) * 60) + int( time2[10:12] ) return abs(t2 - t1) mySite = pywikibot.getSite() while True: wait = False now = time.strftime("%d %b %Y %H:%M:%S (UTC)", time.gmtime()) localSandboxTitle = pywikibot.translate(mySite, sandboxTitle) if type(localSandboxTitle) is list: titles = localSandboxTitle else: titles = [localSandboxTitle] for title in titles: sandboxPage = pywikibot.Page(mySite, title) try: text = sandboxPage.get() translatedContent = pywikibot.translate(mySite, content) translatedMsg = pywikibot.translate(mySite, msg) subst = "subst:" in translatedContent if text.strip() == translatedContent.strip(): pywikibot.output(u"The sandbox is still clean, no change necessary.") elif subst and sandboxPage.userName() == mySite.loggedInAs(): pywikibot.output(u"The sandbox might be clean, no change necessary.") elif text.find(translatedContent.strip()) <> 0 and not subst: sandboxPage.put(translatedContent, translatedMsg) pywikibot.output(u"Standard content was changed, sandbox cleaned.") else: diff = minutesDiff(sandboxPage.editTime(), time.strftime("%Y%m%d%H%M%S", time.gmtime())) # Is the last edit more than 5 minutes ago? if diff >= self.delay: sandboxPage.put(translatedContent, translatedMsg) else: # wait for the rest pywikibot.output(u"Sleeping for %d minutes." % (self.delay - diff)) time.sleep((self.delay - diff) * 60) wait = True except pywikibot.EditConflict: pywikibot.output(u"*** Loading again because of edit conflict.\n") if self.no_repeat: pywikibot.output(u"\nDone.") return elif not wait: if self.hours < 1.0: pywikibot.output("\nSleeping %s minutes, now %s" % ((self.hours * 60), now)) else: pywikibot.output("\nSleeping %s hours, now %s" % (self.hours, now)) time.sleep(self.hours * 60 * 60)
def main(): gaia = wikipedia.getSite(code=u'en', fam=u'gaia') plot = wikipedia.getSite(code=u'en', fam=u'plotwiki') wikipedia.setAction(wikipedia.translate(gaia, msg)) wikipedia.setAction(wikipedia.translate(plot, msg)) final = u'<noinclude><!-- Do not edit this page, this page is automatically created by a Bot. -->\n' final += u'==Most Recent Events==</noinclude>\n' nonrecent = u'<noinclude>==Older Events==\n' end = u'\n\'\'View everything here on the [[Plotwiki:|plotwiki...]]\'\'</noinclude>' moreYears = True year = 04 events = [] temp = [] while moreYears: y = str(year) page = wikipedia.Page(plot, u'Template:Pnav%s' % y.zfill(2)) try: text = page.get() r = sre.compile(u'^.*<span style=".*normal.*">(.*)</span>.*$', sre.UNICODE | sre.MULTILINE | sre.DOTALL) text = sre.sub(r, u'\\1', text) r = sre.compile(u'\s+\|\s+', sre.UNICODE | sre.MULTILINE | sre.DOTALL) pages = sre.split(r, text) r = sre.compile(u'\[\[([^|]*)(\|.*)?\]\]', sre.UNICODE) for p in pages: temp.append(sre.sub(r, u'\\1', p)) year += 1 except wikipedia.NoPage: moreYears = False for e in temp: if not e in events: events.append(e) events = reversed(list(events)) x = 1 for e in events: final += u'* [[Plotwiki:%s|]]\n' % e x += 1 if x == 6: final += nonrecent if x <= 6: final += end final += end page = wikipedia.Page(gaia, u'Plotwiki Current Events') page.put(final)
def setSummaryMessage(self, disambPage, new_targets=[], unlink=False, dn=False): # make list of new targets targets = '' for page_title in new_targets: targets += u'[[%s]], ' % page_title # remove last comma targets = targets[:-2] if not targets: targets = i18n.twtranslate(self.mysite, unknown_msg) # first check whether user has customized the edit comment if (self.mysite.family.name in pywikibot.config.disambiguation_comment and self.mylang in pywikibot.config.disambiguation_comment[ self.mysite.family.name]): try: self.comment = pywikibot.translate( self.mysite, pywikibot.config.disambiguation_comment[ self.mysite.family.name]) % (disambPage.title(), targets) # Backwards compatibility, type error probably caused by too # many arguments for format string except TypeError: self.comment = pywikibot.translate( self.mysite, pywikibot.config.disambiguation_comment[ self.mysite.family.name]) % disambPage.title() elif disambPage.isRedirectPage(): # when working on redirects, there's another summary message if unlink and not new_targets: self.comment = i18n.twtranslate(self.mysite, msg_redir_unlink, {'from': disambPage.title()}) elif dn and not new_targets: self.comment = i18n.twtranslate(self.mysite, msg_redir_dn, {'from': disambPage.title()}) else: self.comment = i18n.twtranslate(self.mysite, msg_redir, { 'from': disambPage.title(), 'to': targets }) else: if unlink and not new_targets: self.comment = i18n.twtranslate(self.mysite, msg_unlink, {'from': disambPage.title()}) elif dn and not new_targets: self.comment = i18n.twtranslate(self.mysite, msg_dn, {'from': disambPage.title()}) else: self.comment = i18n.twtranslate(self.mysite, msg, { 'from': disambPage.title(), 'to': targets })
def reportBadAccount(self, name=None, final=False): # Queue process if name: if globalvar.confirm: answer = wikipedia.inputChoice( u"%s may have an unwanted username, do you want to report this user?" % name, ["Yes", "No", "All"], ["y", "N", "a"], "N", ) if answer in ["a", "all"]: answer = "y" globalvar.confirm = False else: answer = "y" if answer.lower() in ["yes", "y"] or not globalvar.confirm: showStatus() wikipedia.output(u"%s is possibly an unwanted username. It will be reported." % name) if hasattr(self, "_BAQueue"): self._BAQueue.append(name) else: self._BAQueue = [name] if len(self._BAQueue) >= globalvar.dumpToLog or final: rep_text = "" # name in queue is max, put detail to report page wikipedia.output("Updating badname accounts to report page...") rep_page = wikipedia.Page(self.site, wikipedia.translate(self.site, report_page)) if rep_page.exists(): text_get = rep_page.get() else: text_get = u"This is a report page for the Bad-username, please translate me. --~~~" pos = 0 # The talk page includes "_" between the two names, in this way i replace them to " ". for usrna in self._BAQueue: username = wikipedia.url2link(usrna, self.site, self.site) n = re.compile(re.escape(username), re.UNICODE) y = n.search(text_get, pos) if y: wikipedia.output(u"%s is already in the report page." % username) else: # Adding the log. rep_text += wikipedia.translate(self.site, report_text) % username if self.site.lang == "it": rep_text = "%s%s}}" % (rep_text, self.bname[username]) com = wikipedia.translate(self.site, comment) if rep_text != "": rep_page.put(text_get + rep_text, comment=com, minorEdit=True) showStatus(5) wikipedia.output(u"Reported") self.BAQueue = list() else: return True
def put(self, title, contents): mysite = wikipedia.getSite() page = wikipedia.Page(mysite, title) # Show the title of the page we're working on. # Highlight the title in purple. wikipedia.output(u">>> \03{lightpurple}%s\03{default} <<<" % page.title()) if self.summary: comment = self.summary else: comment = wikipedia.translate(mysite, self.msg) comment_top = comment + " - " + wikipedia.translate(mysite, self.msg_top) comment_bottom = comment + " - " + wikipedia.translate(mysite, self.msg_bottom) comment_force = comment + " *** " + wikipedia.translate(mysite, self.msg_force) + " ***" # Remove trailing newlines (cause troubles when creating redirects) contents = re.sub('^[\r\n]*','', contents) if page.exists(): if self.append == "Top": wikipedia.output(u"Page %s already exists, appending on top!" % title) contents = contents + page.get() comment = comment_top elif self.append == "Bottom": wikipedia.output(u"Page %s already exists, appending on bottom!" % title) contents = page.get() + contents comment = comment_bottom elif self.force: wikipedia.output(u"Page %s already exists, ***overwriting!" % title) comment = comment_force else: wikipedia.output(u"Page %s already exists, not adding!" % title) return else: if self.autosummary: comment = '' wikipedia.setAction('') if self.dry: wikipedia.output("*** Dry mode ***\n" + \ "\03{lightpurple}title\03{default}: " + title + "\n" + \ "\03{lightpurple}contents\03{default}:\n" + contents + "\n" \ "\03{lightpurple}comment\03{default}: " + comment + "\n") return try: page.put(contents, comment = comment, minorEdit = self.minor) except wikipedia.LockedPage: wikipedia.output(u"Page %s is locked; skipping." % title) except wikipedia.EditConflict: wikipedia.output(u'Skipping %s because of edit conflict' % title) except wikipedia.SpamfilterError, error: wikipedia.output(u'Cannot change %s because of spam blacklist entry %s' % (title, error.url))
def __init__(self, generator, acceptall=False, limit=None, ignorepdf=False): """ - generator : Page generator - acceptall : boolean, is -always on ? - limit : int, stop after n modified pages - ignorepdf : boolean """ self.generator = generator self.acceptall = acceptall self.limit = limit self.ignorepdf = ignorepdf self.site = pywikibot.getSite() # Check manual = 'mw:Manual:Pywikibot/refLinks' code = None for alt in [self.site.code] + i18n._altlang(self.site.code): if alt in localized_msg: code = alt break if code: manual += '/%s' % code self.msg = i18n.twtranslate(self.site, 'reflinks-msg', locals()) self.stopPage = pywikibot.Page(self.site, pywikibot.translate(self.site, stopPage)) local = pywikibot.translate(self.site, badtitles) if local: bad = '(' + globalbadtitles + '|' + local + ')' else: bad = globalbadtitles self.titleBlackList = re.compile(bad, re.I | re.S | re.X) self.norefbot = noreferences.NoReferencesBot(None) self.deduplicator = DuplicateReferences() try: self.stopPageRevId = self.stopPage.latestRevision() except pywikibot.NoPage: pywikibot.output(u'The stop page %s does not exist' % self.stopPage.title(asLink=True)) raise # Regex to grasp content-type meta HTML tag in HTML source self.META_CONTENT = re.compile(ur'(?i)<meta[^>]*content\-type[^>]*>') # Extract the encoding from a charset property (from content-type !) self.CHARSET = re.compile(ur'(?i)charset\s*=\s*(?P<enc>[^\'";>/]*)') # Extract html title from page self.TITLE = re.compile(ur'(?is)(?<=<title>).*?(?=</title>)') # Matches content inside <script>/<style>/HTML comments self.NON_HTML = re.compile( ur'(?is)<script[^>]*>.*?</script>|<style[^>]*>.*?</style>|<!--.*?-->|<!\[CDATA\[.*?\]\]>') # Authorized mime types for HTML pages self.MIME = re.compile( ur'application/(?:xhtml\+xml|xml)|text/(?:ht|x)ml')
def main(): gaia = wikipedia.getSite(code=u'en', fam=u'gaia') plot = wikipedia.getSite(code=u'en', fam=u'plotwiki') wikipedia.setAction(wikipedia.translate(gaia, msg)) wikipedia.setAction(wikipedia.translate(plot, msg)) final = u'<noinclude><!-- Do not edit this page, this page is automatically created by a Bot. -->\n' final+= u'==Most Recent Events==</noinclude>\n' nonrecent = u'<noinclude>==Older Events==\n' end = u'\n\'\'View everything here on the [[Plotwiki:|plotwiki...]]\'\'</noinclude>' moreYears = True year = 04 events = [] temp = [] while moreYears: y = str(year) page = wikipedia.Page(plot, u'Template:Pnav%s' % y.zfill(2)) try: text = page.get() r = sre.compile(u'^.*<span style=".*normal.*">(.*)</span>.*$', sre.UNICODE | sre.MULTILINE | sre.DOTALL) text = sre.sub(r, u'\\1', text) r = sre.compile(u'\s+\|\s+', sre.UNICODE | sre.MULTILINE | sre.DOTALL) pages = sre.split(r, text) r = sre.compile(u'\[\[([^|]*)(\|.*)?\]\]', sre.UNICODE) for p in pages: temp.append(sre.sub(r, u'\\1', p)) year+=1 except wikipedia.NoPage: moreYears = False for e in temp: if not e in events: events.append(e) events = reversed(list(events)); x = 1 for e in events: final+=u'* [[Plotwiki:%s|]]\n' % e x+=1 if x==6: final+=nonrecent if x<=6: final+=end final+=end page = wikipedia.Page(gaia, u'Plotwiki Current Events') page.put(final)
def treat(self, page): ''' Loads a page, converts all HTML tables in its text to wiki syntax, and saves the converted text. Returns True if the converted table was successfully saved, otherwise returns False. ''' wikipedia.output(u'\n>>> %s <<<' % page.title()) site = page.site() try: text = page.get() except wikipedia.NoPage: wikipedia.output(u"ERROR: couldn't find %s" % page.title()) return False except wikipedia.IsRedirectPage: wikipedia.output(u'Skipping redirect %s' % page.title()) return False newText, convertedTables, warningSum = self.convertAllHTMLTables(text) # Check if there are any marked tags left markedTableTagR = re.compile("<##table##|</##table##>", re.IGNORECASE) if markedTableTagR.search(newText): wikipedia.output( u'ERROR: not all marked table start or end tags processed!') return if convertedTables == 0: wikipedia.output(u"No changes were necessary.") else: if config.table2wikiAskOnlyWarnings and warningSum == 0: doUpload = True else: if config.table2wikiSkipWarnings: doUpload = True else: print "There were %i replacement(s) that might lead to bad output." % warningSum doUpload = (wikipedia.input( u'Do you want to change the page anyway? [y|N]') == "y" ) if doUpload: # get edit summary message if warningSum == 0: wikipedia.setAction( wikipedia.translate(site.lang, msg_no_warnings)) elif warningSum == 1: wikipedia.setAction( wikipedia.translate(site.lang, msg_one_warning) % warningSum) else: wikipedia.setAction( wikipedia.translate(site.lang, msg_multiple_warnings) % warningSum) page.put_async(newText)
def reportBadAccount(self, name = None, final = False): #Queue process if name: if globalvar.confirm: answer = pywikibot.inputChoice(u'%s may have an unwanted username, do you want to report this user?' % name, ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if answer in ['a', 'all']: answer = 'y' globalvar.confirm = False else: answer = 'y' if answer.lower() in ['yes', 'y'] or not globalvar.confirm: showStatus() pywikibot.output(u'%s is possibly an unwanted username. It will be reported.' % name) if hasattr(self, '_BAQueue'): self._BAQueue.append(name) else: self._BAQueue = [name] if len(self._BAQueue) >= globalvar.dumpToLog or final: rep_text = '' #name in queue is max, put detail to report page pywikibot.output("Updating badname accounts to report page...") rep_page = pywikibot.Page(self.site, pywikibot.translate(self.site, report_page) ) if rep_page.exists(): text_get = rep_page.get() else: text_get = u'This is a report page for the Bad-username, please translate me. --~~~' pos = 0 # The talk page includes "_" between the two names, in this way i replace them to " ". for usrna in self._BAQueue: username = pywikibot.url2link(usrna, self.site, self.site) n = re.compile(re.escape(username), re.UNICODE) y = n.search(text_get, pos) if y: pywikibot.output(u'%s is already in the report page.' % username) else: # Adding the log. rep_text += pywikibot.translate(self.site, report_text) % username if self.site.lang == 'it': rep_text = "%s%s}}" % (rep_text, self.bname[username]) com = i18n.twtranslate(self.site, 'welcome-bad_username') if rep_text != '': rep_page.put(text_get + rep_text, comment = com, minorEdit = True) showStatus(5) pywikibot.output(u'Reported') self.BAQueue = list() else: return True
def main(args): generator = None always = False # Load a lot of default generators genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): genFactory.handleArg(arg) generator = genFactory.getCombinedGenerator() if not generator: raise add_text.NoEnoughData( 'You have to specify the generator you want to use for the script!' ) pregenerator = pagegenerators.PreloadingGenerator(generator) for page in pregenerator: if page.exists() and (page.namespace() == 6) and \ (not page.isRedirectPage()): imagepage = pywikibot.ImagePage(page.site(), page.title()) foundNowCommons = False for template in imagepage.templates(): #FIXME: Move the templates list to a lib. if template in pywikibot.translate(imagepage.site(), nowCommons): foundNowCommons = True if foundNowCommons: pywikibot.output( u'The file %s is already tagged with NowCommons' % imagepage.title()) else: imagehash = imagepage.getHash() commons = pywikibot.getSite(u'commons', u'commons') duplicates = commons.getFilesFromAnHash(imagehash) if duplicates: duplicate = duplicates.pop() pywikibot.output(u'Found duplicate image at %s' % duplicate) comment = i18n.twtranslate( imagepage.site(), 'commons-file-now-available', { 'localfile': imagepage.titleWithoutNamespace(), 'commonsfile': duplicate }) template = pywikibot.translate(imagepage.site(), nowCommonsTemplate) newtext = imagepage.get() + template % (duplicate, ) pywikibot.showDiff(imagepage.get(), newtext) imagepage.put(newtext, comment)
def transferImage(self, sourceImagePage, debug=False): """Gets a wikilink to an image, downloads it and its description, and uploads it to another wikipedia. Returns the filename which was used to upload the image This function is used by imagetransfer.py and by copy_table.py """ sourceSite = sourceImagePage.site() if debug: print "-" * 50 if debug: print "Found image: %s"% imageTitle url = sourceImagePage.fileUrl().encode('utf-8') pywikibot.output(u"URL should be: %s" % url) # localize the text that should be printed on the image description page try: description = sourceImagePage.get() # try to translate license templates if (sourceSite.sitename(), self.targetSite.sitename()) in licenseTemplates: for old, new in licenseTemplates[(sourceSite.sitename(), self.targetSite.sitename())].iteritems(): new = '{{%s}}' % new old = re.compile('{{%s}}' % old) description = pywikibot.replaceExcept(description, old, new, ['comment', 'math', 'nowiki', 'pre']) description = pywikibot.translate(self.targetSite, copy_message) \ % (sourceSite, description) description += '\n\n' + sourceImagePage.getFileVersionHistoryTable() # add interwiki link if sourceSite.family == self.targetSite.family: description += "\r\n\r\n" + sourceImagePage.aslink(forceInterwiki = True) except pywikibot.NoPage: description='' print "Image does not exist or description page is empty." except pywikibot.IsRedirectPage: description='' print "Image description page is redirect." else: bot = upload.UploadRobot(url = url, description = description, targetSite = self.targetSite, urlEncoding = sourceSite.encoding()) # try to upload targetFilename = bot.run() if targetFilename and self.targetSite.family.name == 'commons' and self.targetSite.lang == 'commons': # upload to Commons was successful reason = pywikibot.translate(sourceSite, nowCommonsMessage) # try to delete the original image if we have a sysop account if sourceSite.family.name in config.sysopnames and sourceSite.lang in config.sysopnames[sourceSite.family.name]: if sourceImagePage.delete(reason): return if sourceSite.lang in nowCommonsTemplate and sourceSite.family.name in config.usernames and sourceSite.lang in config.usernames[sourceSite.family.name]: # add the nowCommons template. pywikibot.output(u'Adding nowCommons template to %s' % sourceImagePage.title()) sourceImagePage.put(sourceImagePage.get() + '\n\n' + nowCommonsTemplate[sourceSite.lang] % targetFilename, comment = nowCommonsMessage[sourceSite.lang])
def get_stats(): import catlib, pagegenerators msg = pywikibot.translate(pywikibot.getSite(), stat_msg) cat = catlib.Category( pywikibot.getSite(), 'Category:%s' % pywikibot.translate(pywikibot.getSite(), reports_cat)) gen = pagegenerators.CategorizedPageGenerator(cat, recurse=True) output = u"""{| {{prettytable|width=|align=|text-align=left}} ! %s ! %s ! %s ! %s ! %s ! %s |- """ % (msg[1], msg[2], msg[3], 'Google', 'Yahoo', 'Live Search') gnt = 0 ynt = 0 mnt = 0 ent = 0 sn = 0 snt = 0 for page in gen: data = page.get() gn = stat_sum('google', data) yn = stat_sum('yahoo', data) mn = stat_sum('(msn|live)', data) en = len(re.findall('=== \[\[', data)) sn = len(data) gnt += gn ynt += yn mnt += mn ent += en snt += sn if en > 0: output += u"|%s||%s||%s KB||%s||%s||%s\n|-\n" \ % (page.title(asLink=True), en, sn / 1024, gn, yn, mn) output += u"""| |||||||| |- |'''%s'''||%s||%s KB||%s||%s||%s |- |colspan="6" align=right style="background-color:#eeeeee;"|<small>''%s: %s''</small> |} """ % (msg[4], ent, snt / 1024, gnt, ynt, mnt, msg[5], time.strftime("%d " + "%s" % (date.monthName(pywikibot.getSite().language(), time.localtime()[1])) + " %Y")) return output
def set_template(name=None): site = pywikibot.getSite() tcat = pywikibot.translate(site, template_cat) url = "%s://%s%s" % (site.protocol(), site.hostname(), site.path()) botdate = u""" <div style="text-align:right">{{{1}}}</div><noinclude>%s\n[[%s:%s]]</noinclude> """ % (tcat[0], site.namespace(14), tcat[1]) botbox = """ <div class=plainlinks style="text-align:right">[%s?title={{{1}}}&diff={{{2}}}&oldid={{{3}}} diff] - [%s?title={{{1}}}&action=history cron] - [%s?title=Special:Log&page={{{1}}} log]</div><noinclude>%s\n[[%s:%s]]</noinclude> """ % (url, url, url, tcat[0], site.namespace(14), tcat[1]) if name == 'botdate': p = pywikibot.Page(site, 'Template:botdate') if not p.exists(): p.put(botdate, comment='Init.') if name == 'botbox': p = pywikibot.Page(site, 'Template:botbox') if not p.exists(): p.put(botbox, comment='Init.')
def copyAndKeep(self, catname, cfdTemplates): """ Returns true if copying was successful, false if target page already existed. """ catname = self.site().category_namespace() + ':' + catname targetCat = wikipedia.Page(self.site(), catname) if targetCat.exists(): wikipedia.output('Target page %s already exists!' % targetCat.title()) return False else: wikipedia.output('Moving text from %s to %s.' % (self.title(), targetCat.title())) authors = ', '.join(self.contributingUsers()) creationSummary = wikipedia.translate(wikipedia.getSite(), msg_created_for_renaming) % (self.title(), authors) newtext = self.get() for regexName in cfdTemplates: matchcfd = re.compile(r"{{%s.*?}}" % regexName, re.IGNORECASE) newtext = matchcfd.sub('',newtext) matchcomment = re.compile(r"<!--BEGIN CFD TEMPLATE-->.*<!--END CFD TEMPLATE-->", re.IGNORECASE | re.MULTILINE | re.DOTALL) newtext = matchcomment.sub('',newtext) pos = 0 while (newtext[pos:pos+1] == "\n"): pos = pos + 1 newtext = newtext[pos:] targetCat.put(newtext, creationSummary) return True
def __init__(self, generator, to13 = False, format = False, always = False): self.generator = generator self.to13 = to13 self.format = format self.always = always self.isbnR = re.compile(r'(?<=ISBN )(?P<code>[\d\-]+[Xx]?)') self.comment = pywikibot.translate(pywikibot.getSite(), msg)
def delete_broken_redirects(self): # get reason for deletion text reason = pywikibot.translate(self.site, reason_broken) for redir_name in self.generator.retrieve_broken_redirects(): self.delete_1_broken_redirect(redir_name, reason) if self.exiting: break
def appendtext(page, apptext): global always if page.isRedirectPage(): page = page.getRedirectTarget() if not page.exists(): if page.isTalkPage(): text = u'' else: raise pywikibot.NoPage(u"Page '%s' does not exist" % page.title()) else: text = page.get() # Here you can go editing. If you find you do not # want to edit this page, just return oldtext = text text += apptext if text != oldtext: pywikibot.showDiff(oldtext, text) if not always: choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N') if choice == 'a': always = True if always or choice == 'y': page.put(text, pywikibot.translate(pywikibot.getSite(), comment))
def run(self): articles = self.cat.articlesList(recurse = 0) if len(articles) == 0: pywikibot.output(u'There are no articles in category %s' % self.cat.title()) else: for article in articles: if not self.titleRegex or re.search(self.titleRegex,article.title()): catlib.change_category(article, self.cat, None, comment = self.editSummary, inPlace = self.inPlace) # Also removes the category tag from subcategories' pages subcategories = self.cat.subcategoriesList(recurse = 0) if len(subcategories) == 0: pywikibot.output(u'There are no subcategories in category %s' % self.cat.title()) else: for subcategory in subcategories: catlib.change_category(subcategory, self.cat, None, comment = self.editSummary, inPlace = self.inPlace) # Deletes the category page if self.cat.exists() and self.cat.isEmptyCategory(): if self.useSummaryForDeletion and self.editSummary: reason = self.editSummary else: reason = pywikibot.translate(self.site, self.deletion_reason_remove) talkPage = self.cat.toggleTalkPage() try: self.cat.delete(reason, not self.batchMode) except pywikibot.NoUsername: pywikibot.output(u'You\'re not setup sysop info, category will not delete.' % self.cat.site()) return if (talkPage.exists()): talkPage.delete(reason=reason, prompt=not self.batchMode)
def run(self): comment = pywikibot.translate(self.site, msg) pywikibot.setAction(comment) for page in self.generator: # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) try: text = page.get() except pywikibot.NoPage: pywikibot.output(u"Page %s does not exist?!" % page.title(asLink=True)) continue except pywikibot.IsRedirectPage: pywikibot.output(u"Page %s is a redirect; skipping." % page.title(asLink=True)) continue except pywikibot.LockedPage: pywikibot.output(u"Page %s is locked?!" % page.title(asLink=True)) continue if pywikibot.getSite().sitename() == 'wikipedia:en' and page.isIpEdit(): pywikibot.output( u"Page %s is edited by IP. Possible vandalized" % page.title(asLink=True)) continue if self.lacksReferences(text): newText = self.addReferences(text) self.save(page, newText)
def run(self): for page in self.generator: try: # get the page, and save it using the unmodified text. # whether or not getting a redirect throws an exception # depends on the variable self.touch_redirects. text = page.get() originalText = text for url in weblinkchecker.weblinksIn(text, withoutBracketed=True): filename = url.split('/')[-1] description = pywikibot.translate(pywikibot.getSite(), msg) % url bot = upload.UploadRobot(url, description=description) # TODO: check duplicates #filename = bot.uploadImage() #if filename: # text = text.replace(url, u'[[Image:%s]]' % filename) # # only save if there were changes #if text != originalText: # page.put(text) except pywikibot.NoPage: print "Page %s does not exist?!" % page.title(asLink=True) except pywikibot.IsRedirectPage: print "Page %s is a redirect; skipping." \ % page.title(asLink=True) except pywikibot.LockedPage: print "Page %s is locked?!" % page.title(asLink=True)
def set_template(name = None): site = pywikibot.getSite() tcat = pywikibot.translate(site, template_cat) url = "%s://%s%s" % (site.protocol(), site.hostname(), site.path()) botdate = u""" <div style="text-align:right">{{{1}}}</div><noinclude>%s\n[[%s:%s]]</noinclude> """ % (tcat[0], site.namespace(14), tcat[1]) botbox = """ <div class=plainlinks style="text-align:right">[%s?title={{{1}}}&diff={{{2}}}&oldid={{{3}}} diff] - [%s?title={{{1}}}&action=history cron] - [%s?title=Special:Log&page={{{1}}} log]</div><noinclude>%s\n[[%s:%s]]</noinclude> """ % (url, url, url, tcat[0], site.namespace(14), tcat[1]) if name == 'botdate': p = pywikibot.Page(site, 'Template:botdate') if not p.exists(): p.put(botdate, comment = 'Init.') if name == 'botbox': p = pywikibot.Page(site, 'Template:botbox') if not p.exists(): p.put(botbox, comment = 'Init.')
def __init__(self, link, name): self.refname = name self.link = link self.site = pywikibot.getSite() self.linkComment = pywikibot.translate(self.site, comment) self.url = re.sub(u'#.*', '', self.link) self.title = None
def getReasonForDeletion(self, page): suggestedReason = self.guessReasonForDeletion(page) pywikibot.output(u"The suggested reason is: \03{lightred}%s\03{default}" % suggestedReason) # We don't use pywikibot.translate() here because for some languages the # entry is intentionally left out. if self.mySite.family.name in self.delete_reasons: if page.site().lang in self.delete_reasons[self.mySite.family.name]: localReasons = pywikibot.translate(page.site().lang, self.delete_reasons) pywikibot.output(u"") localReasoneKey = localReasons.keys() localReasoneKey.sort() for key in localReasoneKey: pywikibot.output((key + ":").ljust(8) + localReasons[key]) pywikibot.output(u"") reason = pywikibot.input( u"Please enter the reason for deletion, choose a default reason, or press enter for the suggested message:" ) if reason.strip() in localReasons: reason = localReasons[reason] else: reason = pywikibot.input( u"Please enter the reason for deletion, or press enter for the suggested message:" ) else: reason = pywikibot.input(u"Please enter the reason for deletion, or press enter for the suggested message:") if not reason: reason = suggestedReason return reason
def run(self): comment = pywikibot.translate(pywikibot.getSite(), msg) pywikibot.setAction(comment) for page in self.generator: if self.done: break self.treat(page)
def run(self): for page in self.generator: try: # get the page, and save it using the unmodified text. # whether or not getting a redirect throws an exception # depends on the variable self.touch_redirects. text = page.get() originalText = text for url in weblinkchecker.weblinksIn(text, withoutBracketed=True): filename = url.split('/')[-1] description = pywikibot.translate(pywikibot.getSite(), msg) % url bot = upload.UploadRobot(url, description = description) # TODO: check duplicates #filename = bot.uploadImage() #if filename: # text = text.replace(url, u'[[Image:%s]]' % filename) # # only save if there were changes #if text != originalText: # page.put(text) except pywikibot.NoPage: print "Page %s does not exist?!" % page.title(asLink=True) except pywikibot.IsRedirectPage: print "Page %s is a redirect; skipping." \ % page.title(asLink=True) except pywikibot.LockedPage: print "Page %s is locked?!" % page.title(asLink=True)
def treat(self, page): if page.isRedirectPage(): page = page.getRedirectTarget() page_t = page.title() # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output(u"\n>>> \03{lightpurple}%s\03{default} <<<" % page_t) if self.titlecase: page_cap = pywikibot.Page(self.site, page_t.title()) else: page_cap = pywikibot.Page(self.site, page_t.capitalize()) if page_cap.exists(): pywikibot.output(u'%s already exists, skipping...\n' % page_cap.title(asLink=True)) else: pywikibot.output(u'[[%s]] doesn\'t exist' % page_cap.title()) if not self.acceptall: choice = pywikibot.inputChoice( u'Do you want to create a redirect?', ['Yes', 'No', 'All', 'Quit'], ['y', 'N', 'a', 'q'], 'N') if choice == 'a': self.acceptall = True elif choice == 'q': self.done = True if self.acceptall or choice == 'y': comment = pywikibot.translate(self.site, msg) % page_t try: page_cap.put(u"#%s [[%s]]" % (self.site.redirect(True), page_t), comment) except: pywikibot.output(u"An error occurred, skipping...")
def changeCommonscat (self, page=None, oldtemplate=u'', oldcat=u'', newtemplate=u'', newcat=u'', linktitle=u'', description=u''): ''' Change the current commonscat template and target. ''' if not linktitle and (page.title().lower() in oldcat.lower() or oldcat.lower() in page.title().lower()): linktitle = oldcat if linktitle and newcat <> page.titleWithoutNamespace(): newtext = re.sub(u'(?i)\{\{%s\|?[^{}]*(?:\{\{.*\}\})?\}\}' % oldtemplate, u'{{%s|%s|%s}}' % (newtemplate, newcat, linktitle), page.get()) elif newcat == page.titleWithoutNamespace(): newtext = re.sub(u'(?i)\{\{%s\|?[^{}]*(?:\{\{.*\}\})?\}\}' % oldtemplate, u'{{%s}}' % newtemplate, page.get()) else: newtext = re.sub(u'(?i)\{\{%s\|?[^{}]*(?:\{\{.*\}\})?\}\}' %oldtemplate, u'{{%s|%s}}' % (newtemplate, newcat), page.get()) if self.summary: comment = self.summary else: comment = pywikibot.translate(page.site(), msg_change) \ % {'oldcat':oldcat, 'newcat':newcat} self.save(newtext, page, comment)
def copyTo(self, catname): """Returns true if copying was successful, false if target page already existed. """ catname = self.site().category_namespace() + ':' + catname targetCat = pywikibot.Page(self.site(), catname) if targetCat.exists(): pywikibot.output('Target page %s already exists!' % targetCat.title()) return else: pywikibot.output('Moving text from %s to %s.' % (self.title(), targetCat.title())) authors = ', '.join(self.contributingUsers()) creationSummary = pywikibot.translate(pywikibot.getSite(), msg_created_for_renaming) \ % (self.title(), authors) # Maybe sometimes length of summary is more than 200 characters and # thus will not be shown. For avoidning copyright violation bot must # listify authors in another place if len(creationSummary) > 200: talkpage = targetCat.toggleTalkPage() try: talktext = talkpage.get() except pywikibot.NoPage: talkpage.put(u"==Authors==\n%s-~~~~" % authors, u"Bot:Listifying authors") else: talkpage.put( talktext + u"\n==Authors==\n%s-~~~~" % authors, u"Bot:Listifying authors") targetCat.put(self.get(), creationSummary) return True
def copyTo(self, catname): """Returns true if copying was successful, false if target page already existed. """ catname = self.site().category_namespace() + ":" + catname targetCat = pywikibot.Page(self.site(), catname) if targetCat.exists(): pywikibot.output("Target page %s already exists!" % targetCat.title()) return else: pywikibot.output("Moving text from %s to %s." % (self.title(), targetCat.title())) authors = ", ".join(self.contributingUsers()) creationSummary = pywikibot.translate(pywikibot.getSite(), msg_created_for_renaming) % ( self.title(), authors, ) # Maybe sometimes length of summary is more than 200 characters and # thus will not be shown. For avoidning copyright violation bot must # listify authors in another place if len(creationSummary) > 200: talkpage = targetCat.toggleTalkPage() try: talktext = talkpage.get() except pywikibot.NoPage: talkpage.put(u"==Authors==\n%s-~~~~" % authors, u"Bot:Listifying authors") else: talkpage.put(talktext + u"\n==Authors==\n%s-~~~~" % authors, u"Bot:Listifying authors") targetCat.put(self.get(), creationSummary) return True
def __init__(self): #Setup Familys for Wikia Involved self.naruto = wikipedia.getSite(code=u'en', fam=u'naruto') wikipedia.setAction(wikipedia.translate(self.naruto, msg)) self.jutsuList = [ u'List of Ninjutsu', u'List of Taijutsu', u'List of Genjutsu' ]
def changeCommonscat(self, page=None, oldtemplate=u'', oldcat=u'', newtemplate=u'', newcat=u'', linktitle=u'', description=u''): ''' Change the current commonscat template and target. ''' if not linktitle and (page.title().lower() in oldcat.lower() or oldcat.lower() in page.title().lower()): linktitle = oldcat if linktitle and newcat <> page.titleWithoutNamespace(): newtext = re.sub( u'(?i)\{\{%s\|?[^{}]*(?:\{\{.*\}\})?\}\}' % oldtemplate, u'{{%s|%s|%s}}' % (newtemplate, newcat, linktitle), page.get()) elif newcat == page.titleWithoutNamespace(): newtext = re.sub( u'(?i)\{\{%s\|?[^{}]*(?:\{\{.*\}\})?\}\}' % oldtemplate, u'{{%s}}' % newtemplate, page.get()) else: newtext = re.sub( u'(?i)\{\{%s\|?[^{}]*(?:\{\{.*\}\})?\}\}' % oldtemplate, u'{{%s|%s}}' % (newtemplate, newcat), page.get()) if self.summary: comment = self.summary else: comment = pywikibot.translate(page.site(), msg_change) \ % {'oldcat':oldcat, 'newcat':newcat} self.save(newtext, page, comment)
def __init__(self, feed, user=None, ask=True, whitelist=None): """ Constructor. Parameters: * feed - The changes feed to work on (Newpages or Recentchanges) * user - Limit whitelist parsing and patrolling to a specific user * ask - If True, confirm each patrol action * whitelist - page title for whitelist (optional) """ self.feed = feed self.user = user self.ask = ask self.site = pywikibot.getSite() if whitelist: self.whitelist_pagename = whitelist else: local_whitelist_subpage_name = pywikibot.translate(self.site, self.whitelist_subpage_name) self.whitelist_pagename = u'%s:%s/%s' % (self.site.namespace(2),self.site.username(),local_whitelist_subpage_name) self.whitelist = None self.whitelist_ts = 0 self.whitelist_load_ts = 0 self.autopatroluserns = False self.highest_rcid = 0 # used to track loops self.last_rcid = 0 self.repeat_start_ts = 0 self.rc_item_counter = 0 # counts how many items have been reviewed self.patrol_counter = 0 # and how many times an action was taken
def main(): featured = False gen = None # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if arg == '-featured': featured = True else: genFactory.handleArg(arg) mysite = pywikibot.getSite() if mysite.sitename() == 'wikipedia:nl': pywikibot.output( u'\03{lightred}There is consensus on the Dutch Wikipedia that bots should not be used to fix redirects.\03{default}' ) sys.exit() if featured: featuredList = pywikibot.translate(mysite, featured_articles) ref = pywikibot.Page(pywikibot.getSite(), featuredList) gen = pagegenerators.ReferringPageGenerator(ref) gen = pagegenerators.NamespaceFilterPageGenerator(gen, [0]) if not gen: gen = genFactory.getCombinedGenerator() if gen: for page in pagegenerators.PreloadingGenerator(gen): workon(page) else: pywikibot.showHelp('fixing_redirects')