def main(): # if -file is not used, this temporary array is used to read the page title. pageTitle = [] page = None gen = None interwiki = False keep_name = False targetLang = None targetFamily = None for arg in pywikibot.handleArgs(): if arg == '-interwiki': interwiki = True elif arg.startswith('-keepname'): keep_name = True elif arg.startswith('-tolang:'): targetLang = arg[8:] elif arg.startswith('-tofamily:'): targetFamily = arg[10:] elif arg.startswith('-file'): if len(arg) == 5: filename = pywikibot.input( u'Please enter the list\'s filename: ') else: filename = arg[6:] gen = pagegenerators.TextfilePageGenerator(filename) else: pageTitle.append(arg) if not gen: # if the page title is given as a command line argument, # connect the title's parts with spaces if pageTitle != []: pageTitle = ' '.join(pageTitle) page = pywikibot.Page(pywikibot.getSite(), pageTitle) # if no page title was given as an argument, and none was # read from a file, query the user if not page: pageTitle = pywikibot.input(u'Which page to check:') page = pywikibot.Page(pywikibot.getSite(), pageTitle) # generator which will yield only a single Page gen = iter([page]) if not targetLang and not targetFamily: targetSite = pywikibot.getSite('commons', 'commons') else: if not targetLang: targetLang = pywikibot.getSite().language if not targetFamily: targetFamily = pywikibot.getSite().family targetSite = pywikibot.getSite(targetLang, targetFamily) bot = ImageTransferBot(gen, interwiki=interwiki, targetSite=targetSite, keep_name=keep_name) bot.run()
def main(): for filename in getfn(): print "Handling images from %s" % filename gen = pagegenerators.TextfilePageGenerator(filename) for image in gen: if image.isImage(): print "-" * 50 print "Image: %s" % image.title() try: # show the image description page's contents print image.get() except wikipedia.NoPage: print "Description empty." except wikipedia.IsRedirectPage: print "Description page is redirect?!" answer=wikipedia.input(u"Copy this image (y/N)?") if answer.lower().startswith('y'): lib_images.transfer_image(image)
def main(): # this temporary array is used to read the page title. pageTitle = [] gen = None for arg in sys.argv[1:]: arg = wikipedia.argHandler(arg, 'windows_chars') if arg: if arg.startswith('-file'): if len(arg) == 5: filename = wikipedia.input( u'please enter the list\'s filename: ') else: filename = arg[6:] gen = pagegenerators.TextfilePageGenerator(filename) elif arg.startswith('-sql'): if len(arg) == 4: sqlfilename = wikipedia.input( u'please enter the SQL dump\'s filename: ') else: sqlfilename = arg[5:] gen = SqlWindows1252PageGenerator(sqlfilename) else: pageTitle.append(arg) # if a single page is given as a command line argument, # reconnect the title's parts with spaces if pageTitle != []: page = wikipedia.Page(wikipedia.getSite(), ' '.join(pageTitle)) gen = iter([page]) # get edit summary message wikipedia.setAction(wikipedia.translate(wikipedia.getSite(), msg)) if not gen: wikipedia.showHelp('windows_chars') elif wikipedia.getSite().encoding() == "utf-8": print "There is no need to run this robot on UTF-8 wikis." else: preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = WindowsCharsBot(preloadingGen) bot.run()
def generator(self): # Choose which generator to use according to options. pagegen = None if self.__workonnew: if not self.__number: self.__number = config.special_page_limit pagegen = pagegenerators.NewpagesPageGenerator(number = self.__number) elif self.__refpagetitle: refpage = wikipedia.Page(wikipedia.getSite(), self.__refpagetitle) pagegen = pagegenerators.ReferringPageGenerator(refpage) elif self.__linkpagetitle: linkpage = wikipedia.Page(wikipedia.getSite(), self.__linkpagetitle) pagegen = pagegenerators.LinkedPageGenerator(linkpage) elif self.__catname: cat = catlib.Category(wikipedia.getSite(), 'Category:%s' % self.__catname) if self.__start: pagegen = pagegenerators.CategorizedPageGenerator(cat, recurse = self.__catrecurse, start = self.__start) else: pagegen = pagegenerators.CategorizedPageGenerator(cat, recurse = self.__catrecurse) elif self.__textfile: pagegen = pagegenerators.TextfilePageGenerator(self.__textfile) else: if not self.__start: self.__start = '!' namespace = wikipedia.Page(wikipedia.getSite(), self.__start).namespace() start = wikipedia.Page(wikipedia.getSite(), self.__start).titleWithoutNamespace() pagegen = pagegenerators.AllpagesPageGenerator(start, namespace) return pagegen
def main(): gen = None prefix = None oldName = None newName = None noredirect = True always = False skipredirects = False summary = None fromToPairs = [] # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if arg.startswith('-pairs'): if len(arg) == len('-pairs'): filename = pywikibot.input( u'Enter the name of the file containing pairs:') else: filename = arg[len('-pairs:'):] oldName1 = None for page in pagegenerators.TextfilePageGenerator(filename): if oldName1: fromToPairs.append([oldName1, page.title()]) oldName1 = None else: oldName1 = page.title() if oldName1: pywikibot.output( u'WARNING: file %s contains odd number of links' % filename) elif arg == '-noredirect': noredirect = False elif arg == '-always': always = True elif arg == '-skipredirects': skipredirects = True elif arg.startswith('-from:'): if oldName: pywikibot.output(u'WARNING: -from:%s without -to:' % oldName) oldName = arg[len('-from:'):] elif arg.startswith('-to:'): if oldName: fromToPairs.append([oldName, arg[len('-to:'):]]) oldName = None else: pywikibot.output(u'WARNING: %s without -from' % arg) elif arg.startswith('-prefix'): if len(arg) == len('-prefix'): prefix = pywikibot.input(u'Enter the prefix:') else: prefix = arg[8:] elif arg.startswith('-summary'): if len(arg) == len('-summary'): summary = pywikibot.input(u'Enter the summary:') else: summary = arg[9:] else: genFactory.handleArg(arg) if oldName: pywikibot.output(u'WARNING: -from:%s without -to:' % oldName) for pair in fromToPairs: page = pywikibot.Page(pywikibot.getSite(), pair[0]) bot = MovePagesBot(None, prefix, noredirect, always, skipredirects, summary) bot.moveOne(page, pair[1]) if not gen: gen = genFactory.getCombinedGenerator() if gen: preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = MovePagesBot(preloadingGen, prefix, noredirect, always, skipredirects, summary) bot.run() elif not fromToPairs: pywikibot.showHelp('movepages')
def main(): pageName = '' singlePage = '' summary = '' always = False doSinglePage = False doCategory = False deleteSubcategories = True doRef = False doLinks = False doImages = False undelete = False fileName = '' gen = None # read command line parameters for arg in pywikibot.handleArgs(): if arg == '-always': always = True elif arg.startswith('-file'): if len(arg) == len('-file'): fileName = pywikibot.input( u'Enter name of file to delete pages from:') else: fileName = arg[len('-file:'):] elif arg.startswith('-summary'): if len(arg) == len('-summary'): summary = pywikibot.input(u'Enter a reason for the deletion:') else: summary = arg[len('-summary:'):] elif arg.startswith('-cat'): doCategory = True if len(arg) == len('-cat'): pageName = pywikibot.input( u'Enter the category to delete from:') else: pageName = arg[len('-cat:'):] elif arg.startswith('-nosubcats'): deleteSubcategories = False elif arg.startswith('-links'): doLinks = True if len(arg) == len('-links'): pageName = pywikibot.input(u'Enter the page to delete from:') else: pageName = arg[len('-links:'):] elif arg.startswith('-ref'): doRef = True if len(arg) == len('-ref'): pageName = pywikibot.input(u'Enter the page to delete from:') else: pageName = arg[len('-ref:'):] elif arg.startswith('-page'): doSinglePage = True if len(arg) == len('-page'): pageName = pywikibot.input(u'Enter the page to delete:') else: pageName = arg[len('-page:'):] elif arg.startswith('-images'): doImages = True if len(arg) == len('-images'): pageName = pywikibot.input( u'Enter the page with the images to delete:') else: pageName = arg[len('-images'):] elif arg.startswith('-undelete'): undelete = True mysite = pywikibot.getSite() if doSinglePage: if not summary: summary = pywikibot.input(u'Enter a reason for the deletion:') page = pywikibot.Page(mysite, pageName) gen = iter([page]) elif doCategory: if not summary: summary = pywikibot.translate(mysite, msg_delete_category) \ % pageName ns = mysite.category_namespace() categoryPage = catlib.Category(mysite, ns + ':' + pageName) gen = pagegenerators.CategorizedPageGenerator( categoryPage, recurse=deleteSubcategories) elif doLinks: if not summary: summary = pywikibot.translate(mysite, msg_delete_links) % pageName pywikibot.setAction(summary) linksPage = pywikibot.Page(mysite, pageName) gen = pagegenerators.LinkedPageGenerator(linksPage) elif doRef: if not summary: summary = pywikibot.translate(mysite, msg_delete_ref) % pageName refPage = pywikibot.Page(mysite, pageName) gen = pagegenerators.ReferringPageGenerator(refPage) elif fileName: if not summary: summary = pywikibot.translate(mysite, msg_simple_delete) gen = pagegenerators.TextfilePageGenerator(fileName) elif doImages: if not summary: summary = pywikibot.translate(mysite, msg_delete_images) gen = pagegenerators.ImagesPageGenerator( pywikibot.Page(mysite, pageName)) if gen: pywikibot.setAction(summary) # We are just deleting pages, so we have no need of using a preloading page generator # to actually get the text of those pages. bot = DeletionRobot(gen, summary, always, undelete) bot.run() else: pywikibot.showHelp(u'delete')
def main(): global protectionLevels protectionLevels = ['sysop', 'autoconfirmed', 'none'] pageName = '' summary = '' always = False doSinglePage = False doCategory = False protectSubcategories = True doRef = False doLinks = False doImages = False fileName = '' gen = None edit = '' move = '' defaultProtection = 'sysop' # read command line parameters for arg in wikipedia.handleArgs(): if arg == '-always': always = True elif arg.startswith('-file'): if len(arg) == len('-file'): fileName = wikipedia.input( u'Enter name of file to protect pages from:') else: fileName = arg[len('-file:'):] elif arg.startswith('-summary'): if len(arg) == len('-summary'): summary = wikipedia.input( u'Enter a reason for the protection:') else: summary = arg[len('-summary:'):] elif arg.startswith('-cat'): doCategory = True if len(arg) == len('-cat'): pageName = wikipedia.input( u'Enter the category to protect from:') else: pageName = arg[len('-cat:'):] elif arg.startswith('-nosubcats'): protectSubcategories = False elif arg.startswith('-links'): doLinks = True if len(arg) == len('-links'): pageName = wikipedia.input(u'Enter the page to protect from:') else: pageName = arg[len('-links:'):] elif arg.startswith('-ref'): doRef = True if len(arg) == len('-ref'): pageName = wikipedia.input(u'Enter the page to protect from:') else: pageName = arg[len('-ref:'):] elif arg.startswith('-page'): doSinglePage = True if len(arg) == len('-page'): pageName = wikipedia.input(u'Enter the page to protect:') else: pageName = arg[len('-page:'):] elif arg.startswith('-images'): doImages = True if len(arg) == len('-images'): pageName = wikipedia.input( u'Enter the page with the images to protect:') else: pageName = arg[len('-images:'):] elif arg.startswith('-unprotect'): defaultProtection = 'none' elif arg.startswith('-edit'): edit = arg[len('-edit:'):] if edit not in protectionLevels: edit = choiceProtectionLevel('edit', defaultProtection) elif arg.startswith('-move'): move = arg[len('-move:'):] if move not in protectionLevels: move = choiceProtectionLevel('move', defaultProtection) elif arg.startswith('-create'): create = arg[len('-create:'):] if create not in protectionLevels: create = choiceProtectionLevel('create', defaultProtection) mysite = wikipedia.getSite() if doSinglePage: if not summary: summary = wikipedia.input(u'Enter a reason for the protection:') page = wikipedia.Page(mysite, pageName) gen = iter([page]) elif doCategory: if not summary: summary = wikipedia.translate(mysite, msg_protect_category) % pageName ns = mysite.category_namespace() categoryPage = catlib.Category(mysite, ns + ':' + pageName) gen = pagegenerators.CategorizedPageGenerator( categoryPage, recurse=protectSubcategories) elif doLinks: if not summary: summary = wikipedia.translate(mysite, msg_protect_links) % pageName linksPage = wikipedia.Page(mysite, pageName) gen = pagegenerators.LinkedPageGenerator(linksPage) elif doRef: if not summary: summary = wikipedia.translate(mysite, msg_protect_ref) % pageName refPage = wikipedia.Page(mysite, pageName) gen = pagegenerators.ReferringPageGenerator(refPage) elif fileName: if not summary: summary = wikipedia.translate(mysite, msg_simple_protect) gen = pagegenerators.TextfilePageGenerator(fileName) elif doImages: if not summary: summary = wikipedia.translate(mysite, msg_protect_images) % pageName gen = pagegenerators.ImagesPageGenerator( wikipedia.Page(mysite, pageName)) if gen: wikipedia.setAction(summary) # We are just protecting pages, so we have no need of using a preloading page generator # to actually get the text of those pages. if not edit: edit = defaultProtection if not move: move = defaultProtection bot = ProtectionRobot(gen, summary, always, edit=edit, move=move) bot.run() else: wikipedia.showHelp(u'protect')
def main(*args): # the option that's always selected when the bot wonders what to do with # a link. If it's None, the user is prompted (default behaviour). always = None alternatives = [] getAlternatives = True dnSkip = False # if the -file argument is used, page titles are dumped in this array. # otherwise it will only contain one page. generator = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitle = [] primary = False main_only = False #Shall we use only the first link from each asterisked line? first_only = False # For sorting the linked pages, case can be ignored minimum = 0 for arg in pywikibot.handleArgs(*args): if arg.startswith('-primary:'): primary = True getAlternatives = False alternatives.append(arg[9:]) elif arg == '-primary': primary = True elif arg.startswith('-always:'): always = arg[8:] elif arg.startswith('-file'): if len(arg) == 5: generator = pagegenerators.TextfilePageGenerator(filename=None) else: generator = pagegenerators.TextfilePageGenerator( filename=arg[6:]) elif arg.startswith('-pos:'): if arg[5] != ':': mysite = pywikibot.getSite() page = pywikibot.Page(mysite, arg[5:]) if page.exists(): alternatives.append(page.title()) else: answer = pywikibot.inputChoice( u'Possibility %s does not actually exist. Use it anyway?' % page.title(), ['yes', 'no'], ['y', 'N'], 'N') if answer == 'y': alternatives.append(page.title()) else: alternatives.append(arg[5:]) elif arg == '-just': getAlternatives = False elif arg == '-dnskip': dnSkip = True elif arg == '-main': main_only = True elif arg == '-first': first_only = True elif arg.startswith('-min:'): minimum = int(arg[5:]) elif arg.startswith('-start'): try: if len(arg) <= len('-start:'): generator = pagegenerators.CategorizedPageGenerator( pywikibot.getSite().disambcategory()) else: generator = pagegenerators.CategorizedPageGenerator( pywikibot.getSite().disambcategory(), start=arg[7:]) generator = pagegenerators.NamespaceFilterPageGenerator( generator, [0]) except pywikibot.NoPage: pywikibot.output( "Disambiguation category for your wiki is not known.") raise elif arg.startswith("-"): pywikibot.output("Unrecognized command line argument: %s" % arg) # show help text and exit pywikibot.showHelp() else: pageTitle.append(arg) # if the disambiguation page is given as a command line argument, # connect the title's parts with spaces if pageTitle != []: pageTitle = ' '.join(pageTitle) page = pywikibot.Page(pywikibot.getSite(), pageTitle) generator = iter([page]) # if no disambiguation page was given as an argument, and none was # read from a file, query the user if not generator: pageTitle = pywikibot.input( u'On which disambiguation page do you want to work?') page = pywikibot.Page(pywikibot.getSite(), pageTitle) generator = iter([page]) bot = DisambiguationRobot(always, alternatives, getAlternatives, dnSkip, generator, primary, main_only, first_only, minimum=minimum) bot.run()