def main(): # The generator gives the pages that should be worked upon. gen = None # If debug is True, doesn't do any real changes, but only show # what would have been changed. debug = False wantHelp = False cat = catlib.Category(wikipedia.getSite(), 'Category:%s' % 'Instance') gen = pagegenerators.CategorizedPageGenerator(cat, start = None, recurse = False) # Parse command line arguments for arg in wikipedia.handleArgs(): if arg.startswith("-debug"): debug = True else: print arg, "yielding wanthelp" wantHelp = True if not wantHelp: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(gen) bot = PuppetUnmanagedListBot(gen, debug) bot.run() else: wikipedia.showHelp()
def main(): genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None pageTitleParts = [] for arg in pywikibot.handleArgs(): if arg.startswith("-reg"): arg = '-transcludes:Infobox film' if not genFactory.handleArg(arg): pageTitleParts.append(arg) if pageTitleParts != []: # We will only work on a single page. pageTitle = ' '.join(pageTitleParts) page = pywikibot.Page(pywikibot.getSite(), pageTitle) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if gen: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(gen) bot = FilmBannerBot(gen) bot.run() else: pywikibot.showHelp()
def main(): # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitleParts = [] # Parse command line arguments for arg in pywikibot.handleArgs(): if arg.startswith("-reg"): arg = '-cat:Unassessed film articles' if not genFactory.handleArg(arg): pageTitleParts.append(arg) if pageTitleParts != []: # We will only work on a single page. pageTitle = ' '.join(pageTitleParts) page = pywikibot.Page(pywikibot.getSite(), pageTitle) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if gen: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(filmfunctions.PagesFromTalkPagesGenerator(gen)) bot = FilmAssessBot(gen) bot.run() else: pywikibot.showHelp()
def main(*args): try: genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(*args): if not genFactory.handleArg(arg): pywikibot.showHelp() break else: gen = genFactory.getCombinedGenerator() if gen: page_match = re.compile('\{\{AFC submission\|') summary = u"[[User:HasteurBot|HasteurBot Task 3]]: Removing " + \ u"maint category that does not apply" disclude_list = [ u'Wikipedia talk:WikiProject Articles for creation', u'Wikipedia talk:WikiProject Articles for creation/2013 5', u'Wikipedia talk:WikiProject Articles for creation/2011', ] for article in gen: if article.title() in disclude_list: continue art_text = article.get() if page_match.match(art_text) is not None: print article art_1 = re.sub( '\\\n\[\[\:Category\:AfC_submissions_with_missing_AfC_template\]\]', '', art_text) art_2 = re.sub( '\\\n\[\[\:Category\:AfC submissions with missing AfC template\]\]', '', art_1) article.put(art_2, comment=summary) else: pywikibot.showHelp() finally: pywikibot.stopme()
def main(): # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitleParts = [] # Parse command line arguments for arg in pywikibot.handleArgs(): if arg.startswith("-reg"): arg = '-cat:Unassessed film articles' if not genFactory.handleArg(arg): pageTitleParts.append(arg) if pageTitleParts != []: # We will only work on a single page. pageTitle = ' '.join(pageTitleParts) page = pywikibot.Page(pywikibot.getSite(), pageTitle) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if gen: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator( filmfunctions.PagesFromTalkPagesGenerator(gen)) bot = FilmAssessBot(gen) bot.run() else: pywikibot.showHelp()
def main(): import os index = None djvu = None pages = None # what would have been changed. dry = False ask = False overwrite = 'ask' # Parse command line arguments for arg in pywikibot.handleArgs(): if arg.startswith("-dry"): dry = True elif arg.startswith("-ask"): ask = True elif arg.startswith("-overwrite:"): overwrite = arg[11:12] if overwrite != 'y' and overwrite != 'n': pywikibot.output(u"Unknown argument %s; will ask before overwriting" % arg) overwrite = 'ask' elif arg.startswith("-djvu:"): djvu = arg[6:] elif arg.startswith("-index:"): index = arg[7:] elif arg.startswith("-pages:"): pages = arg[7:] else: pywikibot.output(u"Unknown argument %s" % arg) # Check the djvu file exists if djvu: os.stat(djvu) if not index: import os.path index = os.path.basename(djvu) if djvu and index: site = pywikibot.getSite() index_page = pywikibot.Page(site, index) if site.family.name != 'wikisource': raise pywikibot.PageNotFound(u"Found family '%s'; Wikisource required." % site.family.name) if not index_page.exists() and index_page.namespace() == 0: index_namespace = site.mediawiki_message('Proofreadpage index namespace') index_page = pywikibot.Page(pywikibot.getSite(), u"%s:%s" % (index_namespace, index)) if not index_page.exists(): raise pywikibot.NoPage(u"Page '%s' does not exist" % index) pywikibot.output(u"uploading text from %s to %s" % (djvu, index_page.title(asLink=True)) ) bot = DjVuTextBot(djvu, index, pages, ask, overwrite, dry) if not bot.has_text(): raise ValueError("No text layer in djvu file") bot.run() else: pywikibot.showHelp()
def main(): #page generator gen = None # If the user chooses to work on a single page, this temporary array is # used to read the words from the page title. The words will later be # joined with spaces to retrieve the full title. pageTitle = [] # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if not genFactory.handleArg(arg): pageTitle.append(arg) if pageTitle: # work on a single page page = pywikibot.Page(pywikibot.getSite(), ' '.join(pageTitle)) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if not gen: pywikibot.showHelp('inline_images') else: preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = InlineImagesRobot(preloadingGen) bot.run()
def main(*args): # Disable cosmetic changes because we don't want to modify any page # content, so that we don't flood the histories with minor changes. config.cosmetic_changes = False #page generator gen = None genFactory = pagegenerators.GeneratorFactory() redirs = False # If the user chooses to work on a single page, this temporary array is # used to read the words from the page title. The words will later be # joined with spaces to retrieve the full title. pageTitle = [] for arg in pywikibot.handleArgs(*args): if genFactory.handleArg(arg): continue if arg == '-redir': redirs = True else: pageTitle.append(arg) gen = genFactory.getCombinedGenerator() if not gen: if pageTitle: # work on a single page page = pywikibot.Page(pywikibot.getSite(), ' '.join(pageTitle)) gen = iter([page]) else: pywikibot.showHelp() return preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = TouchBot(preloadingGen, redirs) bot.run()
def main(): # The generator gives the pages that should be worked upon. gen = None # If debug is True, doesn't do any real changes, but only show # what would have been changed. debug = False wantHelp = False cat = catlib.Category(wikipedia.getSite(), 'Category:%s' % 'VE') gen = pagegenerators.CategorizedPageGenerator(cat, start=None, recurse=False) # Parse command line arguments for arg in wikipedia.handleArgs(): if arg.startswith("-debug"): debug = True else: print arg, "yielding wanthelp" wantHelp = True if not wantHelp: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(gen) bot = VEIDBot(gen, debug) bot.run() else: wikipedia.showHelp()
def main(): featured = False gen = None # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if arg == '-featured': featured = True else: genFactory.handleArg(arg) mysite = pywikibot.getSite() if mysite.sitename() == 'wikipedia:nl': pywikibot.output( u'\03{lightred}There is consensus on the Dutch Wikipedia that bots should not be used to fix redirects.\03{default}' ) sys.exit() if featured: featuredList = pywikibot.translate(mysite, featured_articles) ref = pywikibot.Page(pywikibot.getSite(), featuredList) gen = pagegenerators.ReferringPageGenerator(ref) gen = pagegenerators.NamespaceFilterPageGenerator(gen, [0]) if not gen: gen = genFactory.getCombinedGenerator() if gen: for page in pagegenerators.PreloadingGenerator(gen): workon(page) else: pywikibot.showHelp('fixing_redirects')
def main(): doCount = False argsList = [] namespaces = [] for arg in wikipedia.handleArgs(): if arg == '-count': doCount = True elif arg.startswith('-namespace:'): try: namespaces.append(int(arg[len('-namespace:'):])) except ValueError: namespaces.append(arg[len('-namespace:'):]) else: argsList.append(arg) if doCount: robot = ReferencesRobot() if not argsList: argsList = templates choice = '' if 'reflist' in argsList: wikipedia.output( u'NOTE: it will take a long time to count "reflist".') choice = wikipedia.inputChoice(u'Proceed anyway?', ['yes', 'no', 'skip'], ['y', 'n', 's'], 'y') if choice == 's': argsList.remove('reflist') if choice <> 'n': robot.countRefs(argsList, namespaces) else: wikipedia.showHelp('refcheck')
def main(): #page generator gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitle = [] # Which namespaces should be processed? # default to [] which means all namespaces will be processed namespaces = [] # Never ask before changing a page always = False # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if arg.startswith('-xml'): if len(arg) == 4: xmlFilename = i18n.input('pywikibot-enter-xml-filename') else: xmlFilename = arg[5:] gen = XmlDumpNoReferencesPageGenerator(xmlFilename) elif arg.startswith('-namespace:'): try: namespaces.append(int(arg[11:])) except ValueError: namespaces.append(arg[11:]) elif arg == '-always': always = True else: if not genFactory.handleArg(arg): pageTitle.append(arg) if pageTitle: page = pywikibot.Page(pywikibot.getSite(), ' '.join(pageTitle)) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if not gen: site = pywikibot.getSite() try: cat = maintenance_category[site.family.name][site.lang] except: pass else: import catlib if not namespaces: namespaces = [0] cat = catlib.Category(site, "%s:%s" % (site.category_namespace(), cat)) gen = pagegenerators.CategorizedPageGenerator(cat) if not gen: pywikibot.showHelp('noreferences') else: if namespaces: gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = NoReferencesBot(preloadingGen, always) bot.run()
def main(): oldImage = None newImage = None summary = '' always = False loose = False # read command line parameters for arg in pywikibot.handleArgs(): if arg == '-always': always = True elif arg == '-loose': loose = True elif arg.startswith('-summary'): if len(arg) == len('-summary'): summary = pywikibot.input(u'Choose an edit summary: ') else: summary = arg[len('-summary:'):] else: if oldImage: newImage = arg else: oldImage = arg if not oldImage: pywikibot.showHelp('image') else: mysite = pywikibot.getSite() ns = mysite.image_namespace() oldImagePage = pywikibot.ImagePage(mysite, ns + ':' + oldImage) gen = pagegenerators.FileLinksGenerator(oldImagePage) preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = ImageRobot(preloadingGen, oldImage, newImage, summary, always, loose) bot.run()
def main(): operation = None argsList = [] namespaces = [] for arg in wikipedia.handleArgs(): if arg == '-count': operation = "Count" elif arg == '-list': operation = "List" elif arg.startswith('-namespace:'): try: namespaces.append(int(arg[len('-namespace:'):])) except ValueError: namespaces.append(arg[len('-namespace:'):]) else: argsList.append(arg) if operation == None: wikipedia.showHelp('templatecount') else: robot = TemplateCountRobot() if not argsList: argsList = ['ref', 'note', 'ref label', 'note label'] if operation == "Count": robot.countTemplates(argsList, namespaces) elif operation == "List": robot.listTemplates(argsList, namespaces)
def main(): doCount = False argsList = [] namespaces = [] for arg in wikipedia.handleArgs(): if arg == '-count': doCount = True elif arg.startswith('-namespace:'): try: namespaces.append(int(arg[len('-namespace:'):])) except ValueError: namespaces.append(arg[len('-namespace:'):]) else: argsList.append(arg) if doCount: robot = ReferencesRobot() if not argsList: argsList = templates choice = '' if 'reflist' in argsList: wikipedia.output(u'NOTE: it will take a long time to count "reflist".') choice = wikipedia.inputChoice(u'Proceed anyway?', ['yes', 'no', 'skip'], ['y', 'n', 's'], 'y') if choice == 's': argsList.remove('reflist') if choice <> 'n': robot.countRefs(argsList, namespaces) else: wikipedia.showHelp('refcheck')
def main(): # This temporary array is used to read the page title if one single # page that should be unlinked. pageTitle = [] # Which namespaces should be processed? # default to [] which means all namespaces will be processed namespaces = [] always = False for arg in pywikibot.handleArgs(): if arg.startswith('-namespace:'): try: namespaces.append(int(arg[11:])) except ValueError: namespaces.append(arg[11:]) elif arg == '-always': always = True else: pageTitle.append(arg) if pageTitle: page = pywikibot.Page(pywikibot.getSite(), ' '.join(pageTitle)) bot = UnlinkBot(page, namespaces, always) bot.run() else: pywikibot.showHelp('unlink')
def main(): #page generator gen = None pageTitle = [] # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in wikipedia.handleArgs(): if not genFactory.handleArg(arg): pageTitle.append(arg) # Disabled this check. Although the point is still valid, there # is now a warning and a prompt (see below). #if wikipedia.getSite() == wikipedia.getSite('nl','wikipedia'): #print "Deze bot is op WikipediaNL niet gewenst." #print "Het toevoegen van cosmetic changes bij andere wijzigingen is toegestaan," #print "maar cosmetic_changes als stand-alone bot niet." #print "Zoek alstublieft een nuttig gebruik voor uw bot." #sys.exit() if pageTitle: page = wikipedia.Page(wikipedia.getSite(), ' '.join(pageTitle)) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if not gen: wikipedia.showHelp() elif wikipedia.inputChoice(warning + '\nDo you really want to continue?', ['yes', 'no'], ['y', 'N'], 'N') == 'y': preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = CosmeticChangesBot(preloadingGen) bot.run()
def main(): featured = False gen = None # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if arg == '-featured': featured = True else: genFactory.handleArg(arg) mysite = pywikibot.getSite() if mysite.sitename() == 'wikipedia:nl': pywikibot.output( u'\03{lightred}There is consensus on the Dutch Wikipedia that bots should not be used to fix redirects.\03{default}') sys.exit() if featured: featuredList = pywikibot.translate(mysite, featured_articles) ref = pywikibot.Page(pywikibot.getSite(), featuredList) gen = pagegenerators.ReferringPageGenerator(ref) gen = pagegenerators.NamespaceFilterPageGenerator(gen, [0]) if not gen: gen = genFactory.getCombinedGenerator() if gen: for page in pagegenerators.PreloadingGenerator(gen): workon(page) else: pywikibot.showHelp('fixing_redirects')
def main(): import os index = None djvu = None pages = None # what would have been changed. dry = False ask = False # Parse command line arguments for arg in wikipedia.handleArgs(): if arg.startswith("-dry"): dry = True elif arg.startswith("-ask"): ask = True elif arg.startswith("-djvu:"): djvu = arg[6:] elif arg.startswith("-index:"): index = arg[7:] elif arg.startswith("-pages:"): pages = arg[7:] else: wikipedia.output(u"Unknown argument %s" % arg) # Check the djvu file exists if djvu: os.stat(djvu) if not index: import os.path index = os.path.basename(djvu) if djvu and index: site = wikipedia.getSite() index_page = wikipedia.Page(site, index) if site.family.name != 'wikisource': raise wikipedia.PageNotFound( u"Found family '%s'; Wikisource required." % site.family.name) if not index_page.exists() and index_page.namespace() == 0: index_namespace = wikipedia.Page( site, 'MediaWiki:Proofreadpage index namespace').get() index_page = wikipedia.Page(wikipedia.getSite(), u"%s:%s" % (index_namespace, index)) if not index_page.exists(): raise wikipedia.NoPage(u"Page '%s' does not exist" % index) wikipedia.output(u"uploading text from %s to %s" % (djvu, index_page.aslink())) bot = DjVuTextBot(djvu, index, pages, ask, dry) if not bot.has_text(): raise ValueError("No text layer in djvu file") bot.run() else: wikipedia.showHelp()
def main(): # The generator gives the pages that should be worked upon. gen = None # If debug is True, doesn't do any real changes, but only show # what would have been changed. debug = False wantHelp = False # Parse command line arguments for arg in wikipedia.handleArgs(): if arg.startswith("-debug"): debug = True else: wantHelp = True if not wantHelp: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. cat = catlib.Category(wikipedia.getSite(), 'Category:%s' % 'IP-Host') hosts_gen = pagegenerators.CategorizedPageGenerator(cat, start = None, recurse = False) hosts_gen = pagegenerators.PreloadingGenerator(hosts_gen) cat = catlib.Category(wikipedia.getSite(), 'Category:%s' % 'IP-Network') nets_gen = pagegenerators.CategorizedPageGenerator(cat, start = None, recurse = False) nets_gen = pagegenerators.PreloadingGenerator(nets_gen) bot = IpNetworkBot(nets_gen, hosts_gen, debug) bot.run() else: wikipedia.showHelp()
def main(): #page generator gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitle = [] # Which namespaces should be processed? # default to [] which means all namespaces will be processed namespaces = [] # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() always = False for arg in pywikibot.handleArgs(): if arg.startswith('-xml'): if len(arg) == 4: xmlFilename = pywikibot.input( u'Please enter the XML dump\'s filename:') else: xmlFilename = arg[5:] gen = XmlDumpSelflinkPageGenerator(xmlFilename) elif arg == '-sql': # NOT WORKING YET query = """ SELECT page_namespace, page_title FROM page JOIN pagelinks JOIN text ON (page_id = pl_from AND page_id = old_id) WHERE pl_title = page_title AND pl_namespace = page_namespace AND page_namespace = 0 AND (old_text LIKE concat('%[[', page_title, ']]%') OR old_text LIKE concat('%[[', page_title, '|%')) LIMIT 100""" gen = pagegenerators.MySQLPageGenerator(query) elif arg.startswith('-namespace:'): try: namespaces.append(int(arg[11:])) except ValueError: namespaces.append(arg[11:]) elif arg == '-always': always = True else: if not genFactory.handleArg(arg): pageTitle.append(arg) if pageTitle: page = pywikibot.Page(pywikibot.getSite(), ' '.join(pageTitle)) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if not gen: pywikibot.showHelp('selflink') else: if namespaces != []: gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = SelflinkBot(preloadingGen, always) bot.run()
def main(): import os index = None djvu = None pages = None # what would have been changed. dry = False ask = False # Parse command line arguments for arg in wikipedia.handleArgs(): if arg.startswith("-dry"): dry = True elif arg.startswith("-ask"): ask = True elif arg.startswith("-djvu:"): djvu = arg[6:] elif arg.startswith("-index:"): index = arg[7:] elif arg.startswith("-pages:"): pages = arg[7:] else: wikipedia.output(u"Unknown argument %s" % arg) # Check the djvu file exists if djvu: os.stat(djvu) if not index: import os.path index = os.path.basename(djvu) if djvu and index: site = wikipedia.getSite() index_page = wikipedia.Page(site, index) if site.family.name != 'wikisource': raise wikipedia.PageNotFound(u"Found family '%s'; Wikisource required." % site.family.name) if not index_page.exists() and index_page.namespace() == 0: index_namespace = wikipedia.Page(site, 'MediaWiki:Proofreadpage index namespace').get() index_page = wikipedia.Page(wikipedia.getSite(), u"%s:%s" % (index_namespace, index)) if not index_page.exists(): raise wikipedia.NoPage(u"Page '%s' does not exist" % index) wikipedia.output(u"uploading text from %s to %s" % (djvu, index_page.aslink()) ) bot = DjVuTextBot(djvu, index, pages, ask, dry) if not bot.has_text(): raise ValueError("No text layer in djvu file") bot.run() else: wikipedia.showHelp()
def main(): acceptall = False titlecase = False startPage = "" for arg in pywikibot.handleArgs(): if arg == "-always": acceptall = True elif arg.startswith("-startPage"): if len(arg) == 10: startPage = pywikibot.input(u'Please input the article to start with:') else: startPage = arg[11:].encode('utf-8') else: pywikibot.showHelp(u'diacritics_redirects') return categs = [#'Categorie:Localități în județul Alba','Categorie:Localități în județul Arad','Categorie:Localități în județul Argeș', # 'Categorie:Localități în județul Bacău','Categorie:Localități în județul Bihor','Categorie:Localități în județul Bistrița-Năsăud', # 'Categorie:Localități în județul Botoșani','Categorie:Localități în județul Brașov','Categorie:Localități în județul Brăila', # 'Categorie:Localități în județul Buzău','Categorie:Localități în județul Caraș-Severin','Categorie:Localități în județul Cluj', # 'Categorie:Localități în județul Constanța','Categorie:Localități în județul Covasna','Categorie:Localități în județul Călărași', # 'Categorie:Localități în județul Dolj','Categorie:Localități în județul Dâmbovița','Categorie:Localități în județul Galați', # 'Categorie:Localități în județul Giurgiu','Categorie:Localități în județul Gorj','Categorie:Localități în județul Harghita', # 'Categorie:Localități în județul Hunedoara','Categorie:Localități în județul Ialomița','Categorie:Localități în județul Iași', # 'Categorie:Localități în județul Ilfov','Categorie:Localități în județul Maramureș','Categorie:Localități în județul Mehedinți', # 'Categorie:Localități în județul Mureș','Categorie:Localități în județul Neamț','Categorie:Localități în județul Olt', # 'Categorie:Localități în județul Prahova','Categorie:Localități în județul Satu Mare','Categorie:Localități în județul Sibiu', # 'Categorie:Localități în județul Suceava','Categorie:Localități în județul Sălaj','Categorie:Localități în județul Teleorman', # 'Categorie:Localități în județul Timiș','Categorie:Localități în județul Tulcea','Categorie:Localități în județul Vaslui', # 'Categorie:Localități în județul Vrancea','Categorie:Localități în județul Vâlcea', # 'Categorie:Comune în județul Alba','Categorie:Comune în județul Arad','Categorie:Comune în județul Argeș', # 'Categorie:Comune în județul Bacău','Categorie:Comune în județul Bihor','Categorie:Comune în județul Bistrița-Năsăud', # 'Categorie:Comune în județul Botoșani','Categorie:Comune în județul Brașov','Categorie:Comune în județul Brăila', # 'Categorie:Comune în județul Buzău','Categorie:Comune în județul Caraș-Severin','Categorie:Comune în județul Cluj', # 'Categorie:Comune în județul Constanța','Categorie:Comune în județul Covasna','Categorie:Comune în județul Călărași', # 'Categorie:Comune în județul Dolj','Categorie:Comune în județul Dâmbovița','Categorie:Comune în județul Galați', # 'Categorie:Comune în județul Giurgiu','Categorie:Comune în județul Gorj','Categorie:Comune în județul Harghita', # 'Categorie:Comune în județul Hunedoara','Categorie:Comune în județul Ialomița','Categorie:Comune în județul Iași', # 'Categorie:Comune în județul Ilfov','Categorie:Comune în județul Maramureș','Categorie:Comune în județul Mehedinți', # 'Categorie:Comune în județul Mureș','Categorie:Comune în județul Neamț','Categorie:Comune în județul Olt', # 'Categorie:Comune în județul Prahova','Categorie:Comune în județul Satu Mare','Categorie:Comune în județul Sibiu', # 'Categorie:Comune în județul Suceava','Categorie:Comune în județul Sălaj','Categorie:Comune în județul Teleorman', # 'Categorie:Comune în județul Timiș','Categorie:Comune în județul Tulcea','Categorie:Comune în județul Vaslui', # 'Categorie:Comune în județul Vrancea','Categorie:Comune în județul Vâlcea', 'Categorie:Municipii în România', 'Categorie:Orașe în România'] bot = w2oWikiLinks(acceptall) #for categ in categs: # pywikibot.output(categ.decode("utf8")) # gen = pagegenerators.CategorizedPageGenerator(catlib.Category(pywikibot.getSite(), categ.decode("utf8"))) # preloadingGen = pagegenerators.PreloadingGenerator(gen, 125) # bot.fetchWikiArticles(preloadingGen) bot.writeToOsm(startPage)
def main(): # HACK: This can be removed when pywikipedia bug 3315395 has been fixed safetyLock = 'birthcat-unlock.dat' if not os.path.exists(safetyLock): choice = pywikibot.inputChoice( u'Have you patched textlib.py in pywikipedia?', ['Yes', 'No'], ['y', 'N'], 'N') if choice == 'y': open(safetyLock, 'w').close() else: return False # END OF HACK # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitleParts = [] # If dry is True, doesn't do any real changes, but only show # what would have been changed. dry = False # If auto is True, run in autonomous mode. auto = False # Parse command line arguments for arg in pywikibot.handleArgs(): if arg.startswith("-dry"): dry = True elif arg.startswith("-auto"): auto = True else: # check if a standard argument like # -start:XYZ or -ref:Asdf was given. if not genFactory.handleArg(arg): pageTitleParts.append(arg) if pageTitleParts != []: # We will only work on a single page. pageTitle = ' '.join(pageTitleParts) page = pywikibot.Page(pywikibot.getSite(), pageTitle) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if gen: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(gen) bot = BirthCatBot(gen, auto, dry) bot.run() else: pywikibot.showHelp()
def main(*args): global catDB fromGiven = False toGiven = False batchMode = False editSummary = '' inPlace = False overwrite = False showImages = False talkPages = False recurse = False withHistory = False titleRegex = None pagesonly = False # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # If this is set to true then the custom edit summary given for removing # categories from articles will also be used as the deletion reason. useSummaryForDeletion = True catDB = CategoryDatabase() action = None sort_by_last_name = False restore = False create_pages = False action = 'listify' #for arg in pywikibot.handleArgs(*args): # if arg == 'listify': # action = 'listify' # else: # genFactory.handleArg(arg) if action == 'listify': oldCatTitle = 'test' #if (fromGiven == False): # oldCatTitle = pywikibot.input( # u'Please enter the name of the category to nominate over:') newCatTitle = "User:HasteurBot/Log" recurse = True bot = CategoryListifyRobot(oldCatTitle, newCatTitle, editSummary, overwrite, showImages, subCats=True, talkPages=talkPages, recurse=recurse) bot.run() else: pywikibot.showHelp('category')
def main(): args = pywikibot.handleArgs() bot = SubsterBot() # for several user's, but what about complete automation (continous running...) for arg in args: pywikibot.showHelp() return try: bot.run() except KeyboardInterrupt: pywikibot.output('\nQuitting program...')
def main(): genFactory = pagegenerators.GeneratorFactory() PageTitles = [] xmlFilename = None always = False ignorepdf = False limit = None namespaces = [] generator = None for arg in pywikibot.handleArgs(): if arg.startswith('-namespace:'): try: namespaces.append(int(arg[11:])) except ValueError: namespaces.append(arg[11:]) elif arg.startswith('-summary:'): pywikibot.setAction(arg[9:]) elif arg == '-always': always = True elif arg == '-ignorepdf': ignorepdf= True elif arg.startswith('-limit:'): limit = int(arg[7:]) elif arg.startswith('-xmlstart'): if len(arg) == 9: xmlStart = pywikibot.input( u'Please enter the dumped article to start with:') else: xmlStart = arg[10:] elif arg.startswith('-xml'): if len(arg) == 4: xmlFilename = pywikibot.input( u'Please enter the XML dump\'s filename:') else: xmlFilename = arg[5:] else: genFactory.handleArg(arg) if xmlFilename: try: xmlStart except NameError: xmlStart = None generator = XmlDumpPageGenerator(xmlFilename, xmlStart, namespaces) if not generator: generator = genFactory.getCombinedGenerator() if not generator: # syntax error, show help text from the top of this file pywikibot.showHelp('reflinks') return generator = pagegenerators.PreloadingGenerator(generator, pageNumber = 50) generator = pagegenerators.RedirectFilterPageGenerator(generator) bot = ReferencesRobot(generator, always, limit, ignorepdf) bot.run()
def main(): genFactory = pagegenerators.GeneratorFactory() PageTitles = [] xmlFilename = None always = False ignorepdf = False limit = None namespaces = [] generator = None for arg in pywikibot.handleArgs(): if arg.startswith('-namespace:'): try: namespaces.append(int(arg[11:])) except ValueError: namespaces.append(arg[11:]) elif arg.startswith('-summary:'): pywikibot.setAction(arg[9:]) elif arg == '-always': always = True elif arg == '-ignorepdf': ignorepdf = True elif arg.startswith('-limit:'): limit = int(arg[7:]) elif arg.startswith('-xmlstart'): if len(arg) == 9: xmlStart = pywikibot.input( u'Please enter the dumped article to start with:') else: xmlStart = arg[10:] elif arg.startswith('-xml'): if len(arg) == 4: xmlFilename = pywikibot.input( u'Please enter the XML dump\'s filename:') else: xmlFilename = arg[5:] else: genFactory.handleArg(arg) if xmlFilename: try: xmlStart except NameError: xmlStart = None generator = XmlDumpPageGenerator(xmlFilename, xmlStart, namespaces) if not generator: generator = genFactory.getCombinedGenerator() if not generator: # syntax error, show help text from the top of this file pywikibot.showHelp('reflinks') return generator = pagegenerators.PreloadingGenerator(generator, pageNumber=50) generator = pagegenerators.RedirectFilterPageGenerator(generator) bot = ReferencesRobot(generator, always, limit, ignorepdf) bot.run()
def main(): # HACK: This can be removed when pywikipedia bug 3315395 has been fixed safetyLock = "birthcat-unlock.dat" if not os.path.exists(safetyLock): choice = pywikibot.inputChoice(u"Have you patched textlib.py in pywikipedia?", ["Yes", "No"], ["y", "N"], "N") if choice == "y": open(safetyLock, "w").close() else: return False # END OF HACK # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitleParts = [] # If dry is True, doesn't do any real changes, but only show # what would have been changed. dry = False # If auto is True, run in autonomous mode. auto = False # Parse command line arguments for arg in pywikibot.handleArgs(): if arg.startswith("-dry"): dry = True elif arg.startswith("-auto"): auto = True else: # check if a standard argument like # -start:XYZ or -ref:Asdf was given. if not genFactory.handleArg(arg): pageTitleParts.append(arg) if pageTitleParts != []: # We will only work on a single page. pageTitle = " ".join(pageTitleParts) page = pywikibot.Page(pywikibot.getSite(), pageTitle) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if gen: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(gen) bot = BirthCatBot(gen, auto, dry) bot.run() else: pywikibot.showHelp()
def main(): start = '!' featured = False title = None namespace = None gen = None # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in wikipedia.handleArgs(): if arg == '-featured': featured = True elif arg.startswith('-page'): if len(arg) == 5: title = wikipedia.input(u'Which page should be processed?') else: title = arg[6:] elif arg.startswith('-namespace'): if len(arg) == 10: namespace = int(wikipedia.input(u'Which namespace should be processed?')) else: namespace = int(arg[11:]) else: genFactory.handleArg(arg) gen = genFactory.getCombinedGenerator() mysite = wikipedia.getSite() if mysite.sitename() == 'wikipedia:nl': wikipedia.output(u'\03{lightred}There is consensus on the Dutch Wikipedia that bots should not be used to fix redirects.\03{default}') sys.exit() linktrail = mysite.linktrail() if featured: featuredList = wikipedia.translate(mysite, featured_articles) ref = wikipedia.Page(wikipedia.getSite(), featuredList) gen = pagegenerators.ReferringPageGenerator(ref) generator = pagegenerators.NamespaceFilterPageGenerator(gen, [0]) for page in generator: workon(page) elif title is not None: page = wikipedia.Page(wikipedia.getSite(), title) workon(page) elif namespace is not None: for page in pagegenerators.AllpagesPageGenerator(start=start, namespace=namespace, includeredirects=False): workon(page) elif gen: for page in pagegenerators.PreloadingGenerator(gen): workon(page) else: wikipedia.showHelp('fixing_redirects')
def main(): show = False # Parse command line arguments for arg in pywikibot.handleArgs(): show = True if not show: bot = BlockreviewBot(pywikibot.simulate) bot.run() else: pywikibot.showHelp()
def main(): # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitleParts = [] # If dry is True, doesn't do any real changes, but only show # what would have been changed. dry = False # will become True when the user uses the -always flag. always = False # will input Yomi data input = False # will input Yomi data outputwiki = False # Parse command line arguments for arg in pywikibot.handleArgs(): if arg.startswith("-dry"): dry = True elif arg.startswith("-always"): always = True elif arg.startswith("-input"): input = True elif arg.startswith("-outputwiki"): outputwiki = True else: # check if a standard argument like # -start:XYZ or -ref:Asdf was given. if not genFactory.handleArg(arg): pageTitleParts.append(arg) if pageTitleParts != []: # We will only work on a single page. pageTitle = ' '.join(pageTitleParts) page = pywikibot.Page(pywikibot.getSite(), pageTitle) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if gen: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(gen) bot = MuseumCategoryBot(gen, dry, always, input, outputwiki) bot.run() else: pywikibot.showHelp()
def main(): args = pywikibot.handleArgs() site = pywikibot.getSite() site.forceLogin() chan = '#' + site.language() + '.' + site.family.name bot = SubsterTagModifiedBot(site, chan, site.loggedInAs(), "irc.wikimedia.org") for arg in args: pywikibot.showHelp() return try: bot.start() except KeyboardInterrupt: pywikibot.output('\nQuitting program...')
def main(*args): genFactory = GeneratorFactory() for arg in pywikibot.handleArgs(*args): if not genFactory.handleArg(arg): pywikibot.showHelp() break else: gen = genFactory.getCombinedGenerator() if gen: for i, page in enumerate(gen, start=1): pywikibot.output("%4d: %s" % (i, page.title()), toStdout=True) else: pywikibot.showHelp()
def main(): hours = 1 no_repeat = True for arg in wikipedia.handleArgs(): if arg.startswith('-hours:'): hours = float(arg[7:]) no_repeat = False else: wikipedia.showHelp('clean_sandbox') return bot = SandboxBot(hours, no_repeat) bot.run()
def main(): """ Process command line arguments and invoke bot. """ #page generator gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitle = [] # Which namespaces should be processed? # default to [] which means all namespaces will be processed namespaces = [] # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # Never ask before changing a page always = False to13 = False format = False for arg in pywikibot.handleArgs(): if arg.startswith('-namespace:'): try: namespaces.append(int(arg[11:])) except ValueError: namespaces.append(arg[11:]) elif arg == '-always': always = True elif arg == '-to13': to13 = True elif arg == '-format': format = True else: if not genFactory.handleArg(arg): pageTitle.append(arg) site = pywikibot.getSite() if pageTitle: gen = iter([pywikibot.Page(site, t) for t in pageTitle]) if not gen: gen = genFactory.getCombinedGenerator() if not gen: pywikibot.showHelp('isbn') else: if namespaces != []: gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = IsbnBot(preloadingGen, to13=to13, format=format, always=always) bot.run()
def main(): #page generator gen = None pageTitle = [] editSummary = '' answer = 'y' always = False # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if arg.startswith('-summary:'): editSummary = arg[len('-summary:'):] elif arg == '-always': always = True elif not genFactory.handleArg(arg): pageTitle.append(arg) if editSummary == '': # Load default summary message. editSummary = pywikibot.translate(pywikibot.getSite(), msg_standalone) # Disabled this check. Although the point is still valid, there # is now a warning and a prompt (see below). #if pywikibot.getSite() == pywikibot.getSite('nl','wikipedia'): #print "Deze bot is op WikipediaNL niet gewenst." #print "Het toevoegen van cosmetic changes bij andere wijzigingen is toegestaan," #print "maar cosmetic_changes als stand-alone bot niet." #print "Zoek alstublieft een nuttig gebruik voor uw bot." #sys.exit() if pageTitle: page = pywikibot.Page(pywikibot.getSite(), ' '.join(pageTitle)) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if not gen: pywikibot.showHelp() elif not always: answer = pywikibot.inputChoice( warning + '\nDo you really want to continue?', ['yes', 'no'], ['y', 'N'], 'N') if answer == 'y': preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = CosmeticChangesBot(preloadingGen, acceptall=always, comment=editSummary) bot.run()
def main(*args): global catDB fromGiven = False toGiven = False batchMode = False editSummary = '' inPlace = False overwrite = False showImages = False talkPages = False recurse = False withHistory = False titleRegex = None pagesonly = False # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # If this is set to true then the custom edit summary given for removing # categories from articles will also be used as the deletion reason. useSummaryForDeletion = True catDB = CategoryDatabase() action = None sort_by_last_name = False restore = False create_pages = False action = 'listify' #for arg in pywikibot.handleArgs(*args): # if arg == 'listify': # action = 'listify' # else: # genFactory.handleArg(arg) if action == 'listify': oldCatTitle='test' #if (fromGiven == False): # oldCatTitle = pywikibot.input( # u'Please enter the name of the category to nominate over:') newCatTitle = "User:HasteurBot/Log" recurse=True bot = CategoryListifyRobot(oldCatTitle, newCatTitle, editSummary, overwrite, showImages, subCats=True, talkPages=talkPages, recurse=recurse) bot.run() else: pywikibot.showHelp('category')
def main(): args = pywikibot.handleArgs() # for several user's, but what about complete automation (continous running...) bot = SubsterBot() for arg in args: if '-page' in arg[:5]: bot.pagegen = [pywikibot.Page(bot.site, arg[6:])] else: pywikibot.showHelp() return try: bot.run() except KeyboardInterrupt: pywikibot.output('\nQuitting program...')
def main(*args): global catDB global logger fromGiven = False toGiven = False batchMode = False editSummary = '' inPlace = False overwrite = False showImages = False talkPages = False recurse = False withHistory = False titleRegex = None pagesonly = False # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # If this is set to true then the custom edit summary given for removing # categories from articles will also be used as the deletion reason. useSummaryForDeletion = True catDB = CategoryDatabase() action = None sort_by_last_name = False restore = False create_pages = False fromGiven action = 'listify' if action == 'listify': oldCatTitle = 'AfC submissions with missing AfC template' newCatTitle = "User:HasteurBot/Log" recurse = True logger.info('Starting Nudge run over %s' % oldCatTitle) bot = CategoryListifyRobot(oldCatTitle, newCatTitle, editSummary, overwrite, showImages, subCats=True, talkPages=talkPages, recurse=recurse) bot.run() else: pywikibot.showHelp('category')
def main(): # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitleParts = [] # If dry is True, doesn't do any real changes, but only show # what would have been changed. dry = False # Parse command line arguments for arg in wikipedia.handleArgs(): if arg.startswith("-dry"): dry = True else: # check if a standard argument like # -start:XYZ or -ref:Asdf was given. if not genFactory.handleArg(arg): pageTitleParts.append(arg) if pageTitleParts != []: # We will only work on a single page. pageTitle = ' '.join(pageTitleParts) page = wikipedia.Page(wikipedia.getSite(), pageTitle) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if gen: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(gen) mt=0 for page in gen: try: if pageHandle(page): if 'RUN: YES' not in urlopen(RAW_URL%SHUTOFF).read(): quit('Shutoff') mt=0 except wikipedia.exceptions.MaxTriesExceededError: mt+=1 if mt>=2: quit('DAMNIT!') else: wikipedia.showHelp()
def main(): # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() # The generator gives the pages that should be worked upon. gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitleParts = [] img = False info = False imdb = False # Parse command line arguments for arg in pywikibot.handleArgs(): if arg.startswith("-img"): img = True elif arg.startswith("-info"): info = True elif arg.startswith("-imdb"): imdb = True elif arg.startswith("-reg"): arg = '-cat:Film articles needing an infobox' if not genFactory.handleArg(arg): pageTitleParts.append(arg) else: # check if a standard argument like # -start:XYZ or -ref:Asdf was given. if not genFactory.handleArg(arg): pageTitleParts.append(arg) if pageTitleParts != []: # We will only work on a single page. pageTitle = ' '.join(pageTitleParts) page = pywikibot.Page(pywikibot.getSite(), pageTitle) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if gen: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator( filmfunctions.PagesFromTalkPagesGenerator(gen)) bot = InfoboxBot(gen, img, info, imdb) bot.run() else: pywikibot.showHelp()
def main(*args): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ summary = None generator = None always = False ns = [] ns.append(14) # Process global args and prepare generator args parser genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(*args): if arg.startswith('-summary'): if len(arg) == 8: summary = pywikibot.input(u'What summary do you want to use?') else: summary = arg[9:] elif arg.startswith('-checkcurrent'): primaryCommonscat, commonscatAlternatives = \ CommonscatBot.getCommonscatTemplate( pywikibot.getSite().language()) generator = pagegenerators.NamespaceFilterPageGenerator( pagegenerators.ReferringPageGenerator( pywikibot.Page(pywikibot.getSite(), u'Template:' + primaryCommonscat), onlyTemplateInclusion=True), ns) elif arg == '-always': always = True else: genFactory.handleArg(arg) if not generator: generator = genFactory.getCombinedGenerator() if generator: pregenerator = pagegenerators.PreloadingGenerator(generator) bot = CommonscatBot(pregenerator, always, summary) bot.run() else: pywikibot.showHelp()
def main(): #page generator gen = None # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitle = [] # Which namespaces should be processed? # default to [] which means all namespaces will be processed namespaces = [] # Never ask before changing a page always = False # This factory is responsible for processing command line arguments # that are also used by other scripts and that determine on which pages # to work on. genFactory = pagegenerators.GeneratorFactory() for arg in wikipedia.handleArgs(): if arg.startswith('-xml'): if len(arg) == 4: xmlFilename = wikipedia.input( u'Please enter the XML dump\'s filename:') else: xmlFilename = arg[5:] gen = XmlDumpNoReferencesPageGenerator(xmlFilename) elif arg.startswith('-namespace:'): try: namespaces.append(int(arg[11:])) except ValueError: namespaces.append(arg[11:]) elif arg == '-always': always = True else: if not genFactory.handleArg(arg): pageTitle.append(arg) if pageTitle: page = wikipedia.Page(wikipedia.getSite(), ' '.join(pageTitle)) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if not gen: wikipedia.showHelp('noreferences') else: if namespaces != []: gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = NoReferencesBot(preloadingGen, always) bot.run()
def main(): # If dry is True, doesn't do any real changes, but only show # what would have been changed. dry = show = False # Parse command line arguments for arg in pywikibot.handleArgs(): if arg == "-dry": dry = True else: show = True if not show: bot = BlockreviewBot(dry) bot.run() else: pywikibot.showHelp()