def cherry_pick(oldtext, newtext, n=0, by_letter=False): """Propose a list of changes for approval. Text with approved changes will be returned. n: int, line of context as defined in difflib.get_grouped_opcodes(). by_letter: if text_a and text_b are single lines, comparison can be done """ FORMAT = "{2}{lightpurple}{0:{1}^50}{default}{2}" patch = PatchManager(oldtext, newtext, n=n, by_letter=by_letter) pywikibot.output(color_format(FORMAT, " ALL CHANGES ", "*", "\n")) for hunk in patch.hunks: pywikibot.output(hunk.diff_text) pywikibot.output(color_format(FORMAT, " REVIEW CHANGES ", "*", "\n")) text_list = patch.apply() pywikibot.output(color_format(FORMAT, " APPROVED CHANGES ", "*", "\n")) if any(hunk.reviewed == hunk.APPR for hunk in patch.hunks): for hunk in patch.hunks: if hunk.reviewed == hunk.APPR: pywikibot.output(hunk.diff_text) else: pywikibot.output(color_format(FORMAT, "None.", "", "")) text = "".join(text_list) return text
def test_marker(self): r"""Test that the \03 marker is only allowed in front of colors.""" self.assertEqual(formatter.color_format("{0}\03{black}", 42), "42\03{black}") # literal before a normal field self.assertRaisesRegex(ValueError, r".*\\03", formatter.color_format, "\03{0}{black}", 42) # literal before a color field self.assertRaisesRegex(ValueError, r".*\\03", formatter.color_format, "{0}\03before{black}", 42)
def main(): """Main function.""" fg_colors = [col for col in colors if col != 'default'] bg_colors = fg_colors[:] n_fg_colors = len(fg_colors) fg_colors.insert(3 * int(n_fg_colors / 4), 'default') fg_colors.insert(2 * int(n_fg_colors / 4), 'default') fg_colors.insert(int(n_fg_colors / 4), 'default') fg_colors.insert(0, 'default') # Max len of color names for padding. max_len_fg_colors = len(max(fg_colors, key=len)) max_len_bc_color = len(max(bg_colors, key=len)) for bg_col in bg_colors: # Three lines per each backgoung color. for fg_col_group in itergroup(fg_colors, n_fg_colors / 4 + 1): line = '' for fg_col in fg_col_group: line += ' ' line += color_format('{color}{0}{default}', fg_col.ljust(max_len_fg_colors), color='%s;%s' % (fg_col, bg_col)) line = '{0} {1}'.format(bg_col.ljust(max_len_bc_color), line) pywikibot.output(line) pywikibot.output('')
def get_reason_for_deletion(self, page): """Get a reason for speedy deletion from operator.""" suggested_reason = self.guess_reason_for_deletion(page) pywikibot.output( color_format('The suggested reason is: {lightred}{}{default}', suggested_reason)) # We don't use i18n.translate() here because for some languages the # entry is intentionally left out. if self.site.family.name in self.delete_reasons \ and page.site.lang in self.delete_reasons[self.site.family.name]: local_reasons = i18n.translate(page.site.lang, self.delete_reasons) pywikibot.output('') for key in sorted(local_reasons.keys()): pywikibot.output((key + ':').ljust(8) + local_reasons[key]) pywikibot.output('') reason = pywikibot.input( fill('Please enter the reason for deletion, choose a default ' 'reason, or press enter for the suggested message:')) if reason.strip() in local_reasons: reason = local_reasons[reason] else: reason = pywikibot.input( 'Please enter the reason for deletion,\n' 'or press enter for the suggested message:') return reason or suggested_reason
def __init__(self, *arg): pywikibot.output(color_format("{lightgreen}* Initialization of bot{default}")) pywikibot.botirc.IRCBot.__init__(self, *arg) # init environment with minimal changes (try to do as less as possible) # - Lua - pywikibot.output("** Redirecting Lua print in order to catch it") lua.execute("__print = print") lua.execute("print = python.globals().pywikibot.output") # It may be useful in debugging to install the 'print' builtin # as the 'print' function in lua. To do this: # lua.execute('print = python.builtins().print') # init constants templ = pywikibot.Page(self.site, bot_config["ConfCSSshell"]) cron = pywikibot.Page(self.site, bot_config["ConfCSScrontab"]) self.templ = templ.title() self.cron = cron.title() self.refs = {self.templ: templ, self.cron: cron} pywikibot.output("** Pre-loading all relevant page contents") for item in self.refs: # security; first check if page is protected, reject any data if not if os.path.splitext(self.refs[item].title().lower())[1] not in [".css", ".js"]: raise ValueError( "%s config %s = %s is not a secure page; " "it should be a css or js userpage which are " "automatically semi-protected." % (self.__class__.__name__, item, self.refs[item]) ) self.refs[item].get(force=True) # load all page contents # init background timer pywikibot.output("** Starting crontab background timer thread") self.on_timer()
def show_status(message=Msg.DEFAULT): """Output colorized status.""" msg, color = message.value pywikibot.output(color_format('{color}[{msg:5}]{default} ', msg=msg, color=color), newline=False)
def treat(self, page): """Treat a single page.""" if self.opt.interwiki: imagelist = [] for linkedPage in page.interwiki(): linkedPage = pywikibot.Page(linkedPage) imagelist.extend(linkedPage.imagelinks()) elif page.is_filepage(): imagePage = pywikibot.FilePage(page.site, page.title()) imagelist = [imagePage] else: imagelist = list(page.imagelinks()) while imagelist: self.show_image_list(imagelist) if len(imagelist) == 1: # no need to query the user, only one possibility todo = 0 else: pywikibot.output('Give the number of the image to transfer.') todo = pywikibot.input('To end uploading, press enter:') if not todo: break todo = int(todo) if 0 <= todo < len(imagelist): if self.transfer_allowed(imagelist[todo]): self.transfer_image(imagelist[todo]) # remove the selected image from the list imagelist.pop(todo) else: pywikibot.output( color_format('{yellow}No such image number.{default}'))
def save(self, text, page, comment, minorEdit=True, botflag=True): """Save the text.""" if text != page.text: # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output(color_format( '\n\n>>> {lightpurple}{0}{default} <<<', page.title())) # show what was changed pywikibot.showDiff(page.get(), text) pywikibot.output(u'Comment: %s' % comment) if not self.dry: if pywikibot.input_yn( u'Do you want to accept these changes?', default=False, automatic_quit=False): page.text = text try: # Save the page page.save(summary=comment, minorEdit=minorEdit, botflag=botflag) except pywikibot.LockedPage: pywikibot.output(u"Page %s is locked; skipping." % page.title(asLink=True)) except pywikibot.EditConflict: pywikibot.output( u'Skipping %s because of edit conflict' % (page.title())) except pywikibot.SpamfilterError as error: pywikibot.output( u'Cannot change %s because of spam blacklist entry ' u'%s' % (page.title(), error.url)) else: return True
def treat_page(self): """Treat current page.""" page = self.current_page code = mwparserfromhell.parse(page.text) edited = False # to prevent unwanted changes for template in code.ifilter_templates(): if not page.site.sametitle(template.name.strip(), 'Information'): continue desc = self.get_description(template) if desc is None: continue for tmp in desc.value.filter_templates(recursive=False): if self.process_desc_template(tmp): edited = True desc_clean = copy.deepcopy(desc.value) for tmp in desc_clean.filter_templates(recursive=False): # TODO: emit a debug item? desc_clean.remove(tmp) value = desc_clean.strip() if value == '': pywikibot.output('Empty description') continue pywikibot.output(value) langs = self.detect_langs(value) if langs: pywikibot.output(color_format( '{lightblue}Hints from langdetect:{default}')) for language in langs: pywikibot.output(color_format( '{{lightblue}}{obj.lang}: {obj.prob}{{default}}', obj=language)) lang = pywikibot.input( 'Enter the language of the displayed text:').strip() if lang != '': tmp_page = pywikibot.Page(page.site, lang, ns=10) if tmp_page not in self.lang_tmps: pywikibot.warning( '"{lang}" is not a valid language template on {site}' .format(lang=lang, site=page.site)) new = mwparserfromhell.nodes.template.Template(lang, [value]) self.replace_value(desc, new) edited = True if edited: text = str(code) summary = i18n.translate(page.site.lang, self.comment, fallback=True) self.put_current(text, summary=summary)
def useHashGenerator(self): """Use hash generator.""" # https://toolserver.org/~multichill/nowcommons.php?language=it&page=2&filter= lang = self.site.lang num_page = 0 word_to_skip_translated = i18n.translate(self.site, word_to_skip) images_processed = list() while 1: url = ('https://toolserver.org/~multichill/nowcommons.php?' 'language=%s&page=%s&filter=') % (lang, num_page) HTML_text = self.site.getUrl(url, no_hostname=True) reg = r'<[Aa] href="(?P<urllocal>.*?)">(?P<imagelocal>.*?)</[Aa]> +?</td><td>\n\s*?' reg += r'<[Aa] href="(?P<urlcommons>http[s]?://commons.wikimedia.org/.*?)" \ >Image:(?P<imagecommons>.*?)</[Aa]> +?</td><td>' regex = re.compile(reg, re.UNICODE) found_something = False change_page = True for x in regex.finditer(HTML_text): found_something = True image_local = x.group('imagelocal') image_commons = x.group('imagecommons') if image_local in images_processed: continue change_page = False images_processed.append(image_local) # Skip images that have something in the title (useful for it.wiki) image_to_skip = False for word in word_to_skip_translated: if word.lower() in image_local.lower(): image_to_skip = True if image_to_skip: continue url_local = x.group('urllocal') url_commons = x.group('urlcommons') pywikibot.output(color_format( '\n\n>>> {lightpurple}{0}{default} <<<', image_local)) pywikibot.output(u'Local: %s\nCommons: %s\n' % (url_local, url_commons)) webbrowser.open(url_local, 0, 1) webbrowser.open(url_commons, 0, 1) if image_local.split('Image:')[1] == image_commons: choice = pywikibot.input_yn( u'The local and the commons images have the same name, ' 'continue?', default=False, automatic_quit=False) else: choice = pywikibot.input_yn( u'Are the two images equal?', default=False, automatic_quit=False) if choice: yield [image_local, image_commons] else: continue # The page is dinamically updated, so we may don't need to change it if change_page: num_page += 1 # If no image found means that there aren't anymore, break. if not found_something: break
def color_line(self, line, line_ref=None): """Color line characters. If line_ref is None, the whole line is colored. If line_ref[i] is not blank, line[i] is colored. Color depends if line starts with +/-. line: string line_ref: string. """ color = line[0] if line_ref is None: if color in self.colors: colored_line = color_format('{color}{0}{default}', line, color=self.colors[color]) return colored_line else: return line colored_line = '' color_closed = True for char, char_ref in zip_longest( line, line_ref.strip(), fillvalue=' ' ): char_tagged = char if color_closed: if char_ref != ' ': if char != ' ': apply_color = self.colors[color] else: apply_color = 'default;' + self.bg_colors[color] char_tagged = color_format('{color}{0}', char, color=apply_color) color_closed = False else: if char_ref == ' ': char_tagged = color_format('{default}{0}', char) color_closed = True colored_line += char_tagged if not color_closed: colored_line += color_format('{default}') return colored_line
def treat_page(self): """Process one page.""" page = self.current_page page_text = [] for text in page.text.split('\n'): page_text += wrap(text, width=79) or [''] pywikibot.output(color_format('{blue}{}{default}', '_' * 80)) if len(page_text) > self.LINES: pywikibot.output( color_format( '{blue}The page detail is too many lines, ' 'only output first {} lines:{default}', self.LINES)) pywikibot.output('\n'.join( page_text[:min(self.LINES, len(page_text))])) pywikibot.output(color_format('{blue}{}{default}', '_' * 80)) choice = pywikibot.input_choice('Input action?', [('delete', 'd'), ('skip', 's'), ('update', 'u'), ('quit', 'q')], default='s', automatic_quit=False) # quit the bot if choice == 'q': self.quitting = True self.quit() # stop the generator and restart from current title elif choice == 'u': pywikibot.output('Updating from CSD category.') self.saved_progress = page.title() self.stop() # delete the current page elif choice == 'd': reason = self.get_reason_for_deletion(page) pywikibot.output( color_format('The chosen reason is: {lightred}{}{default}', reason)) page.delete(reason, prompt=False) # skip this page else: pywikibot.output('Skipping page {}'.format(page))
def _flush(stop: bool = True) -> None: """ Drop this process from the throttle log, after pending threads finish. Wait for the page-putter to flush its queue. Also drop this process from the throttle log. Called automatically at Python exit. """ _logger = 'wiki' debug('_flush() called', _logger) def remaining() -> Tuple[int, datetime.timedelta]: remainingPages = page_put_queue.qsize() if stop: # -1 because we added a None element to stop the queue remainingPages -= 1 remainingSeconds = datetime.timedelta( seconds=round(remainingPages * _config.put_throttle)) return (remainingPages, remainingSeconds) if stop: # None task element leaves async_manager page_put_queue.put((None, [], {})) num, sec = remaining() if num > 0 and sec.total_seconds() > _config.noisysleep: output( color_format( '{lightblue}Waiting for {num} pages to be put. ' 'Estimated time remaining: {sec}{default}', num=num, sec=sec)) if _putthread is not threading.current_thread(): while (_putthread.is_alive() and (page_put_queue.qsize() > 0 or page_put_queue_busy.qsize() > 0)): try: _putthread.join(1) except KeyboardInterrupt: if input_yn( 'There are {} pages remaining in the queue. ' 'Estimated time remaining: {}\nReally exit?'.format( *remaining()), default=False, automatic_quit=False): # delete the put queue with page_put_queue.mutex: page_put_queue.all_tasks_done.notify_all() page_put_queue.queue.clear() page_put_queue.not_full.notify_all() break # only need one drop() call because all throttles use the same global pid with suppress(IndexError): list(_sites.values())[0].throttle.drop() log('Dropped throttle(s).')
def main(*args): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ featured = False gen = None # Process global args and prepare generator args parser local_args = pywikibot.handle_args(args) genFactory = pagegenerators.GeneratorFactory() for arg in local_args: if arg == '-featured': featured = True elif genFactory.handleArg(arg): pass mysite = pywikibot.Site() if mysite.sitename == 'wikipedia:nl': pywikibot.output( color_format( '{lightred}There is consensus on the Dutch Wikipedia that ' 'bots should not be used to fix redirects.{default}')) return if featured: repo = mysite.data_repository() if repo: dp = pywikibot.ItemPage(repo, featured_articles) try: ref = pywikibot.Category(mysite, dp.getSitelink(mysite)) except pywikibot.NoPage: pass else: gen = ref.articles(namespaces=0, content=True) if not gen: suggest_help( unknown_parameters=['-featured'], additional_text='Option is not available for this site.') return False else: gen = genFactory.getCombinedGenerator() if gen: gen = mysite.preloadpages(gen) if gen: bot = FixingRedirectBot(generator=gen) bot.run() return True else: suggest_help(missing_generator=True) return False
def transfer_allowed(self, image): """Check whether transfer is allowed.""" target_repo = self.opt.target.image_repository() if image.file_is_shared() \ and image.site.image_repository() == target_repo: pywikibot.output(color_format( '{yellow}The image is already shared on {}.{default}', target_repo)) return False return True
def find_alternates(filename, script_paths): """Search for similar filenames in the given script paths.""" from pywikibot import config, input_choice, output from pywikibot.bot import ShowingListOption, QuitKeyboardInterrupt from pywikibot.tools.formatter import color_format assert config.pwb_close_matches > 0, \ 'config.pwb_close_matches must be greater than 0' assert 0.0 < config.pwb_cut_off < 1.0, \ 'config.pwb_cut_off must be a float in range [0, 1]' print('ERROR: {} not found! Misspelling?'.format(filename), file=sys.stderr) scripts = {} script_paths = [['.']] + script_paths # add current directory for path in script_paths: for script_name in os.listdir(os.path.join(*path)): # remove .py for better matching name, _, suffix = script_name.rpartition('.') if suffix == 'py' and not name.startswith('__'): scripts[name] = os.path.join(*(path + [script_name])) filename = filename[:-3] similar_scripts = get_close_matches(filename, scripts, config.pwb_close_matches, config.pwb_cut_off) if not similar_scripts: return None if len(similar_scripts) == 1: script = similar_scripts[0] wait_time = config.pwb_autostart_waittime output( color_format( 'NOTE: Starting the most similar script ' '{lightyellow}{0}.py{default}\n' ' in {1} seconds; type CTRL-C to stop.', script, wait_time)) try: sleep(wait_time) # Wait a bit to let it be cancelled except KeyboardInterrupt: return None else: msg = '\nThe most similar scripts are:' alternatives = ShowingListOption(similar_scripts, pre=msg, post='') try: prefix, script = input_choice('Which script to be run:', alternatives, default='1') except QuitKeyboardInterrupt: return None print() return scripts[script]
def main(*args): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ featured = False gen = None # Process global args and prepare generator args parser local_args = pywikibot.handle_args(args) genFactory = pagegenerators.GeneratorFactory() for arg in local_args: if arg == '-featured': featured = True elif genFactory.handleArg(arg): pass mysite = pywikibot.Site() if mysite.sitename == 'wikipedia:nl': pywikibot.output(color_format( '{lightred}There is consensus on the Dutch Wikipedia that ' 'bots should not be used to fix redirects.{default}')) return if featured: repo = mysite.data_repository() if repo: dp = pywikibot.ItemPage(repo, featured_articles) try: ref = pywikibot.Category(mysite, dp.getSitelink(mysite)) except pywikibot.NoPage: pass else: gen = ref.articles(namespaces=0, content=True) if not gen: suggest_help( unknown_parameters=['-featured'], additional_text='Option is not available for this site.') return False else: gen = genFactory.getCombinedGenerator() if gen: gen = mysite.preloadpages(gen) if gen: bot = FixingRedirectBot(generator=gen) bot.run() return True else: suggest_help(missing_generator=True) return False
def test_marker(self): r"""Test that the \03 marker is only allowed in front of colors.""" self.assertEqual(formatter.color_format('{0}\03{black}', 42), '42\03{black}') # literal before a normal field self.assertRaisesRegex( ValueError, r'.*\\03', formatter.color_format, '\03{0}{black}', 42) # literal before a color field self.assertRaisesRegex( ValueError, r'.*\\03', formatter.color_format, '{0}\03before{black}', 42)
def find_alternates(filename, script_paths): """Search for similar filenames in the given script paths.""" from pywikibot import config, input_choice, output from pywikibot.bot import ShowingListOption, QuitKeyboardInterrupt from pywikibot.tools.formatter import color_format assert config.pwb_close_matches > 0, \ 'config.pwb_close_matches must be greater than 0' assert 0.0 < config.pwb_cut_off < 1.0, \ 'config.pwb_cut_off must be a float in range [0, 1]' print('ERROR: {} not found! Misspelling?'.format(filename), file=sys.stderr) scripts = {} for file_package in script_paths: path = file_package.split('.') for script_name in os.listdir(os.path.join(*path)): # remove .py for better matching name, _, suffix = script_name.rpartition('.') if suffix == 'py' and not name.startswith('__'): scripts[name] = os.path.join(*(path + [script_name])) filename = filename[:-3] similar_scripts = get_close_matches(filename, scripts, config.pwb_close_matches, config.pwb_cut_off) if not similar_scripts: return None if len(similar_scripts) == 1: script = similar_scripts[0] wait_time = config.pwb_autostart_waittime output(color_format( 'NOTE: Starting the most similar script ' '{lightyellow}{0}.py{default}\n' ' in {1} seconds; type CTRL-C to stop.', script, wait_time)) try: sleep(wait_time) # Wait a bit to let it be cancelled except KeyboardInterrupt: return None return scripts[script] msg = '\nThe most similar scripts are:' alternatives = ShowingListOption(similar_scripts, pre=msg, post='') try: prefix, script = input_choice('Which script to be run:', alternatives, default='1') except QuitKeyboardInterrupt: return None print() return scripts[script[0]]
def main(*args: str) -> None: """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. :param args: command line arguments """ featured = False options = {} gen = None # Process global args and prepare generator args parser gen_factory = pagegenerators.GeneratorFactory() local_args = pywikibot.handle_args(args) local_args = gen_factory.handle_args(local_args) unknown = [] for arg in local_args: if arg == '-featured': featured = True elif arg in ('-ignoremoves', '-overwrite'): options[arg[1:]] = True else: unknown.append(arg) suggest_help(unknown_parameters=unknown) mysite = pywikibot.Site() if mysite.sitename == 'wikipedia:nl': pywikibot.output( color_format( '{lightred}There is consensus on the Dutch Wikipedia that ' 'bots should not be used to fix redirects.{default}')) return if featured: ref = mysite.page_from_repository(FEATURED_ARTICLES) if ref is not None: gen = ref.articles(namespaces=0, content=True) if not gen: suggest_help( unknown_parameters=['-featured'], additional_text='Option is not available for this site.') return else: gen = gen_factory.getCombinedGenerator(preload=True) if gen: bot = FixingRedirectBot(generator=gen, **options) bot.run() else: suggest_help(missing_generator=True)
def generator(self) -> Generator[pywikibot.Page, None, None]: """Generator to retrieve misspelling pages or misspelling redirects.""" templates = self.misspelling_templates.get(self.site.sitename) categories = [cat for cat in (self.site.page_from_repository(item) for item in self.misspelling_categories) if cat is not None] if templates: pywikibot.output(color_format( '{yellow}Working on templates...{default}')) if isinstance(templates, str): templates = (templates, ) generators = ( pywikibot.Page(self.site, template_name, ns=10).getReferences( follow_redirects=False, only_template_inclusion=True) for template_name in templates ) if self.opt.start: pywikibot.output( '-start parameter is not supported on this wiki\n' 'because templates are used for misspellings.') elif categories: pywikibot.output(color_format( '{yellow}Working on categories...{default}')) generators = ( pagegenerators.CategorizedPageGenerator( cat, recurse=True, start=self.opt.start ) for cat in categories ) else: pywikibot.output(HELP_MSG.format(site=self.site)) return yield from pagegenerators.PreloadingGenerator(chain(*generators))
def _flush(stop=True): """ Drop this process from the throttle log, after pending threads finish. Wait for the page-putter to flush its queue. Also drop this process from the throttle log. Called automatically at Python exit. """ _logger = "wiki" debug('_flush() called', _logger) def remaining(): remainingPages = page_put_queue.qsize() if stop: # -1 because we added a None element to stop the queue remainingPages -= 1 remainingSeconds = datetime.timedelta(seconds=(remainingPages * config.put_throttle)) return (remainingPages, remainingSeconds) if stop: # None task element leaves async_manager page_put_queue.put((None, [], {})) num, sec = remaining() if num > 0 and sec.total_seconds() > config.noisysleep: output( color_format( '{lightblue}Waiting for {num} pages to be put. ' 'Estimated time remaining: {sec}{default}', num=num, sec=sec)) while _putthread.isAlive() and page_put_queue.qsize() > 0: try: _putthread.join(1) except KeyboardInterrupt: if input_yn('There are {0} pages remaining in the queue. ' 'Estimated time remaining: {1}\nReally exit?' ''.format(*remaining()), default=False, automatic_quit=False): return # only need one drop() call because all throttles use the same global pid try: list(_sites.values())[0].throttle.drop() log(u"Dropped throttle(s).") except IndexError: pass
def stopme(): """Drop this process from the throttle log, after pending threads finish. Can be called manually if desired, but if not, will be called automatically at Python exit. """ global stopped _logger = "wiki" if not stopped: debug(u"stopme() called", _logger) def remaining(): remainingPages = page_put_queue.qsize() - 1 # -1 because we added a None element to stop the queue remainingSeconds = datetime.timedelta( seconds=(remainingPages * config.put_throttle)) return (remainingPages, remainingSeconds) page_put_queue.put((None, [], {})) stopped = True if page_put_queue.qsize() > 1: num, sec = remaining() output( color_format( '{lightblue}Waiting for {num} pages to be put. ' 'Estimated time remaining: {sec}{default}', num=num, sec=sec)) while (_putthread.isAlive()): try: _putthread.join(1) except KeyboardInterrupt: if input_yn('There are %i pages remaining in the queue. ' 'Estimated time remaining: %s\nReally exit?' % remaining(), default=False, automatic_quit=False): return # only need one drop() call because all throttles use the same global pid try: list(_sites.values())[0].throttle.drop() log(u"Dropped throttle(s).") except IndexError: pass
def revert(self, item): """Revert a single item.""" page = pywikibot.Page(self.site, item['title']) history = list(page.revisions(total=2)) if len(history) > 1: rev = history[1] else: return False comment = i18n.twtranslate(self.site, 'revertbot-revert', { 'revid': rev.revid, 'author': rev.user, 'timestamp': rev.timestamp }) additional_comment = self.getOption('comment') if additional_comment: comment += ': ' + additional_comment pywikibot.output( color_format( '\n\n>>> {lightpurple}{0}{default} <<<', page.title(as_link=True, force_interwiki=True, textlink=True))) if not self.getOption('rollback'): old = page.text page.text = page.getOldVersion(rev.revid) pywikibot.showDiff(old, page.text) page.save(comment) return comment try: pywikibot.data.api.Request(self.site, parameters={ 'action': 'rollback', 'title': page, 'user': self.user, 'token': rev.rollbacktoken, 'markbot': True }).submit() except pywikibot.data.api.APIError as e: if e.code == 'badtoken': pywikibot.error( 'There was an API token error rollbacking the edit') else: pywikibot.exception() return False return 'The edit(s) made in {0} by {1} was rollbacked'.format( page.title(), self.user)
def changelang(self, page): """Set page language.""" token = self.site.get_tokens(['csrf']).get('csrf') parameters = { 'action': 'setpagelanguage', 'title': page.title(), 'lang': self.opt.setlang, 'token': token } r = self.site._simple_request(**parameters) r.submit() pywikibot.output( color_format( '{lightpurple}{0}{default}: Setting ' 'page language to {green}{1}{default}', page.title(as_link=True), self.opt.setlang))
def __init__(self, *arg): """Initializer.""" pywikibot.output( color_format('{lightgreen}* Initialization of bot{default}')) pywikibot.botirc.IRCBot.__init__(self, *arg) # init environment with minimal changes (try to do as less as possible) # - Lua - if lua: pywikibot.output('** Redirecting Lua print in order to catch it') lua.execute('__print = print') lua.execute('print = python.eval("pywikibot.output")') # It may be useful in debugging to install the 'print' builtin # as the 'print' function in lua. To do this: # lua.execute('print = python.builtins.print') # init constants templ = pywikibot.Page(self.site, bot_config['ConfCSSshell']) cron = pywikibot.Page(self.site, bot_config['ConfCSScrontab']) self.templ = templ.title() self.cron = cron.title() self.refs = { self.templ: templ, self.cron: cron, } pywikibot.output('** Pre-loading all relevant page contents') for item in self.refs: # First check if page is protected, reject any data if not parts = self.refs[item].title().lower().rsplit('.') if len(parts) == 1 or parts[1] not in ['css', 'js']: raise ValueError('{0} config {1} = {2} is not a secure page; ' 'it should be a css or js userpage which are ' 'automatically semi-protected.'.format( self.__class__.__name__, item, self.refs[item])) try: self.refs[item].get(force=True) # load all page contents except pywikibot.NoPage: pywikibot.error( "The configuration page {0} doesn't exists".format( self.refs[item].title(as_link=True))) raise # init background timer pywikibot.output('** Starting crontab background timer thread') self.on_timer()
def _flush(stop=True): """ Drop this process from the throttle log, after pending threads finish. Wait for the page-putter to flush its queue. Also drop this process from the throttle log. Called automatically at Python exit. """ _logger = "wiki" debug('_flush() called', _logger) def remaining(): remainingPages = page_put_queue.qsize() if stop: # -1 because we added a None element to stop the queue remainingPages -= 1 remainingSeconds = datetime.timedelta( seconds=(remainingPages * config.put_throttle)) return (remainingPages, remainingSeconds) if stop: # None task element leaves async_manager page_put_queue.put((None, [], {})) num, sec = remaining() if num > 0 and sec.total_seconds() > config.noisysleep: output(color_format( '{lightblue}Waiting for {num} pages to be put. ' 'Estimated time remaining: {sec}{default}', num=num, sec=sec)) while _putthread.isAlive() and page_put_queue.qsize() > 0: try: _putthread.join(1) except KeyboardInterrupt: if input_yn('There are {0} pages remaining in the queue. ' 'Estimated time remaining: {1}\nReally exit?' ''.format(*remaining()), default=False, automatic_quit=False): return # only need one drop() call because all throttles use the same global pid try: list(_sites.values())[0].throttle.drop() log(u"Dropped throttle(s).") except IndexError: pass
def stopme(): """ Drop this process from the throttle log, after pending threads finish. Can be called manually if desired, but if not, will be called automatically at Python exit. """ global stopped _logger = "wiki" if not stopped: debug(u"stopme() called", _logger) def remaining(): remainingPages = page_put_queue.qsize() - 1 # -1 because we added a None element to stop the queue remainingSeconds = datetime.timedelta( seconds=(remainingPages * config.put_throttle)) return (remainingPages, remainingSeconds) page_put_queue.put((None, [], {})) stopped = True if page_put_queue.qsize() > 1: num, sec = remaining() output(color_format( '{lightblue}Waiting for {num} pages to be put. ' 'Estimated time remaining: {sec}{default}', num=num, sec=sec)) while(_putthread.isAlive()): try: _putthread.join(1) except KeyboardInterrupt: if input_yn('There are %i pages remaining in the queue. ' 'Estimated time remaining: %s\nReally exit?' % remaining(), default=False, automatic_quit=False): return # only need one drop() call because all throttles use the same global pid try: list(_sites.values())[0].throttle.drop() log(u"Dropped throttle(s).") except IndexError: pass
def edit_existing(player, site, always, error_count, page_count): # Find the infobox?? page = pwb.Page(site, player.name.replace(' ', '_')) pwbpage = page.get() wtppage = wtp.parse(page.get()) infobox = [element for idx, element in enumerate(wtppage.templates) if 'Player' in element.name] infobox = process_infobox(player, infobox) newtext = wtppage.string text = pwbpage if text == newtext: print(f'skipping {player.name}') return (page_count, error_count, always) else: pwb.output(color_format( '\n\n>>> {lightpurple}{0}{default} <<<', page.title())) pwb.showDiff(text, newtext) while True: # Let's put the changes. if not always: try: choice = pwb.input_choice( 'Do you want to accept these changes?', [('Yes', 'y'), ('No', 'n'), ('All', 'a'), ('open in Browser', 'b')], 'n') except QuitKeyboardInterrupt: sys.exit('User quit bot run.') if choice == 'a': always = True elif choice == 'n': return (page_count, error_count, always) elif choice == 'b': pwb.bot.open_webbrowser(page) if always or choice == 'y': result = page.put(newtext, summary='Update player infobox', asynchronous=True) if result is None: return (page_count + 1, error_count, always) else: print(f'Error occurred! Try {player.name} again.') return (page_count, error_count + 1, always)
def _ColorCodeWordScreen(self, word): res = '' lastIsCyr = word[0] in self.localLtr if lastIsCyr: res += self.colorFormatLocalColor else: res += self.colorFormatLatinColor for l in word: if l in self.localLtr: if not lastIsCyr: res += self.colorFormatLocalColor lastIsCyr = True elif l in self.latLtr: if lastIsCyr: res += self.colorFormatLatinColor lastIsCyr = False res += l return formatter.color_format(res + self.colorFormatSuffix)
def featuredArticles(self, site, task, cache): articles = [] info = globals()[task + '_name'] if task == 'lists': code = site.code else: code = 'wikidata' try: method = info[code][0] except KeyError: pywikibot.error( "language %s doesn't has %s category source." % (code, task)) return name = info[code][1] # hide #-sorted items on en-wiki try: hide = info[code][2] except IndexError: hide = None for p in method(site, name, hide): if p.namespace() == 0: # Article articles.append(p) # Article talk (like in English) elif p.namespace() == 1 and site.code != 'el': articles.append(pywikibot.Page(p.site, p.title(with_ns=False))) pywikibot.output(color_format( '{lightred}** {0} has {1} {2} articles{default}', site, len(articles), task)) while articles: p = articles.pop(0) if p.title() < self.getOption('afterpage'): continue if '/' in p.title() and p.namespace() != 0: pywikibot.output('%s is a subpage' % p.title()) continue if p.title() in cache: pywikibot.output('(cached) %s -> %s' % ( p.title(), cache[p.title()])) continue yield p
def featuredArticles(self, site, task, cache): articles = [] info = globals()[task + '_name'] if task == 'lists': code = site.code else: code = 'wikidata' try: method = info[code][0] except KeyError: pywikibot.error( u'language %s doesn\'t has %s category source.' % (code, task)) return name = info[code][1] # hide #-sorted items on en-wiki try: hide = info[code][2] except IndexError: hide = None for p in method(site, name, hide): if p.namespace() == 0: # Article articles.append(p) # Article talk (like in English) elif p.namespace() == 1 and site.code != 'el': articles.append(pywikibot.Page(p.site, p.title(withNamespace=False))) pywikibot.output(color_format( '{lightred}** {0} has {1} {2} articles{default}', site, len(articles), task)) while articles: p = articles.pop(0) if p.title() < self.getOption('afterpage'): continue if u"/" in p.title() and p.namespace() != 0: pywikibot.output(u"%s is a subpage" % p.title()) continue if p.title() in cache: pywikibot.output(u"(cached) %s -> %s" % (p.title(), cache[p.title()])) continue yield p
def revert(self, item) -> Union[str, bool]: """Revert a single item.""" page = pywikibot.Page(self.site, item['title']) history = list(page.revisions(total=2)) if len(history) <= 1: return False rev = history[1] pywikibot.output( color_format( '\n\n>>> {lightpurple}{0}{default} <<<', page.title(as_link=True, force_interwiki=True, textlink=True))) if not self.opt.rollback: comment = i18n.twtranslate(self.site, 'revertbot-revert', { 'revid': rev.revid, 'author': rev.user, 'timestamp': rev.timestamp }) if self.opt.comment: comment += ': ' + self.opt.comment old = page.text page.text = page.getOldVersion(rev.revid) pywikibot.showDiff(old, page.text) page.save(comment) return comment try: self.site.rollbackpage(page, user=self.user, markbot=True) except APIError as e: if e.code == 'badtoken': pywikibot.error( 'There was an API token error rollbacking the edit') return False except Error: pass else: return 'The edit(s) made in {} by {} was rollbacked'.format( page.title(), self.user) pywikibot.exception() return False
def __init__(self, *arg): """Constructor.""" pywikibot.output(color_format( '{lightgreen}* Initialization of bot{default}')) pywikibot.botirc.IRCBot.__init__(self, *arg) # init environment with minimal changes (try to do as less as possible) # - Lua - pywikibot.output(u'** Redirecting Lua print in order to catch it') lua.execute('__print = print') lua.execute('print = python.globals().pywikibot.output') # It may be useful in debugging to install the 'print' builtin # as the 'print' function in lua. To do this: # lua.execute('print = python.builtins().print') # init constants templ = pywikibot.Page(self.site, bot_config['ConfCSSshell']) cron = pywikibot.Page(self.site, bot_config['ConfCSScrontab']) self.templ = templ.title() self.cron = cron.title() self.refs = {self.templ: templ, self.cron: cron, } pywikibot.output(u'** Pre-loading all relevant page contents') for item in self.refs: # First check if page is protected, reject any data if not parts = self.refs[item].title().lower().rsplit('.') if len(parts) == 1 or parts[1] not in ['.css', '.js']: raise ValueError('%s config %s = %s is not a secure page; ' 'it should be a css or js userpage which are ' 'automatically semi-protected.' % (self.__class__.__name__, item, self.refs[item])) try: self.refs[item].get(force=True) # load all page contents except pywikibot.NoPage: pywikibot.error("The configuation page %s doesn't exists" % self.refs[item].title(asLink=True)) raise # init background timer pywikibot.output(u'** Starting crontab background timer thread') self.on_timer()
def parse_archive(f: str, output: str = "data.lua"): ls = [] with open(f, encoding='utf-8', errors='ignore') as tmp: ls = tmp.readlines() pywikibot.output("Parsing '{}'...".format(f)) pywikibot.output("Number of lines: {}.".format(len(ls))) # parse every line to a list of tuples (<number>, <title>) res = [] for line in ls: pass pywikibot.output("Got {} raw results.".format(len(res))) if DEBUG: pass # pywikibot.output(color_format("Missing comic {red}#{0}{default}.", n)) m = {} pywikibot.output(color_format("Got {aqua}{0}{default} comic titles after cleanup.", len(m)))
def treat_page(self): """Do the magic.""" # set origin origin = self.current_page.title() site = self.current_page.site # create redirect title if not self.getOption('reversed'): redir = pywikibot.Page(site, origin.replace('–', '-') .replace('—', '-')) else: redir = pywikibot.Page(site, origin.replace('-', '–')) # skip unchanged if redir.title() == origin: pywikibot.output('No need to process %s, skipping…' % redir.title()) # suggest -reversed parameter if '-' in origin and not self.getOption('reversed'): pywikibot.output('Consider using -reversed parameter ' 'for this particular page') else: # skip existing if redir.exists(): pywikibot.output('%s already exists, skipping…' % redir.title()) else: # confirm and save redirect if self.user_confirm( color_format( 'Redirect from {lightblue}{0}{default} doesn\'t exist ' 'yet.\nDo you want to create it?', redir.title())): # If summary option is None, it takes the default # i18n summary from i18n subdirectory with summary key. if self.getOption('summary'): summary = self.getOption('summary') else: summary = i18n.twtranslate(site, 'ndashredir-create', {'title': origin}) redir.set_redirect_target(self.current_page, create=True, summary=summary)
def revert(self, item): """Revert a single item.""" page = pywikibot.Page(self.site, item["title"]) history = list(page.revisions(total=2)) if len(history) > 1: rev = history[1] else: return False comment = i18n.twtranslate( self.site, "revertbot-revert", {"revid": rev.revid, "author": rev.user, "timestamp": rev.timestamp} ) if self.comment: comment += ": " + self.comment pywikibot.output( color_format( "\n\n>>> {lightpurple}{0}{default} <<<", page.title(asLink=True, forceInterwiki=True, textlink=True) ) ) if not self.rollback: old = page.text page.text = rev.text pywikibot.showDiff(old, page.text) page.save(comment) return comment try: pywikibot.data.api.Request( self.site, parameters={ "action": "rollback", "title": page, "user": self.user, "token": rev.rollbacktoken, "markbot": True, }, ).submit() except pywikibot.data.api.APIError as e: if e.code == "badtoken": pywikibot.error("There was an API token error rollbacking the edit") else: pywikibot.exception() return False return "The edit(s) made in %s by %s was rollbacked" % (page.title(), self.user)
def revert(self, item): page = pywikibot.Page(self.site, item['title']) history = list(page.revisions(total=2)) if len(history) > 1: rev = history[1] else: return False comment = i18n.twtranslate(self.site, 'revertbot-revert', { 'revid': rev.revid, 'author': rev.user, 'timestamp': rev.timestamp }) if self.comment: comment += ': ' + self.comment pywikibot.output( color_format( '\n\n>>> {lightpurple}{0}{default} <<<', page.title(asLink=True, forceInterwiki=True, textlink=True))) if not self.rollback: old = page.text page.text = rev.text pywikibot.showDiff(old, page.text) page.save(comment) return comment try: pywikibot.data.api.Request(self.site, parameters={ 'action': 'rollback', 'title': page, 'user': self.user, 'token': rev.rollbacktoken, 'markbot': True }).submit() except pywikibot.data.api.APIError as e: if e.code == 'badtoken': pywikibot.error( "There was an API token error rollbacking the edit") else: pywikibot.exception() return False return u"The edit(s) made in %s by %s was rollbacked" % (page.title(), self.user)
def replace2(self, match): link = match.group(1) trail = match.group(2) or '' target = self.from_cache(link) if not target: return match.group() left_spaces = len(link) - len(link.lstrip()) right_spaces = len(link) - len(link.rstrip()) if (link.lstrip() + trail).startswith(target): rest = (link.lstrip() + trail)[len(target):] return '[[%s%s]]%s' % (' ' * left_spaces, target, rest) if self.onlypiped is True: # todo: user_interactor return match.group() options_list = [match.group()] if not trail: options_list.append( '[[%s%s%s]]' % (left_spaces * ' ', target, right_spaces * ' ')) options_list.append('[[%s|%s%s]]' % (target, link, trail)) options = [('%d %s' % (i, opt), str(i)) for i, opt in enumerate(options_list, start=1) ] + [('Do not replace unpiped links', 'n')] pre = match.string[max(0, match.start() - 30):match.start()].rpartition('\n')[2] post = match.string[match.end():match.end() + 30].partition('\n')[0] pywikibot.output( color_format('{0}{lightred}{1}{default}{2}', pre, match.group(), post)) choice = pywikibot.input_choice('Replace this link?', options, default='1', automatic_quit=False) if choice == 'n': self.onlypiped = True choice = 1 return options_list[int(choice) - 1]
def showStatus(n=0): """Output colorized status.""" staColor = { 0: 'lightpurple', 1: 'lightaqua', 2: 'lightgreen', 3: 'lightyellow', 4: 'lightred', 5: 'lightblue' } staMsg = { 0: 'MSG', 1: 'NoAct', 2: 'Match', 3: 'Skip', 4: 'Warning', 5: 'Done', } pywikibot.output(color_format('{color}[{0:5}]{default} ', staMsg[n], color=staColor[n]), newline=False)
def __init__(self, *arg): """Constructor.""" pywikibot.output( color_format('{lightgreen}* Initialization of bot{default}')) pywikibot.botirc.IRCBot.__init__(self, *arg) # init environment with minimal changes (try to do as less as possible) # - Lua - pywikibot.output(u'** Redirecting Lua print in order to catch it') lua.execute('__print = print') lua.execute('print = python.globals().pywikibot.output') # It may be useful in debugging to install the 'print' builtin # as the 'print' function in lua. To do this: # lua.execute('print = python.builtins().print') # init constants templ = pywikibot.Page(self.site, bot_config['ConfCSSshell']) cron = pywikibot.Page(self.site, bot_config['ConfCSScrontab']) self.templ = templ.title() self.cron = cron.title() self.refs = { self.templ: templ, self.cron: cron, } pywikibot.output(u'** Pre-loading all relevant page contents') for item in self.refs: # security; first check if page is protected, reject any data if not if os.path.splitext( self.refs[item].title().lower())[1] not in ['.css', '.js']: raise ValueError( u'%s config %s = %s is not a secure page; ' u'it should be a css or js userpage which are ' u'automatically semi-protected.' % (self.__class__.__name__, item, self.refs[item])) self.refs[item].get(force=True) # load all page contents # init background timer pywikibot.output(u'** Starting crontab background timer thread') self.on_timer()
def summary_hook(self, match, replaced): def underscores(string): if string.startswith(' '): string = '_' + string[1:] if string.endswith(' '): string = string[:-1] + '_' return string new = old = match.group() if self.needs_decision(): options = [('keep', 'k')] replacements = [] for i, repl in enumerate(self.replacements, start=1): replacement = match.expand(repl) replacements.append(replacement) options.append( ('%s %s' % (i, underscores(replacement)), str(i))) text = match.string pre = text[max(0, match.start() - 30):match.start()].rpartition('\n')[2] post = text[match.end():match.end() + 30].partition('\n')[0] pywikibot.output( color_format('{0}{lightred}{1}{default}{2}', pre, old, post)) choice = pywikibot.input_choice('Choose the best replacement', options, automatic_quit=False, default='k') if choice != 'k': new = replacements[int(choice) - 1] else: new = match.expand(self.replacements[0]) if old == new: pywikibot.warning('No replacement done in string "%s"' % old) if old != new: fragment = ' → '.join( underscores(re.sub('\n', r'\\n', i)) for i in (old, new)) if fragment.lower() not in map(methodcaller('lower'), replaced): replaced.append(fragment) return new
def revert(self, item): """Revert a single item.""" page = pywikibot.Page(self.site, item['title']) history = list(page.revisions(total=2)) if len(history) > 1: rev = history[1] else: return False comment = i18n.twtranslate( self.site, 'revertbot-revert', {'revid': rev.revid, 'author': rev.user, 'timestamp': rev.timestamp}) if self.comment: comment += ': ' + self.comment pywikibot.output(color_format( '\n\n>>> {lightpurple}{0}{default} <<<', page.title(asLink=True, forceInterwiki=True, textlink=True))) if not self.rollback: old = page.text page.text = page.getOldVersion(rev.revid) pywikibot.showDiff(old, page.text) page.save(comment) return comment try: pywikibot.data.api.Request( self.site, parameters={'action': 'rollback', 'title': page, 'user': self.user, 'token': rev.rollbacktoken, 'markbot': True}).submit() except pywikibot.data.api.APIError as e: if e.code == 'badtoken': pywikibot.error( 'There was an API token error rollbacking the edit') else: pywikibot.exception() return False return 'The edit(s) made in %s by %s was rollbacked' % (page.title(), self.user)
def run(self) -> None: pages = [self.parent] + self.children for page in pages: original_text = page.text new_text = page.get_newtext() if original_text == new_text: break while True: pywikibot.output(color_format( '\n\n>>> {lightpurple}{0}{default} <<<', page.title())) pywikibot.showDiff(original_text, new_text) choice = pywikibot.input_choice('この変更を投稿しますか', [('はい', 'y'), ('いいえ', 'n'), ('エディタで編集する', 'e')]) if choice == 'n': break if choice == 'e': editor = editarticle.TextEditor() as_edited = editor.edit(new_text) if as_edited: new_text = as_edited continue if choice == 'y': page.text = new_text page.save( 'Botによる: [[User:YuukinBot#作業内容2|カテゴリの整備]]', asynchronous=True, callback=self._async_callback, quiet=True) self._pending_processing_titles.put(page.title(as_link=True)) while not self._pending_processed_titles.empty(): proc_title, res = self._pending_processed_titles.get() pywikibot.output('{0}{1}'.format(proc_title, 'が投稿されました' if res else 'は投稿されませんでした')) break while not all((self._pending_processing_titles.empty(), self._pending_processed_titles.empty())): proc_title, res = self._pending_processed_titles.get() pywikibot.output('{0}{1}'.format(proc_title, 'が投稿されました' if res else 'は投稿されませんでした')) pywikibot.output(f'{self.changed_pages} ページ編集しました') pywikibot.output(f'{self.parent.title(as_link=True)} 関連の整備が完了しました')
def summary_hook(self, match, replaced): def underscores(string): if string.startswith(' '): string = '_' + string[1:] if string.endswith(' '): string = string[:-1] + '_' return string new = old = match.group() if self.needsDecision(): options = [('keep', 'k')] replacements = [] for i, repl in enumerate(self.replacements, start=1): replacement = match.expand(repl) replacements.append(replacement) options.append( ('%s %s' % (i, underscores(replacement)), str(i)) ) text = match.string pre = text[max(0, match.start() - 30):match.start()].rpartition('\n')[2] post = text[match.end():match.end() + 30].partition('\n')[0] pywikibot.output(color_format('{0}{lightred}{1}{default}{2}', pre, old, post)) choice = pywikibot.input_choice('Choose the best replacement', options, automatic_quit=False, default='k') if choice != 'k': new = replacements[int(choice) - 1] else: new = match.expand(self.replacements[0]) if old == new: pywikibot.warning('No replacement done in string "%s"' % old) if old != new: fragment = ' → '.join(underscores(re.sub('\n', r'\\n', i)) for i in (old, new)) if fragment.lower() not in map(methodcaller('lower'), replaced): replaced.append(fragment) return new
def _generate_diff(self, hunks): """Generate a diff text for the given hunks.""" def extend_context(start, end): """Add context lines.""" return ''.join(' {0}\n'.format(line.rstrip()) for line in self.a[start:end]) context_range = self._get_context_range(hunks) output = color_format('{aqua}{0}{default}\n{1}', Hunk.get_header_text(*context_range), extend_context(context_range[0][0], hunks[0].a_rng[0])) previous_hunk = None for hunk in hunks: if previous_hunk: output += extend_context(previous_hunk.a_rng[1], hunk.a_rng[0]) previous_hunk = hunk output += hunk.diff_text output += extend_context(hunks[-1].a_rng[1], context_range[0][1]) if self._replace_invisible: output = chars.replace_invisible(output) return output
def treat_page(self): """Process a single page.""" text = self.current_page.text if self.spam_external_url not in text: return lines = text.split('\n') newpage = [] lastok = "" for line in lines: if self.spam_external_url in line: if lastok: pywikibot.output(lastok) pywikibot.output(color_format('{lightred}{0}{default}', line)) lastok = None else: newpage.append(line) if line.strip(): if lastok is None: pywikibot.output(line) lastok = line if self.getOption('always'): answer = "y" else: answer = pywikibot.input_choice( u'\nDelete the red lines?', [('yes', 'y'), ('no', 'n'), ('edit', 'e')], 'n', automatic_quit=False) if answer == "n": return elif answer == "e": editor = TextEditor() newtext = editor.edit(text, highlight=self.spam_external_url, jumpIndex=text.find(self.spam_external_url)) else: newtext = "\n".join(newpage) if newtext != text: self.put_current(newtext, summary=self.getOption('summary'))
def run(self): """Start the bot.""" # Run the generator which will yield Pages which might need to be # changed. for page in self.generator: if self.isTitleExcepted(page.title()): pywikibot.output( u'Skipping {0!s} because the title is on the exceptions list.'.format(page.title(asLink=True))) continue try: # Load the page's text from the wiki original_text = page.get(get_redirect=True) if not page.canBeEdited(): pywikibot.output(u"You can't edit page {0!s}".format(page.title(asLink=True))) continue except pywikibot.NoPage: pywikibot.output(u'Page {0!s} not found'.format(page.title(asLink=True))) continue applied = set() new_text = original_text while True: if self.isTextExcepted(new_text): pywikibot.output(u'Skipping %s because it contains text ' u'that is on the exceptions list.' % page.title(asLink=True)) break last_text = None while new_text != last_text: last_text = new_text new_text = self.apply_replacements(last_text, applied, page) if not self.recursive: break if new_text == original_text: pywikibot.output(u'No changes were necessary in {0!s}'.format(page.title(asLink=True))) break if hasattr(self, 'addedCat'): # Fetch only categories in wikitext, otherwise the others will # be explicitly added. cats = textlib.getCategoryLinks(new_text, site=page.site) if self.addedCat not in cats: cats.append(self.addedCat) new_text = textlib.replaceCategoryLinks(new_text, cats, site=page.site) # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output(color_format( '\n\n>>> {lightpurple}{0}{default} <<<', page.title())) pywikibot.showDiff(original_text, new_text) if self.getOption('always'): break choice = pywikibot.input_choice( u'Do you want to accept these changes?', [('Yes', 'y'), ('No', 'n'), ('Edit', 'e'), ('open in Browser', 'b'), ('all', 'a')], default='N') if choice == 'e': editor = editarticle.TextEditor() as_edited = editor.edit(original_text) # if user didn't press Cancel if as_edited and as_edited != new_text: new_text = as_edited continue if choice == 'b': pywikibot.bot.open_webbrowser(page) try: original_text = page.get(get_redirect=True, force=True) except pywikibot.NoPage: pywikibot.output(u'Page {0!s} has been deleted.'.format(page.title())) break new_text = original_text continue if choice == 'a': self.options['always'] = True if choice == 'y': page.text = new_text page.save(summary=self.generate_summary(applied), async=True, callback=self._count_changes, quiet=True) while not self._pending_processed_titles.empty(): proc_title, res = self._pending_processed_titles.get() pywikibot.output('Page {0!s}{1!s} saved'.format(proc_title, '' if res else ' not')) # choice must be 'N' break if self.getOption('always') and new_text != original_text: try: page.text = new_text page.save(summary=self.generate_summary(applied), callback=self._count_changes, quiet=True) except pywikibot.EditConflict: pywikibot.output(u'Skipping {0!s} because of edit conflict'.format(page.title())) except pywikibot.SpamfilterError as e: pywikibot.output( u'Cannot change {0!s} because of blacklist entry {1!s}'.format(page.title(), e.url)) except pywikibot.LockedPage: pywikibot.output(u'Skipping {0!s} (locked page)'.format(page.title())) except pywikibot.PageNotSaved as error: pywikibot.output(u'Error putting page: {0!s}'.format(error.args)) if self._pending_processed_titles.qsize() > 50: while not self._pending_processed_titles.empty(): proc_title, res = self._pending_processed_titles.get() pywikibot.output('Page {0!s}{1!s} saved'.format(proc_title, '' if res else ' not'))
def main(*args): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ options = {} message_properties = {} generator = None protections = {} default_level = 'sysop' default_summaries = { 'cat': 'category', 'links': 'links', 'ref': 'ref', 'imageused': 'images', 'file': 'simple', } # read command line parameters local_args = pywikibot.handle_args(args) genFactory = pagegenerators.GeneratorFactory() site = pywikibot.Site() generator_type = None protection_levels = set(site.protection_levels()) protection_types = site.protection_types() if '' in protection_levels: protection_levels.add('all') for arg in local_args: if arg == '-always': options['always'] = True elif arg.startswith('-summary'): if len(arg) == len('-summary'): # fill dummy value to prevent automatic generation options['summary'] = None else: options['summary'] = arg[len('-summary:'):] elif arg.startswith('-images'): pywikibot.output(color_format( '\n{lightred}-image option is deprecated. ' 'Please use -imagelinks instead.{default}\n')) local_args.append('-imagelinks' + arg[7:]) elif arg.startswith('-unprotect'): default_level = 'all' elif arg.startswith('-default'): if len(arg) == len('-default'): default_level = 'sysop' else: default_level = arg[len('-default:'):] else: is_p_type = False if arg.startswith('-'): delimiter = arg.find(':') if delimiter > 0: p_type_arg = arg[1:delimiter] level = arg[delimiter + 1:] if p_type_arg in protection_types: protections[p_type_arg] = level is_p_type = True if not is_p_type: if not genFactory.handleArg(arg): raise ValueError('Unknown parameter "{0}"'.format(arg)) found = arg.find(':') if found: message_properties.update({'cat': arg[found + 1:], 'page': arg[found + 1:]}) if 'summary' not in options: generator_type = arg[1:found] if found > 0 else arg[1:] if generator_type in default_summaries: message_type = default_summaries[generator_type] if message_type == 'simple' or message_properties: if default_level == 'all': options['summary'] = i18n.twtranslate( site, 'unprotect-{0}'.format(message_type), message_properties) else: options['summary'] = i18n.twtranslate( site, 'protect-{0}'.format(message_type), message_properties) generator = genFactory.getCombinedGenerator() # We are just protecting pages, so we have no need of using a preloading # page generator to actually get the text of those pages. if generator: if default_level: default_level = check_protection_level('Default level', default_level, protection_levels) # set the default value for all # None (not the string 'none') will be ignored by Site.protect() combined_protections = dict( (p_type, default_level) for p_type in protection_types) for p_type, level in protections.items(): level = check_protection_level(p_type, level, protection_levels, default_level) # '' is equivalent to 'all' if level == 'none' or level == '': level = 'all' combined_protections[p_type] = level if not options.get('summary'): options['summary'] = pywikibot.input( u'Enter a reason for the protection change:') bot = ProtectionRobot(generator, combined_protections, site, **options) bot.run() return True else: pywikibot.bot.suggest_help(missing_generator=True) return False
def main(*args): """ Process command line arguments and perform task. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ always = False namespaces = [] spamSite = '' for arg in pywikibot.handle_args(args): if arg == "-always": always = True elif arg.startswith('-namespace:'): try: namespaces.append(int(arg[len('-namespace:'):])) except ValueError: namespaces.append(arg[len('-namespace:'):]) else: spamSite = arg if not spamSite: pywikibot.bot.suggest_help(missing_parameters=['spam site']) return False mysite = pywikibot.Site() pages = mysite.exturlusage(spamSite, namespaces=namespaces, content=True) summary = i18n.twtranslate(mysite, 'spamremove-remove', {'url': spamSite}) for i, p in enumerate(pages, 1): text = p.text if spamSite not in text: continue # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output(color_format( '\n\n>>> {lightpurple}{0}{default} <<<', p.title())) lines = text.split('\n') newpage = [] lastok = "" for line in lines: if spamSite in line: if lastok: pywikibot.output(lastok) pywikibot.output(color_format('{lightred}{0}{default}', line)) lastok = None else: newpage.append(line) if line.strip(): if lastok is None: pywikibot.output(line) lastok = line if always: answer = "y" else: answer = pywikibot.input_choice( u'\nDelete the red lines?', [('yes', 'y'), ('no', 'n'), ('edit', 'e')], 'n', automatic_quit=False) if answer == "n": continue elif answer == "e": editor = TextEditor() newtext = editor.edit(text, highlight=spamSite, jumpIndex=text.find(spamSite)) else: newtext = "\n".join(newpage) if newtext != text: p.text = newtext p.save(summary) else: if "i" not in locals(): pywikibot.output('No page found.') elif i == 1: pywikibot.output('1 pages done.') else: pywikibot.output('%d pages done.' % i)
def assert_format(self, format_string, expected, *args, **kwargs): """Assert that color_format returns the expected string and type.""" result = formatter.color_format(format_string, *args, **kwargs) self.assertEqual(result, expected) self.assertIsInstance(result, type(expected))
def process_filename(self, file_url=None): """Return base filename portion of file_url.""" if not file_url: file_url = self.url pywikibot.warning("file_url is not given. " "Set to self.url by default.") always = self.getOption('always') # Isolate the pure name filename = file_url # Filename may be either a URL or a local file path if "://" in filename: # extract the path portion of the URL filename = urlparse(filename).path filename = os.path.basename(filename) if self.useFilename: filename = self.useFilename if not self.keepFilename: pywikibot.output( u"The filename on the target wiki will default to: %s" % filename) assert not always newfn = pywikibot.input( u'Enter a better name, or press enter to accept:') if newfn != "": filename = newfn # FIXME: these 2 belong somewhere else, presumably in family # forbidden characters are handled by pywikibot/page.py forbidden = ':*?/\\' # to be extended try: allowed_formats = self.targetSite.siteinfo.get( 'fileextensions', get_default=False) except KeyError: allowed_formats = [] else: allowed_formats = [item['ext'] for item in allowed_formats] # ask until it's valid first_check = True while True: if not first_check: if always: filename = None else: filename = pywikibot.input('Enter a better name, or press ' 'enter to skip the file:') if not filename: return None first_check = False ext = os.path.splitext(filename)[1].lower().strip('.') # are any chars in forbidden also in filename? invalid = set(forbidden) & set(filename) if invalid: c = "".join(invalid) pywikibot.output( 'Invalid character(s): %s. Please try again' % c) continue if allowed_formats and ext not in allowed_formats: if always: pywikibot.output('File format is not one of ' '[{0}]'.format(' '.join(allowed_formats))) continue elif not pywikibot.input_yn( u"File format is not one of [%s], but %s. Continue?" % (u' '.join(allowed_formats), ext), default=False, automatic_quit=False): continue potential_file_page = pywikibot.FilePage(self.targetSite, filename) if potential_file_page.exists(): overwrite = self._handle_warning('exists') if overwrite is False: pywikibot.output( 'File exists and you asked to abort. Skipping.') return None if potential_file_page.canBeEdited(): if overwrite is None: overwrite = not pywikibot.input_yn( "File with name %s already exists. " "Would you like to change the name? " "(Otherwise file will be overwritten.)" % filename, default=True, automatic_quit=False) if not overwrite: continue else: break else: pywikibot.output(u"File with name %s already exists and " "cannot be overwritten." % filename) continue else: try: if potential_file_page.fileIsShared(): pywikibot.output( 'File with name %s already exists in shared ' 'repository and cannot be overwritten.' % filename) continue else: break except pywikibot.NoPage: break # A proper description for the submission. # Empty descriptions are not accepted. if self.description: pywikibot.output('The suggested description is:\n%s' % self.description) while not self.description or self.verifyDescription: if not self.description: pywikibot.output(color_format( '{lightred}It is not possible to upload a file ' 'without a description.{default}')) assert not always # if no description, ask if user want to add one or quit, # and loop until one is filled. # if self.verifyDescription, ask if user want to change it # or continue. if self.description: question = 'Do you want to change this description?' else: question = 'No description was given. Add one?' if pywikibot.input_yn(question, default=not self.description, automatic_quit=self.description): from pywikibot import editor as editarticle editor = editarticle.TextEditor() try: newDescription = editor.edit(self.description) except ImportError: raise except Exception as e: pywikibot.error(e) continue # if user saved / didn't press Cancel if newDescription: self.description = newDescription elif not self.description: raise QuitKeyboardInterrupt self.verifyDescription = False return filename
def review_hunks(self): """Review hunks.""" def find_pending(start, end): step = -1 if start > end else +1 for pending in range(start, end, step): if super_hunks[pending].reviewed == Hunk.PENDING: return pending # TODO: Missing commands (compared to git --patch): edit and search help_msg = {'y': 'accept this hunk', 'n': 'do not accept this hunk', 'q': 'do not accept this hunk and quit reviewing', 'a': 'accept this hunk and all other pending', 'd': 'do not apply this hunk or any of the later hunks in the file', 'g': 'select a hunk to go to', 'j': 'leave this hunk undecided, see next undecided hunk', 'J': 'leave this hunk undecided, see next hunk', 'k': 'leave this hunk undecided, see previous undecided hunk', 'K': 'leave this hunk undecided, see previous hunk', 's': 'split this hunk into smaller ones', '?': 'help', } super_hunks = self._generate_super_hunks( h for h in self.hunks if h.reviewed == Hunk.PENDING) position = 0 while any(any(hunk.reviewed == Hunk.PENDING for hunk in super_hunk) for super_hunk in super_hunks): super_hunk = super_hunks[position] next_pending = find_pending(position + 1, len(super_hunks)) prev_pending = find_pending(position - 1, -1) answers = ['y', 'n', 'q', 'a', 'd', 'g'] if next_pending is not None: answers += ['j'] if position < len(super_hunks) - 1: answers += ['J'] if prev_pending is not None: answers += ['k'] if position > 0: answers += ['K'] if len(super_hunk) > 1: answers += ['s'] answers += ['?'] pywikibot.output(self._generate_diff(super_hunk)) choice = pywikibot.input('Accept this hunk [{0}]?'.format( ','.join(answers))) if choice not in answers: choice = '?' if choice == 'y' or choice == 'n': super_hunk.reviewed = Hunk.APPR if choice == 'y' else Hunk.NOT_APPR if next_pending is not None: position = next_pending else: position = find_pending(0, position) elif choice == 'q': for super_hunk in super_hunks: for hunk in super_hunk: if hunk.reviewed == Hunk.PENDING: hunk.reviewed = Hunk.NOT_APPR elif choice == 'a' or choice == 'd': for super_hunk in super_hunks[position:]: for hunk in super_hunk: if hunk.reviewed == Hunk.PENDING: hunk.reviewed = Hunk.APPR if choice == 'a' else Hunk.NOT_APPR position = find_pending(0, position) elif choice == 'g': hunk_list = [] rng_width = 18 for index, super_hunk in enumerate(super_hunks, start=1): if super_hunk.reviewed == Hunk.PENDING: status = ' ' elif super_hunk.reviewed == Hunk.APPR: status = '+' elif super_hunk.reviewed == Hunk.NOT_APPR: status = '-' else: assert False, "The super hunk's review status is " \ "unknown." if super_hunk[0].a_rng[1] - super_hunk[0].a_rng[0] > 0: mode = '-' first = self.a[super_hunk[0].a_rng[0]] else: mode = '+' first = self.b[super_hunk[0].b_rng[0]] hunk_list += [(status, index, Hunk.get_header_text( *self._get_context_range(super_hunk), affix=''), mode, first)] rng_width = max(len(hunk_list[-1][2]), rng_width) line_template = ('{0}{1} {2: >' + str(int(math.log10(len(super_hunks)) + 1)) + '}: {3: <' + str(rng_width) + '} {4}{5}') # the last entry is the first changed line which usually ends # with a \n (only the last may not, which is covered by the # if-condition following this block) hunk_list = ''.join( line_template.format( '*' if hunk_entry[1] == position + 1 else ' ', *hunk_entry) for hunk_entry in hunk_list) if hunk_list.endswith('\n'): hunk_list = hunk_list[:-1] pywikibot.output(hunk_list) next_hunk = pywikibot.input('Go to which hunk?') try: next_hunk_position = int(next_hunk) - 1 except ValueError: next_hunk_position = False if (next_hunk_position is not False and 0 <= next_hunk_position < len(super_hunks)): position = next_hunk_position elif next_hunk: # nothing entered is silently ignored pywikibot.error('Invalid hunk number "{0}"'.format(next_hunk)) elif choice == 'j': position = next_pending elif choice == 'J': position += 1 elif choice == 'k': position = prev_pending elif choice == 'K': position -= 1 elif choice == 's': super_hunks = (super_hunks[:position] + super_hunks[position].split() + super_hunks[position + 1:]) pywikibot.output('Split into {0} hunks'.format(len(super_hunk._hunks))) elif choice == '?': pywikibot.output(color_format( '{purple}{0}{default}', '\n'.join( '{0} -> {1}'.format(answer, help_msg[answer]) for answer in answers))) else: assert False, '%s is not a valid option' % choice
def run(self): """Start the bot.""" # Run the generator which will yield Pages which might need to be # changed. for page in self.generator: if self.isTitleExcepted(page.title()): pywikibot.output("Skipping %s because the title is on the exceptions list." % page.title(asLink=True)) continue try: # Load the page's text from the wiki original_text = page.get(get_redirect=True) if not page.canBeEdited(): pywikibot.output("You can't edit page %s" % page.title(asLink=True)) continue except pywikibot.NoPage: pywikibot.output("Page %s not found" % page.title(asLink=True)) continue applied = set() new_text = original_text while True: if self.isTextExcepted(new_text): pywikibot.output( "Skipping %s because it contains text " "that is on the exceptions list." % page.title(asLink=True) ) break last_text = None while new_text != last_text: last_text = new_text new_text = self.apply_replacements(last_text, applied, page) if not self.recursive: break if new_text == original_text: pywikibot.output("No changes were necessary in %s" % page.title(asLink=True)) break if hasattr(self, "addedCat"): # Fetch only categories in wikitext, otherwise the others will # be explicitly added. cats = textlib.getCategoryLinks(new_text, site=page.site) if self.addedCat not in cats: cats.append(self.addedCat) new_text = textlib.replaceCategoryLinks(new_text, cats, site=page.site) # Show the title of the page we're working on. # Highlight the title in purple. pywikibot.output(color_format("\n\n>>> {lightpurple}{0}{default} <<<", page.title())) pywikibot.showDiff(original_text, new_text) if self.getOption("always"): break choice = pywikibot.input_choice( "Do you want to accept these changes?", [("Yes", "y"), ("No", "n"), ("Edit", "e"), ("open in Browser", "b"), ("all", "a")], default="N", ) if choice == "e": editor = editarticle.TextEditor() as_edited = editor.edit(original_text) # if user didn't press Cancel if as_edited and as_edited != new_text: new_text = as_edited continue if choice == "b": pywikibot.bot.open_webbrowser(page) try: original_text = page.get(get_redirect=True, force=True) except pywikibot.NoPage: pywikibot.output("Page %s has been deleted." % page.title()) break new_text = original_text continue if choice == "a": self.options["always"] = True if choice == "y": page.text = new_text page.save( summary=self.generate_summary(applied), async=True, callback=self._count_changes, quiet=True ) while not self._pending_processed_titles.empty(): proc_title, res = self._pending_processed_titles.get() pywikibot.output("Page %s%s saved" % (proc_title, "" if res else " not")) # choice must be 'N' break if self.getOption("always") and new_text != original_text: try: page.text = new_text page.save(summary=self.generate_summary(applied), callback=self._count_changes, quiet=True) except pywikibot.EditConflict: pywikibot.output("Skipping %s because of edit conflict" % (page.title(),)) except pywikibot.SpamfilterError as e: pywikibot.output("Cannot change %s because of blacklist entry %s" % (page.title(), e.url)) except pywikibot.LockedPage: pywikibot.output("Skipping %s (locked page)" % (page.title(),)) except pywikibot.PageNotSaved as error: pywikibot.output("Error putting page: %s" % (error.args,)) if self._pending_processed_titles.qsize() > 50: while not self._pending_processed_titles.empty(): proc_title, res = self._pending_processed_titles.get() pywikibot.output("Page %s%s saved" % (proc_title, "" if res else " not"))
def review_hunks(self): """Review hunks.""" def find_pending(start, end): step = -1 if start > end else +1 for pending in range(start, end, step): if super_hunks[pending].reviewed == Hunk.PENDING: return pending # TODO: Missing commands (compared to git --patch): edit and search help_msg = { "y": "accept this hunk", "n": "do not accept this hunk", "q": "do not accept this hunk and quit reviewing", "a": "accept this hunk and all other pending", "d": "do not apply this hunk or any of the later hunks in the file", "g": "select a hunk to go to", "j": "leave this hunk undecided, see next undecided hunk", "J": "leave this hunk undecided, see next hunk", "k": "leave this hunk undecided, see previous undecided hunk", "K": "leave this hunk undecided, see previous hunk", "s": "split this hunk into smaller ones", "?": "help", } super_hunks = self._generate_super_hunks(h for h in self.hunks if h.reviewed == Hunk.PENDING) position = 0 while any(any(hunk.reviewed == Hunk.PENDING for hunk in super_hunk) for super_hunk in super_hunks): super_hunk = super_hunks[position] next_pending = find_pending(position + 1, len(super_hunks)) prev_pending = find_pending(position - 1, -1) answers = ["y", "n", "q", "a", "d", "g"] if next_pending is not None: answers += ["j"] if position < len(super_hunks) - 1: answers += ["J"] if prev_pending is not None: answers += ["k"] if position > 0: answers += ["K"] if len(super_hunk) > 1: answers += ["s"] answers += ["?"] pywikibot.output(self._generate_diff(super_hunk)) choice = pywikibot.input("Accept this hunk [{0}]?".format(",".join(answers))) if choice not in answers: choice = "?" if choice == "y" or choice == "n": super_hunk.reviewed = Hunk.APPR if choice == "y" else Hunk.NOT_APPR if next_pending is not None: position = next_pending else: position = find_pending(0, position) elif choice == "q": for super_hunk in super_hunks: for hunk in super_hunk: if hunk.reviewed == Hunk.PENDING: hunk.reviewed = Hunk.NOT_APPR elif choice == "a" or choice == "d": for super_hunk in super_hunks[position:]: for hunk in super_hunk: if hunk.reviewed == Hunk.PENDING: hunk.reviewed = Hunk.APPR if choice == "a" else Hunk.NOT_APPR position = find_pending(0, position) elif choice == "g": hunk_list = [] rng_width = 18 for index, super_hunk in enumerate(super_hunks, start=1): if super_hunk.reviewed == Hunk.PENDING: status = " " elif super_hunk.reviewed == Hunk.APPR: status = "+" elif super_hunk.reviewed == Hunk.NOT_APPR: status = "-" else: assert False, "The super hunk's review status is " "unknown." if super_hunk[0].a_rng[1] - super_hunk[0].a_rng[0] > 0: mode = "-" first = self.a[super_hunk[0].a_rng[0]] else: mode = "+" first = self.b[super_hunk[0].b_rng[0]] hunk_list += [ ( status, index, Hunk.get_header_text(*self._get_context_range(super_hunk), affix=""), mode, first, ) ] rng_width = max(len(hunk_list[-1][2]), rng_width) line_template = ( "{0}{1} {2: >" + str(int(math.log10(len(super_hunks)) + 1)) + "}: {3: <" + str(rng_width) + "} {4}{5}" ) # the last entry is the first changed line which usually ends # with a \n (only the last may not, which is covered by the # if-condition following this block) hunk_list = "".join( line_template.format("*" if hunk_entry[1] == position + 1 else " ", *hunk_entry) for hunk_entry in hunk_list ) if hunk_list.endswith("\n"): hunk_list = hunk_list[:-1] pywikibot.output(hunk_list) next_hunk = pywikibot.input("Go to which hunk?") try: next_hunk_position = int(next_hunk) - 1 except ValueError: next_hunk_position = False if next_hunk_position is not False and 0 <= next_hunk_position < len(super_hunks): position = next_hunk_position elif next_hunk: # nothing entered is silently ignored pywikibot.error('Invalid hunk number "{0}"'.format(next_hunk)) elif choice == "j": position = next_pending elif choice == "J": position += 1 elif choice == "k": position = prev_pending elif choice == "K": position -= 1 elif choice == "s": super_hunks = super_hunks[:position] + super_hunks[position].split() + super_hunks[position + 1 :] pywikibot.output("Split into {0} hunks".format(len(super_hunk._hunks))) elif choice == "?": pywikibot.output( color_format( "{purple}{0}{default}", "\n".join("{0} -> {1}".format(answer, help_msg[answer]) for answer in answers), ) ) else: assert False, "%s is not a valid option" % choice
def run(self): commons = pywikibot.Site('commons', 'commons') comment = i18n.twtranslate(self.site, 'imagetransfer-nowcommons_notice') for page in self.getPageGenerator(): if self.getOption('use_hash'): # Page -> Has the namespace | commons image -> Not images_list = page # 0 -> local image, 1 -> commons image page = pywikibot.Page(self.site, images_list[0]) else: # If use_hash is true, we have already print this before, no need self.current_page = page try: localImagePage = pywikibot.FilePage(self.site, page.title()) if localImagePage.fileIsShared(): pywikibot.output(u'File is already on Commons.') continue sha1 = localImagePage.latest_file_info.sha1 if self.getOption('use_hash'): filenameOnCommons = images_list[1] else: filenameOnCommons = self.findFilenameOnCommons( localImagePage) if not filenameOnCommons and not self.getOption('use_hash'): pywikibot.output(u'NowCommons template not found.') continue commonsImagePage = pywikibot.FilePage(commons, 'Image:%s' % filenameOnCommons) if (localImagePage.title(withNamespace=False) == commonsImagePage.title(withNamespace=False) and self.getOption('use_hash')): pywikibot.output( u'The local and the commons images have the same name') if (localImagePage.title(withNamespace=False) != commonsImagePage.title(withNamespace=False)): usingPages = list(localImagePage.usingPages()) if usingPages and usingPages != [localImagePage]: pywikibot.output(color_format( '"{lightred}{0}{default}" is still used in {1} pages.', localImagePage.title(withNamespace=False), len(usingPages))) if self.getOption('replace') is True: pywikibot.output(color_format( 'Replacing "{lightred}{0}{default}" by ' '"{lightgreen}{1}{default}\".', localImagePage.title(withNamespace=False), commonsImagePage.title(withNamespace=False))) bot = ImageBot( pg.FileLinksGenerator(localImagePage), localImagePage.title(withNamespace=False), commonsImagePage.title(withNamespace=False), '', self.getOption('replacealways'), self.getOption('replaceloose')) bot.run() # If the image is used with the urlname the # previous function won't work is_used = bool(list(pywikibot.FilePage( self.site, page.title()).usingPages(total=1))) if is_used and self.getOption('replaceloose'): bot = ImageBot( pg.FileLinksGenerator( localImagePage), localImagePage.title( withNamespace=False, asUrl=True), commonsImagePage.title( withNamespace=False), '', self.getOption('replacealways'), self.getOption('replaceloose')) bot.run() # refresh because we want the updated list usingPages = len(list(pywikibot.FilePage( self.site, page.title()).usingPages())) if usingPages > 0 and self.getOption('use_hash'): # just an enter pywikibot.input( u'There are still %s pages with this \ image, confirm the manual removal from them please.' % usingPages) else: pywikibot.output(u'Please change them manually.') continue else: pywikibot.output(color_format( 'No page is using "{lightgreen}{0}{default}" ' 'anymore.', localImagePage.title(withNamespace=False))) commonsText = commonsImagePage.get() if self.getOption('replaceonly') is False: if sha1 == commonsImagePage.latest_file_info.sha1: pywikibot.output( u'The image is identical to the one on Commons.') if (len(localImagePage.getFileVersionHistory()) > 1 and not self.getOption('use_hash')): pywikibot.output( u"This image has a version history. Please \ delete it manually after making sure that the \ old versions are not worth keeping.""") continue if self.getOption('always') is False: format_str = color_format( '\n\n>>>> Description on {lightpurple}%s' '{default} <<<<\n') pywikibot.output(format_str % page.title()) pywikibot.output(localImagePage.get()) pywikibot.output(format_str % commonsImagePage.title()) pywikibot.output(commonsText) if pywikibot.input_yn( u'Does the description on Commons contain ' 'all required source and license\n' 'information?', default=False, automatic_quit=False): localImagePage.delete( '%s [[:commons:Image:%s]]' % (comment, filenameOnCommons), prompt=False) else: localImagePage.delete( comment + ' [[:commons:Image:%s]]' % filenameOnCommons, prompt=False) else: pywikibot.output( u'The image is not identical to the one on Commons.') except (pywikibot.NoPage, pywikibot.IsRedirectPage) as e: pywikibot.output(u'%s' % e[0]) continue