def process(day): """ one day bot processing arguments: day -- python date format """ if params.verbose: print("processing Journal des recréations ({day})".format(day=format_date(day))) start = to_date(day) end = to_date(day+ONE_DAY) result = "\n\n== {} ==\n".format(format_date(day)) comment = [] for i,page in enumerate(creation_log(start,end),1): gras = '' date = '' if params.verbose: print (i,page["timestamp"]) dl = deletelog(page["title"]) if dl: page_pas = Page(Site(), "Discussion:" + page["title"] + "/Suppression") if page_pas.isRedirectPage(): page_pas = page_pas.getRedirectTarget() if page_pas.exists() and re.search(r'article supprimé', page_pas.get(), re.I): if re.search(r'\{\{ ?article supprimé[^\}]*\d{1,2} (\S* \d{4}) à', page_pas.get(), re.I): date = u' de %s' % re.search(r'\{\{ ?article supprimé[^\}]*\d{1,2} (\S* \d{4}) à', page_pas.get(), re.I).group(1) comment.append(u'[[%s]] (malgré [[%s|PàS]]%s)' % (page["title"], page_pas.title(), date)) gras = "'''" r = (u"* {g}{{{{a-court|{title}}}}} <small>([[{pas}|PàS]])</small> supprimé le {date} puis recréé par {{{{u|{user}}}}}{g} \n" .format(title = wiki_param(page["title"]), pas = page_pas.title(), user = wiki_param(page["user"]), date = format_date(from_date(dl["timestamp"])), g = gras)) if params.verbose: print(r) result += r page = Page(Site(), params.prefix + u'/' + format_date(day, skip_day=True)) try: result = page.get() + result except NoPage: result = u'{{mise à jour bot|Zérobot}}' + result if comment: comment.insert(0, '') page.put(result,comment="Journal des recréations ({day}) ".format(day=format_date(day)) + ' - '.join(comment))
def process(day): """ one day bot processing arguments: day -- python date format """ if params.verbose: print("processing Journal des recréations ({day})".format(day=format_date(day))) start = to_date(day) end = to_date(day+ONE_DAY) result = "\n== {} ==\n".format(format_date(day)) for i,page in enumerate(creation_log(start,end),1): if params.verbose: print (i,page["timestamp"]) dl = deletelog(page["title"]) if dl: r = ("* {{{{a-court|{title}}}}} <small>([[{pas}|PàS]])</small> supprimé le {date} recréé par {{{{u|{user}}}}} \n" .format(title = wiki_param(page["title"]) , pas = wiki_param("Discussion:"+page["title"]+"/Suppression"), user = wiki_param(page["user"]), date = format_date(from_date(dl["timestamp"])))) if params.verbose: print(r) result += r page = Page(Site(), params.prefix+"/"+format_date(day,skip_day=True)) try: result = page.get()+result except NoPage: pass page.put(result,comment="Journal des recréations ({day})".format(day=format_date(day)))
def logGroup(self, page: pywikibot.Page, users: List[pywikibot.User]) -> None: text = page.get(force=True) if page.exists() else "" for user in users: newLine = f"\n* [[Benutzer:{user.username}|{user.username}]]" if not newLine in text: text += newLine page.text = text page.save(summary=f"Bot: Benutzerliste nach Botlauf aktualisiert.")
def process(day): """ one day bot processing arguments: day -- python date format """ if params.verbose: print("processing Journal des recréations ({day})".format(day=format_date(day))) start = to_date(day) end = to_date(day+ONE_DAY) result = "\n== {} ==\n".format(format_date(day)) comment = '' for i,page in enumerate(creation_log(start,end),1): gras = '' if params.verbose: print (i,page["timestamp"]) dl = deletelog(page["title"]) if dl: page_pas = Page(Site(), "Discussion:"+page["title"]+"/Suppression") if page_pas.exists() and re.search('\{\{\ ?Article supprimé', page_pas.get(), re.I): comment += u' - %s (malgré [[%s|PàS]])' % (page["title"], page_pas.title()) gras = "'''" r = ("* {g}{{{{a-court|{title}}}}} <small>([[{pas}|PàS]])</small> supprimé le {date} recréé par {{{{u|{user}}}}}{g} \n" .format(title = wiki_param(page["title"]) , pas = page_pas.title()), user = wiki_param(page["user"]), date = format_date(from_date(dl["timestamp"])), g = gras) if params.verbose: print(r) result += r page = Page(Site(), params.prefix+"/"+format_date(day,skip_day=True)) try: result = page.get()+result except NoPage: pass page.put(result,comment="Journal des recréations ({day})".format(day=format_date(day)) + comment)
def load_config(page: pywikibot.Page, **kwargs: Any) -> ConfigJSONObject: """Load JSON config from the page.""" if page.isRedirectPage(): pywikibot.log(f"{page!r} is a redirect.") page = page.getRedirectTarget() _empty = jsoncfg.loads_config("{}") if not page.exists(): pywikibot.log(f"{page!r} does not exist.") return _empty try: return jsoncfg.loads_config(page.get(**kwargs).strip()) except pywikibot.exceptions.PageRelatedError: return _empty
def extract_coach_tenures(name): """ Extract a coaches tenures from Wikipedia. Arguments: - name (name of coach) Returns: - list(dict) """ logging.info('Looking for coach %s' % name) page_name = get_page_name_from_coach_name_wiki(name) # If we can't find a wikipedia page, return immediately if not page_name: return [] else: logging.debug('Looking up %s as http://en.wikipedia.org/wiki/%s' % (name, page_name)) # Extract page content from wikipedia and narrow it down to the templates p = Page(Site('en', 'wikipedia'), page_name) if p.isRedirectPage(): p = p.getRedirectTarget() content = p.get() parsed = mwparserfromhell.parse(content) templates = parsed.filter_templates() # Extract teams and years from the template teams, years = None, None for t in templates: for p in t.params: if "coach_teams" in p.name: teams = parse_coach_teams_and_positions_from_wiki(p) if "coach_years" in p.name: years = parse_coach_years_from_wiki(p) # If we were not able to extract information from the page, log & return empty if not teams or not years: logging.warning( 'ISSUE DETECTED: %s is valid page but no information extracted' % name) return [] tenures = [dict(t[0].items() + t[1].items()) for t in zip(teams, years)] [d.update({'name': name}) for d in tenures] return tenures
def process(day): """ one day bot processing arguments: day -- python date format """ if params.verbose: print("processing Journal des recréations ({day})".format( day=format_date(day))) start = to_date(day) end = to_date(day + ONE_DAY) result = "\n== {} ==\n".format(format_date(day)) for i, page in enumerate(creation_log(start, end), 1): if params.verbose: print(i, page["timestamp"]) dl = deletelog(page["title"]) if dl: r = ( "* {{{{a-court|{title}}}}} <small>([[{pas}|PàS]])</small> supprimé le {date} recréé par {{{{u|{user}}}}} \n" .format(title=wiki_param(page["title"]), pas=wiki_param("Discussion:" + page["title"] + "/Suppression"), user=wiki_param(page["user"]), date=format_date(from_date(dl["timestamp"])))) if params.verbose: print(r) result += r page = Page(Site(), params.prefix + "/" + format_date(day, skip_day=True)) try: result = page.get() + result except NoPage: pass page.put( result, comment="Journal des recréations ({day})".format(day=format_date(day)))
def remove_cfd_tpl(page: pywikibot.Page, summary: str) -> None: """ Remove the CfD template from the page. @param page: Page to edit @param summary: Edit summary """ text = re.sub( r'<!--\s*BEGIN CFD TEMPLATE\s*-->.*?' r'<!--\s*END CFD TEMPLATE\s*-->\n*', '', page.get(force=True), flags=re.I | re.M | re.S, ) wikicode = mwparserfromhell.parse(text, skip_style_tags=True) for tpl in wikicode.ifilter_templates(): try: template = pywikibot.Page(page.site, str(tpl.name), ns=10) if template in TPL['cfd']: wikicode.remove(tpl) except pywikibot.InvalidTitle: continue page.text = str(wikicode).strip() page.save(summary=summary)
def ensureIncludedAsTemplate(mainLogPage: pywikibot.Page, subLogPageTitle: str) -> None: if not f"{{{subLogPageTitle}}}" in mainLogPage.get(force=True): mainLogPage.text = mainLogPage.text + f"\n{{{{{subLogPageTitle}}}}}" mainLogPage.save( summary=f"Bot: Unterseite [[{subLogPageTitle}]] eingebunden.")
def process_wikipage(self, wikipage: pywikibot.Page, language: str): content = wikipage.get() title = wikipage.title() return self.process_non_wikipage(title, content, language)
def process_page(self, page: Page): page_text = page.get(force=True) parsed = mwparserfromhell.parse(page_text) year = None month = None day = None entry = None for template in parsed.filter_templates(): if (template.name.matches('Dyktalk') or template.name.matches('DYK talk')) and ( not template.has('entry') or len(template.get('entry').value) == 0): if year is None: if (not template.has(1)) or (not template.has(2)): print('Skipping {{DYK talk}} page', page, ', no date found') continue print('*', page.title(), template.get(2), template.get(1)) year = template.get(2).value.strip() day, month = template.get(1).value.strip().split(' ') if entry is None: entry = self.get_entry_for_page(year, month, day, page) if entry: print('Adding entry', entry, 'to {{DYK talk}}') template.add('entry', entry) elif (template.name.matches('ArticleHistory') or template.name.matches('Article history')) and ( not template.has('dykentry') or len(template.get('dykentry').value) == 0): if year is None: if not template.has('dykdate'): print('Skipping {{ArticleHistory}} on page', page, ', no date found') continue date = template.get('dykdate').value.strip() print('*', page.title(), date) if ' ' in date: # monthName YYYY if date.count(' ') == 1: date = '1 ' + date day, month, year = date.split(' ')[:3] elif '-' in date: year, month, day = date.split('-')[:3] month = datetime.date(1900, int(month), 1).strftime('%B') else: print('Skipping {{ArticleHistory}} on page', page, ", can't parse date", date) continue print(page.title(), year, month, day) if entry is None: entry = self.get_entry_for_page(year, month, day, page) if entry: print('Adding entry', entry, 'to {{ArticleHistory}}') template.add('dykentry', entry, before='dykdate') if entry: new_text = str(parsed) if (new_text != page.text and self.should_edit() and (not self.is_manual_run or confirm_edit())): self.get_mediawiki_api().get_site().login() page.text = str(parsed) page.save( self.get_task_configuration('missing_blurb_edit_summary'), botflag=self.should_use_bot_flag(), ) self.record_trial_edit() return True return False