gcmlimit='max', **kwargs) def check_category(tplclass, *additional_classes): errors = collections.defaultdict(list) kwargs = {} if hasattr(tplclass, 'NAMESPACES'): kwargs['gcmnamespace'] = mwbot.join(tplclass.NAMESPACES) for page in catmembers_with_revs(tplclass.CATEGORY, **kwargs): code = mwbot.parse(page['revisions'][0]['*']) for error in check_template(code, tplclass): errors[page['title']].append(error) for ac in additional_classes: for error in check_template(code, ac): errors[page['title']].append(error) return generate_section(tplclass.CATEGORY, errors) if __name__ == '__main__': parser = mwbot.get_argparser() args = parser.parse_args() site = mwbot.getsite('report.py', args) report = '{{Sonstiges Leiste}}\nGenerated by ~~~~\n' report += check_category(LVADaten, Zuordnung) report += check_category(AbteilungDaten) report += check_category(KatalogDaten) report += check_category(Beispiel) mwbot.save(site, 'VoWi:Report', None, report, 'update')
#!/usr/bin/env python3 import mwbot import vowi if __name__ == '__main__': parser = mwbot.get_argparser() parser.add_argument('page') args = parser.parse_args() site = mwbot.getsite('bsp_convert_legacy.py', args) index = next( site.results(titles=args.page, prop='revisions', rvprop='content')) code = mwbot.parse(index['revisions'][0]['*']) moves = [] for iw in code.ifilter_wikilinks(): if not str(iw.title).startswith('/') and '/Übungen' in str(iw.title): moves.append( (str(iw.title), '/'.join(index['title'].split('/')[:2]) + '/Beispiel ' + str(iw.text))) mwbot.moves( site, moves, 'verschiebe zu semester {}'.format( index['title'].split('/')[1].split()[1]))
title = match.group(1).strip().replace('_', ' ') if not title.startswith('Kategorie'): after += '{{#attach:%s}}\n' % title else: print('cannot handle', r['title']) mwbot.save(site, r['title'], text, after, 'verwende #attach Parserfunktion') def convert_files(threads): results = list( site.results(generator='categorymembers', gcmtitle='Category:Materialien', gcmtype='file', prop='revisions', rvprop='content', gcmlimit='max')) with PoolExecutor(max_workers=threads) as executor: for _ in executor.map(convert_file, results): pass if __name__ == '__main__': parser = mwbot.get_argparser() parser.add_argument('--threads', default=1, type=int) args = parser.parse_args() site = mwbot.getsite('materialien2attachments.py', args) convert_redirects(args.threads) convert_files(args.threads)
BSP_PREFIX = 'Beispiel ' FORMAT = '{}/Übungen {}/Beispiel {}' if __name__ == '__main__': today = datetime.datetime.today() month = today.month semester = ('WS' if month == 1 or month > 9 else 'SS') + str(today.year)[2:] parser = mwbot.get_argparser() parser.add_argument('index') parser.add_argument('from', nargs='?', default='-') parser.add_argument('to', nargs='?') args = parser.parse_args() site = mwbot.getsite('bsp_mover.py', args) index = next(site.results(titles=args.index)) index_parts = index['title'].split('/') assert index_parts[1].startswith('Übungen ') moves = [] from_ = getattr(args, 'from') if '-' in from_: min_, max_ = from_.split('-', 2) min_ = int(min_) if min_ else 1 max_ = int(max_) if max_ else None for page in site.results(generator='allpages', gapprefix=args.index.split(':')[1] + '/'+BSP_PREFIX, gaplimit='max', prop='revisions', rvprop='content', gapnamespace=index['ns']): if page['title'].count('/') != 2: continue
links = td.contents.filter_wikilinks() if len(links) == 1: if links[0].title.startswith('/'): td.attributes.clear() title = page + str(links[0].title) if title in classes_per_title: td.attributes.append('class="{}"'.format(' '.join(classes_per_title[title]))) else: logging.info('found non-relative link') else: logging.info('found cell with {} links'.format(len(links))) mwbot.save(site, page, idxpage['revisions'][0]['*'], str(code), site.msg('update')) if __name__ == '__main__': parser = mwbot.get_argparser() parser.add_argument('page', nargs='?') args = parser.parse_args() site = mwbot.getsite('color_bsptable.py', args) try: if args.page: handle(site, args.page) else: for title in site.get('askargs', conditions='Category:Beispielindexe|Ist veraltet::0', parameters='limit=9999')['query']['results']: print(title) bsp_fixer.handle(site, title) handle(site, title) except KeyboardInterrupt as e: sys.exit()
new += '\n{{Baustein footer}}' if not code.filter_headings(): new = '====== ' + page['title'].split(':')[1] + ' ======\n' + new mwbot.save(site, page['title'], orig, new, 'fixe Textbaustein (bausteien_fixer.py)') if mwbot.santitle(page['title'].split(':')[1]) not in templates: undocumented_templates.append(page['title'].split(':')[1]) if __name__ == '__main__': parser = argparse.ArgumentParser() overview = 'Hilfe:Vorlagen für Beispiele' mwbot.add_pagesel_args(parser, categorydefault='Textbausteine für Beispiele') args = parser.parse_args() site = mwbot.getsite() page = next( site.query('pages', prop='revisions', titles=overview, rvprop='content')) orig = page['revisions'][0]['*'] code = mwbot.parse(orig) templates = [mwbot.santitle(t.name) for t in code.ifilter_templates()] undocumented_templates = [] mwbot.handle_pagesel_args(site, args, (mwapi.NS_TEMPLATE, ), handle_page) undocumented_templates.sort() new = orig for t in undocumented_templates:
if match: tpl.add('accessibility', match.group(1).replace(' ', ' ').strip()) else: print("couldn't find accessibility info") return 'update room data (from TISS)' def handle_page(page): before = page['revisions'][0]['*'] code = mwbot.parse(before) templates = code.filter_templates(matches=lambda t: t.name.matches('Room')) rcode = page['title'].split(':')[-1] if templates and rcode in rooms: msg = handle_template(templates[0], rooms[rcode], page['ns']) mwbot.save(site, page['title'], before, str(code), site.msg(msg), strip_consec_nl=True) if __name__ == '__main__': parser = mwbot.get_argparser() mwbot.add_pagesel_args(parser, categorydefault='Rooms') args = parser.parse_args() site = mwbot.getsite('room_update.py', args) mwbot.handle_pagesel_args(site, args, (3000, ), handle_page)
import re import mwbot """ Not using move subpages functionality because it does not check if the destination exists beforehand. """ if __name__ == '__main__': parser = mwbot.get_argparser() parser.add_argument('src') parser.add_argument('dest') parser.add_argument('reason', nargs='?', default='LVA-Umbenennung') args = parser.parse_args() site = mwbot.getsite('mat_mover.py', args) site.require_rights('edit', 'move', 'movefile', 'markbotedits') srcpage = next(site.results(prop='info', titles=args.src)) if 'missing' in srcpage: sys.exit("fatal: src doesn't exist") moves = {srcpage['pageid']: args.dest} #dict from_id -> to_title for subpage in site.results(list='prefixsearch', pssearch=args.src + '/', pslimit='max'): name = subpage['title'][len(args.src) + 1:] moves[subpage['pageid']] = args.dest + '/' + name print('subpage', name)
if rel.has('2'): rel.get('2').value = str(rel.get('2').value).replace('–', '-') rels.sort(key=lambda x: x.get('1')) tpl.get('zuordnungen').value = '\n' + '\n'.join( [' ' * 4 + str(r) for r in rels]) + '\n' return 'fixe LVA-Daten' def handle_page(page): before = page['revisions'][0]['*'] code = mwbot.parse(before) templates = code.filter_templates( matches=lambda t: t.name.matches('LVA-Daten')) if templates: msg = handle_template(templates[0], code, page['ns']) mwbot.save(site, page['title'], before, str(code), site.msg(msg), strip_consec_nl=True) if __name__ == '__main__': parser = mwbot.get_argparser() mwbot.add_pagesel_args(parser, categorydefault='LVAs') args = parser.parse_args() site = mwbot.getsite('lva_fixer.py', args) mwbot.handle_pagesel_args(site, args, vowi.UNI_NAMESPACES, handle_page)
#!/usr/bin/env python3 import argparse import os.path import mwbot parser = mwbot.get_argparser() parser.add_argument('src') parser.add_argument('dest') parser.add_argument('reason') parser.add_argument('--noredir', action='store_true') args = parser.parse_args() site = mwbot.getsite('subpage_mover.py', args) site.require_rights('move') src_ns = next(site.results(list='pages', prop='info', titles=args.src))['ns'] if args.dest.startswith('.'): destname = os.path.abspath('/' + args.src + '/' + args.dest)[1:] print('resolved destination to {}'.format(destname)) else: destname = args.dest # if noredir, fix backlinks moves = [] srcname = args.src.split(':', 2)[1] for page in site.results(list='allpages',
if tpl.has('latitude'): tpl.get('latitude').name = 'lat' if tpl.has('longitude'): tpl.get('longitude').name = 'lng' if tpl.has('SearchValue'): tpl.get('SearchValue').name = 'alt_name' return 'convert to [[Template:Room]]' def handle_page(page): before = page['revisions'][0]['*'] code = mwbot.parse(before) templates = code.filter_templates( matches=lambda t: t.name.matches('RaumCode')) if templates: msg = handle_template(templates[0], code, page['ns']) mwbot.save(site, page['title'], before, str(code).replace('[[Kategorie:Raumcode]]', ''), site.msg(msg), strip_consec_nl=True) if __name__ == '__main__': parser = mwbot.get_argparser() mwbot.add_pagesel_args(parser, categorydefault='Raumcode') args = parser.parse_args() site = mwbot.getsite('raum2room.py', args) mwbot.handle_pagesel_args(site, args, (3000, ), handle_page)
if 'duplicatefiles' in page: for dup in page['duplicatefiles']: duplicates.append('Datei:' + dup['name'].replace('_', ' ')) break else: excluded += 1 for ns in vowi.UNI_NAMESPACES: for p in site.results(list='allpages', apfilterredir='nonredirects', apnamespace=ns, aplimit='max'): if '/' in p['title']: if not p['title'].split('/', 1)[0] in current_lvas: excluded += 1 return excluded if __name__ == '__main__': parser = mwbot.get_argparser() args = parser.parse_args() site = mwbot.getsite('excluded_mat_counter.py', args) count = count_excluded_resources() print(count) mwbot.save(site, 'Vorlage:!materialien anzahl/exkludiert', None, count, site.msg('update'))
])) if angabe_div: template.add('1', '\n' + str(angabe_div[0].contents).strip() + '\n', showkey=True) code.remove(angabe_div[0]) else: angabe_sec = code.get_sections(matches='Angabe|Aufgabe') if angabe_sec: code.remove(angabe_sec[0].nodes[0]) template.add('1', '\n' + str(angabe_sec[0]).strip() + '\n', showkey=True) code.replace(angabe_sec[0], '\n') mwbot.save(site, page['title'], orig, str(code), 'beispiel_fixer.py', strip_consec_nl=True) if __name__ == '__main__': parser = mwbot.get_argparser() parser.add_argument('index') args = parser.parse_args() site = mwbot.getsite('bsp_fixer.py', args) handle(site, args.index)