if not pageexist[i][1]: if enlink: out.append(u"[[%s]][[%s|^]]" % (link, enlink)) else: out.append(u"[[%s]]" % link) count += 1 if count == lim: break return out def main(): pagewrite = pywikibot.Page(site, conf.pagewrite) content = pagewrite.get() s = [] #s += getlink(conf.wpvital, 1, reqen=True) s += getlink(conf.wpreq, 4, reqen=True) #s += getlink(conf.wpbio, 2, reqen=True) content = patoldlink.sub(dummytext + u"\n", content) content = content.replace(dummytext, u"\n".join( map(lambda x: u"--> " + x + u" <!--", s))) pagewrite.put(content, conf.summary) args, site, conf = wp.pre(7, lock=True, main=__name__) try: glob() wp.run(main) except: wp.posterror() else: wp.post()
(i, ab["user"].ljust(16), ab["timestamp"])) userobj = wp.User(ab["user"]) if userobj.editCount() >= 5000: continue if userobj.name() not in user: user[userobj.name()] = {} if i not in user[userobj.name()]: user[userobj.name()][i] = deque() deq = user[userobj.name()][i] deq.append(pywikibot.Timestamp.fromISOformat(ab["timestamp"])) now = site.getcurrenttime() while deq and ((now - deq[0]).seconds >= data["checkDuration"]): deq.popleft() pywikibot.output(list(deq)) if len(deq) >= data["threshold"]: process(userobj, data, ab) deq.clear() if delta: break ltime.sleep(60) start = max(start, site.getcurrenttime() - ltime.td(seconds=120)) args, site, conf = wp.pre(2, lock=False, continuous=True, main=__name__) try: glob() wp.run(main) except: wp.posterror() else: wp.post()
partial = True else: updateList = config.keys() for ind, page in enumerate(updateList): if not isinstance(page, pywikibot.Page): updateList[ind] = wp.Page(page) updateList = list(site.preloadpages(updateList)) sources = [wp.Page(value['source']) for value in config.values()] if not partial: out = [] for psite, subgroup in itertools.groupby(sources, lambda x: x.site): out += list(psite.preloadpages(subgroup)) sources = out sources = {m: m for m in sources} for page in updateList: process(page, config[page], sources) # normalize page's name args, site, conf = wp.pre(-2, main=__name__, lock=True) try: glob() wp.run(main) except: wp.posterror() else: wp.post()
""" page.protect(u"โรบอต: ย้อนเนื้อหาไปมาจำนวนมาก", locktype="edit", duration={"hours": 5}, level="autoconfirmed") """ stitle.clear() def main(): gen = lrepeat.repeat(site, site.recentchanges, lambda x: x["revid"], 60, showRedirects=False, changetype=["edit", "new"], showBot=False, namespaces=[0], start=site.getcurrenttime() - ltime.td(hours=1)) for rev in gen: try: check(rev) except: wp.error() pass if __name__ == "__main__": args, site, conf = wp.pre("lock page") try: glob() main() except: wp.posterror() else: wp.post() a/b < c
import os import urllib def main(): aliases = [u"กล่องข้อมูล ภาพชอบธรรม", "nonfreeimage", "Nonfreeimage", "Non-free use rationale", "non-free use rationale", u"กล่องข้อมูล ไฟล์ชอบธรรม"] for page in site.unusedfiles(): if not page.exists(): continue text = page.get() for name in aliases: if name in text: os.system(("open /Applications/Google\ Chrome.app/" " http://th.wikipedia.org/wiki/{}").format( urllib.quote(page.title().encode('utf-8')))) if raw_input() != 'n': page.delete(reason=u"โรบอต: ไฟล์ชอบธรรมไม่มีการใช้งาน", prompt=False) break if __name__ == "__main__": args, site, conf = wp.pre(12) try: main() except: wp.posterror() else: wp.post()
pass def main(): r = api.Request(site=site, action="query", list="allfileusages", title=self.title(), appendtext=text, summary=comment, token=token) ''' for page in site.allpages(namespace=10): has = False for t in page.embeddedin(): has = True break if not has: if "/" in page.title() and "/doc" not in page.title(): print page.title() """ if "/doc" in page.title(): page.delete(reason=u"โรบอต: หน้าเปลี่ยนทางไม่จำเป็น", prompt=False) """ ''' if __name__ == "__main__": args, site, conf = wp.pre("detect unused templates") try: glob() main() except: wp.posterror() else: wp.post()
if i in dic: f.write(dic.pop(i).encode('utf-8') + '\t') else: f.write('\t') for i in dic: f.write(i.encode('utf-8') + '=' + dic[i].encode('utf-8') + ' ') f.write('\n') """ for page in site.recentchanges(reverse=True): #print page print page """ site = pywikibot.Site('en') page = pywikibot.Page(site, 'A') print api.Request( site=site, action="edit", token=site.token(page, "edit"), title=u"On2 gauge", appendtext="" ).submit() """ args, site, conf = wp.pre(12, main=__name__) try: wp.run(main) except: wp.posterror() else: wp.post()
import wp import pywikibot from wp import lre def glob(): pass def main(): if len(args) != 2: return pagemain = wp.Page(wp.toutf(args[0])) newlink = wp.toutf(args[1]) pywikibot.output("old: " + pagemain.title()) pywikibot.output("new: " + newlink) for page in pagemain.backlinks(content=True): pywikibot.output("processing " + page.title()) txt = page.get() page.put(lre.sub(r"\[\[" + lre.escape(pagemain.title()), "[[" + newlink.replace("_", " "), txt), u"ย้ายลิงก์ไปหน้าใหม่") if __name__ == "__main__": args, site, conf = wp.pre("move links") try: glob() main() except: wp.posterror() else: pass wp.post()
__version__ = "2.0.1" __author__ = "Sorawee Porncharoenwase" import init import wp import pywikibot from wp import lcleaner def glob(): pass def main(): page = wp.handlearg("page", args) if page: page = wp.Page(page) page.put(lcleaner.clean(page.get()), conf.summary) txt = wp.handlearg("txt", args) if txt: pywikibot.output(lcleaner.clean(txt)) if __name__ == "__main__": args, site, conf = wp.pre(u"clean articles") try: glob() main() except: wp.posterror() else: wp.post()
prevday = pywikibot.Timestamp.today().day for rev in lrepeat.repeat(site, site.recentchanges, lambda x: x["revid"], 60, showRedirects=False, showBot=False, changetype=["edit", "new"], namespaces=conf.namespaces): try: check(rev) except: wp.error() if ((prevday != pywikibot.Timestamp.today().day) and (pywikibot.Timestamp.today().day % 3 == 1)): try: flush() except: wp.error() prevday = pywikibot.Timestamp.today().day if __name__ == "__main__": args, site, conf = wp.pre("notify linking to disambigous page", lock=True) try: glob() main() except: wp.posterror() else: wp.post()
# -*- coding: utf-8 -*- """example""" __version__ = "1.0.0" __author__ = "Sorawee Porncharoenwase" import init import wp import pywikibot def glob(): pass def main(): pass if __name__ == "__main__": args, site, conf = wp.pre("example", lock=True) try: glob() main() except: wp.posterror() else: wp.post()
creator = page.getVersionHistory(reverseOrder=True, total=1)[0] tscreate = pywikibot.Timestamp.fromISOformat(creator[1]) dic["create"] = {} dic["create"]["text"] = u"สร้างบทความ" dic["create"]["result"] = resgen((now - tscreate).days <= 14) dic["create"]["value"] = (u"บทความนี้สร้างโดย %s " u"เมื่อ %s (%d วันที่แล้ว)" % (creator[2], tscreate.strftime("%Y-%m-%d %H:%M:%S"), (now - tscreate).days)) if ((dic["create"]["result"] == resgen(True)) or (dic["oldlen"]["result"] == resgen(True))): if dic["create"]["result"] == resgen(False): dic["create"]["result"] = "normal" if dic["oldlen"]["result"] == resgen(False): dic["oldlen"]["result"] = "normal" print json.dumps(dic) if __name__ == "__main__": args, site, conf = wp.pre("DYK Checker") try: glob() main() except: wp.posterror() else: wp.post()
#!/usr/bin/python # -*- coding: utf-8 -*- """example""" __version__ = "1.0.0" __author__ = "Sorawee Porncharoenwase" import init import wp import pywikibot from wp import lthread def glob(): pass def main(): if args: for page in wp.Category(wp.toutf(args[0])).articles(content=True): page.save("null edit", async=True) if __name__ == "__main__": args, site, conf = wp.pre("null edit") try: glob() main() except: wp.posterror() else: wp.post()
if "nofixbot" in s: return s return subst.process(s) def main(): #tl = wp.Page(u"Template:บาเบล") for page in site.allpages(filterredir=False, content=True): #for page in tl.embeddedin(content=True): #for page in tl: #page = wp.Page(u"รายชื่อวัดในจังหวัดชัยนาท") pywikibot.output(">>>" + page.title()) text = fix(page.get()) if page.get() != text: pywikibot.showDiff(page.get(), text) if raw_input("...") == "y": try: page.put(text, u"โรบอต: เก็บกวาด", async=True) except: wp.error() pass if __name__ == "__main__": args, site, conf = wp.pre(u"user-fixes") try: glob() main() except: wp.posterror() else: wp.post()
allo += output def main(): pool = lthread.ThreadPool(30) gen = [] page = wp.handlearg("page", args) if page is not None: gen = [wp.Page(page)] else: gen = site.allpages(prefix=conf.title, content=True, namespace=2) for page in gen: for req in lre.pats["entry"].finditer(page.get()): pool.add_task(process, req.group(1), page.title()) pool.wait_completion() #print allo global allo allo = '<source lang="python">\n' + allo + u'</source>{{คู่มือการใช้งาน}}' wp.Page(u'User:Nullzerobot/ปรับปรุงหน้าอัตโนมัติ/ปกติ').put(allo, u"ปรับปรุงหน้า") #for i in allpages: # i.save(u"โรบอต: แปลงหน้า", async=True) if __name__ == "__main__": args, site, conf = wp.pre(-2, lock=True) try: glob() wp.run(main) except: wp.posterror() else: wp.post()
global site site = site.data_repository() prop = pywikibot.Page(site, "Property:P140") #print prop #print prop.get() for page in prop.backlinks(): try: if page.namespace() == 0 and page.title().startswith("Q"): page = pywikibot.ItemPage(site, page.title()) content = page.get() print ">>>", page.title() for claim in content["claims"]["p140"]: target = claim.getTarget() links = target.get()["sitelinks"] if "thwiki" in links: print "thwiki", links["thwiki"] else: print "enwiki", links["enwiki"] except: wp.error() if __name__ == "__main__": args, site, conf = wp.pre(u"update top things in Wikipedia") try: glob() main() except: wp.posterror() else: wp.post()