def pre(taskid=-1, lock=None, sites=[], continuous=False, main=None): """ Return argument list, site object, and configuration of the script. This function also handles default arguments, generates lockfile and halt the script if lockfile exists before. """ import imp global info info["main"] = main == "__main__" if continuous: lock = False pywikibot.handleArgs("-log") pywikibot.output("start task #%s at %s" % (taskid, getTime())) info["taskid"] = taskid info["lock"] = lock info["lockfile"] = simplifypath([os.environ["WPROBOT_DIR"], "tmp", info["basescript"] + ".lock"]) info["continuous"] = continuous if os.path.exists(info["lockfile"]) and lock: error("lockfile found. unable to execute the script.") if info["main"]: pywikibot.stopme() sys.exit(ExitCode.LockFileError) open(info["lockfile"], "w").close() args = pywikibot.handleArgs() # must be called before Site() site = pywikibot.Site() info["site"] = site confpath = simplifypath([os.environ["WPROBOT_DIR"], "conf", info["basescript"]]) module = imp.load_source("conf", confpath) if os.path.exists(confpath) else None return args, site, module
def multiUpload(): try: glam_name = flask.request.args.get('glam', None) categories = flask.session.get('categories') username = flask.session.get('username', None) f = flask.request.form ids = f.getlist('selected') glam_class = utils.get_glam_class(glam_list, glam_name) error_msg_list = [] for identifier in ids: obj = glam_class(identifier) if not obj == None: wiki_filename, wikitext, image_url = obj.generate_image_information( categories) try: upload_file(image_url, wikitext, wiki_filename, username, glam_name) except Exception as e: error_msg_list.append(str(e)) pass finally: # cleaning up pywikibot pywikibot.stopme() pywikibot.config.authenticate.clear() pywikibot.config.usernames['commons'].clear() pywikibot._sites.clear() return flask.render_template('results.html', username=username, duplicate_list=error_msg_list)
def main(): fapage = pywikibot.Page(faSite, u'ویکیپدیا:درخواست انتقال رده') try: text = fapage.get() page_history = fapage.getVersionHistory() Last_user = page_history[0][2] except pywikibot.IsRedirectPage: fapage = fapage.getRedirectTarget() try: text = fapage.get() page_history = fapage.getVersionHistory() Last_user = page_history[0][2] except: #pywikibot.output(u"requested page didn't find!") pywikibot.stopme() sys.exit() except: #pywikibot.output(u"requested page didn't find!") pywikibot.stopme() sys.exit() if Last_user != u'Dexbot' and check_user(Last_user): fapage.put(u'{{/بالا}}', u'ربات:انتقال رده انجام شد!') runn(text, Last_user) res = {'result': 'Finished'} print json.dumps(res) else: res = { 'result': 'Not done. User is not allowed, less than 3000 edits are made by last editing user' } print json.dumps(res)
def main(): fapage = pywikibot.Page(faSite, u'ویکیپدیا:درخواست انتقال رده') try: text = fapage.get() page_history = fapage.getVersionHistory() Last_user = page_history[0][2] except pywikibot.IsRedirectPage: fapage = fapage.getRedirectTarget() try: text = fapage.get() page_history = fapage.getVersionHistory() Last_user = page_history[0][2] except: #pywikibot.output(u"requested page didn't find!") pywikibot.stopme() sys.exit() except: #pywikibot.output(u"requested page didn't find!") pywikibot.stopme() sys.exit() if Last_user != u'Dexbot' and check_user(Last_user): fapage.put(u'{{/بالا}}', u'ربات:انتقال رده انجام شد!') runn(text, Last_user) res = {'result': 'Finished'} print json.dumps(res) else: res = { 'result': 'Not done. User is not allowed, less than 3000 edits are made by last editing user'} print json.dumps(res)
def main(): import pywikibot from include.bot import Bot try: bot = Bot() bot.run() except (pywikibot.bot.QuitKeyboardInterrupt, KeyboardInterrupt): pywikibot.output('\r\n\03{lightyellow}Quitting\03{default}') try: pywikibot.stopme() except KeyboardInterrupt: pass
def post(unlock=True): """ This function removes throttle file. It also removes lockfile unless unlock variable is set to False """ if unlock or (not info["lock"]): try: os.remove(info["lockfile"]) except OSError: error("unable to remove lockfile.") pywikibot.output("stop task at " + getTime()) pywikibot.stopme()
def main(*args): global bot try: a = pywikibot.handleArgs(*args) if len(a) == 1: raise RuntimeError('Unrecognized argument "%s"' % a[0]) elif a: raise RuntimeError('Unrecognized arguments: ' + " ".join(('"%s"' % arg) for arg in a)) bot = CategoryRedirectBot() bot.run() finally: pywikibot.stopme()
def IRCcleanup(): while 1: try: bot = IRC_RC_Bot() bot.start() except KeyboardInterrupt: print "Najanona tamin'ny alalan'i CTRL+C" break except Exception: time.sleep(10) del bot continue wikipedia.stopme()
def write_results(winner1, winner2, winner3, pagename='Utente:CristianCantoro', summary='Test'): sito = pywikibot.Site('it', 'wikisource') page = pywikibot.Page(sito, pagename) bot = BasicBot(summary) bot.run(page=page, winner1=winner1, winner2=winner2, winner3=winner3) pywikibot.stopme()
def run(self): """Run the bot.""" starttime = time() rc_listener = site_rc_listener(self.site, timeout=60) while True: pywikibot.output(Timestamp.now().strftime(">> %H:%M:%S: ")) self.read_lists() try: self.markBlockedusers(self.loadBlockedUsers()) self.contactDefendants(bootmode=self.start) except pywikibot.EditConflict: pywikibot.output("Edit conflict found, try again.") continue # try again and skip waittime except pywikibot.PageNotSaved: pywikibot.output("Page not saved, try again.") continue # try again and skip waittime # wait for new block entry print() now = time() pywikibot.stopme() for i, entry in enumerate(rc_listener): if i % 25 == 0: print('\r', ' ' * 50, '\rWaiting for events', end='') if entry['type'] == 'log' and \ entry['log_type'] == 'block' and \ entry['log_action'] in ('block', 'reblock'): pywikibot.output('\nFound a new blocking event ' 'by user "%s" for user "%s"' % (entry['user'], entry['title'])) break if entry['type'] == 'edit' and \ not entry['bot'] and \ entry['title'] == self.vmPageName: pywikibot.output('\nFound a new edit by user "%s"' % entry['user']) break if not entry['bot']: print('.', end='') print('\n') self.optOutListAge += time() - now # read older entries again after ~4 minutes if time() - starttime > 250: starttime = time() self.reset_timestamp() self.start = False self.total = 10
def main(*args): try: gen = None genFactory = GeneratorFactory() for arg in pywikibot.handleArgs(*args): genFactory.handleArg(arg) gen = genFactory.getCombinedGenerator() if gen: for page in gen: pywikibot.stdout(page.title()) else: pywikibot.showHelp() except Exception: pywikibot.error("Fatal error", exc_info=True) finally: pywikibot.stopme()
def main(): wikipedia.stopme() (wikipedia.config).put_throttle = int(1) timeshift=3 bot = Wikilister() while(1): t = list(time.gmtime()) cond = (not (t[3]+timeshift)%6) and (t[4]==0) if cond: bot.run('Wikibolana','wiktionary') bot.run('Wikipedia','wikipedia') time.sleep(120) else: print "Fanavaozana isaky ny adin'ny 6" print "Miandry ny fotoana tokony hamaozana ny pejy (ora %2d:%2d) (GMT+%d)"%((t[3]+timeshift),t[4],(timeshift)) time.sleep(30)
def main(): print(__doc__) # Получение данных о каналах, указанных в SubscriberCountAndViewCountList data = getAllDataOfSubsAndViewsCount() # Обновление даннах в соответствующих статьях for member in data: IEBLogger.info("Current article: " + member["ruWikipediaArticleTitle"]) # IEBLogger.debug ("Waiting for user agree...") # IEBLogger.debug ("Press <ENTER> to continue...") # input () # IEBLogger.info ("Sleeping for " + str(wikiMinQuota) + " seconds (local quota)...") # time.sleep (wikiMinQuota) updateViewsAndSubscribersCount(member, site) IEBLogger.debug("Done! Press <ENTER> to exit...") input() pywikibot.stopme() return 0
def main(*args): try: gen = None genFactory = GeneratorFactory() for arg in pywikibot.handleArgs(*args): genFactory.handleArg(arg) gen = genFactory.getCombinedGenerator() if gen: i = 0 for page in gen: i += 1 pywikibot.stdout("%4d: %s" % (i, page.title())) else: pywikibot.showHelp() except Exception: pywikibot.error("Fatal error", exc_info=True) finally: pywikibot.stopme()
def write_results(winner1, winner2, winner3, pagename='Utente:CristianCantoro', summary='Test' ): sito = pywikibot.Site('it', 'wikisource') page = pywikibot.Page(sito, pagename) bot = BasicBot(summary) bot.run(page=page, winner1=winner1, winner2=winner2, winner3=winner3 ) pywikibot.stopme()
def main(self, url, filename, date, license_, filedesc, creator, username, oauth): """Main worker code.""" stats = Stats() def statuscallback(text, percent): statuscallback_base(self, stats, text, percent) path = prepare_upload(self, url, statuscallback) try: statuscallback('Configuring Pywikibot...', -1) pywikibot.config.authenticate[pwb_site] = \ (consumer_key, consumer_secret) + tuple(oauth) pywikibot.Site(user=username).login() wikitext = \ build_wikitext(filedesc, date, creator, license_) statuscallback('Uploading...', -1) filename, wikifileurl = upload.upload(path, filename, url, http_host, wikitext, username, statuscallback, errorcallback) if not wikifileurl: errorcallback('Upload failed!') except NeedServerSideUpload as e: # json serializer cannot properly serialize an exception # without losing data, so we change the exception into a dict. return {'type': 'ssu', 'hashsum': e.hashsum, 'url': e.url} except pywikibot.Error: # T124922 workaround exc_info = sys.exc_info() raise TaskError( (u'pywikibot.Error: %s: %s' % (exc_info[0].__name__, exc_info[1]) ).encode('utf-8')), None, exc_info[2] else: statuscallback('Done!', 100) return {'type': 'done', 'filename': filename, 'url': wikifileurl} finally: statuscallback('Cleaning up...', -1) pywikibot.stopme() pywikibot.config.authenticate.clear() pywikibot.config.usernames['commons'].clear() pywikibot._sites.clear()
def main(): """ Run some tests. """ from suggestbot import SuggestBot mybot = SuggestBot() myhandler = WikiProjectHandler(mybot) logging.info(u"instantiated WikiProjectHandler and SuggestBot objects, testing request handling...") try: myhandler.process_requests() finally: pywikibot.stopme() # OK, done... return
def main(): global CD output(u'Running ' + __version__) CD = CommonsDelinker() output(u'This bot runs from: ' + str(CD.site)) re._MAXCACHE = 4 args = pywikibot.handle_args() if '-since' in args: # NOTE: Untested ts_format = '%Y-%m-%d %H:%M:%S' try: since = time.strptime( args[args.index('-since') + 1], ts_format) except ValueError: if args[args.index('-since') + 1][0] == '[' and \ len(args) != args.index('-since') + 2: since = time.strptime('%s %s' % \ args[args.index('-since') + 1], '[%s]' % ts_format) else: raise ValueError('Incorrect time format!') output(u'Reading deletion log since [%s]' %\ time.strftime(ts_format, since)) CD.last_check = time.mktime(since) try: try: CD.start() except Exception, e: if type(e) not in (SystemExit, KeyboardInterrupt): output('An exception occured in the main thread!', False) traceback.print_exc(file = sys.stderr) threadpool.terminate() finally: output(u'Stopping CommonsDelinker') pywikibot.stopme() # Flush the standard streams sys.stdout.flush() sys.stderr.flush()
def main(): global R import sys, traceback pywikibot.handleArgs() output(u'Running ' + __version__) try: try: # FIXME: Add support for single-process replacer. R = Replacer() output(u'This bot runs from: ' + str(R.site)) R.start() except (SystemExit, KeyboardInterrupt): raise except Exception, e: output('A critical error has occured! Aborting!') traceback.print_exc(file = sys.stderr) finally: output('Exitting replacer') pywikibot.stopme()
def main(): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ options = {} for arg in pywikibot.handle_args(): options[arg[1:]] = True bot = AFDNoticeBot(**options) while True: bot.run() pywikibot.output('Waiting 300 seconds...\n') pywikibot.stopme() try: time.sleep(300) except KeyboardInterrupt: bot.exit() break
def stopme(self): pywikibot.stopme()
def run(self): """Run the bot.""" while True: welcomed_count = 0 for users in self.parseNewUserLog(): if users.isBlocked(): showStatus(3) pywikibot.output('%s has been blocked!' % users.username) continue if 'bot' in users.groups(): showStatus(3) pywikibot.output('%s is a bot!' % users.username) continue if 'bot' in users.username.lower(): showStatus(3) pywikibot.output(u'%s might be a global bot!' % users.username) continue if users.editCount() >= globalvar.attachEditCount: showStatus(2) pywikibot.output(u'%s has enough edits to be welcomed.' % users.username) ustp = users.getUserTalkPage() if ustp.exists(): showStatus(3) pywikibot.output(u'%s has been already welcomed.' % users.username) continue else: if self.badNameFilter(users.username): self.reportBadAccount(users.username) continue welcome_text = self.welcome_text if globalvar.randomSign: if self.site.family.name != 'wikinews': welcome_text = (welcome_text % choice(self.defineSign())) if self.site.family.name != 'wiktionary' or \ self.site.code != 'it': welcome_text += timeselected elif (self.site.family.name != 'wikinews' and self.site.code != 'it'): welcome_text = (welcome_text % globalvar.defaultSign) final_text = i18n.translate(self.site, final_new_text_additions) if final_text: welcome_text += final_text welcome_comment = i18n.twtranslate( self.site, 'welcome-welcome') try: # append welcomed, welcome_count++ ustp.put(welcome_text, welcome_comment, minor=False) welcomed_count += 1 self._totallyCount += 1 self.welcomed_users.append(users) except pywikibot.EditConflict: showStatus(4) pywikibot.output(u'An edit conflict has occurred, ' u'skipping this user.') if globalvar.makeWelcomeLog and \ i18n.translate(self.site, logbook): showStatus(5) if welcomed_count == 1: pywikibot.output(u'One user has been welcomed.') elif welcomed_count == 0: pywikibot.output(u'No users have been welcomed.') else: pywikibot.output(u'%s users have been welcomed.' % welcomed_count) if welcomed_count >= globalvar.dumpToLog: if self.makelogpage(self.welcomed_users): self.welcomed_users = [] welcomed_count = 0 else: continue # If we haven't to report, do nothing. else: if users.editCount() == 0: if not globalvar.quiet: showStatus(1) pywikibot.output(u'%s has no contributions.' % users.username) else: showStatus(1) pywikibot.output(u'%s has only %d contributions.' % (users.username, users.editCount())) # That user mustn't be welcomed. continue if globalvar.makeWelcomeLog and i18n.translate( self.site, logbook) and welcomed_count > 0: showStatus() if welcomed_count == 1: pywikibot.output(u'Putting the log of the latest user...') else: pywikibot.output( u'Putting the log of the latest %d users...' % welcomed_count) if not self.makelogpage(self.welcomed_users): continue self.welcomed_users = [] if hasattr(self, '_BAQueue'): showStatus() pywikibot.output("Putting bad name to report page....") self.reportBadAccount(None, final=True) try: if globalvar.recursive: showStatus() if locale.getlocale()[1]: strfstr = time.strftime('%d %b %Y %H:%M:%S (UTC)', time.gmtime()) # py2-py3 compatibility if not isinstance(strfstr, UnicodeType): strfstr = strfstr.decode(locale.getlocale()[1]) else: strfstr = time.strftime(u"%d %b %Y %H:%M:%S (UTC)", time.gmtime()) pywikibot.output(u'Sleeping %d seconds before rerun. %s' % (globalvar.timeRecur, strfstr)) pywikibot.stopme() time.sleep(globalvar.timeRecur) else: raise KeyboardInterrupt except KeyboardInterrupt: break
def receiveData(): glam_names = utils.get_glam_names(glam_list) print(glam_list) print("=======") username = flask.session.get('username', None) pywikibot.config.authenticate['commons.wikimedia.org'] = ( app.config['CONSUMER_KEY'], app.config['CONSUMER_SECRET'], flask.session['access_token_key'], flask.session['access_token_secret']) pywikibot.config.usernames['commons']['commons'] = username pywikibot.Site('commons', 'commons', user=username).login() glam_name = flask.request.form['glam_name'] searchstring = flask.request.form['searchstring'] identifier = flask.request.form['unique_id'] category1 = flask.request.form['categories'] categories = [] if category1: categories.append(category1) # get other categories if more than one categories are gievn f = flask.request.form for key in f.keys(): if 'category' in key and flask.request.form[key]: categories.append(flask.request.form[key]) # if searchstring is non-empty if searchstring: # store the categories in the session to be accessed in /multiUpload if categories: flask.session['categories'] = categories try: # obtain the thumbs without instantiating any objects glam_class = utils.get_glam_class(glam_list, glam_name) ids = glam_class.search_to_identifiers(searchstring) image_list = [] for id in ids: image_loc = glam_class.get_thumbnail(id) image_list.append(image_loc) prefix = glam_class.url_prefix return flask.render_template('image_gallery.html', glam_name=glam_class.name, uuid_list=ids, image_list=image_list, prefix=prefix, username=username) except Exception as e: return flask.render_template('error.html', error_msg=str(e)) # if searchstring is empty but identifier is given elif identifier: # instantiate a proper GLAM class object which in turn instantiates # a GenericGLAM class object to form the wikitext glam_class = utils.get_glam_class(glam_list, glam_name) try: obj = glam_class(identifier) print(obj) wiki_filename, wikitext, image_url = obj.generate_image_information( categories) upload_file(image_url, wikitext, wiki_filename, username, glam_name) return flask.render_template('results.html', glam_name=glam_name, unique_id=identifier, filename=wiki_filename) except Exception as e: return flask.render_template('error.html', error_msg=str(e)) finally: # cleaning up pywikibot pywikibot.stopme() pywikibot.config.authenticate.clear() pywikibot.config.usernames['commons'].clear() pywikibot._sites.clear() else: return flask.render_template('index.html', username=username, glam_list=glam_names)
else: for n in nombre: yield [u'%s%s%s%s%s' %(t, verb[:-2].lower(), desinence, n, m), u"''%s %s ny bika %s %s ny matoanteny [[%s]].''" %(desinences[desinence], nombre[n], temps[t], modes[m], verb), u"Avy amin'ny tovona <i>%s</i>- mamaritra ny fitaona %s, ny fototeny [[%s]] ; ny tovana -<i>[[%s]]</i> izay mamaritra ny mpanao (%s) ; " %(t, temps[t], verb[:-2], desinence, desinences[desinence].lower()) + " ary ny tovana -<i>%s</i> mamaritra ny %s " %(n, nombre[n])] def genbatch(): pages = file('batch/voverbs.txt','r').readlines() out = open("data/vowords.txt","w") n = 0 for mot in pages: if len(mot) < 2: continue mot = mot.decode('utf8') for i in get_desinences(mot): outstr = i[0]+u"\n" out.write(outstr.encode('utf8')) print n out.close() try: main() #genbatch() #evaluate() except KeyboardInterrupt: wikipedia.stopme() finally: wikipedia.stopme()
def run(self): self.Application.exec_() wikipedia.stopme() print "OK!"
filename = self.source if self.warnonly else self.dest with codecs.open(filename, 'r', 'utf-8') as g: lines = enumerate(g.readlines(), start=1) for i, line in lines: for w in warnings: if w[0] in line: pywikibot.warning('line {0}: {1}>>> {2}\n'.format( i, line, w[1])) def main(): """Process command line arguments and invoke bot.""" filename = None warnonly = False # Parse command line arguments for -help option for arg in pywikibot.handleArgs(): if arg.startswith('-warnonly'): warnonly = True elif not arg.startswith('-'): filename = arg else: pywikibot.warning(arg + ' is not supported') bot = ConvertBot(filename, warnonly) bot.run() if __name__ == "__main__": pywikibot.stopme() # we do not work on any site main()
def run(self): output( strftime("########## timestamp: %Y-%m-%d %H:%M:%S ############", localtime())) db = wppb.Database(database=pb_db_config.db_name) generator = [pywikibot.Page(self._site, workList)] generator = pagegenerators.PreloadingGenerator(generator, step=1, lookahead=1) page = u"" # variables for concatenating a reasonable edit summary commentLongAdded = u"" commentLongRefused = u"" commentLongNewUsers = u"" commentLongACKAlreadyIn = u"" commentLongNewAlreadyIn = u"" # for new users commentShortAdded = 0 commentShortRefused = 0 commentShortNewUsers = 0 commentShortACKAlreadyIn = 0 commentShortNewAlreadyIn = 0 # for new users for page in generator: print u"" # take last element of iterator rawText = page.get() newRawText = rawText if not page.canBeEdited(): output(u"Seite gesperrt") pywikibot.stopme() newUsers, newACKs = divideIntoTasks(rawText) seenUsers = [] for currentName, minutes, hours, day, month, year in newUsers: if currentName in seenUsers: output(u"überspringe %s weil schon enthalten" % currentName) continue timestamp = "%s-%02d-%02d %02d:%02d:00" % (year, month, day, hours, minutes) output(u"füge %s hinzu.. " % currentName) entry = newUserReplRegex % re.escape(currentName) try: if not DONOTSAVEDB: db.add_user(currentName, timestamp) except oursql.IntegrityError: # already in ... TODO change the edit comment output(u"user " + currentName + " already in") except oursql.CollatedWarningsError: # already in ... TODO change the edit comment output(u"user " + currentName + " already in") newRawText = textlib.replaceExcept(newRawText, entry, u"", [u"comment", u"nowiki"]) # build comments for edit summary if commentLongNewUsers != u"": commentLongNewUsers = commentLongNewUsers + u", " commentLongNewUsers = commentLongNewUsers + ( u"[[User:%s|%s]]" % (currentName, currentName)) commentShortNewUsers = commentShortNewUsers + 1 doNotRemoveWikiText = False for currentACK in newACKs: certifier, certified, comment, minutes, hours, day, month, year = currentACK timestamp = "%s-%02d-%02d %02d:%02d:00" % (year, month, day, hours, minutes) output( u"füge Bestätigung: %s >> %s (Kommentar: %s) am %s hinzu.." % (certifier, certified, comment, timestamp)) entry = newACKReplRegex % (re.escape(certifier), re.escape(certified)) try: addConfirmation(db, certifier, certified, comment, year, month, day, hours, minutes) except oursql.PermissionsError: # WORKAROUND wir koennen nicht sicher sein, dass die Bestaetigungen wirklich ankamen, darum entferne sie erst beim naechsten Mal, wenn es sicher ist! doNotRemoveWikiText = True newRawText = textlib.replaceExcept(newRawText, entry, u"", [u"comment", u"nowiki"]) # build comments for edit summary if commentLongAdded != u"": commentLongAdded = commentLongAdded + u", " commentLongAdded = commentLongAdded + u"[[User:%s|%s]] → [[User:%s|%s]]" % ( certifier, certifier, certified, certified) commentShortAdded = commentShortAdded + 1 if doNotRemoveWikiText: output( "Bestaetigungen versucht hinzuzufuegen, aber wegen oursql.PermissionsError nicht von der WP-Seite entfernt!" ) return # ende hier ##### edit comments editSummary = u"" commentPreAdded = u" Bestätigungen: " commentPreNewUsers = u" neue(r) Benutzer: " commentPreACKAlreadyIn = u" Bestätigungen schon vorhanden: " commentPreNewAlreadyIn = u" Benutzer schon hinzugefügt: " commentLong = editSummary if commentLongAdded != u"": commentLong = commentLong + commentPreAdded + commentLongAdded if commentLongACKAlreadyIn != u"": commentLong = commentLong + commentPreACKAlreadyIn + commentLongACKAlreadyIn if commentLongNewUsers != u"": commentLong = commentLong + commentPreNewUsers + commentLongNewUsers if commentLongNewAlreadyIn != u"": commentLong = commentLong + commentPreNewAlreadyIn + commentLongNewAlreadyIn commentShort = editSummary if commentShortAdded != 0: commentShort = commentShort + commentPreAdded + str( commentShortAdded) if commentShortACKAlreadyIn != 0: commentShort = commentShort + commentPreACKAlreadyIn + str( commentShortACKAlreadyIn) if commentShortNewUsers != 0: commentShort = commentShort + commentPreNewUsers + str( commentShortNewUsers) if commentShortNewAlreadyIn != 0: commentShort = commentShort + commentPreNewAlreadyIn + str( commentShortNewAlreadyIn) if (len(commentLong) > 200): editSummary = commentShort else: editSummary = commentLong ## update wikipedia: delete requests if self.isForceMode or newRawText != rawText: output(u"Anfragen:") pywikibot.showDiff(rawText, newRawText) if not DONOTSAVE: page.put(newRawText, u"In Datenbank übertragen: " + editSummary, False, False, True) writeUserListToWikipedia(db, editSummary) output(u"Zusammenfassung: " + editSummary + u"\n") else: output(u"nichts zu tun") # write userlist 4 times per day if time.localtime()[3] in [3, 15] and time.localtime()[4] in [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ]: writeUserListToWikipedia(db, editSummary)
else: if page.namespace(): continue found = 1 if not found: if dluga: table += '\n|-\n| [[{0}]] || {1}'.format(tmp[0], int(tmp[1])) else: table += '\n|-\n| [[{0}]] || [{{{{fullurl:Specjalna:Linkujące|limit=500&from=0&target={1}}}}} {2}]'.format(tmp[0], tmp[0].replace(' ', '_'), int(tmp[1])) i += 1 table += '\n|-\n|}' with open('{0}output/frequencyProcessedTable.txt'.format(config.path['scripts']), encoding='utf-8', mode='w') as g: g.write(table) if dluga: outputPage = pwb.Page(site, 'Wikipedysta:AlkamidBot/listy/Najbardziej_potrzebne_-_długa_lista') else: outputPage = pwb.Page(site, 'Wikipedysta:AlkamidBot/listy/Najbardziej_potrzebne') outputPage.text = table outputPage.save(summary='aktualizacja') if __name__ == '__main__': try: frequencyListUpdate() finally: pwb.stopme()
def main(): summary_commandline, gen, template = None, None, None namespaces, PageTitles, exceptions = [], [], [] encat, newcatfile = '', '' autoText, autoTitle = False, False recentcat, newcat = False, False genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if arg == '-autotitle': autoTitle = True elif arg == '-autotext': autoText = True elif arg.startswith('-page'): if len(arg) == 5: PageTitles.append(pywikibot.input(u'Which page do you want to chage?')) else: PageTitles.append(arg[6:]) break elif arg.startswith('-except:'): exceptions.append(arg[8:]) elif arg.startswith('-template:'): template = arg[10:] elif arg.startswith('-facat:'): encat = arg[7:].replace(u'Category:', u'').replace(u'category:', u'').replace(u'رده:', u'') encat = englishdictionry(u'رده:' + encat, fa_site, en_site).replace(u'Category:', u'').replace(u'category:', u'') break elif arg.startswith('-encat:'): encat = arg[7:].replace(u'Category:', u'').replace(u'category:', u'').replace(u'رده:', u'') break elif arg.startswith('-newcatfile:'): newcatfile = arg[12:] break elif arg.startswith('-recentcat'): arg = arg.replace(':', '') if len(arg) == 10: genfa = pagegenerators.RecentchangesPageGenerator() else: genfa = pagegenerators.RecentchangesPageGenerator(number=int(arg[10:])) genfa = pagegenerators.DuplicateFilterPageGenerator(genfa) genfa = pagegenerators.NamespaceFilterPageGenerator(genfa, [14]) preloadingGen = pagegenerators.PreloadingGenerator(genfa, 60) recentcat = True break elif arg.startswith('-newcat'): arg = arg.replace(':', '') if len(arg) == 7: genfa = pagegenerators.NewpagesPageGenerator(step=100, namespaces=14) else: genfa = pagegenerators.NewpagesPageGenerator(step=int(arg[7:]), namespaces=14) preloadingGen = pagegenerators.PreloadingGenerator(genfa, 60) newcat = True break elif arg.startswith('-namespace:'): namespaces.append(int(arg[11:])) elif arg.startswith('-summary:'): pywikibot.setAction(arg[9:]) summary_commandline = True else: generator = genFactory.handleArg(arg) if generator: gen = genFactory.getCombinedGenerator(gen) if encat != '': encatfalist, encatlists = encatlist(encat) if encatlists: for encat in encatlists: encat = englishdictionry(encat, en_site, fa_site) if encat: run([encat]) if encatfalist is not False: run(encatfalist) if PageTitles: pages = [pywikibot.Page(fa_site, PageTitle) for PageTitle in PageTitles] gen = iter(pages) if recentcat: for workpage in preloadingGen: workpage = workpage.title() cat = pywikibot.Category(fa_site, workpage) gent = pagegenerators.CategorizedPageGenerator(cat) run(gent) pywikibot.stopme() sys.exit() if newcat: for workpage in preloadingGen: workpage = workpage.title() workpage = englishdictionry(workpage, fa_site, en_site) if workpage is not False: encatfalist, encatlists = encatlist(workpage) if encatlists: for encat in encatlists: encat = englishdictionry(encat, en_site, fa_site) if encat: run([encat]) if encatfalist is not False: run(encatfalist) pywikibot.stopme() sys.exit() if newcatfile: text2 = codecs.open(newcatfile, 'r', 'utf8') text = text2.read() linken = re.findall(ur'\[\[.*?\]\]', text, re.S) if linken: for workpage in linken: workpage = workpage.split(u'|')[0].replace(u'[[', u'').replace(u']]', u'').strip() workpage = englishdictionry(workpage, fa_site, en_site) if workpage is not False: encatfalist,encatlists=encatlist(workpage) workpage=englishdictionry(workpage,'fa','en') if encatfalist: run(encatfalist) pywikibot.stopme() sys.exit() if not gen: pywikibot.stopme() sys.exit() if namespaces != []: gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) preloadingGen = pagegenerators.PreloadingGenerator(gen, pageNumber=60) run(preloadingGen)
wikipedia.output( u'* Page ignorée (nom incorrect): [[%s:%s]]' % (namespace, title)) else: commandLogFile.write( u'* Page ignorée (namespace incorrect): [[%s:%s]]\n' % (namespace, title)) if verbose: wikipedia.output( u'* Page ignorée (namespace incorrect): [[%s:%s]]' % (namespace, title)) index = index + 1 if finishindex and (index >= finishindex): break commandLogFile.write(u'%d page(s) au total\n' % total) commandLogFile.write(u'%d page(s) en place\n' % allset) commandLogFile.write(u'%d page(s) traitées\n' % processed) commandLogFile.close() if __name__ == '__main__': try: main() except Exception, myexception: almalog.error(u'translation_move', u'%s %s' % (type(myexception), myexception.args)) raise finally: wikipedia.stopme()
def stopme(self): ''' Simply a call to pywikibot.stopme(), and usually used in the finally section of a try/finally clause. ''' pywikibot.stopme();
except: addon['noicon'] = False if addon['noicon']: addon['icon url'] = u"" else: try: addon['icon url'] = u""+addon['path']+'/'+data.assets.icon.string except: addon['icon url'] = u''+addon['path']+'/icon.png' addon['summary'] = re.sub("\[CR\]","\\n",addon['summary']) addon['description'] = re.sub("\[CR\]","\\n",addon['description']) return addon # Download addons.xml and return Soup xml class def importAddonXML(url): headers = {'User-Agent':'Kodi-AddonBot'} req = urllib2.Request(url, None, headers) page = urllib2.urlopen(req) if page.headers.get('Content-Type').find('gzip') >= 0 or page.headers.get('Content-Type').find('application/octet-stream') >= 0: d = zlib.decompressobj(16+zlib.MAX_WBITS) page = d.decompress(page.read()) return BeautifulStoneSoup(page) if __name__ == '__main__': try: UpdateAddons() finally: pywikibot.stopme()
def main(*args): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ add_cat = None gen = None # summary message edit_summary = u"" # Array which will collect commandline parameters. # First element is original text, second element is replacement text. commandline_replacements = [] # A list of 2-tuples of original text and replacement text. replacements = [] # Don't edit pages which contain certain texts. exceptions = { 'title': [], 'text-contains': [], 'inside': [], 'inside-tags': [], 'require-title': [], # using a seperate requirements dict needs some } # major refactoring of code. # Should the elements of 'replacements' and 'exceptions' be interpreted # as regular expressions? regex = False # Predefined fixes from dictionary 'fixes' (see above). fixes_set = [] # the dump's path, either absolute or relative, which will be used # if -xml flag is present xmlFilename = None useSql = False # will become True when the user presses a ('yes to all') or uses the # -always flag. acceptall = False # Will become True if the user inputs the commandline parameter -nocase caseInsensitive = False # Will become True if the user inputs the commandline parameter -dotall dotall = False # Will become True if the user inputs the commandline parameter -multiline multiline = False # Do all hits when they overlap allowoverlap = False # Do not recurse replacement recursive = False # Between a regex and another (using -fix) sleep some time (not to waste # too much CPU sleep = None # Request manual replacements even if replacements are already defined manual_input = False # Replacements loaded from a file replacement_file = None replacement_file_arg_misplaced = False # Read commandline parameters. local_args = pywikibot.handle_args(args) genFactory = pagegenerators.GeneratorFactory() for arg in local_args: if genFactory.handleArg(arg): continue if arg == '-regex': regex = True elif arg.startswith('-xmlstart'): if len(arg) == 9: xmlStart = pywikibot.input( u'Please enter the dumped article to start with:') else: xmlStart = arg[10:] elif arg.startswith('-xml'): if len(arg) == 4: xmlFilename = i18n.input('pywikibot-enter-xml-filename') else: xmlFilename = arg[5:] elif arg == '-sql': useSql = True elif arg.startswith('-excepttitle:'): exceptions['title'].append(arg[13:]) elif arg.startswith('-requiretitle:'): exceptions['require-title'].append(arg[14:]) elif arg.startswith('-excepttext:'): exceptions['text-contains'].append(arg[12:]) elif arg.startswith('-exceptinside:'): exceptions['inside'].append(arg[14:]) elif arg.startswith('-exceptinsidetag:'): exceptions['inside-tags'].append(arg[17:]) elif arg.startswith('-fix:'): fixes_set += [arg[5:]] elif arg.startswith('-sleep:'): sleep = float(arg[7:]) elif arg == '-always': acceptall = True elif arg == '-recursive': recursive = True elif arg == '-nocase': caseInsensitive = True elif arg == '-dotall': dotall = True elif arg == '-multiline': multiline = True elif arg.startswith('-addcat:'): add_cat = arg[8:] elif arg.startswith('-summary:'): edit_summary = arg[9:] elif arg.startswith('-allowoverlap'): allowoverlap = True elif arg.startswith('-manualinput'): manual_input = True elif arg.startswith('-replacementfile'): if len(commandline_replacements) % 2: replacement_file_arg_misplaced = True if arg == '-replacementfile': replacement_file = pywikibot.input( u'Please enter the filename to read replacements from:') else: replacement_file = arg[len('-replacementfile:'):] else: commandline_replacements.append(arg) site = pywikibot.Site() if len(commandline_replacements) % 2: pywikibot.error('Incomplete command line pattern replacement pair.') return False if replacement_file_arg_misplaced: pywikibot.error( '-replacementfile used between a pattern replacement pair.') return False if replacement_file: try: with codecs.open(replacement_file, 'r', 'utf-8') as f: # strip newlines, but not other characters file_replacements = f.read().splitlines() except (IOError, OSError) as e: pywikibot.error(u'Error loading {0}: {1}'.format( replacement_file, e)) return False if len(file_replacements) % 2: pywikibot.error( '{0} contains an incomplete pattern replacement pair.'.format( replacement_file)) return False # Strip BOM from first line file_replacements[0].lstrip(u'\uFEFF') commandline_replacements.extend(file_replacements) if not (commandline_replacements or fixes_set) or manual_input: old = pywikibot.input( u'Please enter the text that should be replaced:') while old: new = pywikibot.input(u'Please enter the new text:') commandline_replacements += [old, new] old = pywikibot.input( 'Please enter another text that should be replaced,' '\nor press Enter to start:') single_summary = None for i in range(0, len(commandline_replacements), 2): replacement = Replacement(commandline_replacements[i], commandline_replacements[i + 1]) if not single_summary: single_summary = i18n.twtranslate(site, 'replace-replacing', { 'description': ' (-%s +%s)' % (replacement.old, replacement.new) }) replacements.append(replacement) if not edit_summary: if single_summary: pywikibot.output(u'The summary message for the command line ' 'replacements will be something like: %s' % single_summary) if fixes_set: pywikibot.output('If a summary is defined for the fix, this ' 'default summary won\'t be applied.') edit_summary = pywikibot.input( 'Press Enter to use this automatic message, or enter a ' 'description of the\nchanges your bot will make:') # Perform one of the predefined actions. for fix in fixes_set: try: fix = fixes.fixes[fix] except KeyError: pywikibot.output(u'Available predefined fixes are: %s' % ', '.join(fixes.fixes.keys())) if not fixes.user_fixes_loaded: pywikibot.output('The user fixes file could not be found: ' '{0}'.format(fixes.filename)) return if "msg" in fix: if isinstance(fix['msg'], basestring): set_summary = i18n.twtranslate(site, str(fix['msg'])) else: set_summary = i18n.translate(site, fix['msg'], fallback=True) else: set_summary = None replacement_set = ReplacementList(fix.get('regex'), fix.get('exceptions'), fix.get('nocase'), set_summary) for replacement in fix['replacements']: summary = None if len(replacement) < 3 else replacement[2] if chars.contains_invisible(replacement[0]): pywikibot.warning('The old string "{0}" contains formatting ' 'characters like U+200E'.format( chars.replace_invisible(replacement[0]))) if chars.contains_invisible(replacement[1]): pywikibot.warning('The new string "{0}" contains formatting ' 'characters like U+200E'.format( chars.replace_invisible(replacement[1]))) replacements.append( ReplacementListEntry( old=replacement[0], new=replacement[1], fix_set=replacement_set, edit_summary=summary, )) # Set the regular expression flags flags = re.UNICODE if caseInsensitive: flags = flags | re.IGNORECASE if dotall: flags = flags | re.DOTALL if multiline: flags = flags | re.MULTILINE # Pre-compile all regular expressions here to save time later for replacement in replacements: replacement.compile(regex, flags) precompile_exceptions(exceptions, regex, flags) if xmlFilename: try: xmlStart except NameError: xmlStart = None gen = XmlDumpReplacePageGenerator(xmlFilename, xmlStart, replacements, exceptions, site) elif useSql: whereClause = 'WHERE (%s)' % ' OR '.join([ "old_text RLIKE '%s'" % prepareRegexForMySQL(old_regexp.pattern) for (old_regexp, new_text) in replacements ]) if exceptions: exceptClause = 'AND NOT (%s)' % ' OR '.join([ "old_text RLIKE '%s'" % prepareRegexForMySQL(exc.pattern) for exc in exceptions ]) else: exceptClause = '' query = u""" SELECT page_namespace, page_title FROM page JOIN text ON (page_id = old_id) %s %s LIMIT 200""" % (whereClause, exceptClause) gen = pagegenerators.MySQLPageGenerator(query) gen = genFactory.getCombinedGenerator(gen) if not gen: # syntax error, show help text from the top of this file pywikibot.showHelp('replace') return preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = ReplaceRobot(preloadingGen, replacements, exceptions, acceptall, allowoverlap, recursive, add_cat, sleep, edit_summary, site) site.login() bot.run() # Explicitly call pywikibot.stopme(). # It will make sure the callback is triggered before replace.py is unloaded. pywikibot.stopme() pywikibot.output(u'\n%s pages changed.' % bot.changed_pages)
def main(): try: bot = RFBOTStatusBot() bot.run() finally: pywikibot.stopme()
def main(*args): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ add_cat = None gen = None # summary message edit_summary = u"" # Array which will collect commandline parameters. # First element is original text, second element is replacement text. commandline_replacements = [] # A list of 2-tuples of original text and replacement text. replacements = [] # Don't edit pages which contain certain texts. exceptions = { 'title': [], 'text-contains': [], 'inside': [], 'inside-tags': [], 'require-title': [], # using a seperate requirements dict needs some } # major refactoring of code. # Should the elements of 'replacements' and 'exceptions' be interpreted # as regular expressions? regex = False # Predefined fixes from dictionary 'fixes' (see above). fixes_set = [] # the dump's path, either absolute or relative, which will be used # if -xml flag is present xmlFilename = None useSql = False # will become True when the user presses a ('yes to all') or uses the # -always flag. acceptall = False # Will become True if the user inputs the commandline parameter -nocase caseInsensitive = False # Will become True if the user inputs the commandline parameter -dotall dotall = False # Will become True if the user inputs the commandline parameter -multiline multiline = False # Do all hits when they overlap allowoverlap = False # Do not recurse replacement recursive = False # Between a regex and another (using -fix) sleep some time (not to waste # too much CPU sleep = None # Read commandline parameters. local_args = pywikibot.handle_args(args) genFactory = pagegenerators.GeneratorFactory() for arg in local_args: if genFactory.handleArg(arg): continue if arg == '-regex': regex = True elif arg.startswith('-xmlstart'): if len(arg) == 9: xmlStart = pywikibot.input( u'Please enter the dumped article to start with:') else: xmlStart = arg[10:] elif arg.startswith('-xml'): if len(arg) == 4: xmlFilename = i18n.input('pywikibot-enter-xml-filename') else: xmlFilename = arg[5:] elif arg == '-sql': useSql = True elif arg.startswith('-excepttitle:'): exceptions['title'].append(arg[13:]) elif arg.startswith('-requiretitle:'): exceptions['require-title'].append(arg[14:]) elif arg.startswith('-excepttext:'): exceptions['text-contains'].append(arg[12:]) elif arg.startswith('-exceptinside:'): exceptions['inside'].append(arg[14:]) elif arg.startswith('-exceptinsidetag:'): exceptions['inside-tags'].append(arg[17:]) elif arg.startswith('-fix:'): fixes_set += [arg[5:]] elif arg.startswith('-sleep:'): sleep = float(arg[7:]) elif arg == '-always': acceptall = True elif arg == '-recursive': recursive = True elif arg == '-nocase': caseInsensitive = True elif arg == '-dotall': dotall = True elif arg == '-multiline': multiline = True elif arg.startswith('-addcat:'): add_cat = arg[8:] elif arg.startswith('-summary:'): edit_summary = arg[9:] elif arg.startswith('-allowoverlap'): allowoverlap = True else: commandline_replacements.append(arg) site = pywikibot.Site() if (len(commandline_replacements) % 2): raise pywikibot.Error('require even number of replacements.') if not commandline_replacements: if fixes_set: manual = pywikibot.input_yn('Replacements via -fix: set. Apply ' 'also manual replacements?', default=False) else: manual = True if manual: old = pywikibot.input(u'Please enter the text that should be replaced:') while old: new = pywikibot.input(u'Please enter the new text:') commandline_replacements += [old, new] old = pywikibot.input( u'Please enter another text that should be replaced,' + u'\nor press Enter to start:') single_summary = None for i in range(0, len(commandline_replacements), 2): replacement = Replacement(commandline_replacements[i], commandline_replacements[i + 1]) if not single_summary: single_summary = i18n.twtranslate( site, 'replace-replacing', {'description': ' (-%s +%s)' % (replacement.old, replacement.new)} ) replacements.append(replacement) if not edit_summary: if single_summary: pywikibot.output(u'The summary message for the command line ' 'replacements will be something like: %s' % single_summary) if fixes_set: pywikibot.output('If a summary is defined for the fix, this ' 'default summary won\'t be applied.') edit_summary = pywikibot.input( u'Press Enter to use this automatic message, or enter a ' + u'description of the\nchanges your bot will make:') # Perform one of the predefined actions. for fix in fixes_set: try: fix = fixes.fixes[fix] except KeyError: pywikibot.output(u'Available predefined fixes are: %s' % ', '.join(fixes.fixes.keys())) return if "msg" in fix: if isinstance(fix['msg'], basestring): set_summary = i18n.twtranslate(site, str(fix['msg'])) else: set_summary = i18n.translate(site, fix['msg'], fallback=True) else: set_summary = None for replacement in fix['replacements']: summary = set_summary if len(replacement) < 3 else replacement[2] replacements.append(Replacement( old=replacement[0], new=replacement[1], use_regex=fix.get('regex'), edit_summary=summary, exceptions=fix.get('exceptions'), case_insensitive=fix.get('nocase') )) # Set the regular expression flags flags = re.UNICODE if caseInsensitive: flags = flags | re.IGNORECASE if dotall: flags = flags | re.DOTALL if multiline: flags = flags | re.MULTILINE # Pre-compile all regular expressions here to save time later for replacement in replacements: replacement.compile(regex, flags) precompile_exceptions(exceptions, regex, flags) if xmlFilename: try: xmlStart except NameError: xmlStart = None gen = XmlDumpReplacePageGenerator(xmlFilename, xmlStart, replacements, exceptions, site) elif useSql: whereClause = 'WHERE (%s)' % ' OR '.join( ["old_text RLIKE '%s'" % prepareRegexForMySQL(old_regexp.pattern) for (old_regexp, new_text) in replacements]) if exceptions: exceptClause = 'AND NOT (%s)' % ' OR '.join( ["old_text RLIKE '%s'" % prepareRegexForMySQL(exc.pattern) for exc in exceptions]) else: exceptClause = '' query = u""" SELECT page_namespace, page_title FROM page JOIN text ON (page_id = old_id) %s %s LIMIT 200""" % (whereClause, exceptClause) gen = pagegenerators.MySQLPageGenerator(query) gen = genFactory.getCombinedGenerator(gen) if not gen: # syntax error, show help text from the top of this file pywikibot.showHelp('replace') return preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = ReplaceRobot(preloadingGen, replacements, exceptions, acceptall, allowoverlap, recursive, add_cat, sleep, edit_summary, site) site.login() bot.run() # Explicitly call pywikibot.stopme(). # It will make sure the callback is triggered before replace.py is unloaded. pywikibot.stopme() pywikibot.output(u'\n%s pages changed.' % bot.changed_pages)
def main(*args): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: unicode """ add_cat = None gen = None # summary message edit_summary = '' # Array which will collect commandline parameters. # First element is original text, second element is replacement text. commandline_replacements = [] # A list of 2-tuples of original text and replacement text. replacements = [] # Don't edit pages which contain certain texts. exceptions = { 'title': [], 'text-contains': [], 'inside': [], 'inside-tags': [], 'require-title': [], # using a separate requirements dict needs some } # major refactoring of code. # Should the elements of 'replacements' and 'exceptions' be interpreted # as regular expressions? regex = False # Predefined fixes from dictionary 'fixes' (see above). fixes_set = [] # the dump's path, either absolute or relative, which will be used # if -xml flag is present xmlFilename = None useSql = False sql_query = None # will become True when the user presses a ('yes to all') or uses the # -always flag. acceptall = False # Will become True if the user inputs the commandline parameter -nocase caseInsensitive = False # Will become True if the user inputs the commandline parameter -dotall dotall = False # Will become True if the user inputs the commandline parameter -multiline multiline = False # Do all hits when they overlap allowoverlap = False # Do not recurse replacement recursive = False # Between a regex and another (using -fix) sleep some time (not to waste # too much CPU sleep = None # Request manual replacements even if replacements are already defined manual_input = False # Replacements loaded from a file replacement_file = None replacement_file_arg_misplaced = False # Read commandline parameters. local_args = pywikibot.handle_args(args) genFactory = pagegenerators.GeneratorFactory() for arg in local_args: if genFactory.handleArg(arg): continue if arg == '-regex': regex = True elif arg.startswith('-xmlstart'): if len(arg) == 9: xmlStart = pywikibot.input( 'Please enter the dumped article to start with:') else: xmlStart = arg[10:] elif arg.startswith('-xml'): if len(arg) == 4: xmlFilename = i18n.input('pywikibot-enter-xml-filename') else: xmlFilename = arg[5:] elif arg.startswith(('-sql', '-mysqlquery')): if arg.startswith('-sql'): issue_deprecation_warning('The usage of "-sql"', '-mysqlquery', 1, ArgumentDeprecationWarning, since='20180617') useSql = True sql_query = arg.partition(':')[2] elif arg.startswith('-excepttitle:'): exceptions['title'].append(arg[13:]) elif arg.startswith('-requiretitle:'): exceptions['require-title'].append(arg[14:]) elif arg.startswith('-excepttext:'): exceptions['text-contains'].append(arg[12:]) elif arg.startswith('-exceptinside:'): exceptions['inside'].append(arg[14:]) elif arg.startswith('-exceptinsidetag:'): exceptions['inside-tags'].append(arg[17:]) elif arg.startswith('-fix:'): fixes_set += [arg[5:]] elif arg.startswith('-sleep:'): sleep = float(arg[7:]) elif arg == '-always': acceptall = True elif arg == '-recursive': recursive = True elif arg == '-nocase': caseInsensitive = True elif arg == '-dotall': dotall = True elif arg == '-multiline': multiline = True elif arg.startswith('-addcat:'): add_cat = arg[8:] elif arg.startswith('-summary:'): edit_summary = arg[9:] elif arg.startswith('-automaticsummary'): edit_summary = True elif arg.startswith('-allowoverlap'): allowoverlap = True elif arg.startswith('-manualinput'): manual_input = True elif arg.startswith('-replacementfile'): issue_deprecation_warning('-replacementfile', '-pairsfile', 2, ArgumentDeprecationWarning, since='20160304') elif arg.startswith('-pairsfile'): if len(commandline_replacements) % 2: replacement_file_arg_misplaced = True if arg == '-pairsfile': replacement_file = pywikibot.input( 'Please enter the filename to read replacements from:') else: replacement_file = arg[len('-pairsfile:'):] else: commandline_replacements.append(arg) site = pywikibot.Site() if len(commandline_replacements) % 2: pywikibot.error('Incomplete command line pattern replacement pair.') return False if replacement_file_arg_misplaced: pywikibot.error('-pairsfile used between a pattern replacement pair.') return False if replacement_file: try: with codecs.open(replacement_file, 'r', 'utf-8') as f: # strip newlines, but not other characters file_replacements = f.read().splitlines() except (IOError, OSError) as e: pywikibot.error('Error loading {0}: {1}'.format( replacement_file, e)) return False if len(file_replacements) % 2: pywikibot.error( '{0} contains an incomplete pattern replacement pair.'.format( replacement_file)) return False # Strip BOM from first line file_replacements[0].lstrip('\uFEFF') commandline_replacements.extend(file_replacements) if not (commandline_replacements or fixes_set) or manual_input: old = pywikibot.input('Please enter the text that should be replaced:') while old: new = pywikibot.input('Please enter the new text:') commandline_replacements += [old, new] old = pywikibot.input( 'Please enter another text that should be replaced,' '\nor press Enter to start:') # The summary stored here won't be actually used but is only an example single_summary = None for i in range(0, len(commandline_replacements), 2): replacement = Replacement(commandline_replacements[i], commandline_replacements[i + 1]) if not single_summary: single_summary = i18n.twtranslate( site, 'replace-replacing', { 'description': ' (-{0} +{1})'.format(replacement.old, replacement.new) }) replacements.append(replacement) # Perform one of the predefined actions. missing_fixes_summaries = [] # which a fixes/replacements miss a summary generators_given = bool(genFactory.gens) for fix_name in fixes_set: try: fix = fixes.fixes[fix_name] except KeyError: pywikibot.output('Available predefined fixes are: {0}'.format( ', '.join(fixes.fixes.keys()))) if not fixes.user_fixes_loaded: pywikibot.output('The user fixes file could not be found: ' '{0}'.format(fixes.filename)) return if not fix['replacements']: pywikibot.warning('No replacements defined for fix ' '"{0}"'.format(fix_name)) continue if 'msg' in fix: if isinstance(fix['msg'], basestring): set_summary = i18n.twtranslate(site, str(fix['msg'])) else: set_summary = i18n.translate(site, fix['msg'], fallback=True) else: set_summary = None if not generators_given and 'generator' in fix: gen_args = fix['generator'] if isinstance(gen_args, basestring): gen_args = [gen_args] for gen_arg in gen_args: genFactory.handleArg(gen_arg) replacement_set = ReplacementList(fix.get('regex'), fix.get('exceptions'), fix.get('nocase'), set_summary, name=fix_name) # Whether some replacements have a summary, if so only show which # have none, otherwise just mention the complete fix missing_fix_summaries = [] for index, replacement in enumerate(fix['replacements'], start=1): summary = None if len(replacement) < 3 else replacement[2] if not set_summary and not summary: missing_fix_summaries.append('"{0}" (replacement #{1})'.format( fix_name, index)) if chars.contains_invisible(replacement[0]): pywikibot.warning('The old string "{0}" contains formatting ' 'characters like U+200E'.format( chars.replace_invisible(replacement[0]))) if (not callable(replacement[1]) and chars.contains_invisible(replacement[1])): pywikibot.warning('The new string "{0}" contains formatting ' 'characters like U+200E'.format( chars.replace_invisible(replacement[1]))) replacement_set.append( ReplacementListEntry( old=replacement[0], new=replacement[1], fix_set=replacement_set, edit_summary=summary, )) # Exceptions specified via 'fix' shall be merged to those via CLI. if replacement_set: replacements.extend(replacement_set) if replacement_set._exceptions is not None: for k, v in replacement_set._exceptions.items(): if k in exceptions: exceptions[k] = list(set(exceptions[k]) | set(v)) else: exceptions[k] = v if len(fix['replacements']) == len(missing_fix_summaries): missing_fixes_summaries.append( '"{0}" (all replacements)'.format(fix_name)) else: missing_fixes_summaries += missing_fix_summaries if ((not edit_summary or edit_summary is True) and (missing_fixes_summaries or single_summary)): if single_summary: pywikibot.output('The summary message for the command line ' 'replacements will be something like: ' + single_summary) if missing_fixes_summaries: pywikibot.output('The summary will not be used when the fix has ' 'one defined but the following fix(es) do(es) ' 'not have a summary defined: ' '{0}'.format(', '.join(missing_fixes_summaries))) if edit_summary is not True: edit_summary = pywikibot.input( 'Press Enter to use this automatic message, or enter a ' 'description of the\nchanges your bot will make:') else: edit_summary = '' # Set the regular expression flags flags = re.UNICODE if caseInsensitive: flags = flags | re.IGNORECASE if dotall: flags = flags | re.DOTALL if multiline: flags = flags | re.MULTILINE # Pre-compile all regular expressions here to save time later for replacement in replacements: replacement.compile(regex, flags) precompile_exceptions(exceptions, regex, flags) if xmlFilename: try: xmlStart except NameError: xmlStart = None gen = XmlDumpReplacePageGenerator(xmlFilename, xmlStart, replacements, exceptions, site) elif useSql: if not sql_query: whereClause = 'WHERE (%s)' % ' OR '.join([ "old_text RLIKE '%s'" % prepareRegexForMySQL(old_regexp.pattern) for (old_regexp, new_text) in replacements ]) if exceptions: exceptClause = 'AND NOT (%s)' % ' OR '.join([ "old_text RLIKE '%s'" % prepareRegexForMySQL(exc.pattern) for exc in exceptions ]) else: exceptClause = '' query = sql_query or """ SELECT page_namespace, page_title FROM page JOIN text ON (page_id = old_id) %s %s LIMIT 200""" % (whereClause, exceptClause) gen = pagegenerators.MySQLPageGenerator(query) gen = genFactory.getCombinedGenerator(gen, preload=True) if not gen: pywikibot.bot.suggest_help(missing_generator=True) return False bot = ReplaceRobot(gen, replacements, exceptions, allowoverlap, recursive, add_cat, sleep, edit_summary, always=acceptall, site=site) site.login() bot.run() # Explicitly call pywikibot.stopme(). It will make sure the callback is # triggered before replace.py is unloaded. pywikibot.stopme() pywikibot.output('\n{0} pages changed.'.format(bot.changed_pages))
filename = self.source if self.warnonly else self.dest with codecs.open(filename, 'r', 'utf-8') as g: lines = enumerate(g.readlines(), start=1) for i, line in lines: for w in warnings: if w[0] in line: pywikibot.warning( 'line {0}: {1}>>> {2}\n'.format(i, line, w[1])) def main(): """Process command line arguments and invoke bot.""" filename = None warnonly = False # Parse command line arguments for -help option for arg in pywikibot.handleArgs(): if arg.startswith('-warnonly'): warnonly = True elif not arg.startswith('-'): filename = arg else: pywikibot.warning(arg + ' is not supported') bot = ConvertBot(filename, warnonly) bot.run() if __name__ == "__main__": pywikibot.stopme() # we do not work on any site main()
def main(*args): """ Process command line arguments and invoke bot. If args is an empty list, sys.argv is used. @param args: command line arguments @type args: list of unicode """ add_cat = None gen = None # summary message edit_summary = "" # Array which will collect commandline parameters. # First element is original text, second element is replacement text. commandline_replacements = [] # A list of 2-tuples of original text and replacement text. replacements = [] # Don't edit pages which contain certain texts. exceptions = { "title": [], "text-contains": [], "inside": [], "inside-tags": [], "require-title": [], # using a seperate requirements dict needs some } # major refactoring of code. # Should the elements of 'replacements' and 'exceptions' be interpreted # as regular expressions? regex = False # Predefined fixes from dictionary 'fixes' (see above). fixes_set = [] # the dump's path, either absolute or relative, which will be used # if -xml flag is present xmlFilename = None useSql = False # will become True when the user presses a ('yes to all') or uses the # -always flag. acceptall = False # Will become True if the user inputs the commandline parameter -nocase caseInsensitive = False # Will become True if the user inputs the commandline parameter -dotall dotall = False # Will become True if the user inputs the commandline parameter -multiline multiline = False # Do all hits when they overlap allowoverlap = False # Do not recurse replacement recursive = False # Between a regex and another (using -fix) sleep some time (not to waste # too much CPU sleep = None # Request manual replacements even if replacements are already defined manual_input = False # Replacements loaded from a file replacement_file = None replacement_file_arg_misplaced = False # Read commandline parameters. local_args = pywikibot.handle_args(args) genFactory = pagegenerators.GeneratorFactory() for arg in local_args: if genFactory.handleArg(arg): continue if arg == "-regex": regex = True elif arg.startswith("-xmlstart"): if len(arg) == 9: xmlStart = pywikibot.input("Please enter the dumped article to start with:") else: xmlStart = arg[10:] elif arg.startswith("-xml"): if len(arg) == 4: xmlFilename = i18n.input("pywikibot-enter-xml-filename") else: xmlFilename = arg[5:] elif arg == "-sql": useSql = True elif arg.startswith("-excepttitle:"): exceptions["title"].append(arg[13:]) elif arg.startswith("-requiretitle:"): exceptions["require-title"].append(arg[14:]) elif arg.startswith("-excepttext:"): exceptions["text-contains"].append(arg[12:]) elif arg.startswith("-exceptinside:"): exceptions["inside"].append(arg[14:]) elif arg.startswith("-exceptinsidetag:"): exceptions["inside-tags"].append(arg[17:]) elif arg.startswith("-fix:"): fixes_set += [arg[5:]] elif arg.startswith("-sleep:"): sleep = float(arg[7:]) elif arg == "-always": acceptall = True elif arg == "-recursive": recursive = True elif arg == "-nocase": caseInsensitive = True elif arg == "-dotall": dotall = True elif arg == "-multiline": multiline = True elif arg.startswith("-addcat:"): add_cat = arg[8:] elif arg.startswith("-summary:"): edit_summary = arg[9:] elif arg.startswith("-automaticsummary"): edit_summary = True elif arg.startswith("-allowoverlap"): allowoverlap = True elif arg.startswith("-manualinput"): manual_input = True elif arg.startswith("-replacementfile"): issue_deprecation_warning("-replacementfile", "-pairsfile", 2, ArgumentDeprecationWarning) elif arg.startswith("-pairsfile"): if len(commandline_replacements) % 2: replacement_file_arg_misplaced = True if arg == "-pairsfile": replacement_file = pywikibot.input("Please enter the filename to read replacements from:") else: replacement_file = arg[len("-pairsfile:") :] else: commandline_replacements.append(arg) site = pywikibot.Site() if len(commandline_replacements) % 2: pywikibot.error("Incomplete command line pattern replacement pair.") return False if replacement_file_arg_misplaced: pywikibot.error("-pairsfile used between a pattern replacement pair.") return False if replacement_file: try: with codecs.open(replacement_file, "r", "utf-8") as f: # strip newlines, but not other characters file_replacements = f.read().splitlines() except (IOError, OSError) as e: pywikibot.error("Error loading {0}: {1}".format(replacement_file, e)) return False if len(file_replacements) % 2: pywikibot.error("{0} contains an incomplete pattern replacement pair.".format(replacement_file)) return False # Strip BOM from first line file_replacements[0].lstrip("\uFEFF") commandline_replacements.extend(file_replacements) if not (commandline_replacements or fixes_set) or manual_input: old = pywikibot.input("Please enter the text that should be replaced:") while old: new = pywikibot.input("Please enter the new text:") commandline_replacements += [old, new] old = pywikibot.input("Please enter another text that should be replaced," "\nor press Enter to start:") # The summary stored here won't be actually used but is only an example single_summary = None for i in range(0, len(commandline_replacements), 2): replacement = Replacement(commandline_replacements[i], commandline_replacements[i + 1]) if not single_summary: single_summary = i18n.twtranslate( site, "replace-replacing", {"description": " (-%s +%s)" % (replacement.old, replacement.new)} ) replacements.append(replacement) # Perform one of the predefined actions. missing_fixes_summaries = [] # which a fixes/replacements miss a summary for fix_name in fixes_set: try: fix = fixes.fixes[fix_name] except KeyError: pywikibot.output("Available predefined fixes are: %s" % ", ".join(fixes.fixes.keys())) if not fixes.user_fixes_loaded: pywikibot.output("The user fixes file could not be found: " "{0}".format(fixes.filename)) return if not fix["replacements"]: pywikibot.warning("No replacements defined for fix " '"{0}"'.format(fix_name)) continue if "msg" in fix: if isinstance(fix["msg"], basestring): set_summary = i18n.twtranslate(site, str(fix["msg"])) else: set_summary = i18n.translate(site, fix["msg"], fallback=True) else: set_summary = None replacement_set = ReplacementList( fix.get("regex"), fix.get("exceptions"), fix.get("nocase"), set_summary, name=fix_name ) # Whether some replacements have a summary, if so only show which # have none, otherwise just mention the complete fix missing_fix_summaries = [] for index, replacement in enumerate(fix["replacements"], start=1): summary = None if len(replacement) < 3 else replacement[2] if not set_summary and not summary: missing_fix_summaries.append('"{0}" (replacement #{1})'.format(fix_name, index)) if chars.contains_invisible(replacement[0]): pywikibot.warning( 'The old string "{0}" contains formatting ' "characters like U+200E".format(chars.replace_invisible(replacement[0])) ) if not callable(replacement[1]) and chars.contains_invisible(replacement[1]): pywikibot.warning( 'The new string "{0}" contains formatting ' "characters like U+200E".format(chars.replace_invisible(replacement[1])) ) replacement_set.append( ReplacementListEntry( old=replacement[0], new=replacement[1], fix_set=replacement_set, edit_summary=summary ) ) if replacement_set: replacements.extend(replacement_set) if len(fix["replacements"]) == len(missing_fix_summaries): missing_fixes_summaries.append('"{0}" (all replacements)'.format(fix_name)) else: missing_fixes_summaries += missing_fix_summaries if (not edit_summary or edit_summary is True) and (missing_fixes_summaries or single_summary): if single_summary: pywikibot.output( "The summary message for the command line " "replacements will be something like: %s" % single_summary ) if missing_fixes_summaries: pywikibot.output( "The summary will not be used when the fix has " "one defined but the following fix(es) do(es) not " "have a summary defined: " "{0}".format(", ".join(missing_fixes_summaries)) ) if edit_summary is not True: edit_summary = pywikibot.input( "Press Enter to use this automatic message, or enter a " "description of the\nchanges your bot will make:" ) else: edit_summary = "" # Set the regular expression flags flags = re.UNICODE if caseInsensitive: flags = flags | re.IGNORECASE if dotall: flags = flags | re.DOTALL if multiline: flags = flags | re.MULTILINE # Pre-compile all regular expressions here to save time later for replacement in replacements: replacement.compile(regex, flags) precompile_exceptions(exceptions, regex, flags) if xmlFilename: try: xmlStart except NameError: xmlStart = None gen = XmlDumpReplacePageGenerator(xmlFilename, xmlStart, replacements, exceptions, site) elif useSql: whereClause = "WHERE (%s)" % " OR ".join( [ "old_text RLIKE '%s'" % prepareRegexForMySQL(old_regexp.pattern) for (old_regexp, new_text) in replacements ] ) if exceptions: exceptClause = "AND NOT (%s)" % " OR ".join( ["old_text RLIKE '%s'" % prepareRegexForMySQL(exc.pattern) for exc in exceptions] ) else: exceptClause = "" query = """ SELECT page_namespace, page_title FROM page JOIN text ON (page_id = old_id) %s %s LIMIT 200""" % ( whereClause, exceptClause, ) gen = pagegenerators.MySQLPageGenerator(query) gen = genFactory.getCombinedGenerator(gen) if not gen: pywikibot.bot.suggest_help(missing_generator=True) return False preloadingGen = pagegenerators.PreloadingGenerator(gen) bot = ReplaceRobot( preloadingGen, replacements, exceptions, allowoverlap, recursive, add_cat, sleep, edit_summary, always=acceptall, site=site, ) site.login() bot.run() # Explicitly call pywikibot.stopme(). # It will make sure the callback is triggered before replace.py is unloaded. pywikibot.stopme() pywikibot.output("\n%s pages changed." % bot.changed_pages)
for item in in_file: pwbot.output(item) item = item.strip('\n') regex = re.search("\[\[(.*)\]\]",item) if regex is None: continue string = regex.groups()[0] ret.append(string[3:-3]) return ret args = sys.argv if __name__ =='__main__': Missing_translations = MissingTranslations() argsdict={ 'irc':IRCretrieve, 'debug':testTranslate, 'analyse':analyse_translations, 'edittimes':analyse_edit_hours, 'addtranslations':add_translations, 'dump':process_dump} try: #verbose=True #print args[1] + " --- " + args[2] argsdict[args[1]](args[2]) finally: pwbot.stopme()
def main(self, url, ie_key, subtitles, filename, filedesc, downloadkey, convertkey, username, oauth): """Main worker code.""" # Get a lock to prevent double-running with same task ID lockkey = 'tasklock:' + self.request.id if redisconnection.exists(lockkey): raise Ignore # Check for 10G of disk space, refuse to run if it is unavailable st = os.statvfs('/srv') if st.f_frsize * st.f_bavail < 10 << 30: self.retry(max_retries=20, countdown=5 * 60) assert False # should never reach here redisconnection.setex(lockkey, 'T', 7 * 24 * 3600) # Generate temporary directory for task for i in range(10): # 10 tries id = os.urandom(8).encode('hex') outputdir = '/srv/v2c/output/' + id if not os.path.isdir(outputdir): os.mkdir(outputdir) break else: raise TaskError("Too many retries to generate a task id") s = Stats() def statuscallback(text, percent): if self.is_aborted(): raise TaskAbort if text is not None: s.text = text if percent is not None: s.percent = percent print '%d: %s' % (s.percent, s.text) self.update_state(state='PROGRESS', meta={ 'text': s.text, 'percent': s.percent }) def errorcallback(text): raise TaskError(text) try: statuscallback('Downloading...', -1) d = download.download(url, ie_key, downloadkey, subtitles, outputdir, statuscallback, errorcallback) if not d: errorcallback('Download failed!') file = d['target'] if not file: errorcallback('Download failed!') subtitles = subtitles and d['subtitles'] statuscallback('Converting...', -1) file = encode.encode(file, convertkey, statuscallback, errorcallback) if not file: errorcallback('Convert failed!') ext = file.split('.')[-1] statuscallback('Configuring Pywikibot...', -1) pywikibot.config.authenticate['commons.wikimedia.org'] = \ (consumer_key, consumer_secret) + tuple(oauth) pywikibot.config.usernames['commons']['commons'] = username pywikibot.Site('commons', 'commons', user=username).login() statuscallback('Uploading...', -1) filename += '.' + ext filename, wikifileurl = upload.upload(file, filename, url, http_host, filedesc, username, statuscallback, errorcallback) if not wikifileurl: errorcallback('Upload failed!') if subtitles: statuscallback('Uploading subtitles...', -1) try: subtitleuploader.subtitles(subtitles, filename, username, statuscallback, errorcallback) except TaskAbort: raise except Exception, e: statuscallback(type(e).__name__ + ": " + str(e), None) print e pass except NeedServerSideUpload as e: # json serializer cannot properly serialize an exception # without losing data, so we change the exception into a dict. return {'type': 'ssu', 'hashsum': e.hashsum, 'url': e.url} except pywikibot.Error: # T124922 workaround exc_info = sys.exc_info() raise TaskError( (u'pywikibot.Error: %s: %s' % (exc_info[0].__name__, exc_info[1]) ).encode('utf-8')), None, exc_info[2] else: statuscallback('Done!', 100) return {'type': 'done', 'filename': filename, 'url': wikifileurl} finally: statuscallback('Cleaning up...', -1) pywikibot.stopme() pywikibot.config.authenticate.clear() pywikibot.config.usernames['commons'].clear() pywikibot._sites.clear() shutil.rmtree(outputdir)
else: table += '\n|-\n| [[{0}]] || [{{{{fullurl:Specjalna:Linkujące|limit=500&from=0&target={1}}}}} {2}]'.format( tmp[0], tmp[0].replace(' ', '_'), int(tmp[1])) i += 1 table += '\n|-\n|}' with open('{0}output/frequencyProcessedTable.txt'.format( config.path['scripts']), encoding='utf-8', mode='w') as g: g.write(table) if dluga: outputPage = pwb.Page( site, 'Wikipedysta:AlkamidBot/listy/Najbardziej_potrzebne_-_długa_lista') else: outputPage = pwb.Page( site, 'Wikipedysta:AlkamidBot/listy/Najbardziej_potrzebne') outputPage.text = table outputPage.save(summary='aktualizacja') if __name__ == '__main__': try: frequencyListUpdate() finally: pwb.stopme()
def tearDown(self): pywikibot.stopme() super(TestSigning, self).tearDown()
def main(): summary_commandline, gen, template = None, None, None namespaces, PageTitles, exceptions = [], [], [] encat, newcatfile = "", "" autoText, autoTitle = False, False recentcat, newcat = False, False genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if arg == "-autotitle": autoTitle = True elif arg == "-autotext": autoText = True elif arg.startswith("-page"): if len(arg) == 5: PageTitles.append(pywikibot.input(u"Which page do you want to chage?")) else: PageTitles.append(arg[6:]) break elif arg.startswith("-except:"): exceptions.append(arg[8:]) elif arg.startswith("-template:"): template = arg[10:] elif arg.startswith("-facat:"): facat = arg.replace(u"Category:", u"").replace(u"category:", u"").replace(u"رده:", u"") encat = ( englishdictionry(u"رده:" + facat[7:], fa_site, en_site) .replace(u"Category:", u"") .replace(u"category:", u"") ) break elif arg.startswith("-encat:"): encat = arg[7:].replace(u"Category:", u"").replace(u"category:", u"").replace(u"رده:", u"") break elif arg.startswith("-newcatfile:"): newcatfile = arg[12:] break elif arg.startswith("-recentcat"): arg = arg.replace(":", "") if len(arg) == 10: genfa = pagegenerators.RecentchangesPageGenerator() else: genfa = pagegenerators.RecentchangesPageGenerator(number=int(arg[10:])) genfa = pagegenerators.DuplicateFilterPageGenerator(genfa) genfa = pagegenerators.NamespaceFilterPageGenerator(genfa, [14]) preloadingGen = pagegenerators.PreloadingGenerator(genfa, 60) recentcat = True break elif arg.startswith("-newcat"): arg = arg.replace(":", "") if len(arg) == 7: genfa = pagegenerators.NewpagesPageGenerator(step=100, namespaces=14) else: genfa = pagegenerators.NewpagesPageGenerator(step=int(arg[7:]), namespaces=14) preloadingGen = pagegenerators.PreloadingGenerator(genfa, 60) newcat = True break elif arg.startswith("-namespace:"): namespaces.append(int(arg[11:])) elif arg.startswith("-summary:"): pywikibot.setAction(arg[9:]) summary_commandline = True else: generator = genFactory.handleArg(arg) if generator: gen = genFactory.getCombinedGenerator(gen) if encat != "": encatfalist, encatlists = encatlist(encat) if encatlists: for encat in encatlists: encat = englishdictionry(encat, en_site, fa_site) if encat: run([encat]) if encatfalist is not False: run(encatfalist) if PageTitles: pages = [pywikibot.Page(fa_site, PageTitle) for PageTitle in PageTitles] gen = iter(pages) if recentcat: for workpage in preloadingGen: workpage = workpage.title() cat = pywikibot.Category(fa_site, workpage) gent = pagegenerators.CategorizedPageGenerator(cat) run(gent) pywikibot.stopme() sys.exit() if newcat: for workpage in preloadingGen: workpage = workpage.title() workpage = englishdictionry(workpage, fa_site, en_site) if workpage is not False: encatfalist, encatlists = encatlist(workpage) if encatlists: for encat in encatlists: encat = englishdictionry(encat, en_site, fa_site) if encat: run([encat]) if encatfalist is not False: run(encatfalist) pywikibot.stopme() sys.exit() if newcatfile: text2 = codecs.open(newcatfile, "r", "utf8") text = text2.read() linken = re.findall(ur"\[\[.*?\]\]", text, re.S) if linken: for workpage in linken: pywikibot.output(u"\03{lightblue}Working on --- Link " + workpage + u" at th newcatfile\03{default}") workpage = workpage.split(u"|")[0].replace(u"[[", u"").replace(u"]]", u"").strip() workpage = englishdictionry(workpage, fa_site, en_site) if workpage is not False: encatfalist, encatlists = encatlist(workpage) workpage = englishdictionry(workpage, fa_site, en_site) if encatlists: run(encatlists) if encatfalist is not False: run(encatfalist) pywikibot.stopme() sys.exit() if not gen: pywikibot.stopme() sys.exit() if namespaces != []: gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) preloadingGen = pagegenerators.PreloadingGenerator(gen, pageNumber=60) run(preloadingGen)
def logoff(self): logger.debug("Logoff ...") if not self.dry: pywikibot.stopme() else: logger.debug("Dry run - No logoff")
def run(self): """Run the bot.""" while True: welcomed_count = 0 for users in self.parseNewUserLog(): if users.isBlocked(): showStatus(3) pywikibot.output(u'%s has been blocked!' % users.name()) continue if 'bot' in users.groups(): showStatus(3) pywikibot.output(u'%s is a bot!' % users.name()) continue if 'bot' in users.name().lower(): showStatus(3) pywikibot.output(u'%s might be a global bot!' % users.name()) continue if users.editCount() >= globalvar.attachEditCount: showStatus(2) pywikibot.output(u'%s has enough edits to be welcomed.' % users.name()) ustp = users.getUserTalkPage() if ustp.exists(): showStatus(3) pywikibot.output(u'%s has been already welcomed.' % users.name()) continue else: if self.badNameFilter(users.name()): self.reportBadAccount(users.name()) continue welcome_text = i18n.translate(self.site, netext) if globalvar.randomSign: if self.site.family.name != 'wikinews': welcome_text = (welcome_text % choice(self.defineSign())) if self.site.family.name == 'wiktionary' and \ self.site.code == 'it': pass else: welcome_text += timeselected elif (self.site.family.name != 'wikinews' and self.site.code != 'it'): welcome_text = (welcome_text % globalvar.defaultSign) final_text = i18n.translate( self.site, final_new_text_additions) if final_text: welcome_text += final_text welcome_comment = i18n.twtranslate(self.site, 'welcome-welcome') try: # append welcomed, welcome_count++ ustp.put(welcome_text, welcome_comment, minorEdit=False) welcomed_count += 1 self._totallyCount += 1 self.welcomed_users.append(users) except pywikibot.EditConflict: showStatus(4) pywikibot.output(u'An edit conflict has occurred, ' u'skipping this user.') if globalvar.makeWelcomeLog and \ i18n.translate(self.site, logbook): showStatus(5) if welcomed_count == 1: pywikibot.output(u'One user has been welcomed.') elif welcomed_count == 0: pywikibot.output(u'No users have been welcomed.') else: pywikibot.output(u'%s users have been welcomed.' % welcomed_count) if welcomed_count >= globalvar.dumpToLog: if self.makelogpage(self.welcomed_users): self.welcomed_users = list() welcomed_count = 0 else: continue # If we haven't to report, do nothing. else: if users.editCount() == 0: if not globalvar.quiet: showStatus(1) pywikibot.output(u'%s has no contributions.' % users.name()) else: showStatus(1) pywikibot.output(u'%s has only %d contributions.' % (users.name(), users.editCount())) # That user mustn't be welcomed. continue if globalvar.makeWelcomeLog and i18n.translate( self.site, logbook) and welcomed_count > 0: showStatus() if welcomed_count == 1: pywikibot.output(u'Putting the log of the latest user...') else: pywikibot.output( u'Putting the log of the latest %d users...' % welcomed_count) if self.makelogpage(self.welcomed_users): self.welcomed_users = list() else: continue self.welcomed_users = list() if hasattr(self, '_BAQueue'): showStatus() pywikibot.output("Putting bad name to report page....") self.reportBadAccount(None, final=True) try: if globalvar.recursive: showStatus() if locale.getlocale()[1]: strfstr = unicode( time.strftime(u"%d %b %Y %H:%M:%S (UTC)", time.gmtime()), locale.getlocale()[1]) else: strfstr = time.strftime( u"%d %b %Y %H:%M:%S (UTC)", time.gmtime()) pywikibot.output(u'Sleeping %d seconds before rerun. %s' % (globalvar.timeRecur, strfstr)) pywikibot.stopme() time.sleep(globalvar.timeRecur) else: raise KeyboardInterrupt except KeyboardInterrupt: break
def main(): summary_commandline, gen, template = None, None, None namespaces, PageTitles, exceptions = [], [], [] encat, newcatfile = '', '' autoText, autoTitle = False, False recentcat, newcat = False, False genFactory = pagegenerators.GeneratorFactory() for arg in pywikibot.handleArgs(): if arg == '-autotitle': autoTitle = True elif arg == '-autotext': autoText = True elif arg.startswith('-page'): if len(arg) == 5: PageTitles.append( pywikibot.input(u'Which page do you want to chage?')) else: PageTitles.append(arg[6:]) break elif arg.startswith('-except:'): exceptions.append(arg[8:]) elif arg.startswith('-template:'): template = arg[10:] elif arg.startswith('-facat:'): encat = arg[7:].replace(u'Category:', u'').replace(u'category:', u'').replace(u'رده:', u'') encat = englishdictionry(u'رده:' + encat, fa_site, en_site).replace(u'Category:', u'').replace( u'category:', u'') break elif arg.startswith('-encat:'): encat = arg[7:].replace(u'Category:', u'').replace(u'category:', u'').replace(u'رده:', u'') break elif arg.startswith('-newcatfile:'): newcatfile = arg[12:] break elif arg.startswith('-recentcat'): arg = arg.replace(':', '') if len(arg) == 10: genfa = pagegenerators.RecentchangesPageGenerator() else: genfa = pagegenerators.RecentchangesPageGenerator( number=int(arg[10:])) genfa = pagegenerators.DuplicateFilterPageGenerator(genfa) genfa = pagegenerators.NamespaceFilterPageGenerator(genfa, [14]) preloadingGen = pagegenerators.PreloadingGenerator(genfa, 60) recentcat = True break elif arg.startswith('-newcat'): arg = arg.replace(':', '') if len(arg) == 7: genfa = pagegenerators.NewpagesPageGenerator(step=100, namespaces=14) else: genfa = pagegenerators.NewpagesPageGenerator(step=int(arg[7:]), namespaces=14) preloadingGen = pagegenerators.PreloadingGenerator(genfa, 60) newcat = True break elif arg.startswith('-namespace:'): namespaces.append(int(arg[11:])) elif arg.startswith('-summary:'): pywikibot.setAction(arg[9:]) summary_commandline = True else: generator = genFactory.handleArg(arg) if generator: gen = genFactory.getCombinedGenerator(gen) if encat != '': encatfalist, encatlists = encatlist(encat) if encatlists: for encat in encatlists: encat = englishdictionry(encat, en_site, fa_site) if encat: run([encat]) if encatfalist is not False: run(encatfalist) if PageTitles: pages = [ pywikibot.Page(fa_site, PageTitle) for PageTitle in PageTitles ] gen = iter(pages) if recentcat: for workpage in preloadingGen: workpage = workpage.title() cat = pywikibot.Category(fa_site, workpage) gent = pagegenerators.CategorizedPageGenerator(cat) run(gent) pywikibot.stopme() sys.exit() if newcat: for workpage in preloadingGen: workpage = workpage.title() workpage = englishdictionry(workpage, fa_site, en_site) if workpage is not False: encatfalist, encatlists = encatlist(workpage) if encatlists: for encat in encatlists: encat = englishdictionry(encat, en_site, fa_site) if encat: run([encat]) if encatfalist is not False: run(encatfalist) pywikibot.stopme() sys.exit() if newcatfile: text2 = codecs.open(newcatfile, 'r', 'utf8') text = text2.read() linken = re.findall(ur'\[\[.*?\]\]', text, re.S) if linken: for workpage in linken: workpage = workpage.split(u'|')[0].replace(u'[[', u'').replace( u']]', u'').strip() workpage = englishdictionry(workpage, fa_site, en_site) if workpage is not False: encatfalist, encatlists = encatlist(workpage) if encatlists: run(encatlists) if encatfalist is not False: run(encatfalist) pywikibot.stopme() sys.exit() if not gen: pywikibot.stopme() sys.exit() if namespaces != []: gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) preloadingGen = pagegenerators.PreloadingGenerator(gen, pageNumber=60) run(preloadingGen)
value = raw_input(text) except NameError: value = input(text) finally: return value db_name = prompt("Enter the database name (without '_p'): ") if db_name in sandbot_header_sites: header = prompt("Only insert header [True/False]: ") if header.lower().startswith("t"): header = True else: header = False if db_name not in sandbot_sites: raise Exception("%s is not configured as a sandbot site.") if header: if db_name not in sandbot_header_sites: raise Exception("%s is not configured as a sandbot header site.") else: bot = SandHeaderBot(db_name) else: bot = SandBot(db_name) bot.run() if __name__ == "__main__": try: main() finally: pywikibot.stopme()
wiktionary_processor.advanced_extract_definition( entry.part_of_speech, d)) entry.definitions = definitions section = entry.serialise() for translation in wiktionary_processor.retrieve_translations(): if translation.part_of_speech == entry.part_of_speech: translation_list.append(translation.serialise()) if entry.language == language: section['translations'] = translation_list ret.append(section) return Response(text=json.dumps(ret), status=200, content_type='application/json') if __name__ == '__main__': try: set_throttle(1) app = web.Application() app.router.add_routes(routes) web.run_app(app, host=args.HOST, port=args.PORT) except Exception as exc: log.exception(exc) log.critical("Error occurred while setting up the server") finally: pwbot.stopme()