def main(rootpage, saveto): wptools = WikiProjectTools() bot = pywikibot.Site('en', 'wikipedia') projects = [] output = 'These WikiProjects are not in any WikiProject meta-categories:\n\n' # Generating category whitelist wpcats = WikiProjectCategories() tree = wpcats.generate() whitelist = list(treegen(tree)) # Run through a simple generator function to produce a flat list whitelist = tuple(set(whitelist)) # De-duplicating and making into a tuple page = pywikibot.Page(bot, rootpage + '/All') contents = mwph.parse(page.text) contents = contents.filter_templates() for t in contents: if t.name.strip() == "WikiProject directory entry": project = str(t.get('project').value).strip().replace(' ', '_') # Give me a list of all the categories, as long as it's on the whitelist query = wptools.query('wiki', "select distinct cl_to from categorylinks join page on categorylinks.cl_from=page.page_id where page_namespace in (4, 14) and page_title = {0} and cl_to in {1};".format('"' + project + '"', whitelist), None) if len(query) == 0: # If page is in none of the whitelisted categories output += "# [[Wikipedia:{0}|{0}]]\n".format(project.replace('_', ' ')) page = pywikibot.Page(bot, saveto) page.text = output page.save('Updating', minor=False)
def main(rootpage, saveto): wptools = WikiProjectTools() bot = pywikibot.Site('en', 'wikipedia') projects = [] output = 'These WikiProjects are not in any WikiProject meta-categories:\n\n' # Generating category whitelist wpcats = WikiProjectCategories() tree = wpcats.generate() whitelist = list(treegen(tree)) # Run through a simple generator function to produce a flat list whitelist = tuple(set(whitelist)) # De-duplicating and making into a tuple page = pywikibot.Page(bot, rootpage + '/All') contents = mwph.parse(page.text) contents = contents.filter_templates() for t in contents: if t.name.strip() == "WikiProject directory entry small": project = str(t.get('project').value).strip().replace(' ', '_') # Give me a list of all the categories, as long as it's on the whitelist query = wptools.query('wiki', "select distinct cl_to from categorylinks join page on categorylinks.cl_from=page.page_id where page_namespace in (4, 14) and page_title = {0} and cl_to in {1};".format('"' + project + '"', whitelist), None) if len(query) == 0: # If page is in none of the whitelisted categories output += "# [[Wikipedia:{0}|{0}]]\n".format(project.replace('_', ' ')) page = pywikibot.Page(bot, saveto) page.text = output page.save('Updating', minor=False)
def main(rootpage): d = WikiProjectDirectory() wptools = WikiProjectTools() wpcats = WikiProjectCategories() tree = wpcats.generate() bot = pywikibot.Site('en', 'wikipedia') directories = {} directoryrow = {} projects = [] # Generate directoryrows and projects lists based on the /All directory: page = pywikibot.Page(bot, rootpage + '/All') contents = mwph.parse(page.text) contents = contents.filter_templates() for t in contents: if t.name.strip() == "WikiProject directory entry small": name = str(t.get('project').value).strip().replace(' ', '_') projects.append(name) directoryrow[name] = str(t).replace('entry small', 'entry') + "\n" # The rest of this stuff is copied from directory.py index_primary = sorted([key for key in tree.keys()]) index_secondary = {} indextext = "'''[[{0}/All|All WikiProjects]]'''\n\n".format(rootpage) for firstlevel in tree.keys(): directories[firstlevel] = "={0}=\n".format(firstlevel.replace('_', ' ')) directories[firstlevel] += d.listpull(wptools, projects, directoryrow, firstlevel) # For immmedate subcats of WikiProjects_by_area directories[firstlevel] += d.treeiterator(wptools, tree[firstlevel], projects, directoryrow, firstlevel) # For descendants of those immediate subcats. index_secondary[firstlevel] = sorted([key for key in tree[firstlevel].keys()]) # Updating the directory index for firstlevel in index_primary: firstlevel_normalized = firstlevel.replace('_', ' ') indextext += ";[[{0}/{1}|{1}]]".format(rootpage, firstlevel_normalized) if len(tree[firstlevel]) > 0: indextext += " : " for secondlevel in index_secondary[firstlevel]: indextext += "[[{0}/{1}#{2}|{2}]] – ".format(rootpage, firstlevel_normalized, secondlevel.replace('_', ' ')) indextext = indextext[:-3] # Truncates trailing dash and is also a cute smiley face indextext += "\n\n" saveindex = pywikibot.Page(bot, 'Template:WikiProject directory index') saveindex.text = indextext saveindex.save('Updating', minor=False, async=True) # Generate directories and save! for directory in directories.keys(): contents = directories[directory] page = pywikibot.Page(bot, rootpage + "/" + directory) if contents != page.text: # Checking to see if a change was made to cut down on API save queries oldcontents = page.text page.text = contents page.save('Updating', minor=False, async=True)
def main(self, rootpage): # Initializing... bot = pywikibot.Site('en', 'wikipedia') wptools = WikiProjectTools() config = json.loads(wptools.query('index', 'select json from config;', None)[0][0]) # Get list of people who opted out optout = pywikibot.Page(bot, 'User:Reports bot/Opt-out') blacklist = [] regexes =[re.findall('\[\[User:(.*?)\|',optout.text,re.I), re.findall('\{\{user\|(.*?)\}\}',optout.text,re.I), re.findall('\[\[:User:(.*?)\]',optout.text,re.I), re.findall('\[\[:User talk:(.*?)\]',optout.text,re.I)] for results in regexes: for user in results: blacklist.append(user) # Bots are to be excluded for result in wptools.query('wiki', "select user_name from user_groups left join user on user_id = ug_user where ug_group = 'bot';", None): blacklist.append(result[0].decode('utf-8')) # List of projects we are working on # Methodology: List from Project Index + List from Formal Definition, minus duplicates # This will cover all of our bases. articles = {} counter = 0 while True: # I am a bad man for doing this query = wptools.query('index', 'select pi_page, pi_project from projectindex where pi_id > {0} and pi_id <= {1};'.format(counter, counter+1000000), None) if len(query) == 0: break for pair in query: # Normalizing by getting rid of namespace page = pair[0] page = page.replace('Draft_talk:', '') page = page.replace('Talk:', '') proj = pair[1][10:] # Normalizing by getting rid of "Wikipedia:" try: articles[proj].append(page) except KeyError: articles[proj] = [page] counter += 1000000 projects = [project for project in articles.keys()] q = ('select distinct page.page_title from page ' 'join categorylinks on page.page_id = categorylinks.cl_from ' 'left join redirect on page.page_id = redirect.rd_from ' 'where page_namespace = 4 ' 'and page_title not like "%/%" ' 'and rd_title is null ' 'and (cl_to in ' '(select page.page_title from page ' 'where page_namespace = 14 and ' 'page_title like "%\_WikiProjects" ' 'and page_title not like "%\_for\_WikiProjects" ' 'and page_title not like "%\_of\_WikiProjects") ' 'or page_title like "WikiProject\_%");') formaldefinition = wptools.query('wiki', q, None) # http://quarry.wmflabs.org/query/3509 for row in formaldefinition: row = row[0].decode('utf-8') if row not in projects: projects.append(row) projects.sort() directories = {'All': ''} # All projects, plus subdirectories to be defined below. directoryrow = {} # Alright! Let's run some reports! for project in projects: # Seeding directory row and profile page if project not in articles: articles[project] = [] project_normalized = project.replace('_', ' ') # List of active project participants (less blacklist) wp_editors = [] start_date = time.strftime('%Y%m%d000000',time.gmtime(time.time()-(60*60*24*90))) # 90 days end_date = time.strftime('%Y%m%d000000',time.gmtime(time.time())) # Today query = "select rev_user_text from page left join revision on page_id = rev_page where (page_namespace = 4 OR page_namespace = 5) and (page_title like \"{0}/%%\" OR page_title = \"{0}\") and rev_timestamp > {1} and rev_timestamp < {2} group by rev_user_text HAVING count(*) > 1;".format(project, start_date, end_date) for result in wptools.query('wiki', query, None): if result[0] is not None: user = result[0].decode('utf-8') if user not in blacklist: wp_editors.append(user) wp_editors.sort() # List of active subject area editors (less blacklist) start_date = time.strftime('%Y%m%d000000',time.gmtime(time.time()-(60*60*24*30))) # 30 days end_date = time.strftime('%Y%m%d000000',time.gmtime(time.time())) # Today if len(articles[project]) > 0: subject_editors = [] packages = [] for i in range(0, len(articles[project]), 10000): packages.append(articles[project][i:i+10000]) counter = 0 for package in packages: counter += 1 if len(package) > 1: query_builder = 'select rev_user_text from page left join revision on page_id = rev_page where page_namespace in (0, 1, 118, 119) and page_title in {0} and rev_timestamp > {1} and rev_timestamp < {2} order by rev_user_text;'.format(tuple(package), start_date, end_date) else: query_builder = 'select rev_user_text from page left join revision on page_id = rev_page where page_namespace in (0, 1, 118, 119) and page_title = "{0}" and rev_timestamp > {1} and rev_timestamp < {2} order by rev_user_text;'.format(package[0], start_date, end_date) for result in wptools.query('wiki', query_builder, None): if result[0] is not None: subject_editors.append(result[0].decode('utf-8')) subject_editors = dict(Counter(subject_editors)) # Convert the list to a dictionary with username as key and edit count as value subject_editors_filtered = [] for user in subject_editors.keys(): if user not in blacklist: if subject_editors[user] > 4: subject_editors_filtered.append(user) subject_editors = subject_editors_filtered # And now assigned back. subject_editors.sort() else: subject_editors = [] # Generate and Save Profile Page wp_editors_formatted = "" subject_editors_formatted = "" if len(wp_editors) > 0: for editor in wp_editors: wp_editors_formatted += "\n* [[User:{0}|{0}]] ([[User talk:{0}|talk]])".format(editor) else: wp_editors_formatted = "" if len(subject_editors) > 0 and len(subject_editors) < 3200: for editor in subject_editors: subject_editors_formatted += "\n* [[User:{0}|{0}]] ([[User talk:{0}|talk]])".format(editor) else: subject_editors_formatted = "" profilepage = "{{{{WikiProject description page | project = {0} | list_of_active_wikiproject_participants = {1} | list_of_active_subject_area_editors = {2}}}}}".format(project_normalized, wp_editors_formatted, subject_editors_formatted) page = pywikibot.Page(bot, rootpage + '/Description/' + project_normalized) if profilepage != page.text: # Checking to see if a change was made to cut down on API queries page.text = profilepage page.save('Updating', minor=False, async=True, quiet=True) # Construct directory entry directoryrow[project] = "{{{{WikiProject directory entry | project = {0} | number_of_articles = {1} | wp_editors = {2} | scope_editors = {3}}}}}\n".format(project_normalized, len(articles[project]), len(wp_editors), len(subject_editors)) # Assign directory entry to relevant directory pages ("All entries" and relevant subdirectory pages) for entry in sorted(directoryrow.items(), key=operator.itemgetter(1)): # Sorting into alphabetical order directories['All'] += entry[1] directories['All'] = "{{WikiProject directory top}}\n" + directories['All'] + "|}" wpcats = WikiProjectCategories() tree = wpcats.generate() index_primary = sorted([key for key in tree.keys()]) index_secondary = {} indextext = "'''[[{0}/All|All WikiProjects]]'''\n\n".format(rootpage) for firstlevel in tree.keys(): directories[firstlevel] = "={0}=\n".format(firstlevel.replace('_', ' ' )) directories[firstlevel] += self.listpull(wptools, projects, directoryrow, firstlevel) # For immmedate subcats of WikiProjects_by_area directories[firstlevel] += self.treeiterator(wptools, tree[firstlevel], projects, directoryrow, firstlevel) # For descendants of those immediate subcats. index_secondary[firstlevel] = sorted([key for key in tree[firstlevel].keys()]) # Updating the directory index for firstlevel in index_primary: firstlevel_normalized = firstlevel.replace('_', ' ') indextext += ";[[{0}/{1}|{1}]]".format(rootpage, firstlevel_normalized) if len(tree[firstlevel]) > 0: indextext += " : " for secondlevel in index_secondary[firstlevel]: indextext += "[[{0}/{1}#{2}|{2}]] – ".format(rootpage, firstlevel_normalized, secondlevel.replace('_', ' ')) indextext = indextext[:-3] # Truncates trailing dash and is also a cute smiley face indextext += "\n\n" saveindex = pywikibot.Page(bot, 'Template:WikiProject directory index') saveindex.text = indextext saveindex.save('Updating', minor=False, async=True, quiet=True) # Generate directories and save! for directory in directories.keys(): contents = directories[directory] page = pywikibot.Page(bot, rootpage + "/" + directory) if contents != page.text: # Checking to see if a change was made to cut down on API save queries oldcontents = page.text page.text = contents page.save('Updating', minor=False, async=True, quiet=True) # Cleanup of obsolete description pages and "Related WikiProjects" pages if directory == 'All': oldcontents = mwph.parse(oldcontents) oldcontents = oldcontents.filter_templates() oldprojectlist = [] for t in oldcontents: if t.name.strip() == "WikiProject directory entry": oldprojectlist.append(str(t.get('project').value)) for oldproject in oldprojectlist: oldproject = oldproject.strip().replace(' ', '_') # Normalizing if oldproject not in projects: deletethis = pywikibot.Page(bot, rootpage + '/Description/' + oldproject) deletethis.text = "{{db-g6|rationale=A bot has automatically tagged this page as obsolete. This means that the WikiProject described on this page has been deleted or made into a redirect}}\n" deletethis.save('Nominating page for deletion', minor=False, async=True, quiet=True) deletethis = pywikibot.Page(bot, 'Wikipedia:Related WikiProjects/' + oldproject) if deletethis.text != "": deletethis.text = "{{db-g6|rationale=A bot has automatically tagged this page as obsolete. This means that the WikiProject described on this page has been deleted or made into a redirect}}\n" deletethis.save('Nominating page for deletion', minor=False, async=True, quiet=True)
def main(rootpage): d = WikiProjectDirectory() wptools = WikiProjectTools() wpcats = WikiProjectCategories() tree = wpcats.generate() bot = pywikibot.Site('en', 'wikipedia') directories = {} directoryrow = {} projects = [] # Generate directoryrows and projects lists based on the /All directory: page = pywikibot.Page(bot, rootpage + '/All') contents = mwph.parse(page.text) contents = contents.filter_templates() for t in contents: if t.name.strip() == "WikiProject directory entry": name = str(t.get('project').value).strip().replace(' ', '_') projects.append(name) directoryrow[name] = str(t) + "\n" # The rest of this stuff is copied from directory.py index_primary = sorted([key for key in tree.keys()]) index_secondary = {} indextext = "'''[[{0}/All|All WikiProjects]]'''\n\n".format(rootpage) for firstlevel in tree.keys(): directories[firstlevel] = "={0}=\n".format(firstlevel.replace( '_', ' ')) directories[firstlevel] += d.listpull( wptools, projects, directoryrow, firstlevel) # For immmedate subcats of WikiProjects_by_area directories[firstlevel] += d.treeiterator( wptools, tree[firstlevel], projects, directoryrow, firstlevel) # For descendants of those immediate subcats. index_secondary[firstlevel] = sorted( [key for key in tree[firstlevel].keys()]) # Updating the directory index for firstlevel in index_primary: firstlevel_normalized = firstlevel.replace('_', ' ') indextext += ";[[{0}/{1}|{1}]]".format(rootpage, firstlevel_normalized) if len(tree[firstlevel]) > 0: indextext += " : " for secondlevel in index_secondary[firstlevel]: indextext += "[[{0}/{1}#{2}|{2}]] – ".format( rootpage, firstlevel_normalized, secondlevel.replace('_', ' ')) indextext = indextext[: -3] # Truncates trailing dash and is also a cute smiley face indextext += "\n\n" saveindex = pywikibot.Page(bot, 'Template:WikiProject directory index') saveindex.text = indextext saveindex.save('Updating', minor=False, async=True) # Generate directories and save! for directory in directories.keys(): contents = directories[directory] page = pywikibot.Page(bot, rootpage + "/" + directory) if contents != page.text: # Checking to see if a change was made to cut down on API save queries oldcontents = page.text page.text = contents page.save('Updating', minor=False, async=True)
def main(self, rootpage): # Initializing... bot = pywikibot.Site('en', 'wikipedia') wptools = WikiProjectTools() config = json.loads( wptools.query('index', 'select json from config;', None)[0][0]) # Get list of people who opted out optout = pywikibot.Page(bot, 'User:Reports bot/Opt-out') blacklist = [] regexes = [ re.findall('\[\[User:(.*?)\|', optout.text, re.I), re.findall('\{\{user\|(.*?)\}\}', optout.text, re.I), re.findall('\[\[:User:(.*?)\]', optout.text, re.I), re.findall('\[\[:User talk:(.*?)\]', optout.text, re.I) ] for results in regexes: for user in results: blacklist.append(user) # Bots are to be excluded for result in wptools.query( 'wiki', "select user_name from user_groups left join user on user_id = ug_user where ug_group = 'bot';", None): blacklist.append(result[0].decode('utf-8')) # List of projects we are working on # Methodology: List from Project Index + List from Formal Definition, minus duplicates # This will cover all of our bases. articles = {} counter = 0 while True: # I am a bad man for doing this query = wptools.query( 'index', 'select pi_page, pi_project from projectindex where pi_id > {0} and pi_id <= {1};' .format(counter, counter + 1000000), None) if len(query) == 0: break for pair in query: # Normalizing by getting rid of namespace page = pair[0] page = page.replace('Draft_talk:', '') page = page.replace('Talk:', '') proj = pair[1][ 10:] # Normalizing by getting rid of "Wikipedia:" try: articles[proj].append(page) except KeyError: articles[proj] = [page] counter += 1000000 projects = [project for project in articles.keys()] q = ('select distinct page.page_title from page ' 'join categorylinks on page.page_id = categorylinks.cl_from ' 'left join redirect on page.page_id = redirect.rd_from ' 'where page_namespace = 4 ' 'and page_title not like "%/%" ' 'and rd_title is null ' 'and (cl_to in ' '(select page.page_title from page ' 'where page_namespace = 14 and ' 'page_title like "%\_WikiProjects" ' 'and page_title not like "%\_for\_WikiProjects" ' 'and page_title not like "%\_of\_WikiProjects") ' 'or page_title like "WikiProject\_%");') formaldefinition = wptools.query( 'wiki', q, None) # http://quarry.wmflabs.org/query/3509 for row in formaldefinition: row = row[0].decode('utf-8') if row not in projects: projects.append(row) projects.sort() directories = { 'All': '' } # All projects, plus subdirectories to be defined below. directoryrow = {} # Alright! Let's run some reports! for project in projects: # Seeding directory row and profile page if project not in articles: articles[project] = [] project_normalized = project.replace('_', ' ') # List of active project participants (less blacklist) wp_editors = [] start_date = time.strftime( '%Y%m%d000000', time.gmtime(time.time() - (60 * 60 * 24 * 90))) # 90 days end_date = time.strftime('%Y%m%d000000', time.gmtime(time.time())) # Today query = "select rev_user_text from page left join revision on page_id = rev_page where (page_namespace = 4 OR page_namespace = 5) and (page_title like \"{0}/%%\" OR page_title = \"{0}\") and rev_timestamp > {1} and rev_timestamp < {2} group by rev_user_text HAVING count(*) > 1;".format( project, start_date, end_date) for result in wptools.query('wiki', query, None): if result[0] is not None: user = result[0].decode('utf-8') if user not in blacklist: wp_editors.append(user) wp_editors.sort() # List of active subject area editors (less blacklist) start_date = time.strftime( '%Y%m%d000000', time.gmtime(time.time() - (60 * 60 * 24 * 30))) # 30 days end_date = time.strftime('%Y%m%d000000', time.gmtime(time.time())) # Today if len(articles[project]) > 0: subject_editors = [] packages = [] for i in range(0, len(articles[project]), 10000): packages.append(articles[project][i:i + 10000]) counter = 0 for package in packages: counter += 1 if len(package) > 1: query_builder = 'select rev_user_text from page left join revision on page_id = rev_page where page_namespace in (0, 1, 118, 119) and page_title in {0} and rev_timestamp > {1} and rev_timestamp < {2} order by rev_user_text;'.format( tuple(package), start_date, end_date) else: query_builder = 'select rev_user_text from page left join revision on page_id = rev_page where page_namespace in (0, 1, 118, 119) and page_title = "{0}" and rev_timestamp > {1} and rev_timestamp < {2} order by rev_user_text;'.format( package[0], start_date, end_date) for result in wptools.query('wiki', query_builder, None): if result[0] is not None: subject_editors.append(result[0].decode('utf-8')) subject_editors = dict( Counter(subject_editors) ) # Convert the list to a dictionary with username as key and edit count as value subject_editors_filtered = [] for user in subject_editors.keys(): if user not in blacklist: if subject_editors[user] > 4: subject_editors_filtered.append(user) subject_editors = subject_editors_filtered # And now assigned back. subject_editors.sort() else: subject_editors = [] # Generate and Save Profile Page wp_editors_formatted = "" subject_editors_formatted = "" if len(wp_editors) > 0: for editor in wp_editors: wp_editors_formatted += "\n* [[User:{0}|{0}]] ([[User talk:{0}|talk]])".format( editor) else: wp_editors_formatted = "" if len(subject_editors) > 0 and len(subject_editors) < 3200: for editor in subject_editors: subject_editors_formatted += "\n* [[User:{0}|{0}]] ([[User talk:{0}|talk]])".format( editor) else: subject_editors_formatted = "" profilepage = "{{{{WikiProject description page | project = {0} | list_of_active_wikiproject_participants = {1} | list_of_active_subject_area_editors = {2}}}}}".format( project_normalized, wp_editors_formatted, subject_editors_formatted) page = pywikibot.Page( bot, rootpage + '/Description/' + project_normalized) if profilepage != page.text: # Checking to see if a change was made to cut down on API queries page.text = profilepage page.save('Updating', minor=False, async=True) # Construct directory entry directoryrow[ project] = "{{{{WikiProject directory entry | project = {0} | number_of_articles = {1} | wp_editors = {2} | scope_editors = {3}}}}}\n".format( project_normalized, len(articles[project]), len(wp_editors), len(subject_editors)) # Assign directory entry to relevant directory pages ("All entries" and relevant subdirectory pages) for entry in sorted( directoryrow.items(), key=operator.itemgetter(1)): # Sorting into alphabetical order directories['All'] += entry[1] directories['All'] = "{{WikiProject directory top}}\n" + directories[ 'All'] + "|}" wpcats = WikiProjectCategories() tree = wpcats.generate() index_primary = sorted([key for key in tree.keys()]) index_secondary = {} indextext = "'''[[{0}/All|All WikiProjects]]'''\n\n".format(rootpage) for firstlevel in tree.keys(): directories[firstlevel] = "={0}=\n".format( firstlevel.replace('_', ' ')) directories[firstlevel] += self.listpull( wptools, projects, directoryrow, firstlevel) # For immmedate subcats of WikiProjects_by_area directories[firstlevel] += self.treeiterator( wptools, tree[firstlevel], projects, directoryrow, firstlevel) # For descendants of those immediate subcats. index_secondary[firstlevel] = sorted( [key for key in tree[firstlevel].keys()]) # Updating the directory index for firstlevel in index_primary: firstlevel_normalized = firstlevel.replace('_', ' ') indextext += ";[[{0}/{1}|{1}]]".format(rootpage, firstlevel_normalized) if len(tree[firstlevel]) > 0: indextext += " : " for secondlevel in index_secondary[firstlevel]: indextext += "[[{0}/{1}#{2}|{2}]] – ".format( rootpage, firstlevel_normalized, secondlevel.replace('_', ' ')) indextext = indextext[: -3] # Truncates trailing dash and is also a cute smiley face indextext += "\n\n" saveindex = pywikibot.Page(bot, 'Template:WikiProject directory index') saveindex.text = indextext saveindex.save('Updating', minor=False, async=True) # Generate directories and save! for directory in directories.keys(): contents = directories[directory] page = pywikibot.Page(bot, rootpage + "/" + directory) if contents != page.text: # Checking to see if a change was made to cut down on API save queries oldcontents = page.text page.text = contents page.save('Updating', minor=False, async=True) # Cleanup of obsolete description pages and "Related WikiProjects" pages if directory == 'All': oldcontents = mwph.parse(oldcontents) oldcontents = oldcontents.filter_templates() oldprojectlist = [] for t in oldcontents: if t.name.strip() == "WikiProject directory entry": oldprojectlist.append(str(t.get('project').value)) for oldproject in oldprojectlist: oldproject = oldproject.strip().replace( ' ', '_') # Normalizing if oldproject not in projects: deletethis = pywikibot.Page( bot, rootpage + '/Description/' + oldproject) deletethis.text = "{{db-g6|rationale=A bot has automatically tagged this page as obsolete. This means that the WikiProject described on this page has been deleted or made into a redirect}}\n" deletethis.save('Nominating page for deletion', minor=False, async=True) deletethis = pywikibot.Page( bot, 'Wikipedia:Related WikiProjects/' + oldproject) if deletethis.text != "": deletethis.text = "{{db-g6|rationale=A bot has automatically tagged this page as obsolete. This means that the WikiProject described on this page has been deleted or made into a redirect}}\n" deletethis.save('Nominating page for deletion', minor=False, async=True)