コード例 #1
0
ファイル: deadend.py プロジェクト: hairr/HairBot
def remove(title):
		text = mw.parse(mwhair.edit(title))
		oldtext = text
		try: links = len(pagelinks(title))
		except: links = 0
		if links > 1:
			if allow_bots(mwhair.edit(title),'HairBot'):
				for template in text.filter_templates():
					if template.name is 'Multiple issues' or 'multiple issues' or 'Article issues' or 'Articleissues' or 'Issues' or 'MI' or 'mi' or 'Mi' or 'Multiple' or 'Multiple Issues' or 'Multipleissues':
						if template.has_param('dead end'):
								template.remove('dead end')
								if links <= 4:
									text = '{{subst:dated|Underlinked}}\n' + text
						else:
							try:
								for itemplate in template.get(1).value.filter_templates():
									if itemplate.name == 'Dead end' or 'dead end' or 'DEP' or 'dep' or 'DEp' or 'DeP' or 'Dead end page' or 'dead end page' or 'dead-end' or 'Dead-end' or 'Dead-End' or 'Deadend':
										text.remove(itemplate,'')
										if links <= 4:
											text = '{{subst:dated|Underlinked}}\n' + text
							except:
								pass
					elif template.name is 'Dead end' or 'dead end' or 'DEP' or 'dep' or 'DEp' or 'DeP' or 'Dead end page' or 'dead end page' or 'dead-end' or 'deadend' or 'Dead-end' or 'Deadend':
						text.replace(template,'')
				raw_input('Press enter')
				save(title,text)
コード例 #2
0
ファイル: sectionlinks.py プロジェクト: hairr/HairBot
def get_contents(title):
	con, revid = lite.connect('pages.db'), mwhair.revnumber(title)
	with con:
		cur = con.cursor()
		cur.execute("SELECT Contents FROM Pages WHERE Id = (?)",(revid,))
		try:
			return cur.fetchall()[0][0]
		except:
			text = mwhair.edit(title)
			cur.execute('INSERT INTO Pages VALUES(?,?)',(revid,text,))
			return text
コード例 #3
0
ファイル: main.py プロジェクト: hairr/Webcite-archive-bot
 def run(self):
     """
     aaannndddd begin
     """
     pages = self.get_pages()
     for page in pages:
         print "Viewing page: " + page
         urls, new_text = [], None
         try:
             text = mwhair.edit(page)
         except UnicodeEncodeError:
             print "UnicodeEncodeError: " + page
         except:
             print "Unknown Error for " + page
         time.sleep(1)
         links = self.get_links(text)
         if links:
             for link in links:
                 print "Found url: " + link
                 archived_link = self.archive(link)
                 if archived_link is None:
                     continue
                 urls.append((link,
                             archived_link))
                 time.sleep(31)  # At 30 seconds we'll be rejected
             for url in urls:
                 if new_text:
                     new_text = self.add_in_template(new_text,
                     url[0], url[1])
                 else:
                     new_text = self.add_in_template(text,
                     url[0], url[1])
                 if text != new_text:
                     print "Saving page: " + page
                     mwhair.save(page, text=new_text,
                     summary="Archiving urls in cite templates", minor=True)