Ejemplo n.º 1
0
 def __init__(self):
     self.site = pywikibot.Site("en", "wikipedia")
     self.categories = [
         Category(self.site, "Category:Pages using citations with accessdate and no URL"),
         Category(self.site, "Category:Pages with archiveurl citation errors"),
         Category(self.site, "Category:Pages with citations having wikilinks embedded in URL titles"),
         Category(self.site, "Category:Pages with empty citations")
     ]
     self.doTaskPage = pywikibot.Page(self.site, "User:Hazard-Bot/DoTask/21")
     self.citationTemplates = self.getAllTitles("Template:Citation")
     citationTemplatesCategory = Category(self.site, "Category:Citation Style 1 templates")
     for page in citationTemplatesCategory.articles():
         if ("/" not in page.title()) and (page.namespace() == 10):
             self.citationTemplates.extend(self.getAllTitles(page.title()))
     self.subscription = self.getAllTitles("Template:Subscription required")
     self.lang = self.getAllTitles("Template:Lang")
     self.lang.extend(self.getAllTitles("Template:Rtl-lang"))
     self.loadLanguages("User:Hazard-Bot/Languages.css")
     self.citationNeededTemplate = mwparserfromhell.nodes.template.Template(
         "citation needed",
         params = [
             mwparserfromhell.nodes.extras.parameter.Parameter(
                 "date",
                 "{{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}"
             )
         ]
     )
Ejemplo n.º 2
0
        if len(last) > 10:
            last.pop(0)
    return resorted


graticules = GraticuleDatabase()


def gratName(place):
    lat, lon = place.split(" ")
    return graticules.getLatLon(lat, lon)[0]


site = wikipedia.getSite()

meta = Category(site, "Category:Meetup by location")
locations = meta.subcategories()

data = []
try:
    data = load("meetupchart.data")
    pass
except:
    print "No meetup chart data is available. We'll have to fetch everything from the wiki. This will take a good while."
    pass

# these are the ones we can't fix currently
addfails = [("John", "2008-06-07", "30 -84")]
skipfails = [
    ("NWoodruff", "2009-08-31", "50 11"),
    ("archchancellor", "2008-06-01", "37 -121"),
Ejemplo n.º 3
0
 def __init__(self):
     self.site = pywikibot.Site("en", "wikipedia")
     self.categories = [
         Category(self.site, "Category:Pages with citations having wikilinks embedded in URL titles"),
         ]
     self.doTaskPage = pywikibot.Page(self.site, 'User:Hazard-Bot/DoTask/21')
Ejemplo n.º 4
0
                cur.execute("delete from categories where title = '%s'" %
                            title)
                cur.execute("delete from participants where title = '%s'" %
                            title)
                cur.execute("delete from reports where title = '%s'" % title)
                conn.commit()

        try:
            text = exp.get()
        except:  #skip redirects
            print "is redirect."
            continue
        if "[[Category:Retro meetup]]" in text:
            print "is retro."
            continue
        if "[[Category:Not reached - Did not attempt]]" in text:
            print "was not attempted."
            continue
        if "[[Category:Expedition planning]]" in text:
            print "is planning."
            continue

        cur.execute("insert into reports values ('%s', '%s')" %
                    (title.replace("'", "''"), text.replace("'", "''")))
        conn.commit()
        print "inserted", title


meta = Category(site, subcat)
recurseCategory(meta)
Ejemplo n.º 5
0
    last.append(best)
    resorted.append(best)
    places.remove(best)
    if len(last)>10:
      last.pop(0)
  return resorted

graticules = GraticuleDatabase()

def gratName(place):
  lat, lon = place.split(" ")
  return graticules.getLatLon(lat, lon)[0]
    
site = wikipedia.getSite()

meta = Category(site, "Category:Meetup by location")
locations = meta.subcategories()

data = []
try:
  data = load("meetupchart.data")
  pass
except:
  print "No meetup chart data is available. We'll have to fetch everything from the wiki. This will take a good while."
  pass
  
# these are the ones we can't fix currently
addfails  = [("John", "2008-06-07", "30 -84")]
skipfails = [("NWoodruff", "2009-08-31", "50 11"), 
             ("archchancellor", "2008-06-01", "37 -121"),
             ("Tom Wuttke", "2008-06-16", "37 -122"),
Ejemplo n.º 6
0
    if len(sys.argv) >= 2:
        if sys.argv[1] == "stats":
            while True:
                p = Corellations().print_stats()
                os.system('clear')
                print p
                time.sleep(10)
        else:
            while True:
                os.system('clear')
                Corellations().print_sel()
                time.sleep(10)

    else:
        site = wikipedia.getSite()
        cat = Category(site, u"Категория:Музыка")
        x=0
        #while True:
        for p in cat.articles(recurse=3):
            print p
            p.purgeCache()
        #    p = site.randompage()
        #    if (p.namespace() == 0) and (not p.isDisambig()) and (not p.isRedirectPage()):
        #        Evaluate(p.title()).run()
        #        x += 1
        #        print x
        #        #if x == 10:
        #        #    time.sleep(10)
        #        #    x = 0