Esempio n. 1
0
def store_revisions(self, page_url):
  """
  Retrieve all the revision of a give wikipedia page_url

  parameters:
    - page_url: a wikipedia page URL
  """

  p = Page()

  d = Dataset( "%s:27017" % (mongodb_host) )

  title = url2title(page_url)
  lang = url2lang(page_url)

  p.fetch_from_api_title(title, lang=lang)

  revisions = p.get_all_editors()

  i = 0

  for revision in revisions:
    i += 1

    # ex: en/crimea/revision/999999
    key = "%s/%s/revision/%s" % (lang,title,revision["revid"])

    # fetch the revision from the internet
    value = p.get_revisions(extra_params={ "rvstartid": revision["revid"], "rvlimit" : 1})

    # write in it the database handler
    d.write(key, value)
    self.update_state( state='PROGRESS',
      meta= { 'current': i, 'total': len(revisions)})
Esempio n. 2
0
def store_last_revisions(db_url):
  d = Dataset( "%s:27017" % (mongodb_host) )

  url = db_url.replace("/timeline", "")

  (lang, page) = url.split("/")

  p = Page()
  p.fetch_from_api_title(page, lang=lang)

  last_rev = p.get_revisions(extra_params={ "rvlimit" : 1 })

  print "last revisions: %s" % (url.encode("utf8"))

  t = list(d.find({ "url": db_url }, { "url" : 1, "dataset" : { "$slice": -1 } }))

  # print t[0]

  extra_params = {
    "rvstartid": t[0]["dataset"][0]["revid"],
    "rvendid": last_rev[0]["revid"],
    "rvdir": "newer"
  }

  print extra_params

  revs = p.get_revisions(extra_params=extra_params)

  print "%s new revisions since %s (%s)" % (len(revs), t[0]["dataset"][0]["timestamp"], t[0]["dataset"][0]["revid"])
  print "%s  ---->  %s" % (t[0]["dataset"][0]["timestamp"], last_rev[0]["timestamp"])

  for r in revs:
    key = "%s/%s/revision/%s" % (lang, page, r["revid"])
    value = [ r ]

    d.write(key, value)