Example #1
0
def init():
    now = int(time.time())
    timelimit = now - 60 * 60 * hoursTillRecheck
    lastSeen = cache.cache_get(triggerCacheSetting)
    if lastSeen is not None and int(lastSeen["value"]) > timelimit:
        return

    try:
        dbcon = db.connect(control.providercacheFile)
        dbcur = dbcon.cursor()
        dbcur.executescript(
            "CREATE TABLE IF NOT EXISTS %s (ID Integer PRIMARY KEY AUTOINCREMENT, provider TEXT, tag TEXT, date INTEGER, info TEXT);"
            % faultTable)
        dbcur.execute("DELETE FROM %s WHERE date < ?;" % faultTable,
                      (timelimit, ))
        try:
            dbcur.execute("SELECT info FROM %s" % faultTable)
        except:
            dbcur.executescript("ALTER Table %s ADD info TEXT" % faultTable)
        dbcon.commit()
        dbcur.close()
        del dbcur
        dbcon.close()
    except:
        pass

    cache.cache_insert(triggerCacheSetting, now)
def application(environ, start_response):
  
  url = 'https://rekrutacja.uni.lodz.pl/index.php?op=news'
  tree = html.fromstring(cache_get('cache.sqlite',url))
    
  rss = PyRSS2Gen.RSS2(
  title = "Rekrutacja UŁ - aktualności",
  link = "http://deetah.jogger.pl",
  description = "Kanał zawiera aktualności ze strony rekrutacja.uni.lodz.pl",
  )
  
  for item in tree.xpath('//table'):
    date_raw = item[1].text_content()[:item[1].text_content().find(',')]
    day = tuple(int(a) for a in date_raw.split(' ')[0].split('-'))
    hour = tuple(int(a) for a in date_raw.split(' ')[1].split(':')) 
    
    rss_title = item[0][0][0].text_content()
    rss_description = item[2][0][0].text_content() 
    rss_pubDate = datetime( *(day+hour) )
    
    rss.items.append( PyRSS2Gen.RSSItem(
		      title = rss_title,
		      description = rss_description,
		      link = url,
		      guid = PyRSS2Gen.Guid( rss_description ),
		      pubDate = rss_pubDate
		    ))

  start_response('200 OK', [('Content-type','application/rss+xml')])
  return rss.to_xml(encoding='utf-8')
Example #3
0
 def get_textures(self, dir_, cache_):
     print Fore.GREEN + "Downloading and extracting assets from jar for", Fore.WHITE + self.ver
     url = "http://s3.amazonaws.com/Minecraft.Download/versions/{0}/{0}.jar".format(self.ver)
     f = zipfile.ZipFile(cache.cache_get(url))
     for i in f.namelist():
         if i.startswith("assets/"):
             print Fore.LIGHTBLUE_EX + "\tExtracting", Fore.WHITE + i,
             f.extract(i, dir_)
             f.extract(i, cache_)
             print "\r" + Fore.LIGHTBLUE_EX + "\tExtracted", Fore.WHITE + i