Ejemplo n.º 1
0
  def get(self):
    url = self.request.get('url')
    msgs = []
    if len(url):
      parsed_url = None
      try:
        parsed_url = File.urlparse(url)
      except Exception, e:
        msgs.append(('bad', 'Could not parse "%s" into a valid url' % url))
      if parsed_url:
        msgs.append(('normal', 'Trying to download "%s"' % parsed_url.geturl()))
        try:
          if parsed_url.hostname == 'localhost' or parsed_url.hostname.endswith('.lan'):
            raise Exception('Local network address requested.')
          if re.match(r'[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+', parsed_url.hostname) or parsed_url.hostname.count('['):
            raise Exception('IPv4/IPv6 literal addresses are not supported')
          if parsed_url.hostname.count('.in-addr.arpa') or parsed_url.hostname.count('.ipv6-literal.net'):
            raise Exception('Found illegal ".in-addre.arpa" or ".ipv6-literal.net" in hostname')
          
          #Keep from doing an url in the ImportedList
          url = parsed_url.geturl()
          done = ImportedList.get_by_key_name(url)
          if done:
            msgs.append(('normal', 'This download is still in our "already done" list, but thank you for reporting it.'))
            raise Exception('Already checked in the last week.')

          
          result = urlfetch.fetch(url)
          if result.status_code == 200:
            ImportedList.get_or_insert(url)
            #Parse the content
            path = os.path.dirname(url)
            msgs.extend(insert_files_from(path, result.content))
            return self.render_to_response('add.html', {'url': parsed_url.geturl(), 'msgs': msgs})
          msgs.append(('bad', 'Download failed with status code %i' % result.status_code))
        except Exception, e:
          msgs.append(('bad', 'Download failed: %s' % str(e)))
          pass
Ejemplo n.º 2
0
  def get(self):
    #Clean up Imported List
    [f.delete() for f in ImportedList.all().filter('ctime < ', datetime.datetime.now() - datetime.timedelta(days = 7))]
    
    #Clean up Hit List
    #TODO rewrite to make sure we always have a few redirects left
    hitCount = Hit.all().count()
    if hitCount > 30:
      [f.delete() for f in Hit.all().filter('ctime < ', datetime.datetime.now() - datetime.timedelta(days = 30))]

    #Remove old files
    for f in File.all().filter('available = ', True).filter('mtime < ', datetime.datetime.now() - datetime.timedelta(days = 32)).fetch(100):
      f.available = False
      f.put()