Example #1
0
def fetch_url_using_pac(pac, url):
    try:
        proxy_string = pacparser.just_find_proxy(pac, url)
    except:
        sys.stderr.write('could not determine proxy using Pacfile\n')
        return None
    proxylist = proxy_string.split(";")
    proxies = None  # Dictionary to be passed to urlopen method of urllib
    while proxylist:
        proxy = proxylist.pop(0).strip()
        if 'DIRECT' in proxy:
            proxies = {}
            break
        if proxy[0:5].upper() == 'PROXY':
            proxy = proxy[6:].strip()
            if isproxyalive(proxy):
                proxies = {'http': 'http://%s' % proxy}
                break
    try:
        sys.stderr.write('trying to fetch the page using proxy %s\n' % proxy)
        response = urllib.urlopen(url, proxies=proxies)
    except Exception, e:
        sys.stderr.write('could not fetch webpage %s using proxy %s\n' %
                         (url, proxies))
        sys.stderr.write(str(e) + '\n')
        return None
Example #2
0
def getDefinedUrlResults():
	print("getDefinedUrlResults ->")

	pacparser.init()
	ret = []

	# get pacparser response
	for url in defurls:
		# print(url)
		try:
				res = pacparser.just_find_proxy("latest.pac", url)
		except:
				res = "pac file not found"
		finally:
				print
				# print(res)
				# print
		ret.append({ "url": url, "res": res })
	print("end for defurls <-")

	pacparser.cleanup()

	# print(ret)
	print("getDefinedUrlResults <-")
	return ret
Example #3
0
def fetch_url_using_pac(pac, url):
  try:
    proxy_string = pacparser.just_find_proxy(pac, url)
  except:
    sys.stderr.write('could not determine proxy using Pacfile\n')
    return None
  proxylist = proxy_string.split(";")
  proxies = None        # Dictionary to be passed to urlopen method of urllib
  while proxylist:
    proxy = proxylist.pop(0).strip()
    if 'DIRECT' in proxy:
      proxies = {}
      break
    if proxy[0:5].upper() == 'PROXY':
      proxy = proxy[6:].strip()
      if isproxyalive(proxy):
        proxies = {'http': 'http://%s' % proxy}
        break
  try:
    sys.stderr.write('trying to fetch the page using proxy %s\n' % proxy)
    response = urllib.urlopen(url, proxies=proxies)
  except Exception, e:
    sys.stderr.write('could not fetch webpage %s using proxy %s\n' %
                     (url, proxies))
    sys.stderr.write(str(e)+'\n')
    return None
Example #4
0
def getDefinedUrlResults():
    print("getDefinedUrlResults ->")

    pacparser.init()
    ret = []

    # get pacparser response
    for url in defurls:
        # print(url)
        try:
            res = pacparser.just_find_proxy("latest.pac", url)
        except:
            res = "pac file not found"
        finally:
            print
            # print(res)
            # print
        ret.append({"url": url, "res": res})
    print("end for defurls <-")

    pacparser.cleanup()

    # print(ret)
    print("getDefinedUrlResults <-")
    return ret
Example #5
0
 def find_proxy_for_url(self, url):
     """Ask the PAC file for the proxy to use for the given URL."""
     self.update_pacfile()
     proxies = pacparser.just_find_proxy(self.pacfile, url)
     if proxies:
         for proxy in proxies.split(";"):
             proxy = proxy.strip()
             if proxy[0:6].upper() == "DIRECT":
                 return None
             if proxy[0:5].upper() == "PROXY":
                 return proxy[6:].strip()
     sys.stderr.write("No proxy offered for %s\n" % (url,))
     return None
Example #6
0
def fetchurl(pac, url, headers):
  try:
    proxy_string = pacparser.just_find_proxy(pac, url)
  except:
    sys.stderr.write('could not determine proxy using Pacfile\n')
    return None
  proxylist = proxy_string.split(";")
  proxies = None        # Dictionary to be passed to urlopen method of urllib
  while proxylist:
    proxy = proxylist.pop(0).strip()
    if 'DIRECT' in proxy:
      proxies = {}
      break
    if proxy[0:5].upper() == 'PROXY':
      proxy = proxy[6:].strip()
      if isproxyalive(proxy):
        proxies = {'http': 'http://%s' % proxy}
        break
  try:
    sys.stderr.write('trying to fetch the page using proxy %s\n' % proxy)
    '''
    using urllib:
    response = urllib.urlopen(url, proxies=proxies)
    '''

    '''
    using urllib2:
    '''
    #srh = rTool.SmartRedirectHandler()
    proxy = urllib2.ProxyHandler(proxies)
    handler = urllib2.HTTPHandler()
    handler.set_http_debuglevel(1)
    cookie = urllib2.HTTPCookieProcessor()
    
    opener = urllib2.build_opener(handler, proxy)
    urllib2.install_opener(opener)

    request = urllib2.Request(url, None, headers)
    response = opener.open(request)


  except Exception, e:
    sys.stderr.write('could not fetch webpage %s using proxy %s\n' %
                     (url, proxies))

    sys.stderr.write(str(e)+'\n')
    return None
Example #7
0
def getproxyserver(pac, url):
  try:
    proxy_string = pacparser.just_find_proxy(pac, url)
  except:
    sys.stderr.write('could not determine proxy using Pacfile\n')
    return None
  proxylist = proxy_string.split(";")

  # Choose first proxy server
  while proxylist:
    proxy = proxylist.pop(0).strip()
    if 'DIRECT' in proxy:
      break
    if proxy[0:5].upper() == 'PROXY':
      proxy = proxy[6:].strip()
      if isproxyalive(proxy):
        break

  return proxy
Example #8
0
def get_proxy_from_pac(pac, url):
  try:
    proxy_string = pacparser.just_find_proxy(pac, url)
  except:
    sys.stderr.write('could not determine proxy using Pacfile\n')
    return None
  proxylist = proxy_string.split(";")
  proxies = None        # Dictionary to be passed to urlopen method of urllib
  while proxylist:
    proxy = proxylist.pop(0).strip()
    if 'DIRECT' in proxy:
      proxies = {}
      break
    if proxy[0:5].upper() == 'PROXY':
      proxy = proxy[6:].strip()
      if isproxyalive(proxy):
        proxies = {'http': 'http://%s' % proxy}
        break
  return proxies
Example #9
0
 def checkProxiesForUrl(self, pac, url):
     """get proxy for the given url"""
     try:
         proxy_string = pacparser.just_find_proxy(pac, url)
     except:
         sys.stderr.write('could not determine proxy using Pacfile\n')
         return None
     proxylist = proxy_string.split(";")
     proxies = None  # Dictionary to be passed to urlopen method of urllib
     while proxylist:
         proxy = proxylist.pop(0).strip()
         if 'DIRECT' in proxy:
             proxies = {}
             return proxies
             #break
         if proxy[0:5].upper() == 'PROXY':
             proxy = proxy[6:].strip()
             if self.isProxyAlive(proxy):
                 proxies = {'http': 'http://%s' % proxy}
                 return proxies
Example #10
0
 def checkProxiesForUrl(self, pac, url):
   """get proxy for the given url"""
   try:
     proxy_string = pacparser.just_find_proxy(pac, url)
   except:
     sys.stderr.write('could not determine proxy using Pacfile\n')
     return None
   proxylist = proxy_string.split(";")
   proxies = None        # Dictionary to be passed to urlopen method of urllib
   while proxylist:
     proxy = proxylist.pop(0).strip()
     if 'DIRECT' in proxy:
       proxies = {}
       return proxies
       #break
     if proxy[0:5].upper() == 'PROXY':
       proxy = proxy[6:].strip()
       if self.isProxyAlive(proxy):
         proxies = {'http': 'http://%s' % proxy}
         return proxies
Example #11
0
#!/usr/bin/env python

import pacparser,sys

print sys.argv[1]

if len(sys.argv) >= 3:
    pacfile=sys.argv[2]
    print "use: ", pacfile
else:
    pacfile="proxy.pac"

pacparser.init()
pacparser.parse_pac(pacfile)
proxy = pacparser.find_proxy(sys.argv[1])
print proxy
pacparser.cleanup()

# Or simply,
print pacparser.just_find_proxy(pacfile, sys.argv[1])
Example #12
0
#!/usr/bin/python2.5

import pacparser

pacparser.init()
pacparser.parse_pac("wpad.dat")
proxy = pacparser.find_proxy("http://www.manugarg.com")
print proxy
pacparser.cleanup()

# Or simply,
print pacparser.just_find_proxy("wpad.dat", "http://www2.manugarg.com")
Example #13
0
            print(stat)

        except Exception, e:
            # When couldn't download pac, return error page.
            error = "Pac file not found. Check your pac in `/app/yourpac.py` and correct access rights."
            return render_template('index.html', defpacs=defpacs, error=error)

    else:
        url = "http://example.com/"
        opt = "http://your-company/test.pac"

    # get pacparser response
    pacparser.init()

    try:
        res = pacparser.just_find_proxy("latest.pac", url)
    except:
        res = "not found"
    finally:
        print
        print(res)
        print

    pacparser.cleanup()

    # get defined check urls
    defres = getDefinedUrlResults()

    return render_template('index.html',
                           url=url,
                           opt=opt,
Example #14
0
			print(stat)

		except Exception,e:
			# When couldn't download pac, return error page.
			error = "Pac file not found. Check your pac in `/app/yourpac.py` and correct access rights."
			return render_template('index.html',defpacs=defpacs, error=error)

	else:
		url="http://example.com/"
		opt="http://your-company/test.pac"

	# get pacparser response
	pacparser.init()

	try:
		res = pacparser.just_find_proxy("latest.pac", url)
	except:
		res = "not found"
	finally:
		print
		print(res)
		print

	pacparser.cleanup()

	# get defined check urls
	defres = getDefinedUrlResults()

	return render_template('index.html',url=url,opt=opt,res=res,stat=stat,defpacs=defpacs,defres=defres)

Example #15
0
#!/usr/bin/env python

import pacparser, sys

print sys.argv[1]

if len(sys.argv) >= 3:
    pacfile = sys.argv[2]
    print "use: ", pacfile
else:
    pacfile = "proxy.pac"

pacparser.init()
pacparser.parse_pac(pacfile)
proxy = pacparser.find_proxy(sys.argv[1])
print proxy
pacparser.cleanup()

# Or simply,
print pacparser.just_find_proxy(pacfile, sys.argv[1])