def find_proxies(self, url): """Parse proxy URL for provided URL Parse PAC and return proxy URL for given URL """ try: protocol = url.split('://')[0] except ValueError: raise ProxyConfigError('Invalid URL: %s' % url) try: pacparser.init() pacparser.parse_pac_string(str(self)) proxies = pacparser.find_proxy(url) pacparser.cleanup() except: raise ProxyConfigError('Error parsing PAC: %s' % self.pac_url) data = {} for v in [x.strip() for x in proxies.split(';')]: if v == 'DIRECT': continue if v[:6] == 'PROXY ': data[protocol] = v[6:] return data
def getDefinedUrlResults(): print("getDefinedUrlResults ->") pacparser.init() ret = [] # get pacparser response for url in defurls: # print(url) try: res = pacparser.just_find_proxy("latest.pac", url) except: res = "pac file not found" finally: print # print(res) # print ret.append({"url": url, "res": res}) print("end for defurls <-") pacparser.cleanup() # print(ret) print("getDefinedUrlResults <-") return ret
def set_proxy(self): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.preferences["proxy"] if global_proxy: handlers.append(urllib.request.ProxyHandler({"http": global_proxy, "https": global_proxy})) global_opener = urllib.request.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.preferences["control_proxy"] control_proxy_pac = self.preferences["control_proxy_pac"] if control_proxy: control_opener = urllib.request.build_opener( urllib.request.ProxyHandler({"http": control_proxy, "https": control_proxy}) ) elif control_proxy_pac and pacparser_imported: pacparser.init() pacparser.parse_pac_string(urllib.request.urlopen(control_proxy_pac).read()) proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break self.worker_run("set_url_opener", (control_opener,))
def get_proxy_with_pac(url): proxy_str = None try: pacparser.init() pacparser.parse_pac('hk1.pac') proxy_str = pacparser.find_proxy(url) except: sys.stderr.write('could not find proxy for %s using this PAC file.\n' % url) return None # proxy_str = 'PROXY hkce01.hk.ibm.com:80; PROXY 9.181.193.210:80; DIRECT' proxy_list = proxy_str.split(';') proxies = {} for proxy in proxy_list: proxy = proxy.strip() if 'DIRECT' == proxy: continue if proxy[0:5].upper() == 'PROXY': proxy = proxy[6:].strip() if is_proxy_alive(proxy): proxies['http'] = proxy break sys.stdout.write('get proxy %s for %s\n' % (proxies, url)) return proxies
def getDefinedUrlResults(): print("getDefinedUrlResults ->") pacparser.init() ret = [] # get pacparser response for url in defurls: # print(url) try: res = pacparser.just_find_proxy("latest.pac", url) except: res = "pac file not found" finally: print # print(res) # print ret.append({ "url": url, "res": res }) print("end for defurls <-") pacparser.cleanup() # print(ret) print("getDefinedUrlResults <-") return ret
def pac_result(request,nid): pac = "<code>function FindProxyForURL(url, host){\n" network = source.objects.get(id=nid) rulepac = rules.objects.get(ref_address_ip = network) statistiques = stats.objects.filter(ref_address_ip = network).order_by('id').reverse()[:20] form = [] form3 = Form_stats() for statis in statistiques: form.append(Form_stats(initial={'HTTP_ACCEPT' : statis.HTTP_ACCEPT , 'HTTP_ACCEPT_CHARSET' : statis.HTTP_ACCEPT_CHARSET , 'HTTP_ACCEPT_ENCODING' : statis.HTTP_ACCEPT_ENCODING , 'HTTP_ACCEPT_LANGUAGE' : statis.HTTP_ACCEPT_LANGUAGE , 'HTTP_CACHE_CONTROL' : statis.HTTP_CACHE_CONTROL , 'HTTP_CONNECTION' : statis.HTTP_CONNECTION , 'HTTP_COOKIE' : statis.HTTP_COOKIE , 'HTTP_HOST' : statis.HTTP_HOST , 'HTTP_USER_AGENT' : statis.HTTP_USER_AGENT , 'HTTP_VIA' : statis.HTTP_VIA , 'HTTP_X_FORWARDED_FOR' : statis.HTTP_X_FORWARDED_FOR , 'PATH_INFO' : statis.PATH_INFO , 'PATH_TRANSLATED' : statis.PATH_TRANSLATED , 'QUERY_STRING' : statis.QUERY_STRING , 'REMOTE_ADDR' : statis.REMOTE_ADDR , 'REMOTE_HOST' : statis.REMOTE_HOST , 'REMOTE_IDENT' : statis.REMOTE_IDENT , 'REMOTE_USER' : statis.REMOTE_USER , 'REQUEST_METHOD' : statis.REQUEST_METHOD})) try: tree = network.get_ancestors(include_self=True,ascending=True) for networks in tree: rulepac = rules.objects.get(ref_address_ip = networks) pac += rulepac.pac +"\n" pac += "}" except: pass pacparser.init() try: pacparser.parse_pac_string(str(pac)) parse_global = "Pac syntax seems to be ok" except: parse_global = "Pac syntax error" pacparser.cleanup() fichier = settings.RRD_ROOT+network.address_ip+".rrd" png = settings.MEDIA_ROOT+"img/rrd/"+network.address_ip+".png" try: openfile = open(str(fichier), 'r') openfile.close() except: ret = rrdtool.create(str(fichier), "--step", "60", "--start", '0', "DS:input:COUNTER:120:U:U", "RRA:AVERAGE:0.5:1:120", "RRA:AVERAGE:0.5:5:288", "RRA:AVERAGE:0.5:30:336", "RRA:AVERAGE:0.5:30:1488", "RRA:MAX:0.5:1:120", "RRA:MAX:0.5:5:288", "RRA:MAX:0.5:30:336", "RRA:MAX:0.5:30:1488") ret = rrdtool.graph( str(png), "--start", "-2hour", "--vertical-label=Access/s", "DEF:inoctets="+str(fichier)+":input:AVERAGE", "AREA:inoctets#FF0000:In access", "COMMENT:\\n", "GPRINT:inoctets:AVERAGE:Avg In access\: %6.2lf %S access", "COMMENT: ", "GPRINT:inoctets:MAX:Max In access\: %6.2lf %S access\\r") return render_to_response('pac_result.html', locals(),context_instance=RequestContext(request))
def test_pac(pacfile, testfile): """Test PAC file against test data using pacparser Returns 0 if all tests pass, 1 if any fail""" # Stores the script's exit code testfailed = 0 v_print(3, "Testing PAC file: {}".format(pacfile)) # Initialise pacparser pacparser.init() pacparser.parse_pac(pacfile) with open(testfile, 'rt') as f: # Create csv reader, filtering out rows starting with '#' reader = csv.DictReader(filter(lambda row: row[0] != '#', f), delimiter=',') # Iterate over test data for row in reader: v_print(1, "\nread row: {} {}".format(row['url'], row['expected'])) testfailed = test_url(row['url'], row['expected']) # Cleanup pacparser pacparser.cleanup() if (testfailed): print("PAC file Test Failed", file=sys.stderr) else: v_print(3, "PAC file Test Passed") return testfailed
def __parse_pac(self): retry, downloaded_pac = download_url(self.pac_url, download_pac) # connection error, we are gonna retry # when get_proxies() gets called. if retry: self.fetched = False return # either download succeeded, or it is not a valid url, then falls back to # a file path. try: pacparser.init() if downloaded_pac is not None: g_log.debug("pac url is a url: {}".format(self.pac_url)) pacparser.parse_pac_string(downloaded_pac) else: g_log.debug("pac url is a local file: {}".format(self.pac_url)) pacparser.parse_pac_file(self.pac_url) except IOError: # neither a valid url or valid file path self.clean_up() raise else: self.fetched = True
def network_rule(request,nid): if not request.user.is_authenticated(): return HttpResponseRedirect("/logon/") if request.POST: c = {} c.update(csrf(request)) form = Form_pac(request.POST) requette_test = "function FindProxyForURL(url, host){if(1){}" if form.is_valid(): network = source.objects.get(id=nid) p = re.compile('[\r\n\t]+') requette = p.sub( "", form.cleaned_data['script']) p = re.compile('[\s]') requette = p.sub( " ", requette) requette_test += requette +"}" pacparser.init() try: pacparser.parse_pac_string(str(requette_test)) except: parse = "Pac syntax error" return render_to_response('pac_new.html', locals(),context_instance=RequestContext(request)) pacparser.cleanup() try: rulepac = rules.objects.get(ref_address_ip = network) rulepac.pac = form.cleaned_data['script'] except ObjectDoesNotExist: rulepac = rules(ref_address_ip = network, pac = form.cleaned_data['script']) rulepac.save() else: return render_to_response('pac_new.html', locals(),context_instance=RequestContext(request)) return HttpResponseRedirect("/network/") else: pac = "function FindProxyForURL(url, host){\n" try: network = source.objects.get(id=nid) rulepac = rules.objects.get(ref_address_ip = network) form = Form_pac(initial={'script': rulepac.pac}) action = "Edit" try: tree = network.get_ancestors(include_self=True,ascending=True) for networks in tree: rulepac = rules.objects.get(ref_address_ip = networks) pac += rulepac.pac +"\n" pac += "}" except: pass pacparser.init() try: pacparser.parse_pac_string(str(pac)) parse_global = "Pac syntax seems to be ok" except: parse_global = "Pac syntax error" pacparser.cleanup() pac2 = "<code>" pac2 += pac form2 = Form_result(initial={'script': pac}) except ObjectDoesNotExist: form = Form_pac() action = "Add" return render_to_response('pac_new.html', locals(),context_instance=RequestContext(request))
def set_proxy(self): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.preferences['proxy'] if global_proxy: handlers.append(urllib.request.ProxyHandler({'http': global_proxy, 'https': global_proxy})) global_opener = urllib.request.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.preferences['control_proxy'] control_proxy_pac = self.preferences['control_proxy_pac'] if control_proxy: control_opener = urllib.request.build_opener(urllib.request.ProxyHandler({'http': control_proxy, 'https': control_proxy})) elif control_proxy_pac and pacparser_imported: pacparser.init() pacparser.parse_pac_string(urllib.request.urlopen(control_proxy_pac).read()) proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break elif control_proxy_pac and not pacparser_imported: logging.warn("Disabled proxy auto-config support because python-pacparser module was not found.") self.worker_run('set_url_opener', (control_opener,))
def set_proxy(self): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.preferences['proxy'] if global_proxy: handlers.append(urllib.request.ProxyHandler({'http': global_proxy, 'https': global_proxy})) global_opener = urllib.request.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.preferences['control_proxy'] control_proxy_pac = self.preferences['control_proxy_pac'] if control_proxy: control_opener = urllib.request.build_opener(urllib.request.ProxyHandler({'http': control_proxy, 'https': control_proxy})) elif control_proxy_pac and pacparser_imported: pacparser.init() pacparser.parse_pac_string(urllib.request.urlopen(control_proxy_pac).read()) proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break self.worker_run('set_url_opener', (control_opener,))
def main(args): try: pacparser.init() WPADProxyRequest.force_direct = 'DIRECT' # direct, until we have a configuration if args.force_proxy: WPADProxyRequest.force_proxy = args.force_proxy else: yield updateWPAD() try: yield install_network_state_changed_callback(reactor, updateWPAD) except Exception as e: # It _may_ actually be preferable to just die if we can't register # this handler. However, the test scripts use a mocked version of # dbus (python-dbusmock) which doesn't support mocking signals. So # I'll just let this pass as a warning for that case. logger.warning( "Issue registering for network state change notifications", exc_info=True) force_proxy_message = ", sending all traffic through %s" % args.force_proxy if args.force_proxy else "" logger.info("Starting proxy server on %s:%s%s", args.bind, args.port, force_proxy_message) yield start_server(args.bind, args.port, reactor) logger.info("Successfully started.") except Exception as e: logger.error("Problem starting the server", exc_info=True)
def find( site, pacfile, myip = "" ): pacparser.init() pac_file_cache = cache_pacfile.cache(pacfile) if not myip == "": pacparser.setmyip(myip) pacparser.parse_pac(pac_file_cache) return pacparser.find_proxy(site)
def validate(self): """Validate PAC with pacparser Validate resolved PAC configuration with pacparser libarry """ pacparser.init() pacparser.parse_pac_string(str(self)) pacparser.cleanup()
def main_test(filename, test_times): pacparser.init() pacparser.parse_pac(filename) beg_time = time.time() for i in xrange(test_times): ret_str = pacparser.find_proxy('http://www.coding.com', 'www.coding.com') # using the worst case end_time = time.time() print "%s:\nTotal Time: %s s\nAvg. Time: %s ms\n\n" % (filename, end_time - beg_time, (end_time - beg_time) * 1000.0 / test_times), pacparser.cleanup()
def proxy_test(pac_string, myip, url): pp.init() if myip: pp.setmyip(myip) if not url: url = 'http://www.google.com' pp._pacparser.parse_pac_string(pac_string) proxy = pp.find_proxy(url) return proxy
def set_proxy(self, *ignore, reconnect=True): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.settings['proxy'] if global_proxy: handlers.append( urllib.request.ProxyHandler({ 'http': global_proxy, 'https': global_proxy })) global_opener = pandora.Pandora.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.settings['control-proxy'] control_proxy_pac = self.settings['control-proxy-pac'] if not control_proxy and (control_proxy_pac and pacparser): pacparser.init() with urllib.request.urlopen(control_proxy_pac) as f: pacstring = f.read().decode('utf-8') try: pacparser.parse_pac_string(pacstring) except pacparser._pacparser.error: logging.warning('Failed to parse PAC.') try: proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break except pacparser._pacparser.error: logging.warning('Failed to find proxy via PAC.') pacparser.cleanup() elif not control_proxy and (control_proxy_pac and not pacparser): logging.warning( "Disabled proxy auto-config support because python-pacparser module was not found." ) if control_proxy: control_opener = pandora.Pandora.build_opener( urllib.request.ProxyHandler({ 'http': control_proxy, 'https': control_proxy })) self.worker_run('set_url_opener', (control_opener, ), self.pandora_connect if reconnect else None)
def index(pacfile=None): myip = request.args.get('myip', '') url = request.args.get('url', 'http://www.google.com') pp.init() pp.setmyip(myip) pp.parse_pac('test.pac') proxy = pp.find_proxy(url) return proxy
def main_test(filename, test_times): pacparser.init() pacparser.parse_pac(filename) beg_time = time.time() for i in xrange(test_times): ret_str = pacparser.find_proxy( 'http://www.coding.com', 'www.coding.com') # using the worst case end_time = time.time() print "%s:\nTotal Time: %s s\nAvg. Time: %s ms\n\n" % ( filename, end_time - beg_time, (end_time - beg_time) * 1000.0 / test_times), pacparser.cleanup()
def runtests(pacfile, testdata, tests_dir): py_ver = '.'.join([str(x) for x in sys.version_info[0:2]]) if sys.platform == 'win32': pacparser_module_path = os.path.join( tests_dir, '..', 'src', 'pymod', 'pacparser-python%s' % sysconfig.get_config_vars('VERSION')[0]) if os.path.exists(os.path.join(pacparser_module_path, '_pacparser.pyd')): raise Exception('Tests failed. Could not determine pacparser path.') else: try: pacparser_module_path = glob.glob(os.path.join( tests_dir, '..', 'src', 'pymod', 'build', 'lib*%s' % py_ver))[0] except Exception: raise Exception('Tests failed. Could not determine pacparser path.') if 'DEBUG' in os.environ: print('Pacparser module path: %s' % pacparser_module_path) sys.path.insert(0, pacparser_module_path) try: import pacparser except ImportError: raise Exception('Tests failed. Could not import pacparser.') if 'DEBUG' in os.environ: print('Imported pacparser module: %s' % sys.modules['pacparser']) f = open(testdata) for line in f: comment = '' if '#' in line: comment = line.split('#', 1)[1] line = line.split('#', 1)[0].strip() if not line: continue if ('NO_INTERNET' in os.environ and os.environ['NO_INTERNET'] and 'INTERNET_REQUIRED' in comment): continue if 'DEBUG' in os.environ: print(line) (params, expected_result) = line.strip().split('|') args = dict(getopt.getopt(params.split(), 'eu:c:')[0]) if '-e' in args: pacparser.enable_microsoft_extensions() if '-c' in args: pacparser.setmyip(args['-c']) pacparser.init() pacparser.parse_pac_file(pacfile) result = pacparser.find_proxy(args['-u']) pacparser.cleanup() if result != expected_result: raise Exception('Tests failed. Got "%s", expected "%s"' % (result, expected_result)) print('All tests were successful.')
def runtests(pacfile, testdata, tests_dir): py_ver = '.'.join([str(x) for x in sys.version_info[0:2]]) if sys.platform == 'win32': pacparser_module_path = os.path.join(tests_dir, '..', 'src', 'pymod', 'dist') if os.path.exists(os.path.join(pacparser_module_path, '_pacparser.pyd')): raise Exception('Tests failed. Could not determine pacparser path.') else: try: pacparser_module_path = glob.glob(os.path.join( tests_dir, '..', 'src', 'pymod', 'build', 'lib*%s' % py_ver))[0] except Exception: raise Exception('Tests failed. Could not determine pacparser path.') if 'DEBUG' in os.environ: print('Pacparser module path: %s' % pacparser_module_path) sys.path.insert(0, pacparser_module_path) try: import pacparser except ImportError: raise Exception('Tests failed. Could not import pacparser.') if 'DEBUG' in os.environ: print('Imported pacparser module: %s' % sys.modules['pacparser']) f = open(testdata) for line in f: comment = '' if '#' in line: comment = line.split('#', 1)[1] line = line.split('#', 1)[0].strip() if not line: continue if ('NO_INTERNET' in os.environ and os.environ['NO_INTERNET'] and 'INTERNET_REQUIRED' in comment): continue if 'DEBUG' in os.environ: print(line) (params, expected_result) = line.strip().split('|') args = dict(getopt.getopt(params.split(), 'eu:c:')[0]) if '-e' in args: pacparser.enable_microsoft_extensions() if '-c' in args: pacparser.setmyip(args['-c']) pacparser.init() pacparser.parse_pac_file(pacfile) result = pacparser.find_proxy(args['-u']) pacparser.cleanup() if result != expected_result: raise Exception('Tests failed. Got "%s", expected "%s"' % (result, expected_result)) print('All tests were successful.')
def main(args): try: pacparser.init() WPADProxyRequest.force_direct = 'DIRECT' # direct, until we have a configuration if args.force_proxy: WPADProxyRequest.force_proxy = args.force_proxy else: yield updateWPAD() signal.signal(signal.SIGHUP, updateWPAD) force_proxy_message = ", sending all traffic through %s"%args.force_proxy if args.force_proxy else "" logger.info("Starting proxy server on port %s%s", args.port, force_proxy_message) yield start_server(args.port, reactor) logger.info("Successfully started.") except Exception as e: logger.error("Problem starting the server", exc_info=True)
def set_proxy(self, *ignore): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.settings.get_string('proxy') if global_proxy: handlers.append(urllib.request.ProxyHandler({'http': global_proxy, 'https': global_proxy})) global_opener = urllib.request.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.settings.get_string('control-proxy') control_proxy_pac = self.settings.get_string('control-proxy-pac') if not control_proxy and (control_proxy_pac and pacparser): pacparser.init() with urllib.request.urlopen(control_proxy_pac) as f: pacstring = f.read().decode('utf-8') try: pacparser.parse_pac_string(pacstring) except pacparser._pacparser.error: logging.warning('Failed to parse PAC.') try: proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break except pacparser._pacparser.error: logging.warning('Failed to find proxy via PAC.') pacparser.cleanup() elif not control_proxy and (control_proxy_pac and not pacparser): logging.warning("Disabled proxy auto-config support because python-pacparser module was not found.") if control_proxy: control_opener = urllib.request.build_opener(urllib.request.ProxyHandler({'http': control_proxy, 'https': control_proxy})) self.worker_run('set_url_opener', (control_opener,))
def __parse_pac(self): required_download, url = parse_file_uri(self.pac_url) try: if required_download: downloaded_pac = download_pac([url], timeout=30) if downloaded_pac: pacparser.init() pacparser.parse_pac_string(downloaded_pac) else: # retryable exceptions self.fetched = False return else: pacparser.init() pacparser.parse_pac_file(url) except: # neither a valid url or valid file path g_log.error("Unsupported pac url {}".format(self.pac_url)) g_log.debug("Cleaning up pacparser...") self.clean_up() raise else: self.fetched = True
def find_proxy(self, host): self.__lock.acquire() try: import pacparser pacparser.init() pacparser.parse_pac_string(self.__pac_string) results = pacparser.find_proxy('http://%s' % host, host) pacparser.cleanup() finally: self.__lock.release() dests = [] for result in results.split(';'): result = result.strip() if result.startswith('PROXY'): host, port = result.split(' ')[1].split(':') dests.append(Address(host, port)) elif result.startswith('DIRECT'): dests.append(None) getLogger().write('Proxy for "%s" -> %s' % (host, dests), Log.DEBUG) return dests
def test(request): if not request.user.is_authenticated(): return HttpResponseRedirect("/logon/") if request.POST: c = {} c.update(csrf(request)) pac = "function FindProxyForURL(url, host){\n" form = Form_test(request.POST) if form.is_valid(): networks = source.objects.all() network =[] ipsource = form.cleaned_data['address_ip'] destination = form.cleaned_data['destination'] network_save = 0 match2 =[] cidr2 =[] for network in networks: cidr = [str(network.address_ip) +"/"+ str(network.mask)] match = smallest_matching_cidr(ipsource,cidr) match2.append(match) cidr2.append(cidr) if match: try: if ((int(str(network.mask))) > (int(str(network_save.mask)))): network_save = network del match except AttributeError: network_save = network del match try: tree = network_save.get_ancestors(include_self=True,ascending=True) for network_node in tree: rulepac = rules.objects.get(ref_address_ip = network_node) pac += rulepac.pac +"\n" pac += "}" p = re.compile('[\r\n\t]+') requette = p.sub( "", pac) p = re.compile('[\s]') requette = p.sub( " ", requette) except AttributeError: if (IPNetwork(str(ipsource)).version == 6): network_save = source.objects.get(address_ip='fe80::') else: network_save = source.objects.get(address_ip='0.0.0.0') rulepac = rules.objects.get(ref_address_ip = network_save) pac += rulepac.pac +"\n" pac += "}" p = re.compile('[\r\n\t]+') requette = p.sub( "", pac) p = re.compile('[\s]') requette = p.sub( " ", requette) pacparser.init() parse = pacparser.parse_pac_string(str(requette)) proxy = pacparser.find_proxy(str(destination)) pacparser.cleanup() return render_to_response('test.html', locals(),context_instance=RequestContext(request)) else: return render_to_response('test.html', {'form': form,},context_instance=RequestContext(request)) else: form = Form_test() return render_to_response('test.html', {'form': form,},context_instance=RequestContext(request))
def get_pac_result(filename, url, host): pacparser.init() pacparser.parse_pac(filename) ret_str = pacparser.find_proxy(url, host) pacparser.cleanup() return ret_str
#!/usr/bin/python2.5 import pacparser pacparser.init() pacparser.parse_pac("wpad.dat") proxy = pacparser.find_proxy("http://www.manugarg.com") print proxy pacparser.cleanup() # Or simply, print pacparser.just_find_proxy("wpad.dat", "http://www2.manugarg.com")
#!/usr/bin/env python import pacparser,sys print sys.argv[1] if len(sys.argv) >= 3: pacfile=sys.argv[2] print "use: ", pacfile else: pacfile="proxy.pac" pacparser.init() pacparser.parse_pac(pacfile) proxy = pacparser.find_proxy(sys.argv[1]) print proxy pacparser.cleanup() # Or simply, print pacparser.just_find_proxy(pacfile, sys.argv[1])
def main(args): ''' Parse arguments into options dictionary ''' parser = argparse.ArgumentParser(prog='todoist-add', description='Add task to Todoist') parser.add_argument("--config", action="store", help="Configuration file with API") parser.add_argument("--item", action="store", help="Item text", required=True) parser.add_argument("--note", action="store", help="Note text") parser.add_argument("--date", action="store", help="Date text - any format Todoist can parse") options = vars(parser.parse_args(sys.argv[1:])) ''' Load user configuration file for API key etal ''' if options['config']: cf = options['config'] else: cf = os.getenv('HOME') + '/.todoist-cli' config = ConfigParser.RawConfigParser() files_read = config.read(cf) if files_read == []: print "Unable to open configuration file " + cf + "- aborting" sys.exit(1) ''' [Authentication] api=xxxx [Network] proxy_pac=zzzz http_proxy=xxxx https_proxy=yyyy ''' api_key = config.get('Authentication', 'api') if api_key == '': print "Unable to read API value from " + cf + "- aborting" sys.exit(1) if config.has_section('Network'): if config.has_option('Network', 'proxy_pac'): proxy_pac = config.get('Network', 'proxy_pac') if proxy_pac != '': ''' Check if proxy pac exists - if so use it to set proxy ''' try: response = urllib2.urlopen(proxy_pac, None, 1) pac_file_contents = response.read() pacparser.init() pacparser.parse_pac_string(pac_file_contents) https_proxy = pacparser.find_proxy('https://todoist.com', 'todoist.com').split(' ')[1] pacparser.cleanup() ''' Set the proxy environment ''' os.environ['HTTP_PROXY'] = https_proxy os.environ['HTTPS_PROXY'] = https_proxy except IOError as e: print e else: ''' Check if explicit proxy exists and use ''' http_proxy = config.get('Network', 'http_proxy') if http_proxy != '': os.environ['HTTP_PROXY'] = http_proxy https_proxy = config.get('Network', 'https_proxy') if https_proxy != '': os.environ['HTTPS_PROXY'] = https_proxy ''' Use the user Todoist API key to connect ''' api = todoist.TodoistAPI(api_key) if api: if options['date']: item = api.items.add(options['item'], 0, date_string=options['date']) else: item = api.items.add(options['item'], 0) if debug: print item if options['note']: note = api.notes.add(item['id'], options['note']) if debug: print note ''' Commit the transaction to Todoist ''' result = api.commit() if debug: print "Result:" print result print "Settings:" print "API Key=" + api_key if proxy_pac: print "PAC=" + proxy_pac print "HTTP_PROXY=" + os.getenv('HTTP_PROXY', "Not set") print "HTTPS_PROXY=" + os.getenv('HTTPS_PROXY', "Not set") else: print "Unable to connect to Todoist with API key - aborting" print api sys.exit(1)