def set_proxy(self): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.preferences["proxy"] if global_proxy: handlers.append(urllib.request.ProxyHandler({"http": global_proxy, "https": global_proxy})) global_opener = urllib.request.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.preferences["control_proxy"] control_proxy_pac = self.preferences["control_proxy_pac"] if control_proxy: control_opener = urllib.request.build_opener( urllib.request.ProxyHandler({"http": control_proxy, "https": control_proxy}) ) elif control_proxy_pac and pacparser_imported: pacparser.init() pacparser.parse_pac_string(urllib.request.urlopen(control_proxy_pac).read()) proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break self.worker_run("set_url_opener", (control_opener,))
def network_rule(request,nid): if not request.user.is_authenticated(): return HttpResponseRedirect("/logon/") if request.POST: c = {} c.update(csrf(request)) form = Form_pac(request.POST) requette_test = "function FindProxyForURL(url, host){if(1){}" if form.is_valid(): network = source.objects.get(id=nid) p = re.compile('[\r\n\t]+') requette = p.sub( "", form.cleaned_data['script']) p = re.compile('[\s]') requette = p.sub( " ", requette) requette_test += requette +"}" pacparser.init() try: pacparser.parse_pac_string(str(requette_test)) except: parse = "Pac syntax error" return render_to_response('pac_new.html', locals(),context_instance=RequestContext(request)) pacparser.cleanup() try: rulepac = rules.objects.get(ref_address_ip = network) rulepac.pac = form.cleaned_data['script'] except ObjectDoesNotExist: rulepac = rules(ref_address_ip = network, pac = form.cleaned_data['script']) rulepac.save() else: return render_to_response('pac_new.html', locals(),context_instance=RequestContext(request)) return HttpResponseRedirect("/network/") else: pac = "function FindProxyForURL(url, host){\n" try: network = source.objects.get(id=nid) rulepac = rules.objects.get(ref_address_ip = network) form = Form_pac(initial={'script': rulepac.pac}) action = "Edit" try: tree = network.get_ancestors(include_self=True,ascending=True) for networks in tree: rulepac = rules.objects.get(ref_address_ip = networks) pac += rulepac.pac +"\n" pac += "}" except: pass pacparser.init() try: pacparser.parse_pac_string(str(pac)) parse_global = "Pac syntax seems to be ok" except: parse_global = "Pac syntax error" pacparser.cleanup() pac2 = "<code>" pac2 += pac form2 = Form_result(initial={'script': pac}) except ObjectDoesNotExist: form = Form_pac() action = "Add" return render_to_response('pac_new.html', locals(),context_instance=RequestContext(request))
def __parse_pac(self): retry, downloaded_pac = download_url(self.pac_url, download_pac) # connection error, we are gonna retry # when get_proxies() gets called. if retry: self.fetched = False return # either download succeeded, or it is not a valid url, then falls back to # a file path. try: pacparser.init() if downloaded_pac is not None: g_log.debug("pac url is a url: {}".format(self.pac_url)) pacparser.parse_pac_string(downloaded_pac) else: g_log.debug("pac url is a local file: {}".format(self.pac_url)) pacparser.parse_pac_file(self.pac_url) except IOError: # neither a valid url or valid file path self.clean_up() raise else: self.fetched = True
def set_proxy(self): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.preferences['proxy'] if global_proxy: handlers.append(urllib.request.ProxyHandler({'http': global_proxy, 'https': global_proxy})) global_opener = urllib.request.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.preferences['control_proxy'] control_proxy_pac = self.preferences['control_proxy_pac'] if control_proxy: control_opener = urllib.request.build_opener(urllib.request.ProxyHandler({'http': control_proxy, 'https': control_proxy})) elif control_proxy_pac and pacparser_imported: pacparser.init() pacparser.parse_pac_string(urllib.request.urlopen(control_proxy_pac).read()) proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break self.worker_run('set_url_opener', (control_opener,))
def updateWPAD(signum=None, stackframe=None): if args.force_proxy: return logger.info("Updating WPAD configuration...") wpad_urls = yield get_possible_configuration_locations() # use DIRECT temporarily; who knows what state the below gets pacparser # in WPADProxyRequest.force_direct = 'DIRECT' for wpad_url in wpad_urls: logger.info("Trying %s...", wpad_url) try: agent = Agent(reactor) # TODO: need to ensure this doesn't go through any http_proxy, such as # ourselves :) response = yield agent.request(b'GET', wpad_url.encode('ascii')) body = yield readBody(response) logger.info("...found. Parsing configuration...") pacparser.parse_pac_string(body.decode('ascii')) logger.info("Updated configuration") WPADProxyRequest.force_direct = None break except Exception as e: logger.info("...didn't work") pass else: logger.info("None of the tried urls seem to have worked; falling back to direct") WPADProxyRequest.force_direct = 'DIRECT'
def find_proxies(self, url): """Parse proxy URL for provided URL Parse PAC and return proxy URL for given URL """ try: protocol = url.split('://')[0] except ValueError: raise ProxyConfigError('Invalid URL: %s' % url) try: pacparser.init() pacparser.parse_pac_string(str(self)) proxies = pacparser.find_proxy(url) pacparser.cleanup() except: raise ProxyConfigError('Error parsing PAC: %s' % self.pac_url) data = {} for v in [x.strip() for x in proxies.split(';')]: if v == 'DIRECT': continue if v[:6] == 'PROXY ': data[protocol] = v[6:] return data
def pac_result(request,nid): pac = "<code>function FindProxyForURL(url, host){\n" network = source.objects.get(id=nid) rulepac = rules.objects.get(ref_address_ip = network) statistiques = stats.objects.filter(ref_address_ip = network).order_by('id').reverse()[:20] form = [] form3 = Form_stats() for statis in statistiques: form.append(Form_stats(initial={'HTTP_ACCEPT' : statis.HTTP_ACCEPT , 'HTTP_ACCEPT_CHARSET' : statis.HTTP_ACCEPT_CHARSET , 'HTTP_ACCEPT_ENCODING' : statis.HTTP_ACCEPT_ENCODING , 'HTTP_ACCEPT_LANGUAGE' : statis.HTTP_ACCEPT_LANGUAGE , 'HTTP_CACHE_CONTROL' : statis.HTTP_CACHE_CONTROL , 'HTTP_CONNECTION' : statis.HTTP_CONNECTION , 'HTTP_COOKIE' : statis.HTTP_COOKIE , 'HTTP_HOST' : statis.HTTP_HOST , 'HTTP_USER_AGENT' : statis.HTTP_USER_AGENT , 'HTTP_VIA' : statis.HTTP_VIA , 'HTTP_X_FORWARDED_FOR' : statis.HTTP_X_FORWARDED_FOR , 'PATH_INFO' : statis.PATH_INFO , 'PATH_TRANSLATED' : statis.PATH_TRANSLATED , 'QUERY_STRING' : statis.QUERY_STRING , 'REMOTE_ADDR' : statis.REMOTE_ADDR , 'REMOTE_HOST' : statis.REMOTE_HOST , 'REMOTE_IDENT' : statis.REMOTE_IDENT , 'REMOTE_USER' : statis.REMOTE_USER , 'REQUEST_METHOD' : statis.REQUEST_METHOD})) try: tree = network.get_ancestors(include_self=True,ascending=True) for networks in tree: rulepac = rules.objects.get(ref_address_ip = networks) pac += rulepac.pac +"\n" pac += "}" except: pass pacparser.init() try: pacparser.parse_pac_string(str(pac)) parse_global = "Pac syntax seems to be ok" except: parse_global = "Pac syntax error" pacparser.cleanup() fichier = settings.RRD_ROOT+network.address_ip+".rrd" png = settings.MEDIA_ROOT+"img/rrd/"+network.address_ip+".png" try: openfile = open(str(fichier), 'r') openfile.close() except: ret = rrdtool.create(str(fichier), "--step", "60", "--start", '0', "DS:input:COUNTER:120:U:U", "RRA:AVERAGE:0.5:1:120", "RRA:AVERAGE:0.5:5:288", "RRA:AVERAGE:0.5:30:336", "RRA:AVERAGE:0.5:30:1488", "RRA:MAX:0.5:1:120", "RRA:MAX:0.5:5:288", "RRA:MAX:0.5:30:336", "RRA:MAX:0.5:30:1488") ret = rrdtool.graph( str(png), "--start", "-2hour", "--vertical-label=Access/s", "DEF:inoctets="+str(fichier)+":input:AVERAGE", "AREA:inoctets#FF0000:In access", "COMMENT:\\n", "GPRINT:inoctets:AVERAGE:Avg In access\: %6.2lf %S access", "COMMENT: ", "GPRINT:inoctets:MAX:Max In access\: %6.2lf %S access\\r") return render_to_response('pac_result.html', locals(),context_instance=RequestContext(request))
def set_proxy(self): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.preferences['proxy'] if global_proxy: handlers.append(urllib.request.ProxyHandler({'http': global_proxy, 'https': global_proxy})) global_opener = urllib.request.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.preferences['control_proxy'] control_proxy_pac = self.preferences['control_proxy_pac'] if control_proxy: control_opener = urllib.request.build_opener(urllib.request.ProxyHandler({'http': control_proxy, 'https': control_proxy})) elif control_proxy_pac and pacparser_imported: pacparser.init() pacparser.parse_pac_string(urllib.request.urlopen(control_proxy_pac).read()) proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break elif control_proxy_pac and not pacparser_imported: logging.warn("Disabled proxy auto-config support because python-pacparser module was not found.") self.worker_run('set_url_opener', (control_opener,))
def find_proxies(self, url): """Parse proxy URL for provided URL Parse PAC and return proxy URL for given URL """ try: protocol = url.split('://')[0] except ValueError: raise ProxyConfigError('Invalid URL: %s' % url) try: pacparser.init() pacparser.parse_pac_string(str(self)) proxies = pacparser.find_proxy(url) pacparser.cleanup() except: raise ProxyConfigError('Error parsing PAC: %s' % self.pac_url) data = {} for v in [x.strip() for x in proxies.split(';')]: if v == 'DIRECT': continue if v[:6] == 'PROXY ': data[protocol] = v[6:] return data
def validate(self): """Validate PAC with pacparser Validate resolved PAC configuration with pacparser libarry """ pacparser.init() pacparser.parse_pac_string(str(self)) pacparser.cleanup()
def validate(self): """Validate PAC with pacparser Validate resolved PAC configuration with pacparser libarry """ pacparser.init() pacparser.parse_pac_string(str(self)) pacparser.cleanup()
def set_proxy(self, *ignore, reconnect=True): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.settings['proxy'] if global_proxy: handlers.append( urllib.request.ProxyHandler({ 'http': global_proxy, 'https': global_proxy })) global_opener = pandora.Pandora.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.settings['control-proxy'] control_proxy_pac = self.settings['control-proxy-pac'] if not control_proxy and (control_proxy_pac and pacparser): pacparser.init() with urllib.request.urlopen(control_proxy_pac) as f: pacstring = f.read().decode('utf-8') try: pacparser.parse_pac_string(pacstring) except pacparser._pacparser.error: logging.warning('Failed to parse PAC.') try: proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break except pacparser._pacparser.error: logging.warning('Failed to find proxy via PAC.') pacparser.cleanup() elif not control_proxy and (control_proxy_pac and not pacparser): logging.warning( "Disabled proxy auto-config support because python-pacparser module was not found." ) if control_proxy: control_opener = pandora.Pandora.build_opener( urllib.request.ProxyHandler({ 'http': control_proxy, 'https': control_proxy })) self.worker_run('set_url_opener', (control_opener, ), self.pandora_connect if reconnect else None)
def set_proxy(self, *ignore): # proxy preference is used for all Pithos HTTP traffic # control proxy preference is used only for Pandora traffic and # overrides proxy # # If neither option is set, urllib2.build_opener uses urllib.getproxies() # by default handlers = [] global_proxy = self.settings.get_string('proxy') if global_proxy: handlers.append(urllib.request.ProxyHandler({'http': global_proxy, 'https': global_proxy})) global_opener = urllib.request.build_opener(*handlers) urllib.request.install_opener(global_opener) control_opener = global_opener control_proxy = self.settings.get_string('control-proxy') control_proxy_pac = self.settings.get_string('control-proxy-pac') if not control_proxy and (control_proxy_pac and pacparser): pacparser.init() with urllib.request.urlopen(control_proxy_pac) as f: pacstring = f.read().decode('utf-8') try: pacparser.parse_pac_string(pacstring) except pacparser._pacparser.error: logging.warning('Failed to parse PAC.') try: proxies = pacparser.find_proxy("http://pandora.com", "pandora.com").split(";") for proxy in proxies: match = re.search("PROXY (.*)", proxy) if match: control_proxy = match.group(1) break except pacparser._pacparser.error: logging.warning('Failed to find proxy via PAC.') pacparser.cleanup() elif not control_proxy and (control_proxy_pac and not pacparser): logging.warning("Disabled proxy auto-config support because python-pacparser module was not found.") if control_proxy: control_opener = urllib.request.build_opener(urllib.request.ProxyHandler({'http': control_proxy, 'https': control_proxy})) self.worker_run('set_url_opener', (control_opener,))
def find_proxy(self, host): self.__lock.acquire() try: import pacparser pacparser.init() pacparser.parse_pac_string(self.__pac_string) results = pacparser.find_proxy('http://%s' % host, host) pacparser.cleanup() finally: self.__lock.release() dests = [] for result in results.split(';'): result = result.strip() if result.startswith('PROXY'): host, port = result.split(' ')[1].split(':') dests.append(Address(host, port)) elif result.startswith('DIRECT'): dests.append(None) getLogger().write('Proxy for "%s" -> %s' % (host, dests), Log.DEBUG) return dests
def __parse_pac(self): required_download, url = parse_file_uri(self.pac_url) try: if required_download: downloaded_pac = download_pac([url], timeout=30) if downloaded_pac: pacparser.init() pacparser.parse_pac_string(downloaded_pac) else: # retryable exceptions self.fetched = False return else: pacparser.init() pacparser.parse_pac_file(url) except: # neither a valid url or valid file path g_log.error("Unsupported pac url {}".format(self.pac_url)) g_log.debug("Cleaning up pacparser...") self.clean_up() raise else: self.fetched = True
print("ERROR: File not found. Exiting...") parsePAC = False elif r.status_code == 404: print("ERROR: File not found. Exiting...") parsePAC = False else: print("Undefined Error. Exiting...") parsePAC = False if not parsePAC: sys.exit(1) #Attempt to parse retrieved PAC file try: pacparser.init() pacparser.parse_pac_string(r.content) except Exception as e: print("Unable to parse PAC file. Exiting...") print(e) sys.exit(1) else: verboseprint("PAC Parsing Successful.") finally: pacparser.cleanup() #Optionally write the output file if args.noout: #Function for writing the output file def writeOutputFile(filePath, contents): try: with open(filePath, "w") as f:
def test(request): if not request.user.is_authenticated(): return HttpResponseRedirect("/logon/") if request.POST: c = {} c.update(csrf(request)) pac = "function FindProxyForURL(url, host){\n" form = Form_test(request.POST) if form.is_valid(): networks = source.objects.all() network =[] ipsource = form.cleaned_data['address_ip'] destination = form.cleaned_data['destination'] network_save = 0 match2 =[] cidr2 =[] for network in networks: cidr = [str(network.address_ip) +"/"+ str(network.mask)] match = smallest_matching_cidr(ipsource,cidr) match2.append(match) cidr2.append(cidr) if match: try: if ((int(str(network.mask))) > (int(str(network_save.mask)))): network_save = network del match except AttributeError: network_save = network del match try: tree = network_save.get_ancestors(include_self=True,ascending=True) for network_node in tree: rulepac = rules.objects.get(ref_address_ip = network_node) pac += rulepac.pac +"\n" pac += "}" p = re.compile('[\r\n\t]+') requette = p.sub( "", pac) p = re.compile('[\s]') requette = p.sub( " ", requette) except AttributeError: if (IPNetwork(str(ipsource)).version == 6): network_save = source.objects.get(address_ip='fe80::') else: network_save = source.objects.get(address_ip='0.0.0.0') rulepac = rules.objects.get(ref_address_ip = network_save) pac += rulepac.pac +"\n" pac += "}" p = re.compile('[\r\n\t]+') requette = p.sub( "", pac) p = re.compile('[\s]') requette = p.sub( " ", requette) pacparser.init() parse = pacparser.parse_pac_string(str(requette)) proxy = pacparser.find_proxy(str(destination)) pacparser.cleanup() return render_to_response('test.html', locals(),context_instance=RequestContext(request)) else: return render_to_response('test.html', {'form': form,},context_instance=RequestContext(request)) else: form = Form_test() return render_to_response('test.html', {'form': form,},context_instance=RequestContext(request))
def main(args): ''' Parse arguments into options dictionary ''' parser = argparse.ArgumentParser(prog='todoist-add', description='Add task to Todoist') parser.add_argument("--config", action="store", help="Configuration file with API") parser.add_argument("--item", action="store", help="Item text", required=True) parser.add_argument("--note", action="store", help="Note text") parser.add_argument("--date", action="store", help="Date text - any format Todoist can parse") options = vars(parser.parse_args(sys.argv[1:])) ''' Load user configuration file for API key etal ''' if options['config']: cf = options['config'] else: cf = os.getenv('HOME') + '/.todoist-cli' config = ConfigParser.RawConfigParser() files_read = config.read(cf) if files_read == []: print "Unable to open configuration file " + cf + "- aborting" sys.exit(1) ''' [Authentication] api=xxxx [Network] proxy_pac=zzzz http_proxy=xxxx https_proxy=yyyy ''' api_key = config.get('Authentication', 'api') if api_key == '': print "Unable to read API value from " + cf + "- aborting" sys.exit(1) if config.has_section('Network'): if config.has_option('Network', 'proxy_pac'): proxy_pac = config.get('Network', 'proxy_pac') if proxy_pac != '': ''' Check if proxy pac exists - if so use it to set proxy ''' try: response = urllib2.urlopen(proxy_pac, None, 1) pac_file_contents = response.read() pacparser.init() pacparser.parse_pac_string(pac_file_contents) https_proxy = pacparser.find_proxy('https://todoist.com', 'todoist.com').split(' ')[1] pacparser.cleanup() ''' Set the proxy environment ''' os.environ['HTTP_PROXY'] = https_proxy os.environ['HTTPS_PROXY'] = https_proxy except IOError as e: print e else: ''' Check if explicit proxy exists and use ''' http_proxy = config.get('Network', 'http_proxy') if http_proxy != '': os.environ['HTTP_PROXY'] = http_proxy https_proxy = config.get('Network', 'https_proxy') if https_proxy != '': os.environ['HTTPS_PROXY'] = https_proxy ''' Use the user Todoist API key to connect ''' api = todoist.TodoistAPI(api_key) if api: if options['date']: item = api.items.add(options['item'], 0, date_string=options['date']) else: item = api.items.add(options['item'], 0) if debug: print item if options['note']: note = api.notes.add(item['id'], options['note']) if debug: print note ''' Commit the transaction to Todoist ''' result = api.commit() if debug: print "Result:" print result print "Settings:" print "API Key=" + api_key if proxy_pac: print "PAC=" + proxy_pac print "HTTP_PROXY=" + os.getenv('HTTP_PROXY', "Not set") print "HTTPS_PROXY=" + os.getenv('HTTPS_PROXY', "Not set") else: print "Unable to connect to Todoist with API key - aborting" print api sys.exit(1)