def test(request): if not request.user.is_authenticated(): return HttpResponseRedirect("/logon/") if request.POST: c = {} c.update(csrf(request)) pac = "function FindProxyForURL(url, host){\n" form = Form_test(request.POST) if form.is_valid(): networks = source.objects.all() network =[] ipsource = form.cleaned_data['address_ip'] destination = form.cleaned_data['destination'] network_save = 0 match2 =[] cidr2 =[] for network in networks: cidr = [str(network.address_ip) +"/"+ str(network.mask)] match = smallest_matching_cidr(ipsource,cidr) match2.append(match) cidr2.append(cidr) if match: try: if ((int(str(network.mask))) > (int(str(network_save.mask)))): network_save = network del match except AttributeError: network_save = network del match try: tree = network_save.get_ancestors(include_self=True,ascending=True) for network_node in tree: rulepac = rules.objects.get(ref_address_ip = network_node) pac += rulepac.pac +"\n" pac += "}" p = re.compile('[\r\n\t]+') requette = p.sub( "", pac) p = re.compile('[\s]') requette = p.sub( " ", requette) except AttributeError: if (IPNetwork(str(ipsource)).version == 6): network_save = source.objects.get(address_ip='fe80::') else: network_save = source.objects.get(address_ip='0.0.0.0') rulepac = rules.objects.get(ref_address_ip = network_save) pac += rulepac.pac +"\n" pac += "}" p = re.compile('[\r\n\t]+') requette = p.sub( "", pac) p = re.compile('[\s]') requette = p.sub( " ", requette) pacparser.init() parse = pacparser.parse_pac_string(str(requette)) proxy = pacparser.find_proxy(str(destination)) pacparser.cleanup() return render_to_response('test.html', locals(),context_instance=RequestContext(request)) else: return render_to_response('test.html', {'form': form,},context_instance=RequestContext(request)) else: form = Form_test() return render_to_response('test.html', {'form': form,},context_instance=RequestContext(request))
def pac(request,ficpac): networks = source.objects.all() network =[] try: ipsource = request.META['HTTP_X_FORWARDED_FOR'] ipsource = ipsource.split(",")[0] except: ipsource = request.META['REMOTE_ADDR'] network_save = 0 count = 0 pac = "function FindProxyForURL(url, host){\n" for network in networks: cidr = [str(network.address_ip) +"/"+ str(network.mask)] match = smallest_matching_cidr(ipsource,cidr) if match: try: if (int(str(network.mask)) > int(str(network_save.mask))): network_save = network del match except AttributeError: network_save = network del match try: tree = network_save.get_ancestors(include_self=True,ascending=True) for network_node in tree: rulepac = rules.objects.get(ref_address_ip = network_node) pac += rulepac.pac +"\n" pac += "}" except AttributeError: if (IPNetwork(str(ipsource)).version == 6): network_save = source.objects.get(address_ip='fe80::') else: network_save = source.objects.get(address_ip='0.0.0.0') rulepac = rules.objects.get(ref_address_ip = network_save) pac += rulepac.pac +"\n" pac += "}" count = network_save.hits count = int(count) + 1 network_save.hits = count network_save.save() try : ref_HTTP_ACCEPT = request.META['HTTP_ACCEPT'] except: ref_HTTP_ACCEPT = "None" try: ref_HTTP_ACCEPT_CHARSET = request.META['HTTP_ACCEPT_CHARSET'] except: ref_HTTP_ACCEPT_CHARSET = "None" try: ref_HTTP_ACCEPT_ENCODING = request.META['HTTP_ACCEPT_ENCODING'] except: ref_HTTP_ACCEPT_ENCODING = "None" try: ref_REQUEST_METHOD = request.META['REQUEST_METHOD'] except: ref_REQUEST_METHOD = "None" try: ref_REMOTE_USER = request.META['REMOTE_USER'] except: ref_REMOTE_USER = "******" try: ref_REMOTE_IDENT = request.META['REMOTE_IDENT'] except: ref_REMOTE_IDENT = "None" try: ref_REMOTE_HOST = request.META['REMOTE_HOST'] except: ref_REMOTE_HOST = "None" try: ref_REMOTE_ADDR = request.META['REMOTE_ADDR'] except: ref_REMOTE_ADDR = "None" try: ref_QUERY_STRING = request.META['QUERY_STRING'] except: ref_QUERY_STRING = "None" try: ref_PATH_TRANSLATED = request.META['PATH_TRANSLATED'] except: ref_PATH_TRANSLATED = "None" try: ref_PATH_INFO = request.META['PATH_INFO'] except: ref_PATH_INFO = "None" try: ref_HTTP_X_FORWARDED_FOR = request.META['HTTP_X_FORWARDED_FOR'] except: ref_HTTP_X_FORWARDED_FOR = "None" try: ref_HTTP_VIA = request.META['HTTP_VIA'] except: ref_HTTP_VIA = "None" try: ref_HTTP_USER_AGENT = request.META['HTTP_USER_AGENT'] except: ref_HTTP_USER_AGENT = "None" try: ref_HTTP_HOST = request.META['HTTP_HOST'] except: ref_HTTP_HOST = "None" try: ref_HTTP_COOKIE = request.META['HTTP_COOKIE'] except: ref_HTTP_COOKIE = "None" try: ref_HTTP_CONNECTION = request.META['HTTP_CONNECTION'] except: ref_HTTP_CONNECTION = "None" try: ref_HTTP_CACHE_CONTROL = request.META['HTTP_CACHE_CONTROL'] except: ref_HTTP_CACHE_CONTROL = "None" try: ref_HTTP_ACCEPT_LANGUAGE = request.META['HTTP_ACCEPT_LANGUAGE'] except: ref_HTTP_ACCEPT_LANGUAGE = "None" # statistiques = stats(ref_address_ip = network_save , HTTP_ACCEPT = ref_HTTP_ACCEPT , HTTP_ACCEPT_CHARSET = ref_HTTP_ACCEPT_CHARSET , HTTP_ACCEPT_ENCODING = ref_HTTP_ACCEPT_ENCODING , HTTP_ACCEPT_LANGUAGE = ref_HTTP_ACCEPT_LANGUAGE , HTTP_CACHE_CONTROL = ref_HTTP_CACHE_CONTROL , HTTP_CONNECTION = ref_HTTP_CONNECTION , HTTP_COOKIE = ref_HTTP_COOKIE , HTTP_HOST = ref_HTTP_HOST , HTTP_USER_AGENT = ref_HTTP_USER_AGENT , HTTP_VIA = ref_HTTP_VIA , HTTP_X_FORWARDED_FOR = ref_HTTP_X_FORWARDED_FOR , PATH_INFO = ref_PATH_INFO , PATH_TRANSLATED = ref_PATH_TRANSLATED , QUERY_STRING = ref_QUERY_STRING , REMOTE_ADDR = ref_REMOTE_ADDR , REMOTE_HOST = ref_REMOTE_HOST , REMOTE_IDENT = ref_REMOTE_IDENT , REMOTE_USER = ref_REMOTE_USER , REQUEST_METHOD = ref_REQUEST_METHOD) # statistiques.save() fichier = settings.RRD_ROOT+network_save.address_ip+".rrd" try: openfile = open(str(fichier), 'r') openfile.close() except: ret = rrdtool.create(str(fichier), "--step", "60", "--start", '0', "DS:input:COUNTER:120:U:U", "RRA:AVERAGE:0.5:1:120", "RRA:AVERAGE:0.5:5:288", "RRA:AVERAGE:0.5:30:336", "RRA:AVERAGE:0.5:30:1488", "RRA:MAX:0.5:1:120", "RRA:MAX:0.5:5:288", "RRA:MAX:0.5:30:336", "RRA:MAX:0.5:30:1488") try: ret = rrdtool.update(str(fichier),'N:' + `int(network_save.hits)`); except: pass return HttpResponse(pac, mimetype="application/x-ns-proxy-autoconfig")