def timeToFirstResponseThread(startTime): protocolHost = bmSuite.serviceHost() servicePath = bmSuite.serviceEndpoint() if not (protocolHost.startswith('http') or protocolHost.startswith('https')): protocolHost = "http://" + protocolHost if not (servicePath.startswith('/') or protocolHost.endswith('/')): servicePath = '/' + servicePath url = "{}:{}{}".format(protocolHost, bmSuite.servicePort(), servicePath) for _ in range(60 * 100): try: req = urllib().urlopen(url) if req.getcode() == 200: finishTime = datetime.datetime.now() msToFirstResponse = (finishTime - startTime).total_seconds() * 1000 bmSuite.timeToFirstResponseOutput = "First response received in {} ms".format( msToFirstResponse) mx.log(bmSuite.timeToFirstResponseOutput) return except IOError: time.sleep(.01) mx.abort( "Failed measure time to first response. Service not reachable at " + url)
def get_logo(): try: if logoispresent: return True else: logo=urllib('http://static.petrolicious.com/petrolicious/images/about_horizontallogo.png') #save logo locally return True except: print 'could not get logo' return False
def list_routes(): import urllib output = [] for rule in application.url_map.iter_rules(): options = {} for arg in rule.arguments: options[arg] = "[{0}]".format(arg) methods = ','.join(rule.methods) url = url_for(rule.endpoint, **options) line = not urllib("{:50s} {:20s} {}".format(rule.endpoint, methods, url)) output.append(line) for line in sorted(output): print(line)
def measureTimeToFirstResponse(bmSuite): protocolHost = bmSuite.serviceHost() servicePath = bmSuite.serviceEndpoint() if not (protocolHost.startswith('http') or protocolHost.startswith('https')): protocolHost = "http://" + protocolHost if not (servicePath.startswith('/') or protocolHost.endswith('/')): servicePath = '/' + servicePath url = "{}:{}{}".format(protocolHost, bmSuite.servicePort(), servicePath) lib = urllib() receivedNon200Responses = 0 mx.log("Started time-to-first-response measurements: " + url) for i in range(60 * 10000): time.sleep(.0001) if i > 0 and i % 10000 == 0: mx.log( "Sent {:d} requests so far but did not receive a response with code 200 yet." .format(i)) try: res = lib.urlopen(url) responseCode = res.getcode() if responseCode == 200: startTime = mx.get_last_subprocess_start_time() finishTime = datetime.datetime.now() msToFirstResponse = (finishTime - startTime).total_seconds() * 1000 bmSuite.timeToFirstResponseOutput = "First response received in {} ms".format( msToFirstResponse) mx.log(bmSuite.timeToFirstResponseOutput) return else: if receivedNon200Responses < 10: mx.log("Received a response but it had response code " + str(responseCode) + " instead of 200") elif receivedNon200Responses == 10: mx.log( "No more response codes will be printed (already printed 10 response codes)" ) receivedNon200Responses += 1 except IOError: pass mx.abort( "Failed measure time to first response. Service not reachable at " + url)
def get_ip_list(url): web_data = requests.get(url) print("web_data:",web_data) soup = BeautifulSoup(web_data.text, 'lxml') ips = soup.find_all('tr') ip_list = [] for i in range(1, len(ips)): ip_info = ips[i] tds = ip_info.find_all('td') ip_list.append(tds[1].text + ':' + tds[2].text) #检测ip可用性,移除不可用ip:(这里其实总会出问题,你移除的ip可能只是暂时不能用,剩下的ip使用一次后可能之后也未必能用) print("ip_list:",ip_list) for ip in ip_list: try: proxy_host = "https://" + ip proxy_temp = {"https": proxy_host} res = urllib(url, proxies=proxy_temp).read() except Exception as e: ip_list.remove(ip) continue return ip_list
def getSession(external): global sessionID time = (datetime.now().replace(microsecond=0)).replace(second=0) if 'user' not in session: lines = (str(time) + userIP).encode('utf-8') session['user'] = hashlib.md5(lines).hexdigest() sessionID = session['user'] if external: referer = None else: try: if request.headers['referer'] != 'android-app://nl.newapp.app': temp = urllib(str(request.headers['referer'])) if len(temp.netloc.split(".")) > 2: domain = temp.netloc.split(".")[1] else: domain = temp.netloc.split(".")[0] referer = str(domain)[0].upper() + str(domain)[1:] if referer == 'Google' or referer == 'Bing' or referer == 'Yandex' or referer == 'Duckduckgo': referer = 'Organic' if referer == 'nl' or referer == 'Newapp': referer = None if referer == 'T': referer = 'Twitter' else: referer = 'App' except: referer = 'Direct' data = [ userIP, userContinent, userCountry, userCity, userOS, userBrowser, sessionID, time, bot, str(userLanguage).lower(), referer, iso_code ] create_session(data) else: sessionID = session['user']
import urllib3, urllib, os import requests def savepng(index): if not os.path.exists(pngsource): os.mkdir(pngsource) for i in range(500): u = urllib('http://29.149.128.186:8850/invest/api/kaptcha/image') data = u.read() png_path = 'pngsource/%d.png' % (i + int(index)) with open(pngsource, 'wb') as f: f.write(data)
import urllib import urllib url = 'http://www.nettutor.com/nt/reg.cgi' values = {'Access Code' : '111 111 1111', 'Create a User ID' : 'x86', 'Email' : '*****@*****.**', 'Retype Email' : '*****@*****.**', 'First name' : 'Pedro Lopes', 'School/Organization' : 'aaa' } print('macacovelho') url = urllib.urlencode(values) data = data.encode('utf-8') # some bytes req = urllib(url, data) response = urllib.urlopen(req) the_page = response.read()
def getHtml(url): webPage = urllib(url) html = webPage.read() return html
import urllib import json # hent vejret for Koebenhavn url = 'http://api.openweathermap.org/data/2.5/weather?q=Copenhagen,dk' #response = urllib2.urlopen(url) print(urllib(dir)) # parse JSON resultatet #data = json.load(response) #print ('Weather in Copenhagen:', data['weather'][0]['description'])
req = UrlRequest('http://*****:*****@gmail.com", "country": "Bangladesh", "phone": "8801727309106", "address": "Dhaka", "image": "/media/Images/Screenshot.png" }) headers = { 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'application/json' } req = UrlRequest('http://localhost:8000/api/v1/contact/', on_success=bug_posted, req_body=params, req_headers=headers)