def __call__(self, req): obj = None try: (version, account, container, obj) = \ split_path(req.path_info, 4, 4, True) except ValueError: # not an object request pass if obj and req.method == 'PUT': # create a POST request with obj name as body payload = { "conf": { "swift_id": obj, "swift_container": container, "swift_user": account, "swift_version": version } } webhook_req = urllib2.Request(URL + ENDPOINT_PATH + "/dags/" + DAG_TO_TRIGGER + "/dag_runs", data=payload) with Timeout(20): try: urllib2.urlopen(webhook_req).read() except (Exception, Timeout): self.logger.exception('failed POST to webhook %s' % webhook) else: self.logger.info('successfully called webhook %s' % webhook) return self.app
def generate_request(self, verb, url, headers, post_data=None): try: # import pdb; pdb.set_trace() req = request.Request(url, data=post_data.encode("utf-8") if post_data is not None else None, headers=headers, method=verb) if self.proxy: req.set_proxy(self.proxy_config, urllib.parse.urlparse(url).scheme) response = request.urlopen(req, timeout=60, context=self.create_ctx()) else: response = request.urlopen(req, timeout=60, context=self.create_ctx()) self.status_code.append(int(response.code)) except error.HTTPError as e: self.status_code.append(int(e.code)) except error.URLError as e: self.sns_logger(status_codes={}, exception=str(e.reason), subject="Grizzly Error") except Exception: import traceback self.sns_logger(status_codes={}, exception=str(traceback.format_exc()), subject="Grizzly Error") print(('generic exception: ' + traceback.format_exc()))
def scout_server_type(self, host): """ Obtain Server header by calling OPTIONS. :param host: host to check :returns: Server type, status """ try: url = "http://%s:%s/" % (host[0], host[1]) req = urllib2.Request(url) req.get_method = lambda: 'OPTIONS' conn = urllib2.urlopen(req) header = conn.info().getheader('Server') server_header = header.split('/') content = server_header[0] status = 200 except urllib2.HTTPError as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = err.code except (urllib2.URLError, socket.timeout) as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = -1 return url, content, status
def _source(url, error_code=None): response = request.urlopen(url) if response.status != 200: if not error_code: return 0 return error_code return response
def _get_proxy(kw): """获取代理ip """ urls = kw.pop("urls",[]) tb_temp = get_tb_temp() if urls: for url in urls: try: res = request.urlopen(url).read().decode() for ip in res.split('\r\n'): try: ip,port = ip.split(':') _type = 'http' tb_temp.push((ip,port,_type)) except: pass proxy_check_tmp(request.urlparse(url).netloc) except: pass func =[get_xici_proxy,get_cn_proxy,get_kuaidaili_proxy] for f in func: try: res = f() except: pass
def _not_async_source(url, error_code=None): response = request.urlopen(url) if response.status != 200: if not error_code: return 0 return error_code return response.read().decode()
def fetch(url): stt = time.time() print("opening", url) body = request.urlopen(url).read() print("done with", url) print('-----> Time func:', time.time() - stt, 'sec') return url, body
def scout_host(self, base_url, recon_type): """ Perform the actual HTTP request to obtain swift recon telemtry. :param base_url: the base url of the host you wish to check. str of the format 'http://127.0.0.1:6200/recon/' :param recon_type: the swift recon check to request. :returns: tuple of (recon url used, response body, and status) """ url = base_url + recon_type try: body = urllib2.urlopen(url, timeout=self.timeout).read() content = json.loads(body) if self.verbose: print("-> %s: %s" % (url, content)) status = 200 except urllib2.HTTPError as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = err.code except (urllib2.URLError, socket.timeout) as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = -1 return url, content, status
def scout_host(self, base_url, recon_type): """ Perform the actual HTTP request to obtain swift recon telemetry. :param base_url: the base url of the host you wish to check. str of the format 'http://127.0.0.1:6200/recon/' :param recon_type: the swift recon check to request. :returns: tuple of (recon url used, response body, and status) """ url = base_url + recon_type try: body = urllib2.urlopen(url, timeout=self.timeout).read() content = json.loads(body) if self.verbose: print("-> %s: %s" % (url, content)) status = 200 except urllib2.HTTPError as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = err.code except (urllib2.URLError, socket.timeout) as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = -1 return url, content, status
def getResultInfo(studentID, termID): start = time.time() RESULTINFO_METHOD = 'ACTIONQUERYSTUDENTSCOREBYSTUDENTNO.APPPROCESS?mode=2' postData = parse.urlencode([('YearTermNO', termID), ('EndYearTermNO', termID), ('ByStudentNO', studentID)]).encode('utf-8') reqData = request.Request(loginer.BASE_URL + RESULTINFO_METHOD, data = postData, headers = {'Cookie': cookie}) response = request.urlopen(reqData).read().decode('GBK') page = BeautifulSoup(response, 'html.parser') db = MySQLdb.connect(charset='utf8', host='localhost', user='******', passwd='PASSWORD', db='sabrina') for row in page.find_all('table')[4].find_all('tr')[1:]: rowList = row.find_all('td') if rowList[0].string==None: break rowList = list(map(lambda x:str(x.string), rowList)) dataList = [] md5encoder = hashlib.md5() md5encoder.update((studentID + ''.join(rowList)).encode('utf-8')) dataList.append(md5encoder.hexdigest()) dataList.append(studentID) dataList.append(str(termID)) dataList+=rowList[1:7]+rowList[-2:] if dataList[-1] == 'None': dataList[-1] = 'NULL' print(dataList) db.query('''INSERT INTO result_data (hash_id, student_id, term, course_id, course_name, course_type, school_hour, credit, exam_type, result, point) VALUES ('%s', '%s', %s, '%s', '%s', '%s', %s, %s, '%s', '%s', %s)''' % tuple(dataList)) db.commit() db.close() finish = time.time() print((finish - start))
def external_assert_status_code_200(self, url): try: with urlopen(url) as resp: self.assertEqual(resp.getcode(), 200, url) return True except: print(url) raise
def fetch(self, url, header): # Fetch HTTP page with eventlet pool response = '' with Timeout(60, False): req = Request(url, None, header) response = urlopen(req).read() response = response.decode() return response
def get_auth(url, user, key, auth_version="1.0", **kwargs): if auth_version != "1.0": exit("ERROR: swiftclient missing, only auth v1.0 supported") req = urllib2.Request(url) req.add_header("X-Auth-User", user) req.add_header("X-Auth-Key", key) conn = urllib2.urlopen(req) headers = conn.info() return (headers.getheader("X-Storage-Url"), headers.getheader("X-Auth-Token"))
def __call__(self, env, start_response): req = Request(env) resp = req.get_response(self.app) self.logger.info("Serverless: available headers: {}".format( str(dict(req.headers)))) try: if "X-Function-URL" in req.headers: version, account, container, obj = split_path( req.path_info, 4, 4, True) self.logger.info( "Serverless: version {}, account {}, container {}, object {}" .format(version, account, container, obj)) if obj and is_success(resp.status_int) and req.method == 'PUT': webhook = req.headers.get("X-Function-URL") data = json.dumps({ "x-auth-token": req.headers.get("X-Auth-Token"), "version": version, "account": account, "container": container, "object": obj, "project_id": req.headers.get("X-Project-Id"), }) self.logger.info( "Serverless: data to send to a function {}".format( str(data))) data_as_bytes = data.encode('utf-8') webhook_req = urllib2.Request(webhook, data=data_as_bytes) webhook_req.add_header('Content-Type', 'application/json') webhook_req.add_header('Content-Length', len(data_as_bytes)) self.logger.info( "Serverless: data to send as bytes {}".format( data_as_bytes)) with Timeout(60): try: result = urllib2.urlopen(webhook_req).read() self.logger.info( "Serverless: function worked fine. Result {}". format(str(result))) except (Exception, Timeout) as ex: self.logger.error( 'Serverless: failed POST to webhook {}, ' 'error {}'.format(webhook, str(ex))) else: self.logger.info("Serverless: skipping functions middleware " "due to absence of function URL") except ValueError: # not an object request pass return self.app(env, start_response)
def get_auth(url, user, key, auth_version='1.0', **kwargs): if auth_version != '1.0': exit('ERROR: swiftclient missing, only auth v1.0 supported') req = urllib2.Request(url) req.add_header('X-Auth-User', user) req.add_header('X-Auth-Key', key) conn = urllib2.urlopen(req) headers = conn.info() return (headers.getheader('X-Storage-Url'), headers.getheader('X-Auth-Token'))
def check_cpstorage(urldata): url, data = urldata try: info = urlopen(url, timeout=1).read() if b'<friendlyName>Athena</friendlyName>' in info: data['services'] = ['service:thinkagile-storage'] return data except Exception: pass return None
def get_auth(url, user, key, auth_version='1.0', **kwargs): if auth_version != '1.0': exit('ERROR: swiftclient missing, only auth v1.0 supported') req = urllib2.Request(url) req.add_header('X-Auth-User', user) req.add_header('X-Auth-Key', key) conn = urllib2.urlopen(req) headers = conn.info() return ( headers.getheader('X-Storage-Url'), headers.getheader('X-Auth-Token'))
def get_image_links(name, link): for char in replace_chars: name = name.replace(char, "") if os.path.isdir(download_directory + name): return if len(link) < 20: link = "http://www.imagefap.com/gallery.php?gid=" + link + "&view=2" os.mkdir(download_directory + name) image_links = [[]] images = BeautifulSoup(request.urlopen(link).read(), 'html.parser').find_all('img') image_count = 0 image_index = 0 batch_index = 0 for image in images: if image.attrs.get('alt') and image.attrs.get('height'): if image_index >= max_threads: image_index = 0 image_links.append([]) batch_index += 1 image_id = re.search(image_id_pattern, str(image.parent['href'])).group(1) image_links[batch_index].append("http://imagefap.com/photo/{}".format(image_id)) image_index += 1 image_count += 1 print("\nGallery: {}. Images: {}".format(name.strip(), image_count), end="") for batch in range(len(image_links)): download_links = [] download_locations = [] for page in pool.imap(fetch, image_links[batch]): image_link = re.search(image_link_pattern, str(page)).group(1) file_type = re.search(image_type_pattern, image_link).group(1) index = str(int(page.find(id="navigation")['data-idx']) + 1) download_links.append(image_link) download_locations.append("{}{}/{}{}".format(download_directory, name, index, file_type)) for index in pool.imap(download, download_links, download_locations): print("\rGallery: {}\t\tDownloaded: {} of {} images".format(name.strip(), index, image_count), end="") print('\n')
def fetch(url, seen, pool): """Fetch a url, stick any found urls into the seen set, and dispatch any new ones to the pool.""" print("fetching", url) data = '' with eventlet.Timeout(5, False): data = urlopen(url).read().decode() for url_match in url_regex.finditer(data): new_url = url_match.group(0) # only send requests to eventlet.net so as not to destroy the internet if new_url not in seen and 'eventlet.net' in new_url: seen.add(new_url) # while this seems stack-recursive, it's actually not: # spawned greenthreads start their own stacks pool.spawn_n(fetch, new_url, seen, pool)
def _base_method(url, data=None, headers=None, origin_req_host=None, unverifiable=False, method=None, decode='utf-8'): def _caller(stack_number): try: info = inspect.stack()[stack_number] caller_func = info[3] caller_inst = info[0].f_locals.get('self') if caller_inst: return str(caller_inst) + ' ' + caller_func else: return str(caller_func) except: return str('can not find caller') if headers is None: headers = {} req = request.Request( url=url, data=data, headers=headers, origin_req_host=origin_req_host, unverifiable=unverifiable, method=method) rval = None try: with hub.Timeout(_DEFAULT_TIME_OUT, True): response = request.urlopen(req) message = response.read() msg_dec = message.decode(decode) rval = json.loads(msg_dec) except hub.Timeout: LOG.error({'method': _caller(2), 'caller': _caller(3), 'url': url, 'timeout': str(_DEFAULT_TIME_OUT)}) except error.HTTPError as e: LOG.error({'method': _caller(2), 'caller': _caller(3), 'url': url, 'error': { 'code': e.code, 'msg': e.msg}}) except: LOG.error({'method': _caller(2), 'caller': _caller(3), 'url': url, 'error': 'uncovered error'}) return rval
def get_url_title(url, verbose=False): soup = None try: with Timeout(TIMEOUT, False): response = urlopen(url) if 'text/html' not in response.getheader('Content-Type'): warnings.warn("Url {} is not a text/html page".format(url)) return '' soup = BeautifulSoup(response, "lxml") except Exception: if verbose: warnings.warn("Couldn't extract title from url {}".format(url)) return '' if soup is None or soup.title is None or soup.title.string is None: return '' return soup.title.string
def get_cn_proxy(): """提取cn-proxy.com公布的代理ip """ tb_temp = get_tb_temp() url="http://cn-proxy.com/" html = request.urlopen(url).read() tree = etree.HTML(html) tb = tree.xpath('//tr') for item in tb: node = item.xpath('td') if node and node[0].text.find('.') >0: ip = node[0].text port = node[1].text _type = 'http' tb_temp.push((ip,port,_type)) proxy_check_tmp('cn-proxy.com')
def get_kuaidaili_proxy(): """提取kuaidaili.com公布的代理ip """ tb_temp = get_tb_temp() for i in range(1,11): url="http://www.kuaidaili.com/proxylist/%s/" % i html = request.urlopen(url).read() tree = etree.HTML(html) tb = tree.xpath('//tr') for item in tb: node = item.xpath('td') if node and node[0].text.find('.') >0: ip = node[0].text port = node[1].text _type = 'http' tb_temp.push((ip,port,_type)) proxy_check_tmp(kuaidaili.com)
def RestAPIStatusCheck(self, RestAPIPort, IP): from eventlet.green.urllib import request try: import socket if IP == "0.0.0.0": s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(('10.255.255.255', 1)) IP = s.getsockname()[0] else: pass conn = request.urlopen(f'http://{IP}:{RestAPIPort}/Alive') if conn.code == 200: return "on" else: return "off" except: return "off"
def getMemberInfo(classID = '1404052'): CLASSINFO_METHOD = 'ACTIONQUERYCLASSSTUDENT.APPPROCESS?mode=2&query=1' postData = parse.urlencode([('ClassNO', classID)]).encode('utf-8') reqData = request.Request(loginer.BASE_URL + CLASSINFO_METHOD, data = postData, headers = {'Cookie': cookie}) response = request.urlopen(reqData).read().decode('GBK') page = BeautifulSoup(response, 'html.parser') studentList = [] studentList.append(list(map(lambda x:str(x.string), page.find_all('table')[1].find_all('td', recursive = False)[1:4]))) db = MySQLdb.connect(charset='utf8', host='localhost', user='******', passwd='PASSWORD', db='sabrina') for row in page.find_all('table')[1].find_all('tr')[1:-1]: rowData = row.find_all('td', recursive = False)[1:4] rowList = list(map(lambda x:str(x.string), rowData)) studentList.append(rowList) db.query('INSERT INTO student (id, name, sex) VALUES (\'%s\', \'%s\', \'%s\')' % tuple(rowList)) db.commit() db.close() return studentList
def sanity_check(self, client, computed_requests): ''' This method checks that the sanity_check_url provides a 200 status code. If the sanity check fails, the application exists. ''' req = request.Request(client, headers=self.computed_requests["headers"][0]) response = request.urlopen(req, timeout=60, context=self.create_ctx()) if response.code != 200: self.sns_logger(status_codes={}, exception=str(response.code), subject="Grizzly Sanity Check Failed", url=client) raise else: self.sns_logger(status_codes={}, exception=str(response.code), subject="Grizzly Sanity Check Passed", url=client) print('Sanity check passed: 200 OK') return True
from eventlet.green.urllib.request import urlopen big_list_of_feeds = """ http://blog.eventlet.net/feed/ http://rss.slashdot.org/Slashdot/slashdot http://feeds.boingboing.net/boingboing/iBag http://feeds.feedburner.com/RockPaperShotgun http://feeds.penny-arcade.com/pa-mainsite http://achewood.com/rss.php http://raysmuckles.blogspot.com/atom.xml http://rbeef.blogspot.com/atom.xml http://journeyintoreason.blogspot.com/atom.xml http://orezscu.blogspot.com/atom.xml http://feeds2.feedburner.com/AskMetafilter http://feeds2.feedburner.com/Metafilter http://stackoverflow.com/feeds http://feeds.feedburner.com/codinghorror http://www.tbray.org/ongoing/ongoing.atom http://www.zeldman.com/feed/ http://ln.hixie.ch/rss/html """ url = 'http://localhost:9010/' result = urlopen(url, big_list_of_feeds) print(result.read())
def base_request(self, method, container=None, name=None, prefix=None, headers=None, proxy=None, contents=None, full_listing=None, logger=None, additional_info=None, timeout=None, marker=None): # Common request method trans_start = time() url = self.url if full_listing: info, body_data = self.base_request(method, container, name, prefix, headers, proxy, timeout=timeout, marker=marker) listing = body_data while listing: marker = listing[-1]['name'] info, listing = self.base_request(method, container, name, prefix, headers, proxy, timeout=timeout, marker=marker) if listing: body_data.extend(listing) return [info, body_data] if headers is None: headers = {} if self.token: headers['X-Auth-Token'] = self.token if container: url = '%s/%s' % (url.rstrip('/'), quote(container)) if name: url = '%s/%s' % (url.rstrip('/'), quote(name)) else: params = ['format=json'] if prefix: params.append('prefix=%s' % prefix) if marker: params.append('marker=%s' % quote(marker)) url += '?' + '&'.join(params) req = urllib2.Request(url, headers=headers, data=contents) if proxy: proxy = urllib.parse.urlparse(proxy) req.set_proxy(proxy.netloc, proxy.scheme) req.get_method = lambda: method conn = urllib2.urlopen(req, timeout=timeout) body = conn.read() info = conn.info() try: body_data = json.loads(body) except ValueError: body_data = None trans_stop = time() if logger: sent_content_length = 0 for n, v in headers.items(): nl = n.lower() if nl == 'content-length': try: sent_content_length = int(v) break except ValueError: pass logger.debug("-> " + " ".join( quote(str(x) if x else "-", ":/") for x in (strftime('%Y-%m-%dT%H:%M:%S', gmtime(trans_stop)), method, url, conn.getcode(), sent_content_length, info['content-length'], trans_start, trans_stop, trans_stop - trans_start, additional_info))) return [info, body_data]
def fetch(url): return urlopen(url).read()
def fetch(url): return BeautifulSoup(request.urlopen(url).read(), 'html.parser')
def _find_service(service, target): net4 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) net6 = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) net6.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) if target: addrs = socket.getaddrinfo(target, 1900, 0, socket.SOCK_DGRAM) for addr in addrs: host = addr[4][0] if addr[0] == socket.AF_INET: msg = smsg.format(host, service) if not isinstance(msg, bytes): msg = msg.encode('utf8') net4.sendto(msg, addr[4]) elif addr[0] == socket.AF_INET6: host = '[{0}]'.format(host) msg = smsg.format(host, service) if not isinstance(msg, bytes): msg = msg.encode('utf8') net6.sendto(msg, addr[4]) else: net4.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) for idx in util.list_interface_indexes(): net6.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, idx) try: msg = smsg.format('[{0}]'.format(mcastv6addr), service) if not isinstance(msg, bytes): msg = msg.encode('utf8') net6.sendto(msg, (mcastv6addr, 1900, 0, 0)) except socket.error: # ignore interfaces without ipv6 multicast causing error pass for i4 in util.list_ips(): if 'broadcast' not in i4: continue addr = i4['addr'] bcast = i4['broadcast'] net4.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(addr)) msg = smsg.format(mcastv4addr, service) if not isinstance(msg, bytes): msg = msg.encode('utf8') net4.sendto(msg, (mcastv4addr, 1900)) msg = smsg.format(bcast, service) if not isinstance(msg, bytes): msg = msg.encode('utf8') net4.sendto(msg, (bcast, 1900)) # SSDP by spec encourages responses to spread out over a 3 second interval # hence we must be a bit more patient deadline = util.monotonic_time() + 4 r, _, _ = select.select((net4, net6), (), (), 4) peerdata = {} while r: for s in r: (rsp, peer) = s.recvfrom(9000) neighutil.refresh_neigh() _parse_ssdp(peer, rsp, peerdata) timeout = deadline - util.monotonic_time() if timeout < 0: timeout = 0 r, _, _ = select.select((net4, net6), (), (), timeout) for nid in peerdata: for url in peerdata[nid].get('urls', ()): if url.endswith('/desc.tmpl'): info = urlopen(url).read() if '<friendlyName>Athena</friendlyName>' in info: peerdata[nid]['services'] = ['service:thinkagile-storage'] yield peerdata[nid]
def fetch(url): print("opening", url) body = urlopen(url).read() print("done with", url) return url, body
def fetch(url): return request.urlopen(url).read()
def fetch(text): content = request.urlopen(text).read() print("done : ", text) return text, content
def base_request( self, method, container=None, name=None, prefix=None, headers=None, proxy=None, contents=None, full_listing=None, logger=None, additional_info=None, timeout=None, marker=None, ): # Common request method trans_start = time() url = self.url if full_listing: info, body_data = self.base_request( method, container, name, prefix, headers, proxy, timeout=timeout, marker=marker ) listing = body_data while listing: marker = listing[-1]["name"] info, listing = self.base_request( method, container, name, prefix, headers, proxy, timeout=timeout, marker=marker ) if listing: body_data.extend(listing) return [info, body_data] if headers is None: headers = {} if self.token: headers["X-Auth-Token"] = self.token if container: url = "%s/%s" % (url.rstrip("/"), quote(container)) if name: url = "%s/%s" % (url.rstrip("/"), quote(name)) else: url += "?format=json" if prefix: url += "&prefix=%s" % prefix if marker: url += "&marker=%s" % quote(marker) req = urllib2.Request(url, headers=headers, data=contents) if proxy: proxy = urllib.parse.urlparse(proxy) req.set_proxy(proxy.netloc, proxy.scheme) req.get_method = lambda: method conn = urllib2.urlopen(req, timeout=timeout) body = conn.read() info = conn.info() try: body_data = json.loads(body) except ValueError: body_data = None trans_stop = time() if logger: sent_content_length = 0 for n, v in headers.items(): nl = n.lower() if nl == "content-length": try: sent_content_length = int(v) break except ValueError: pass logger.debug( "-> " + " ".join( quote(str(x) if x else "-", ":/") for x in ( strftime("%Y-%m-%dT%H:%M:%S", gmtime(trans_stop)), method, url, conn.getcode(), sent_content_length, info["content-length"], trans_start, trans_stop, trans_stop - trans_start, additional_info, ) ) ) return [info, body_data]
def external_assert_status_code_200(self, url): with urlopen(url) as resp: self.assertEqual(resp.getcode(), 200) return True
def assert_status_code(self, url, code=200): with urlopen(url) as resp: self.assertEqual(resp.getcode(), code, url) return True
def get_and_assert_status_code(self, url, code=200): with urlopen(url) as resp: self.assertEqual(resp.getcode(), code, url) data = resp.read() return data