def handle_client(client_socket): # just print out what the client sends request = client_socket.recv(1024) request.decode("ASCII"); request = request[:len(request) - 1]; print ("[*] Received: \"%s\"" % request.decode("ASCII")) # send back a packet client_socket.send(b'ACK\n') print (client_socket.getpeername()) client_socket.close()
def _transform_request(request): request = request.decode('utf-8') request = unquote(request) # Direct http example if request.startswith('http'): request = download_image(request) else: # Slack Label Example request_array = request.split('&') print(request_array) result = [ value for value in request_array if value.startswith('text=') ] if len(result) > 0: request = download_image(result[0][5:]) print(request) predict_img = image.load_img(request, target_size=(224, 224)) predict_img_array = image.img_to_array(predict_img) predict_img_array = np.expand_dims(predict_img_array, axis=0) predict_preprocess_img = preprocess_input(predict_img_array) return predict_preprocess_img
def call(url): req = urllib.request.Request(url) with contextlib.closing(urllib.request.urlopen(req)) as response: request = response.read() request = request.decode("utf-8").replace("'", '"') data = json.loads(request) return data
def StockLastData(code): ''' 1:”27.55″,今日开盘价; 2:”27.25″,昨日收盘价; 3:”26.91″,当前价格; 4:”27.55″,今日最高价; 5:”26.20″,今日最低价; 6:”26.91″,竞买价,即“买一”报价; 7:”26.92″,竞卖价,即“卖一”报价; 8:”22114263″,成交的股票数,由于股票交易以一百股为基本单位,所以在使用时,通常把该值除以一百; 9:”589824680″,成交金额,单位为“元”,为了一目了然,通常以“万元”为成交金额的单位,所以通常把该值除以一万; 10:”4695″,“买一”申请4695股,即47手; 11:”26.91″,“买一”报价; 12:”57590″,“买二” 13:”26.90″,“买二” 14:”14700″,“买三” 15:”26.89″,“买三” 16:”14300″,“买四” 17:”26.88″,“买四” 18:”15100″,“买五” 19:”26.87″,“买五” 20:”3100″,“卖一”申报3100股,即31手; 21:”26.92″,“卖一”报价 (22, 23), (24, 25), (26,27), (28, 29)分别为“卖二”至“卖四的情况” 30:”2008-01-11″,日期; 31:”15:05:32″,时间; ''' import urllib.request url = 'http://hq.sinajs.cn/list=sh%s' request = urllib.request.urlopen(url % (code)).read() return request.decode('UTF-8', 'ignore').split(',')[1:]
def readF4M(bitrate, host, port): """ read request from server """ logging.info("reading f4M") request = b"" try: res = urllib.request.urlopen( 'http://{}:{}/vod/big_buck_bunny.f4m' .format(host, port) ) if res.getcode() != 200: print( 'f4m request failed, code {}'.format(res.getcode()), file=sys.stderr ) logging.debug( 'f4m request failed, code {}'.format(res.getcode()) ) return request = res.read() except Exception as ex: print('f4m request failed, exception: {}'.format(ex), file=sys.stderr) logging.debug('f4m request failed, exception: {}'.format(ex)) return """ this is a f4m """ logging.info("reading f4m") logging.info(request.decode('utf-8')) if len(bitrate) == 0: rates = re.findall(b'bitrate="([0-9]+)"', request) logging.info("bitrates available: {}".format(rates)) for rate in rates: bitrate.append(int(rate.decode('utf-8'))) logging.info("bitrate append:{}".format(bitrate)) bitrate = sorted(bitrate) return
def parse(self, request): """ Me quedo con el metodo y la URL que le paso como parametro """ request = request.decode('utf-8') method = request.split()[0] url = request.split()[1] return method, url
def get_matches(word): request_str = "http://api.datamuse.com/words?sl={}".format(word) request = urllib.request.urlopen(request_str).read() response = json.loads(request.decode('UTF-8')) if len(response) == 0: return [] return [x["word"] for x in response]
def get_first_youtube_link(url): debug('Requesting post text ' + url + ' to look for youtube link... ') try: request = urllib.request.Request(url) request = urllib.request.urlopen(request).read() except Exception as e: error(e) sys.exit(1) finally: soup = BeautifulSoup(request.decode('utf-8'), "html.parser") for link in soup.find_all('a', href=True): if 'youtube' in str(link['href']): return str(link['href'])
def get_posts(url): """ Download Discourse last posts page in JSON format. """ debug('Requesting url ' + url + ' ... ') try: request = urllib.request.Request(url) request = urllib.request.urlopen(request).read() content = json.loads(request.decode('utf-8')) except Exception as e: error(e) sys.exit(1) finally: return content
def get_enable_symboles_on_market(client, exchange_type): symbols = [] if exchange_type == "Binance": symbols_data = client.get_products()["data"] for data in symbols_data: symbols.append(data['symbol']) elif exchange_type == "KuCoin": request = urllib.request.urlopen( 'https://api.kucoin.com/v1/market/open/symbols').read() decoded_request = request.decode('utf8').replace("'", '"') json_request = json.loads(decoded_request) for data in json_request["data"]: symbols.append(data['coinType']) return symbols
def handle_request(request): decoded_request = request.decode("UTF-8") url_part = decoded_request.split(" ")[1] url = urllib.parse.urlparse(url_part) path = url.path query = urllib.parse.parse_qs(url.query) if(path == "/update/"): response = handle_update(query) else: response = { "message": "Don't know how to handle request: " + url_part, "status": "500" } return response
def buildArchive(project): if project == "adblock": printBold("Building AdBlock language archive...") request = urllib.request.urlopen(AB_EXPORT_URL).read() elif project == "catblock": printBold("Building CatBlock language archive...") request = urllib.request.urlopen(CB_EXPORT_URL).read() data = json.loads(request.decode("utf-8")) status = data["success"]["status"] if status == "built": print(" - Project has been built successfully") elif status == "skipped": print(" - Build has been skipped, previous is up-to-date or less than 30 minutes old")
def buildGetParams(self, request): params = {} try: path = re.findall("^GET ([^\s]+)", request) except: path = re.findall("^GET ([^\s]+)", request.decode('utf-8')) if path: path = path[0] start = path.find("?") if start != -1: for param in path[start+1:].split("&"): f = param.split("=") if len(f) == 2: var = f[0] value = f[1] value = value.replace("+", " ") value = urllib.parse.unquote(value) params[var] = value return params
def get_single_post_text(url): """ Download Discourse specific post in JSON format. """ debug('Requesting post text ' + url + ' ... ') try: request = urllib.request.Request(url) request = urllib.request.urlopen(request).read() content = json.loads(request.decode('utf-8')) except Exception as e: error(e) sys.exit(1) finally: post_text = clean_html( str(content['post_stream']['posts'][0]['cooked'])) # removing \n on the text item = post_text.split('\n') item = " ".join(item) # TO-DO: removing multiple white spaces due the \n removal # # returning twitter style string return str(item[0:140] + ' (...)')
def give_stat(event_for_wait, event_for_set): global last_stat, Have_to_exit try: sock = socket.socket() sock.bind(("", 3333)) sock.listen(1) except: Have_to_exit = False while not Have_to_exit: event_for_wait.wait() event_for_wait.clear() sock.settimeout(pollPeriod) data = None conn, addr = sock.accept() try: request = conn.recv(1000) if request: udata = request.decode("utf-8") packet = json.loads(udata) if packet["method"] == "miner_getstat1": responce = make_stat1(last_stat) elif packet["method"] == "miner_getstat2": responce = make_stat2(last_stat) elif packet["method"] in ("miner_restart", "miner_reboot"): os.system("shutdown /r /f /t 1") else: responce = {'{EQ': -1} buff = json.dumps(responce) conn.sendall(buff.encode('utf-8')) except socket.error as msg: pass finally: conn.close() event_for_set.set()
elif(os.name == "posix"): ip = ni.ifaddresses(ni.interfaces()[1])[ni.AF_INET][0]['addr'] #Get the machine's local IPv4 Address # Defaults to eth0 - ignoring # myLocalIP = socket.gethostbyname(socket.getfqdn()) if(os.name == "nt"): port = 9999 elif(os.name == "posix"): port = 9998 bindData = (ip, port) #Get public facing IPv4 Address url = "http://ip.42.pl/raw" request = urllib.request.urlopen(url).read() myPublicIP = request.decode("ASCII"); print("[!] Your public IP is: " + myPublicIP + ", and should be forwarded to local IP: %s on port: %d" % (bindData[0], bindData[1])) server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # allow listening despite TIME_WAIT sockets server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # bind to ip & port server.bind(bindData) server.listen(5) print("[*] Listening on %s:%d" % (bindData[0], bindData[1])) # this is our client handling thread def handle_client(client_socket):
import socket, urllib.request # Config host = 'mydomain.ddns.net' user = '******' pasw = 'pass' ip = [(s.connect(('8.8.8.8', 80)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1] url = 'http://'+user+':'+pasw+'@dynupdate.no-ip.com/nic/update?hostname=' + host + '&myip=' + ip print('Starting DDNS %s @ %s...\n' % (host, ip)) # Process request = urllib.request.urlopen(url).read() response = str(request.decode('utf-8')) print('Status: ' + response) # Exception print('Status: Error!')
def get(self, request): cmd_options = "" runcmd = "" try: res = re.findall("^GET ([^\s]+)", request) except: res = re.findall("^GET ([^\s]+)", request.decode('utf-8')) if res is None: return pGet = {} try: page = res[0] except: return paramStart = page.find("?") if paramStart != -1: page = page[:paramStart] pGet = self.buildGetParams(request) if page.startswith("/images/") or page.startswith("/js/") or page.startswith("/inputs/"): if os.path.exists("core/"+page[1:]): try: f=open("core/"+page[1:],'r',encoding="utf-8") data = f.read() except: try: with open("core/"+page[1:],'rb') as img_f: data = base64.b64encode(img_f.read()).decode('utf-8') except: data = "" self.pages[page]=data if page == "/cmd_dict": # view dictionary info path, dirs, files = next(os.walk("dictionary/")) total_dirs = len(dirs) total_files = len(files) size = 0 for d in dirs: path, dirs, files = next(os.walk("dictionary/"+d)) total_files = total_files + len(files) for f in files: size += os.path.getsize("dictionary/"+d+"/"+f) size = self.convert_size(size) last_update = time.ctime(os.path.getctime("dictionary/")) self.pages["/cmd_dict"] = "<table align='center' border='1' cellspacing='5' cellpadding='5'><tr><td><u>Creation Date:</u></td><td><u>Size:</u></td><td><u>Total Words:</u></td><td><u>Total Symbols:</u></td></tr><tr><td align='center'>"+str(last_update)+"</td><td align='center'>"+str(size)+"</td><td align='center'>"+str(total_dirs)+"</td><td align='center'>"+str(total_files)+"</td></tr></table>" if page == "/cmd_remove_ocr": # remove ocr image from previews if not pGet["symbol"]=="off": self.pages["/cmd_remove_ocr"] = "<div style='display:block' id='discarding' name='discarding'><pre>[Info] Discarding image from previews...</pre></div>" symbol = pGet["symbol"] try: os.remove("core/" + symbol) except: pass if page == "/cmd_move_ocr": # move ocr image from previews to dictionary if not pGet["symbol"]=="off": self.pages["/cmd_move_ocr"] = "<div style='display:block' id='adding' name='adding'><pre>[Info] Adding image from previews to dictionary...</pre></div>" symbol = pGet["symbol"] letter = pGet["letter"] o = "core/" + symbol d = "dictionary/" + letter try: if not os.path.exists(d): os.makedirs(d) head, tail = os.path.split(symbol) final = d + "/" + tail copyfile(o, final) # copy file to letter on dictionary os.remove(o) # purge from previews except: pass if page == "/cmd_list": # list mods self.pages["/cmd_list"] = "<pre>Waiting for a list of available modules...</pre>" runcmd = "(python -i cintruder --mods-list "+ "|tee /tmp/out) &" if page == "/cmd_list_update": if not os.path.exists('/tmp/out'): open('/tmp/out', 'w').close() with open('/tmp/out', 'r') as f: self.pages["/cmd_list_update"] = "<pre>"+f.read()+"<pre>" if page == "/cmd_track": # tracking self.pages["/cmd_track"] = "<pre>Waiting for tracking results...</pre>" if pGet["tor"]=="on": cmd_options = cmd_options + "--proxy 'http://localhost:8118' " if pGet["verbose"]=="on": cmd_options = cmd_options + "--verbose " runcmd = "(python -i cintruder --track '"+pGet["tracking_source"]+"' --track-num '"+pGet["tracking_num"]+"' " + cmd_options + "|tee /tmp/out) &" if page == "/cmd_track_update": if not os.path.exists('/tmp/out'): open('/tmp/out', 'w').close() with open('/tmp/out', 'r') as f: self.pages["/cmd_track_update"] = "<pre>"+f.read()+"<pre>" if page == "/cmd_tracklist": # list last tracks self.pages["/cmd_tracklist"] = "<pre>Waiting for a list of last tracks...</pre>" runcmd = "(python -i cintruder --tracked-list "+ "|tee /tmp/out) &" if page == "/cmd_tracklist_update": if not os.path.exists('/tmp/out'): open('/tmp/out', 'w').close() with open('/tmp/out', 'r') as f: self.pages["/cmd_tracklist_update"] = "<pre>"+f.read()+"<pre>" if page == "/cmd_train": # training self.pages["/cmd_train"] = "<pre>Waiting for training results...</pre>" if pGet["tor"]=="on": cmd_options = cmd_options + "--proxy 'http://localhost:8118' " if pGet["verbose"]=="on": cmd_options = cmd_options + "--verbose " if not pGet["colourID"]=="off": cmd_options = cmd_options + "--set-id='" + pGet["colourID"] + "' " if not pGet["module"]=="off": cmd_options = cmd_options + "--mod='" + pGet["module"] + "' " if pGet["source_file"]=="off": # from remote url source runcmd = "(python -i cintruder --train '"+pGet["train_url"]+"' " + cmd_options + "|tee /tmp/out) &" else: # from local source source_file = pGet["source_file"] runcmd = "(python -i cintruder --train '"+source_file+"' " + cmd_options + "|tee /tmp/out) &" if page == "/cmd_train_update": if not os.path.exists('/tmp/out'): open('/tmp/out', 'w').close() with open('/tmp/out', 'r') as f: self.pages["/cmd_train_update"] = "<pre>"+f.read()+"<pre>" if page == "/cmd_crack": # cracking self.pages["/cmd_crack"] = "<pre>Waiting for cracking (bruteforcing) results...</pre>" if pGet["tor"]=="on": cmd_options = cmd_options + "--proxy 'http://localhost:8118' " if pGet["verbose"]=="on": cmd_options = cmd_options + "--verbose " if not pGet["colourID"]=="off": cmd_options = cmd_options + "--set-id='" + pGet["colourID"] + "' " if not pGet["module"]=="off": cmd_options = cmd_options + "--mod='" + pGet["module"] + "' " if not pGet["xml"]=="off": cmd_options = cmd_options + "--xml='" + pGet["xml"] + "' " if pGet["source_file"]=="off": # from remote url source runcmd = "(python -i cintruder --crack '"+pGet["crack_url"]+"' " + cmd_options + "|tee /tmp/out) &" else: # from local source source_file = pGet["source_file"] runcmd = "(python -i cintruder --crack '"+source_file+"' " + cmd_options + "|tee /tmp/out) &" if page == "/cmd_crack_update": if not os.path.exists('/tmp/out'): open('/tmp/out', 'w').close() with open('/tmp/out', 'r') as f: self.pages["/cmd_crack_update"] = "<pre>"+f.read()+"<pre>" ctype = "text/html" if page.find(".js") != -1: ctype = "text/javascript" elif page.find(".txt") != -1: ctype = "text/plain" elif page.find(".ico") != -1: ctype = "image/x-icon" elif page.find(".png") != -1: ctype = "image/png" elif page.find(".jpeg") != -1: ctype = "image/jpeg" elif page.find(".jpg") != -1: ctype = "image/jpeg" elif page.find(".gif") != -1: ctype = "image/gif" if page in self.pages: return dict(run=runcmd, code="200 OK", html=self.pages[page], ctype=ctype) return dict(run=runcmd, code="404 Error", html="404 Error<br><br>Page not found...", ctype=ctype)
import socket, urllib.request # Config host = 'mydomain.ddns.net' user = '******' pasw = 'pass' ip = [(s.connect(('8.8.8.8', 80)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1] url = 'http://' + user + ':' + pasw + '@dynupdate.no-ip.com/nic/update?hostname=' + host + '&myip=' + ip print('Starting DDNS %s @ %s...\n' % (host, ip)) # Process request = urllib.request.urlopen(url).read() response = str(request.decode('utf-8')) print('Status: ' + response) # Exception print('Status: Error!')
#!/usr/bin/python3 '''urllib module''' import urllib.request if __name__ == "__main__": with urllib.request.urlopen('https://intranet.hbtn.io/status') as response: request = response.read() request_utf8 = request.decode('utf8') print("Body response:") print("\t- type: {}".format(type(request))) print("\t- content: {}".format(request)) print("\t- utf8 content: {}".format(request_utf8))
s.setblocking(1) s.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) s.bind(('',81)) s.listen(10) #t = (urllib.request.urlopen("http://tbau.000webhostapp.com/Photoshop Website/images/earth2.jpg").read()) try: c, addr = s.accept() print("Got connection from"+str(addr)) while True: request = c.recv(4096) if(request != None): #print ("Message: "+request.decode('utf-8')) str=request.decode('utf-8') if(str != ""): print(str) if(str == "1000"): forwardDrive() elif(str == "1001"): forwardTurnRight() elif(str == "1010"): forwardTurnLeft() #if(str == "0100"): # reverse() else: allStop() #request.decode is how we grab c.close()
import json import urllib.request, urllib.response, urllib.error url = 'http://jsonplaceholder.typicode.com/comments' request = urllib.request.urlopen(url).read() print(request.decode()) first = json.load(request.decode()) print(first[0])
#!/usr/local/bin/python3 # imports the needed stuff. :-) from sys import argv import urllib.request import urllib.parse #creates a single string of command line arguments. # join on ':' which we know is the etc/passwd delimiter, so this is # a safe delimiter. names = ':'.join(argv[1::]) #creates the url string eith key-value pair url = 'http://hills.ccsf.edu/~pcliffor/cgi-bin/nameserver.py?name=' + names #requests from the server request = urllib.request.urlopen(url).read() #prints the serverside output print(request.decode('utf-8'))
return Counter(str1) == Counter(str2) is_anagram("abc", "bcd") # False is_anagram("abc", "cba") # True # check IP address import urllib.request import re url = "http://checkip.dyndns.org" request = urllib.request.urlopen(url).read() the_ip = re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", request.decode('utf8')) print("Your current IP Address is: {}".format(the_ip[0])) #! This one-liner uses advanced slicing notation. # data some_values = [0, 1, 2, 3, 4, 5, 6] # get sum over every second element res = sum(some_values[::2]) print(res) #* 12 #! Read lines from a file and store them in a list filename = 'the-zen-of-python.txt' lines = [line.strip() for line in open(filename)]
def getApiLinkData(self, url): try: request = urllib.request.urlopen(url).read() return json.loads(request.decode('utf-8')) except http.client.IncompleteRead as e: request = e.partial
#creates a single string of command line arguments. # join on ':' which we know is the etc/passwd delimiter, so this is # a safe delimiter. nameDict = {} nameDict['key'] = '' nameDict['key2'] = 'false' if argv[1] == '-r': nameDict['key2'] = 'true' else: names = ' '.join(argv[1::]) nameDict['key'] = names # converts the dictionary into key value pairs to send to server. namePair = urllib.parse.urlencode(nameDict) #creates the url string eith key-value pair url = 'http://hills.ccsf.edu/~pcliffor/cgi-bin/messageserver.py?' + namePair #requests from the server request = urllib.request.urlopen(url).read() #prints the serverside output print(request.decode('utf-8'))
def __handleDataSock(self, data): REQ_MSG_DATASOCK = ( "RAHOST DATASOCK\n" "-HID:{0}\n" "-AUTH:{1}\n" "-CID:{2}\n" "-SSLFWD\n" "-DONE\n" ) hostAuth = data[12:12+63].decode() buf = io.StringIO(data.decode()) buf.readline() buf.readline() # clientAddr connectionId = buf.readline().rstrip() message = REQ_MSG_DATASOCK.format(self.hostId, hostAuth, connectionId) log.debug("Sending %s" % message) # create new connection sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) client_ssl = ssl.wrap_socket(sock) client_ssl.connect((self.gateway, 443)) # send datasock request client_ssl.send(message.encode()) while 1: chunks = [] bytes_recv = 1024 while bytes_recv == 1024: try: chunk = client_ssl.recv(1024) except socket.error as e: if e.errno != socket.errno.ECONNRESET and e.errno != socket.errno.ECONNREFUSED: raise break if chunk == b"": break chunks.append(chunk) bytes_recv = len(chunk) request = b"".join(chunks) if request == b"": client_ssl.close() return log.debug("Request: %s[...]" % request[:65]) decoded = request.decode() firstPart = urllib.parse.unquote(decoded[:30]) if firstPart.startswith("GET / HTTP") or firstPart.startswith("GET /default.html"): previousRaSid = re.findall('^Cookie:.*RASID=([A-Za-z0-9+/=]+)[;\n\r $]', decoded) if previousRaSid: raSid = previousRaSid[0] else: raSid = b64encode(os.urandom(40)).decode() response = self.config.welcomePage(raSid) log.debug("Sending %s" % response) try: client_ssl.send(response) client_ssl.close() # I think only the GET responses need to be closed right after the send except socket.error as e: log.info("Error during sending response: %d" % e.errno) client_ssl.close() return else: self.__createResponse(request, client_ssl) client_ssl.close() return return
import urllib.request import re from bs4 import BeautifulSoup print("we will try to open this url, in order to get IP Address") url = "http://checkip.dyndns.org" print(url) request = urllib.request.urlopen(url).read() my_HTML = request.decode("utf8") soup = BeautifulSoup(my_HTML, 'html.parser') print(soup.get_text()) body = soup.find('body') print(f"Body is: {body}") print(soup) the_contents_of_body_without_body_tags = body.findChildren() print(f"Request is: {my_HTML}") print("your IP Address is: ", the_contents_of_body_without_body_tags) print("your IP Address is: ", body) unwrap = soup.body.get_text() print(f"unwrap variable: {unwrap}")