def http_request(self, request): data = request.get_data() if data is not None and type(data) != str: v_files = [] v_vars = [] try: for(key, value) in data.items(): if type(value) == file: v_files.append((key, value)) else: v_vars.append((key, value)) except TypeError: systype, value, traceback = sys.exc_info() raise TypeError, "not a valid non-string sequence or mapping object", traceback if len(v_files) == 0: data = encode(v_vars, doseq) else: boundary, data = self.multipart_encode(v_vars, v_files) contenttype = 'multipart/form-data; boundary=%s' % boundary if(request.has_header('Content-Type') and request.get_header('Content-Type').find('multipart/form-data') != 0): print "Replacing %s with %s" % (request.get_header('content-type'), 'multipart/form-data') request.add_unredirected_header('Content-Type', contenttype) request.add_data(data) return request
def api_call(self, method, params, internal=False): try: url_params = params # add token and dep id url_params['token'] = self.token url_params['store_department_id'] = self.dep_id url_params = encode(url_params) request = "%s%s?%s" % (self.sailplay_domain, method, url_params) data = open(request).read().decode("utf-8") response_json = json.loads(data) if internal: return request, response_json if response_json[u'status'] == u'ok': logging.info("api_call: success: [%s] " % request) return response_json else: logging.error("api_call: Error: [%s] [%s: %s]" % (request, response_json[u'status'], response_json[u'message'])) return False except Exception as e: logging.critical("api_call: Exception: [%s] [%s] " % (e, request)) return False
def stopStartServices(server, portNum, adminUser, adminPass, stopStart, serviceList, token=None): ''' Function to stop, start or delete a service. Requires Admin user/password, as well as server and port (necessary to construct token if one does not exist). stopStart = Stop|Start|Delete serviceList = List of services. A service must be in the <name>.<type> notation If a token exists, you can pass one in for use. ''' # Get and set the token if token is None: token = gentoken(server, portNum, adminUser, adminPass) if serviceList == "all": serviceList = getServiceList(server, port, adminUser, adminPass, token) print(" \n") else: serviceList = [serviceList] # modify the services(s) for service in serviceList: op_service_url = "{}://{}:{}/arcgis/admin/services/{}/{}".format(http, server, port, service, stopStart) query = {'token':token, 'f':'json'} status = urlopen(op_service_url, data=encode(query)).read() if 'success' in status: print ("{} successfully performed on {}".format(stopStart, service)) else: print ("Failed to perform operation. Returned message from the server:") print (status) return
def setDefault(self, default): """ Sets the default image in the case the provided email does not have a gravatar can be a direct url or one of the included defaults: 404, mm, identicon, monsterid, wavatar, retro, and blank """ self._default = urllib.encode(default)
def wiglePrint(username, password, netid): browser = mechanize.Browser() browser.open('http://wigle.net') reqData = urllib.urlencode({ 'credential_0': username, 'credential_1': password }) print browser.open('https://wigle.net/gps/gps/main/login', reqData).read() params = {} params['netid'] = netid reqParams = urllib.encode(params) respURL = 'http://wigle.net/gps/gps/main/confirmquery/' resp = browse.open(respUrl, reqParams).read() mapLat = 'N/A' mapLon = 'N/A' print resp rLat = re.findall(r'maplat=.*\&', resp) if rLat: mapLat = rLat[0].split('&')[0].split('=')[1] rLong = re.findall(r'maplon=.*\&', resp) if rLon: mapLon = rLon[0].split print '[-] Lat: ' + mapLat + ', Lon: ' + mapLon
def music(self, **kw): if ('type' in kw.keys()): if (kw['type'].lower() == 'mp3'): size = 320 else: size = 2000 elif ('size' in kw.keys()): size = kw['size'] else: size = 320 head = header head['cookie'] = _dict_str(self.cookie) data = encode({ 'action': 'getmusicurl', 'site': self.site, 'id': self.song, 'type': size }) print requests.post(api, data, headers=head).json() return { 'type': 'flac' if (size == 2000) else 'mp3', 'url': _unicode_str( request.post(api, data, headers=head).json()[u'url']) }
def run(self, team_name, agent_name): # Find docker ip DOCKERIP = None for interface in netifaces.interfaces(): if interface.startswith('docker'): DOCKERIP = netifaces.ifaddresses(interface)[2][0]['addr'] break if DOCKERIP == None: print "Please check your docker interface." sys.exit(-1) settings = Settings().getSettings() GET_AGENT_URL = settings["urls"]["get_agent"] CODE_VALIDATION_URL = settings["urls"]["code_validation"] DJANGO_HOST = settings["settings"]["django_host"] DJANGO_PORT = settings["settings"]["django_port"] AGENT_ENDPOINT = "http://" + DOCKERIP + ":" + str(DJANGO_PORT) + GET_AGENT_URL + \ encode(team_name) + "/" + encode(agent_name) + "/" docker = subprocess.Popen("docker run ubuntu/ciberonline " \ "bash -c 'curl -s " \ "%s" \ " | tar -xz;" \ " python tests.py'" % \ (AGENT_ENDPOINT, ), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = docker.communicate() if docker.returncode != 0: message = stderr else: message = "Passed tests with success" print "[TESTS] " + message url = "http://" + DJANGO_HOST + ":" + str(DJANGO_PORT) + CODE_VALIDATION_URL + encode(agent_name) + "/" data = {'team_name': team_name ,'code_valid': docker.returncode == 0, 'validation_result': message} r = requests.put(url, data=data) print "[TESTS]" + str(r.status_code) print "[TESTS]" + r.text print "[TESTS] Test finished sucessfully"
def login(self, password): '登录' head = header head['cookie'] = _dict_str(self.cookie) data = encode({'action': 'login', 'pwd': password}) redict = requests.post(api, data, headers=head).cookies.get_dict() return _dict_add(self.cookie, redict)
def seearchBookByTitle(self, title): url = "https://www.googleapis.com/books/v1/volumes?q=intitle:"+encode(str(title))+"&key=AIzaSyB3zv7o9a9Dqk5DFH8L_0PRRhU00UVXypE" response = urlopen(url) data = response.read() parsedData = json.loads(data) # If there isn't any books with this title - None is returned if (parsedData['totalItems'] == 0): return None else: return self.parseBook(parsedData)
def seearchBookByTitle(self, title): url = "https://www.googleapis.com/books/v1/volumes?q=intitle:" + encode( str(title)) + "&key=AIzaSyB3zv7o9a9Dqk5DFH8L_0PRRhU00UVXypE" response = urlopen(url) data = response.read() parsedData = json.loads(data) # If there isn't any books with this title - None is returned if (parsedData['totalItems'] == 0): return None else: return self.parseBook(parsedData)
def do_get(host, url, content_type = 'text/plain', accept = 'text/plain', categories = [], attributes = []): """ Do a HTTP GET operation on an given URL Handles request and response based on content_type and accept which are both 'text/plain' by default. Returns a set of return code, categories, attributes, links and locations or the appropiate HTTP error. Keyword arguments: url -- The url content_type -- The content_type determining how the data is rendered in the request (default text/plain) accept -- The accept header determining how the data should be returned by the service (default text/plain) categories -- A list of categories (default empty) attributes -- A list of attribtues (default empty) """ conn = httplib.HTTPConnection(host) body = None headers = { 'Content-Type': content_type, 'Accept': accept, 'User-Agent': USER_AGENT_STRING } # TODO: pack given information into appropiate rendering... # create connection retrieve body & headers if body is None: conn.request('GET', url, body=body, headers=headers) else: conn.request('GET', url, body=urllib.encode(body), headers=headers) response = conn.getresponse() response_code = response.status response_body = response.read() response_headers = response.getheaders() conn.close() # Verifies that the OCCI version # is in the response. for item in response_headers: if item[0] == 'server': if item[1].find(VERSION_STRING) == -1: raise AttributeError('Service did not expose the correct OCCI version number') # Verifies that the service responded with the corrent content-type reponse_content_type = response.getheader('Content-Type') if reponse_content_type != accept: raise AttributeError('Client requested that service reponses with Content-Type :', accept, ' Instead got: ', reponse_content_type) # TODO: unpack information from rendering... return response_code, [], [], [], []
def get(self): # build link for auth page request = encode( { "client_id": client_id, "redirect_uri": redirect_uri, "response_type": "code", "approval_prompt": "force", "scope": "https://www.googleapis.com/auth/calendar", } ) url = "https://accounts.google.com/o/oauth2/auth?" + request self.response.out.write(template.render("templates/home.html", {"url": url}))
def get(self): code = self.request.get("code", False) error = self.request.get("error", False) if code: # do authentication request = encode( { "code": code, "client_id": client_id, "client_secret": client_secret, "redirect_uri": redirect_uri, "grant_type": "authorization_code", } ) result = urlfetch.fetch( url="https://accounts.google.com/o/oauth2/token", payload=request, method=urlfetch.POST ) data = json.loads(result.content) request = encode({"access_token": data["access_token"]}) # from here everything is done over AJAX self.redirect("/landing?" + request)
def get_timetable(self): request = encode( { "identifier": self.programme_code, "days": "1-5", "periods": "1-20", "weeks": "5-16;20-31", "objectclass": "programme+of+study", "style": "individual", } ) result = urlfetch.fetch(url="http://timetable.ucc.ie/showtimetable.asp", payload=request, method=urlfetch.POST) self.html = result.content
def call(self, **kwargs): api_map = endpointsMap[api_call] method = api_map['method'] path = api_map['path'] status = api_map['status'] valid_params = api_map.get('valid_params', ()) # Body can be passed from data or in args body = kwargs.pop('data', None) or self._data # Substitute mustache placeholders with data from keywords url = re.sub( '\{\{(?P<m>[a-zA-Z_]+)\}\}', # Optional pagination parameters will default to blank lambda m: "%s" % kwargs.pop(m.group(1), ''), API_URL + path ) # Validate remaining kwargs against valid_params and add # params url encoded to url variable. for kw in kwargs: if kw not in valid_params: raise TypeError("%s() got an unexpected keyword argument " "'%s'" % (api_call, kw)) else: url += '?' + encode(kwargs) self.createHttpConnection() if body: contentLength = len(body) self._headers['Content-Length'] = contentLength def apiCall(): self._c.request(method, url, body, self._headers) self.resp = self._c.getresponse() self.responseStatus = self.resp.status self.responseReason = self.resp.reason self.responseBody = self.resp.read() # bushleague error handle: if self.responseStatus not in (200, 201): print "Something went wrong. status:", self.resp.status print "Reason:", self.resp.reason elif self.resp.status == 429: print "429 - Too many requests. Sleeping for 2 seconds." time.sleep(2) apiCall() apiCall() return self
def otpcheck(ip, username,password): try: h = httplib.HTTPSConnection('webfront01.guifibages.net') headers = {"Content-type": "application/x-www-form-urlencoded"} params = urllib.encode({'ip': ip, 'password': password}) h.request("POST", "/api/user/%s/otp" % username, params, headers) res = h.getresponse() status = False if res.status == 200: status = True h.close() return status except Exception, error: log ("otpcheck Error: %s" % error)
def showBookByTitle(self, title): """Searches for a book by its tittle returns Book object if the book a result is found or None if there is an error goodreads API returns a book even if ISBN is passed""" url = "https://www.goodreads.com/book/title.xml?title=" + encode( str(title)) + "&key=" + self.key responce = urlopen(url) data = responce.read() if ( data != "<error>book not found</error>" ): # Checks if book exists if book does not exist the API returns book not found error root = ET.fromstring(data) return self.parseBook(root[1]) else: return None
def hashgen(): try : data_raw = raw_input("$ ").rstrip() data = {"phone":data_raw} arg_e = urllib.encode(data) pass_hash = urllib2.urllib(url,arg_e).read() if "passhash" in pass_hash: var_pass = pass_hash[13:30] FO = open(file,"a+") FO.write(var_pass+"==>"+data_raw) FO.close() except: print "[*]Sth is Wrong !\n"
def lyric(self): head = header head['cookie'] = _dict_str(self.cookie) data = encode({ 'action': 'getmusiclrc', 'site': self.site, 'id': self.lrc }) songlrc = _unicode_str( requests.post(api, data, headers=head).json()[u'data']) fout = open('%s - %s.lrc' % (self.artist, self.name), 'w+') fout.write(songlrc) fout.close()
def update_channel(self, opts): ''' Parameters: opts (dict) - A dict of values to be passed vie url params ''' if not 'key' in opts: if not self.write_key: raise ValueError('A write key is required to update a channel') else: opts['key'] = self.write_key opts = urllib.encode(ops) url = '{}/update?{}'.format(self.API, opts) req = urllib2.Request(url) response = urllib2.urlopen(req) result = response.read() return result
def login(self): url_params = encode({ 'store_department_id': self.dep_id, 'store_department_key': self.dep_key, 'pin_code': self.pin_code }) data = open("%s/api/v2/login/?%s" % (self.sailplay_domain, url_params)).read().decode("utf-8") response_json = json.loads(data) if response_json[u'status'] == u'ok': logging.info("sp_api_connector: login: success: [%s]" % (response_json)) self.token = response_json[u'token'] return self.token else: logging.error( "sp_api_connector: login: failure: [%s: %s] " % (response_json[u'status'], response_json[u'message'])) return False
def gentoken(server, portNum, adminUser, adminPass, expiration=60): # Re-usable function to get a token required for Admin changes sslURL = "http://{}:{}/arcgis/admin/generateToken?f=json".format(server, portNum) redirectURL = urlopen(sslURL).geturl() sslSettings = json.loads(urlopen(redirectURL).read()) global port port = portNum try: if sslSettings['ssl']['supportsSSL']: port = sslSettings['ssl']['sslPort'] global http http = 'https' except: pass query_dict = {'username': adminUser, 'password': adminPass, 'expiration': str(expiration), 'client': 'requestip', 'f': 'json'} query_string = encode(query_dict) url = "{}://{}:{}/arcgis/admin/generateToken".format(http, server, port) try: token = json.loads(urlopen(url, query_string).read()) if "token" not in token or token == None: print ("Failed to get token, return message from server:") print (token) sys.exit() else: # Return the token to the function which called for it return token['token'] except URLError as e: print ("Could not connect to machine {} on port {}".format(server, port)) print (e) sys.exit()
def logInstall(self, host, ip, app, rev): """The default logger. Do not override this. Instead, use the install_log hook to pass a callable """ log_url = "compswww.office.pirc.co.uk/cgi-bin/admin/store_deployment.py" #headers = {'User-agent': 'python-httplib'} log_data = {'host': host, 'ip': ip, 'app': app, 'rev': rev, 'ts': str(datetime.datetime.now())} data = urllib.encode(log_data) try: req = urllib2.Request(log_url, data) response = urllib2.urlopen(req) if not response.msg == 'OK': print "There was a problem logging the installer with the server" except Exception: print "There was a problem logging the installer with the server" if getattr(self, 'install_log') and operator.isCallable(self.install_log): try: self.install_log() except Exception: print "Failed to run install_log callable"
def search(self, key, site='qq'): '搜索' sites = [ 'kw', #酷我 'wy', #网易云 'qq' ] #QQ if (site not in sites): return head = header head['cookie'] = _dict_str(self.cookie) data = encode({ 'action': 'search', 'site': site, 'key': quote(key.decode('GBK').encode('utf-8')) }) return [ music(i, self.cookie, site) for i in requests.post(api, data, headers=head).json()[u'data'] ]
import socket import urllib sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) host = '10.230.229.13' port = 80 sfile = open("ishell.exe", "rb") sock.bind((host, port)) sock.listen(5) while 1: (client, addr) = sock.accept() sock.send(urllib.encode(sfile)) sock.connect((host, port)) sock.send("[*] Connection recieved.")
http = httplib2.Http() if 0: url = 'http://192.168.0.103/toggle' response, content = http.request(url, 'GET') print response print content if 0: url_json = 'http://192.168.0.103/jblink' data = {'times': '10', 'pause': '500'} headers = {'Content-Type': 'application/json; charset=UTF-8'} response, content = http.request(url_json, 'POST', headers=headers, body=json.dumps(data)) print response print content if 1: import urllib url_query = 'http://192.168.0.103/qblink' data = {'times': '10', 'pause': '500'} headers = {'Content-Type': 'application/json; charset=UTF-8'} response, content = http.request(url_query, 'POST', headers=headers, body=urllib.encode(data)) print response print content
def main(): # first: odl_ip --- second: device name base_url = "http://%s:8181/restconf/operational/" +\ "network-topology:network-topology/topology/" +\ "topology-netconf/node/%s/yang-ext:mount/Cisco-IOS-XR-pfi-im-cmd-oper:interfaces/" # Constants odl_ip = "localhost" input_router_ip = "198.18.1.30" interface_name = "GigabitEthernet0/0/0/4" threshold = 9000 MAX_OCCURENCES = 2 MAX_NORMAL = 8 # Get device name from the source IP address found = False for (device_name, device_config) in settings.config['network_device'].items(): address = device_config['address'] if(address == input_router_ip): source_name = device_name found = True break if(not found): print("Input source IP %s could not be found among the list of devices" % (input_source_ip)) exit(1) # Need to go deeper into the yang model to get the statistics statistics_url = "interface-xr/interface/%s/interface-statistics/full-interface-stats/" request_url = base_url % (odl_ip, device_name) + statistics_url % (encode(interface_name)) # Start time of experiment start_time = int(time()) first = True # If getting stats for the first time occurences = MAX_OCCURENCES # Number of times threshold can be hit before route is set id = 0 # id of input to send to elasticsearch previous_bytes = 0 # bytes from previous GET request current_bytes = 0 # bytes from current GET request route_set = False # Whether the route is set on the controller normal_runs = 0 # Number of normal runs after the route is set while(True): # Perform a get request to retrieve all of the interface statistics print("[GET] Full interface stats") response = requests.get(request_url, auth=(base64.b64decode("YWRtaW4="), base64.b64decode("YWRtaW4="))) if(response.status_code == 200): # Parse JSON text into Python object stats = json.loads(response.text) current_bytes = stats["full-interface-stats"]["bytes-sent"] if(first): print("Bytes sent = %d" % (current_bytes)) previous_bytes = current_bytes first = False else: difference = current_bytes - previous_bytes print("Bytes sent = %d --- Difference = %d" % (current_bytes, difference)) if(difference > threshold and occurences != 0): print("THRESHOLD HIT -- %d more chances" % (occurences)) occurences -= 1 elif(difference > threshold and occurences == 0): print("\nStopping connection on interface %s\n" % (interface_name)) subprocess.Popen("python insert_route.py", shell=True) route_set = True # route is set normal_runs = 0 # start checking for normal runs occurences = MAX_OCCURENCES # reset the occurences elif(route_set and normal_runs < MAX_NORMAL): normal_runs += 1 # found one normal run elif(route_set and normal_runs == MAX_NORMAL): # Delete the route print("\nResetting connection on interface %s\n" % (interface_name)) subprocess.Popen("python delete_route.py", shell=True) route_set = False # route is not set # write data to json file write_data(difference, start_time, id) # send data send_data() id += 1 # increase the id for the data previous_bytes = current_bytes else: print("Expected status code 200 but received %d." % response.status_code) sleep(2)
''' ----------------------------------------- Author: stefan Version: 2016-9-19 ----------------------------------------- ''' import urllib2 import urllib post: values = {"username":"******", "password":"******"} values['username']="******" values['password']="******" data = urllib.encode(values) url = "http://www.baidu.com" request = urllib2.Request(url, data) response = urllib2.urlopen(request) html = response.read() print html get: geturl = url + '?' + data request = urllib2.Request(geturl) response = urllib2.urlopen(request) html = response.read() print html user_agent = ''
# HTTPError import urllib2 url = 'http://www.xxxxx.com' req = urllib2.Request(url) try: response = urllib2.urlopen(req) except urllib2.HTTPError as e: print e.code print 'we can not fulfill the request \n' except urllib2.URLError as e: print e.reason print 'we can not reach a server' else: print('No problem') # 数据 import urllib import urllib2 # specify the url we will fetch . url='http://www.voidspace.com.uk/' #create a dict to store the data. dict_data={'use_name':'aibilim','password':'******','language':'python'} # encode the dict_data in order to pass to the Request. data_pass=urllib.encode(dict_data) # next,we make a req. req=urllib2.Request(url,data) # now we get the response response=urllib2.urlopen(req) the_page=response.read() # print the page. print the page
#!/usr/bin/python from BeautifulSoup import BeautifulSoup import re from urllib import urlencode as encode from urllib2 import * out = open("./data", 'w') p = urlopen("http://www.plurk.com/t/Taiwan#hot") #soup = BeautifulSoup(p.read()) content = p.read() #print soup out.write(content) last_offset = re.search('fetchMore\("hot", ([0-9.]*)\)', content).group(1) print last_offset for i in range(1,10): fetch = Request("http://www.plurk.com/PlurkTop/fetchPlurks") #fetch.add_header() fetch.add_data(encode({'sorting':'hot', 'collection_id':2, 'offset': last_offset })) res = urlopen(fetch).read() last_offset = re.search('fetchMore\("hot", ([0-9.]*)\)', res).group(1) print last_offset out.write("i=%d:12345678901234567890123456789012345678901234567890123456789012345678901234567890\n"%i) out.write(res) #soup = BeautifulSoup(res) #print soup
def run(php_query): os.system('open "https://duckduckgo.com/?q=!php%20{0}"'.format(urllib.encode(php_query)))
import smtplib import requests import sqlalchemy from urllib import urlencode as encode from json import dumps engine = sqlalchemy.create_engine('mysql://*****:*****@localhost/mysql') # connect to server engine.execute("use openx") val_in_cache = engine.execute("select count(*) from ox_data_summary_ad_hourly_report") for row in val_in_cache: val = row['count(*)'] if val>1000: print "hello" '''server = smtplib.SMTP('smtp.gmail.com', 587) server.starttls() server.login("*****@*****.**", "helloworld") msg = "Reporting delay , please check ox_data_summary_ad_hourly_report" server.sendmail("*****@*****.**","*****@*****.**",msg)''' _requests = "syncRequest" _payload = { "text": "Testing", "channel": "#images", "username": "******", "icon_emoji": ":damei:" } payload = encode({'payload':dumps(_payload)}) headers = { 'content-type': "application/x-www-form-urlencoded"} response = requests.post( "https://hooks.slack.com/services/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", data=payload, headers=headers )
def main(): # first: odl_ip --- second: device name base_url = "http://%s:8181/restconf/operational/" +\ "network-topology:network-topology/topology/" +\ "topology-netconf/node/%s/yang-ext:mount/Cisco-IOS-XR-pfi-im-cmd-oper:interfaces/" # Constants odl_ip = "localhost" input_router_ip = "198.18.1.30" interface_name = "GigabitEthernet0/0/0/4" threshold = 9000 MAX_OCCURENCES = 2 MAX_NORMAL = 8 # Get device name from the source IP address found = False for (device_name, device_config) in settings.config['network_device'].items(): address = device_config['address'] if (address == input_router_ip): source_name = device_name found = True break if (not found): print( "Input source IP %s could not be found among the list of devices" % (input_source_ip)) exit(1) # Need to go deeper into the yang model to get the statistics statistics_url = "interface-xr/interface/%s/interface-statistics/full-interface-stats/" request_url = base_url % (odl_ip, device_name) + statistics_url % ( encode(interface_name)) # Start time of experiment start_time = int(time()) first = True # If getting stats for the first time occurences = MAX_OCCURENCES # Number of times threshold can be hit before route is set id = 0 # id of input to send to elasticsearch previous_bytes = 0 # bytes from previous GET request current_bytes = 0 # bytes from current GET request route_set = False # Whether the route is set on the controller normal_runs = 0 # Number of normal runs after the route is set while (True): # Perform a get request to retrieve all of the interface statistics print("[GET] Full interface stats") response = requests.get(request_url, auth=(base64.b64decode("YWRtaW4="), base64.b64decode("YWRtaW4="))) if (response.status_code == 200): # Parse JSON text into Python object stats = json.loads(response.text) current_bytes = stats["full-interface-stats"]["bytes-sent"] if (first): print("Bytes sent = %d" % (current_bytes)) previous_bytes = current_bytes first = False else: difference = current_bytes - previous_bytes print("Bytes sent = %d --- Difference = %d" % (current_bytes, difference)) if (difference > threshold and occurences != 0): print("THRESHOLD HIT -- %d more chances" % (occurences)) occurences -= 1 elif (difference > threshold and occurences == 0): print("\nStopping connection on interface %s\n" % (interface_name)) subprocess.Popen("python insert_route.py", shell=True) route_set = True # route is set normal_runs = 0 # start checking for normal runs occurences = MAX_OCCURENCES # reset the occurences elif (route_set and normal_runs < MAX_NORMAL): normal_runs += 1 # found one normal run elif (route_set and normal_runs == MAX_NORMAL): # Delete the route print("\nResetting connection on interface %s\n" % (interface_name)) subprocess.Popen("python delete_route.py", shell=True) route_set = False # route is not set # write data to json file write_data(difference, start_time, id) # send data send_data() id += 1 # increase the id for the data previous_bytes = current_bytes else: print("Expected status code 200 but received %d." % response.status_code) sleep(2)