def tshark(self, pcap, options=""): """tshark(pcap, options): DESCRIPTION: Run tshark on the specified pcap file with the specified options Return the text file name """ outfile = utilities.snip(pcap, 0, ".pcap") + ".tshark" self.local_shell.run("tshark -r " + pcap + " > " + outfile, 0) return outfile
def __init__(self, transfer_parent, dpr_client, transfer_filenames, options, transfer_id): super(transferThread, self).__init__() self.transfer_parent = transfer_parent self.dpr_client = dpr_client self.transfer_filenames = transfer_filenames self.options = options self.transfer_id = transfer_id self.data = {} self.stop_transfer = 0 self.transfer_running = 1 self.thread_name = utilities.snip(str(self), "(", ",") try: self.count = options['COUNT'] except: self.count = 1
def __init__(self): """__init__(): This is the initialization constructor for reporting and initalizes the meta data """ # File to write results to self.results_filename = utilities.snip(sys.argv[0], 0, ".py") + ".results.log" # default the meta data self.result = "N/A" self.elapsed_time = "N/A" self.number_passed=0 self.number_failed=0 self.number_warning=0 self.number_skipped=0 self.total_number_of_tests=0 self.remark=" " self.result_file=""
def run(self): # Indicate that we have a transfer thread running self.transfer_running = 1 # Set up the transfer command as part of this thread we are in # sadly the upload sequence needs the proxy before the upload cmds if 'UPLOAD' in self.options and self.options['PROXY'] != 'DIRECT': cmd = "" else: # pageload requires a different transfer strategy. Mote: page files objects # for transfer MUST be loaded on a local server accessable by the test server. if 'PAGELOAD' in self.options: cmd = "cd /home/qfreleng/testmget; export http_proxy=127.0.0.1:10080; mget -r -p -nc -H --max-redirect 1 --num-threads " +str(self.options['PAGELOAD'])+ " --level 1 " else: cmd = "curl" # Check if they want HTTP 1.0 if 'http' in self.options and self.options['http'] == '1.0': cmd = cmd + " -0" # check and verify curl options if any if 'curlopt' in self.options: if const_curl_opts[self.options['curlopt']]: cmd = cmd + " " + const_curl_opts[self.options['curlopt']] outfile = self.options['OUTFILE'] outfiles = [] upld_cmd = "" dnldpath = "" if 'CURL_DNLD_DIR' in self.options: dnldpath = self.options['CURL_DNLD_DIR'] i = 1 j = 0 for file in self.transfer_filenames: if outfile == "": # when in continuous transfer mode a new file of size n gets created every time # it runs which means depending on the length of the run the HD starts to fill up. # and can eventually max out. So to protect against that use a more common name for resuse. if self.options['CONTINUOUS']: if not self.options['PROXY'] == "HTTP_PROXY": # going direct if 'UPLOAD' in self.options: cmd += cmd + " -F 'uploaded=@" +str(file)+ "; filename=" +os.path.split(file)[1] + ".CONT." + str(self.dpr_client.proxyPort) + "." +self.thread_name+ "." +str(i)+".tmp' -H \"Expect:\" http://" +self.options['CSERVER']+ "/cgi-bin/upload.php" log("UPLOAD") else: outfile = dnldpath + os.path.split(file)[1] + ".CONT." + str(self.dpr_client.proxyPort) + "." + self.thread_name + "." + str(i) + ".tmp" else: # DRM http proxy if 'UPLOAD' in self.options: cmd += cmd + " -F 'uploaded=@" +str(file)+ "; filename=" +os.path.split(file)[1] + ".CONT." + str(self.dpr_client.currentClientConfig['nonDprProxyPort']) + "." +self.thread_name+ "." +str(i)+".tmp' -H \"Expect:\" http://" +self.options['CSERVER']+ "/cgi-bin/upload.php" else: outfile = dnldpath + os.path.split(file)[1] + "." + str(self.dpr_client.proxyPort) + "." + str(i) + ".tmp" else: if 'UPLOAD' in self.options: # add in the upload xfer directives and temp file name.. index needs to be i-1 cmd += cmd + " -F 'uploaded=@" +str(file)+ "; filename=" +os.path.split(file)[1] + "." + str(self.dpr_client.proxyPort) + "." +self.thread_name+ "." +str(i)+".tmp' -H \"Expect:\" http://" +self.options['CSERVER']+ "/cgi-bin/upload.php" else: outfile = dnldpath + os.path.split(file)[1] + "." + self.thread_name + "." + str(i) + ".tmp" log("UPLOAD") i += 1 if 'UPLOAD' not in self.options: # pageload is file/obj based cannot redirect output here if 'PAGELOAD' in self.options: cmd = cmd + " " +file else: cmd = cmd + " " + file + " -o " + outfile outfiles.append(outfile) outfile="" log("CURL CMD : " + cmd) # NOTE_TO_SELF - Change to .launch and check for completion while looking if the user called stop thread while not self.stop_transfer: # Start the transfer if getOpt('VERBOSE_TRANSFER'): log('DEBUG', "Transfer " + self.thread_name + ":" + str(self.transfer_id) + " started via: " + cmd) output = self.test_client.shell.run(cmd, 1, 1, 1, 0) if getOpt('VERBOSE_TRANSFER'): log('DEBUG', "Transfer" + self.thread_name + ":" + str(self.transfer_id) + " ended \nLOCAL FILE: " + str(outfiles) + "\nCURL OUTPUT:\n" + output) # Parse the output for errors errmsg = utilities.snip(output, "curl: ") # Get each stat line lines = output.split("\r") # Get the values from the last line of output (this will have the best average, but the intermediate ones are available values = lines[-1].split() stats = {} stats["time_finished"] = strftime("%m/%d/%y %H:%M:%S", gmtime()) if errmsg: stats["error"] = self.__class__.__name__ + "() " + errmsg + "\n" self.stop_transfer = 1 else: try: stats["total_%"] = values[0] stats["total_bytes"] = values[1] stats["rcvd_%"] = values[2] stats["rcvd_bytes"] = values[3] stats["xfer_%"] = values[4] stats["xfer_bytes"] = values[5] stats["avg_dload"] = values[6] stats["avg_uload"] = values[7] stats["time_total"] = values[8] stats["time_spent"] = values[9] stats["time_left"] = values[10] stats["current_speed"] = values[11] except: if output.strip() != "": errmsg = "Transfer encountered abnormal output: " + str(output) if "ssh_exchange_identification" in output: errmsg = errmsg + "\nConsider modifying /etc/ssh/ssd_config on the client machine: MaxStartups 30 (remove the :10:20)" log(errmsg) stats["error"] = self.__class__.__name__ + "() " + errmsg + "\n" self.stop_transfer = 1 else: if not 'UPLOAD' in self.options: # Process curl download stats. The throughput numbers like avg_dload are like 12K. # Make them graphable like 12300 stats["down_throughput"] = 0 if stats["avg_dload"] != "": for unit in ['k', 'M', 'G', 'none']: if unit in stats["avg_dload"]: break if unit == 'none': stats["avg_dload"] = stats["avg_dload"] + "B" unit = "B" avg_dload = utilities.snip(stats["avg_dload"], 0, unit) # There appears to be a bug in Python where sometimes a string like "13.8" # when passed to float has unseen garbage. Maybe it's a fault of utilities.snip # Anyway, reassigning the variable seems to clean up the garbage if avg_dload == "": avg_dload = 0 x = avg_dload # 02/06/15 - DRM: well the above does not always work. Multi-client # force kill causes garbage to be collected as avg_dload and the float # aborts on string data this will overide the Python val error try: y = float(x) except ValueError: y = float(0) if getOpt('VERBOSE_TRANSFER'): log("FLOAT ERROR: converting: " +x+ " to float") multiplier = {"B":1, "k":1024, "M":1024*1024, "G":1024*1024*1024} y = multiplier[unit] * y down_throughput = float(int(y)) stats["down_throughput"] = str(down_throughput) else: # Process curl upload stats. The throughput numbers like avg_dload are like 12K. # Make them graphable like 12300 stats["up_throughput"] = 0 if stats["avg_uload"] != "": for unit in ['k', 'M', 'G', 'none']: if unit in stats["avg_uload"]: break if unit == 'none': stats["avg_uload"] = stats["avg_uload"] + "B" unit = "B" avg_uload = utilities.snip(stats["avg_uload"], 0, unit) # There appears to be a bug in Python where sometimes a string like "13.8" # when passed to float has unseen garbage. Maybe it's a fault of utilities.snip # Anyway, reassigning the variable seems to clean up the garbage x = avg_uload # 02/06/15 - DRM: well the above does not always work. Multi-client # force kill causes garbage to be collected as avg_dload and the float # aborts on string data this will overide the Python val error try: y = float(x) except ValueError: y = float(0) if getOpt('VERBOSE_TRANSFER'): log("FLOAT ERROR: converting: " +x+ " to float") multiplier = {"B":1, "k":1024, "M":1024*1024, "G":1024*1024*1024} y = multiplier[unit] * y up_throughput = float(int(y)) stats["up_throughput"] = str(up_throughput) # Append the stats dictionary onto the parents lists of transfer statistics #print(" IN THREAD APPENDING STATS: " + str(stats)) self.transfer_parent.stats.append(stats) # Quit if we got a curl error if not errmsg == "" and not self.stop_transfer: if getOpt('VERBOSE'): msg = "Transfer: " + cmd + " encountered an error:\n\n" + cmd + "\n\n" + output + "\n" msg = self.__class__.__name__ + "() " + msg log('ERROR', "") log('ERROR', "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") log('ERROR', msg) log('ERROR', "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") log('ERROR', "") break # They may have requested a single transfer or continuous or multiple transfers if self.count > 0: self.count -= 1 if not self.options['CONTINUOUS'] and self.count <= 0: self.stop_transfer = 1 if 'DELAY_XFER_RESTART' in self.options: time.sleep(self.options['DELAY_XFER_RESTART']) # Indicate we are no longer running self.transfer_running = 0
def run(self): # Indicate that we have a transfer thread running self.transfer_running = 1 # Set up the transfer command as part of this thread we are in # sadly the upload sequence needs the proxy before the upload cmds if 'UPLOAD' in self.options and self.options['PROXY'] != 'DIRECT': cmd = "" else: # pageload requires a different transfer strategy. Mote: page files objects # for transfer MUST be loaded on a local server accessable by the test server. if 'PAGELOAD' in self.options: cmd = "cd /home/qfreleng/testmget; export http_proxy=127.0.0.1:10080; mget -r -p -nc -H --max-redirect 1 --num-threads " + str( self.options['PAGELOAD']) + " --level 1 " else: cmd = "curl" # Check if they want HTTP 1.0 if 'http' in self.options and self.options['http'] == '1.0': cmd = cmd + " -0" # check and verify curl options if any if 'curlopt' in self.options: if const_curl_opts[self.options['curlopt']]: cmd = cmd + " " + const_curl_opts[self.options['curlopt']] outfile = self.options['OUTFILE'] outfiles = [] upld_cmd = "" dnldpath = "" if 'CURL_DNLD_DIR' in self.options: dnldpath = self.options['CURL_DNLD_DIR'] i = 1 j = 0 for file in self.transfer_filenames: if outfile == "": # when in continuous transfer mode a new file of size n gets created every time # it runs which means depending on the length of the run the HD starts to fill up. # and can eventually max out. So to protect against that use a more common name for resuse. if self.options['CONTINUOUS']: if not self.options['PROXY'] == "HTTP_PROXY": # going direct if 'UPLOAD' in self.options: cmd += cmd + " -F 'uploaded=@" + str( file ) + "; filename=" + os.path.split( file )[1] + ".CONT." + str( self.dpr_client.proxyPort ) + "." + self.thread_name + "." + str( i ) + ".tmp' -H \"Expect:\" http://" + self.options[ 'CSERVER'] + "/cgi-bin/upload.php" log("UPLOAD") else: outfile = dnldpath + os.path.split( file)[1] + ".CONT." + str( self.dpr_client.proxyPort ) + "." + self.thread_name + "." + str( i) + ".tmp" else: # DRM http proxy if 'UPLOAD' in self.options: cmd += cmd + " -F 'uploaded=@" + str( file ) + "; filename=" + os.path.split( file )[1] + ".CONT." + str( self.dpr_client. currentClientConfig['nonDprProxyPort'] ) + "." + self.thread_name + "." + str( i ) + ".tmp' -H \"Expect:\" http://" + self.options[ 'CSERVER'] + "/cgi-bin/upload.php" else: outfile = dnldpath + os.path.split( file)[1] + "." + str(self.dpr_client.proxyPort ) + "." + str(i) + ".tmp" else: if 'UPLOAD' in self.options: # add in the upload xfer directives and temp file name.. index needs to be i-1 cmd += cmd + " -F 'uploaded=@" + str( file ) + "; filename=" + os.path.split(file)[1] + "." + str( self.dpr_client.proxyPort ) + "." + self.thread_name + "." + str( i) + ".tmp' -H \"Expect:\" http://" + self.options[ 'CSERVER'] + "/cgi-bin/upload.php" else: outfile = dnldpath + os.path.split(file)[ 1] + "." + self.thread_name + "." + str(i) + ".tmp" log("UPLOAD") i += 1 if 'UPLOAD' not in self.options: # pageload is file/obj based cannot redirect output here if 'PAGELOAD' in self.options: cmd = cmd + " " + file else: cmd = cmd + " " + file + " -o " + outfile outfiles.append(outfile) outfile = "" log("CURL CMD : " + cmd) # NOTE_TO_SELF - Change to .launch and check for completion while looking if the user called stop thread while not self.stop_transfer: # Start the transfer if getOpt('VERBOSE_TRANSFER'): log( 'DEBUG', "Transfer " + self.thread_name + ":" + str(self.transfer_id) + " started via: " + cmd) output = self.test_client.shell.run(cmd, 1, 1, 1, 0) if getOpt('VERBOSE_TRANSFER'): log( 'DEBUG', "Transfer" + self.thread_name + ":" + str(self.transfer_id) + " ended \nLOCAL FILE: " + str(outfiles) + "\nCURL OUTPUT:\n" + output) # Parse the output for errors errmsg = utilities.snip(output, "curl: ") # Get each stat line lines = output.split("\r") # Get the values from the last line of output (this will have the best average, but the intermediate ones are available values = lines[-1].split() stats = {} stats["time_finished"] = strftime("%m/%d/%y %H:%M:%S", gmtime()) if errmsg: stats[ "error"] = self.__class__.__name__ + "() " + errmsg + "\n" self.stop_transfer = 1 else: try: stats["total_%"] = values[0] stats["total_bytes"] = values[1] stats["rcvd_%"] = values[2] stats["rcvd_bytes"] = values[3] stats["xfer_%"] = values[4] stats["xfer_bytes"] = values[5] stats["avg_dload"] = values[6] stats["avg_uload"] = values[7] stats["time_total"] = values[8] stats["time_spent"] = values[9] stats["time_left"] = values[10] stats["current_speed"] = values[11] except: if output.strip() != "": errmsg = "Transfer encountered abnormal output: " + str( output) if "ssh_exchange_identification" in output: errmsg = errmsg + "\nConsider modifying /etc/ssh/ssd_config on the client machine: MaxStartups 30 (remove the :10:20)" log(errmsg) stats[ "error"] = self.__class__.__name__ + "() " + errmsg + "\n" self.stop_transfer = 1 else: if not 'UPLOAD' in self.options: # Process curl download stats. The throughput numbers like avg_dload are like 12K. # Make them graphable like 12300 stats["down_throughput"] = 0 if stats["avg_dload"] != "": for unit in ['k', 'M', 'G', 'none']: if unit in stats["avg_dload"]: break if unit == 'none': stats[ "avg_dload"] = stats["avg_dload"] + "B" unit = "B" avg_dload = utilities.snip( stats["avg_dload"], 0, unit) # There appears to be a bug in Python where sometimes a string like "13.8" # when passed to float has unseen garbage. Maybe it's a fault of utilities.snip # Anyway, reassigning the variable seems to clean up the garbage if avg_dload == "": avg_dload = 0 x = avg_dload # 02/06/15 - DRM: well the above does not always work. Multi-client # force kill causes garbage to be collected as avg_dload and the float # aborts on string data this will overide the Python val error try: y = float(x) except ValueError: y = float(0) if getOpt('VERBOSE_TRANSFER'): log("FLOAT ERROR: converting: " + x + " to float") multiplier = { "B": 1, "k": 1024, "M": 1024 * 1024, "G": 1024 * 1024 * 1024 } y = multiplier[unit] * y down_throughput = float(int(y)) stats["down_throughput"] = str(down_throughput) else: # Process curl upload stats. The throughput numbers like avg_dload are like 12K. # Make them graphable like 12300 stats["up_throughput"] = 0 if stats["avg_uload"] != "": for unit in ['k', 'M', 'G', 'none']: if unit in stats["avg_uload"]: break if unit == 'none': stats[ "avg_uload"] = stats["avg_uload"] + "B" unit = "B" avg_uload = utilities.snip( stats["avg_uload"], 0, unit) # There appears to be a bug in Python where sometimes a string like "13.8" # when passed to float has unseen garbage. Maybe it's a fault of utilities.snip # Anyway, reassigning the variable seems to clean up the garbage x = avg_uload # 02/06/15 - DRM: well the above does not always work. Multi-client # force kill causes garbage to be collected as avg_dload and the float # aborts on string data this will overide the Python val error try: y = float(x) except ValueError: y = float(0) if getOpt('VERBOSE_TRANSFER'): log("FLOAT ERROR: converting: " + x + " to float") multiplier = { "B": 1, "k": 1024, "M": 1024 * 1024, "G": 1024 * 1024 * 1024 } y = multiplier[unit] * y up_throughput = float(int(y)) stats["up_throughput"] = str(up_throughput) # Append the stats dictionary onto the parents lists of transfer statistics #print(" IN THREAD APPENDING STATS: " + str(stats)) self.transfer_parent.stats.append(stats) # Quit if we got a curl error if not errmsg == "" and not self.stop_transfer: if getOpt('VERBOSE'): msg = "Transfer: " + cmd + " encountered an error:\n\n" + cmd + "\n\n" + output + "\n" msg = self.__class__.__name__ + "() " + msg log('ERROR', "") log( 'ERROR', "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" ) log('ERROR', msg) log( 'ERROR', "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" ) log('ERROR', "") break # They may have requested a single transfer or continuous or multiple transfers if self.count > 0: self.count -= 1 if not self.options['CONTINUOUS'] and self.count <= 0: self.stop_transfer = 1 if 'DELAY_XFER_RESTART' in self.options: time.sleep(self.options['DELAY_XFER_RESTART']) # Indicate we are no longer running self.transfer_running = 0