def search_name(self): self.name = "" for word in self.utterance.lower().split(): if re.findall(self.name_ptrn, word): self.name = (clear(word)).title() print_("\nName", self.name) return
def search_intents(self): self.intents = [] for pattern_name in self.intents_ptrn: if re.findall(self.intents_ptrn[pattern_name], self.utterance.lower()): self.intents.append(pattern_name) print_("\nIntents", self.intents)
def search_products(self): self.products = [] for pattern_name in self.product_ptrn: if re.findall(self.product_ptrn[pattern_name], self.utterance.lower()): self.products.append(pattern_name) print_("\nProducts", self.products)
def main(): if len(sys.argv) != 2: print("Usage: {} <path-to-input-file>".format(sys.argv[0])) exit(1) # Parse inputs input_f = sys.argv[1] # name of the current case fname = os.path.basename(input_f) print_sec("Running for case: {}".format(fname)) if not os.path.isfile(input_f): raise FileNotFoundError(input_f) # Parse input - use serial form if it's there prob = parse_input(input_f) prob.kickstart() # Process print_ssec("computing...") out_ids, out_dists = assign(prob, prob.curr_positions.keys(), {ride.id: ride for ride in prob.rides}, 0) print("out_ids: ", out_ids) print("out_dists: ", out_dists) print_("compuations done.") # Export results outfile = "{}_{}".format(int(time.time()), fname) export_results(prob, outfile) print_("all done, exiting.")
def search_entities(self): self.entities = [] for pattern_name in self.entities_ptrn: if re.findall(self.entities_ptrn[pattern_name], self.utterance.lower()): self.entities.append(pattern_name) print_("\nEntities", self.entities)
def main(): if len(sys.argv) != 2: print("Usage: {} <path-to-input-file>".format(sys.argv[0])) exit(1) # Parse inputs input_f = sys.argv[1] # name of the current case fname = os.path.basename(input_f) print_sec("Running for case: {}".format(fname)) if not os.path.isfile(input_f): raise FileNotFoundError(input_f) # Parse input - use serial form if it's there prob = parse_input(input_f) # Process print_ssec("computing...") # TODO print_("compuations done.") # Export results outfile = "{}_{}".format(int(time.time()), fname) export_results(prob, outfile) print_("all done, exiting.")
def speedtest_with_retry(): """Calls myspeedtest(), retrying if it fails.""" for i in range(1, SPEEDTEST_MAX_RETRIES + 1): result = myspeedtest() if result: return result if i < SPEEDTEST_MAX_RETRIES: utils.print_( 'netspeed_edge.py: Speed test was unsuccessful, sleeping %s seconds and then will try again...' % str(SPEEDTEST_RETRY_DELAY)) time.sleep(SPEEDTEST_RETRY_DELAY) return None
def preparing_sonar(self, system): """ Function to preparing sonar-scanner execution. """ utils.print_( ">> Preparando execucao do SonarQube no sistema {} ...".format( system)) language = list({ item["Language"] for item in self.systems_and_keys if system.upper() in item["ID"].upper() })[0] files = ",".join( {file["File"] for file in self.files if file["ID"] == system}) modules = utils.write_modules(self.modules.items(), self.files, system) replacements = { "{url}": self.sonar_server, "{login}": self.sonar_login, "{password}": self.sonar_password, "{repository}": self.base_repository, "{system}": system, "{branch}": self.git_repository.active_branch.name, "{sources}": "sonar.sources=" + files, "{files}": files, "{language}": language, "{modules}": modules } if replacements["{modules}"] != "": replacements.update({"{sources}": ""}) lines = [] with open(self.sonar_template) as infile: for line in infile: for src, target in replacements.items(): line = line.replace(src, target) lines.append(line) with open( self.sonar_folder + "{}.sonarsource.properties".format(system), 'w') as outfile: for line in lines: outfile.write(line) utils.ok_text( "Arquivo {}.sonarsource.properties criado com sucesso.".format( system))
def remove_configuration_file(self, system): """ Function to remove sonar configuration file. """ utils.print_(">> Removendo arquivo de configuracao ...") try: utils.remove_file(self.sonar_folder + "{}.sonarsource.properties".format(system)) utils.ok_text( "Arquivo {}.sonarsource.properties removido com sucesso.". format(system)) except Exception: utils.error_text( "Nao foi possivel remover o arquivo de configuracao do sistema {}" .format(system)) utils.system_exit_ok()
def post_networkdata(jsonpayload, event_id, heart_beat=False): """Sends network data in json format to mqtt. Returns True if successful.""" retries = 2 if heart_beat: retries = SEND_MAX_RETRIES for i in range(1, retries + 1): result = mqttpub.post_networkdata_single_wiotp(jsonpayload, event_id, heart_beat=heart_beat) if result == 1: return True # success if result == -1: # We were not registered utils.print_( 'netspeed_edge.py: Send to mqtt failed. Not registered.') else: # The send failed for some reason other than not be registered time.sleep(SEND_RETRY_DELAY) return False
def commit_analyzer(self): """ Main function to analyze commit. """ utils.verify_branch_is_merging(self.git_command) if self.scan_status: utils.print_("\n") utils.print_(" ANALISE DE CODIGO PELO SONARQUBE INICIADO") utils.print_("=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=\n") start_time = time.time() self.find_modified_files() utils.verify_sonar_response(self.sonar_server) for system in self.systems: self.preparing_sonar(system) self.run_sonar(system) utils.remove_folder("{}.scannerwork".format(self.base_repository)) utils.print_( ">> Analise de qualidade de codigo pelo SonarQube finalizada.") hours, rem = divmod(time.time() - start_time, 3600) minutes, seconds = divmod(rem, 60) utils.print_( ">> Tempo de execucao: {:0>2}:{:0>2}:{:05.2f}\n".format( int(hours), int(minutes), seconds)) if self.scanner_error: utils.warning_text( "Existem problemas criticos de qualidade, verifique o relatorio no navegador. Commit recusado." ) utils.system_exit_block_commit() else: utils.ok_text("Nenhum problema encontrado. Commit liberado.") utils.system_exit_ok() else: utils.warning_text( ">> Analise de qualidade de codigo pelo SonarQube esta desativada. Commit liberado." ) utils.system_exit_ok()
def parse_input(f_in: str): """Parse the input file, fill and return the `Problem` class instance.""" print_ssec("parsing input...") # Read all contents - line by line with open(f_in, 'r') as f: conts = [[int(s) for s in i.rstrip().split()] for i in f.readlines()] # Remove last line if empty if conts[-1] == '': conts = conts[:-1] prob = Problem() # Total values prob.R, prob.C, prob.F, prob.N, prob.B, prob.T = conts[0] # Get rest of items - N Rides for line in range(1, prob.N + 1): c = conts[line] _id = line - 1 line += 1 start = Point(c[0], c[1]) end = Point(c[2], c[3]) t_start, t_end = c[4], c[5] prob.rides.append(Ride(start, end, t_start, t_end, _id)) # Print stuff out print_ssec("Inputs: ") print("* R: {} rows".format(prob.R)) print("* C: {} columns".format(prob.C)) print("* F: {} vehicles/fleet".format(prob.F)) print("* N: {} rides".format(prob.N)) print("* B: {} bonus".format(prob.B)) print("* T: {} simulation steps/turns".format(prob.T)) print() # print_("R: {}| C: {}| F: {}| N: {}| B: {}| T: {}".format( # prob.R, prob.C, prob.F, prob.N, prob.B, prob.T # )) print("rides:\n", prob.rides) print_("parsed input successfully") return prob
def search_city(self): utterance = clear(self.utterance).split() self.city = "" tmp = [] while len(utterance) >= 2: word1 = utterance.pop(0).title() word2 = utterance[0].title() tmp.append(word1) word = word1 + ' ' + word2 if re.fullmatch(self.city_ptrn, word): self.city = word print_("\nCity", self.city) return if utterance: tmp.append(utterance[0].title()) for word in tmp: if re.findall(self.city_ptrn, word): self.city = word print_("\nCity", self.city) return
def post_networkdata_single_wiotp(jsonpayload, event_id, heart_beat=False): """Tries once to send network data in json format to WIoTP via mqtt. Returns 1 if successful, 0 if not, -1 if failed because not registered. """ try: retain = True qos = 2 # since speed data is sent so infrequently we can afford to make sure it gets there exactly once if debug_flag: utils.print_("mqtt_pub.py: Sending data to mqtt... \ mqtt_topic=%s, mqtt_broker=%s, client_id=%s" % (mqtt_topic, mqtt_broker, mqtt_client_id)) # Publish to MQTT publish.single(topic=mqtt_topic, payload=jsonpayload, qos=qos, hostname=mqtt_broker, protocol=mqtt.MQTTv311, client_id=mqtt_client_id, port=mqtt_port, #auth=mqtt_auth, tls=mqtt_tls, retain=retain) if debug_flag: utils.print_('mqtt_pub.py: Send to mqtt successful') return 1 except: e = sys.exc_info()[1] if 'not authori' in str(e).lower() or 'bad user name or password' in str(e).lower(): # The data send failed because we are not successfully registered return -1 else: utils.print_('Send to mqtt failed: %s' % e) return 0
def parse_input(f_in: str): """Parse the input file, fill and return the `Problem` class instance.""" print_ssec("parsing input...") # Read all contents - line by line # TODO - Change me if not dealing with ints!!! with open(f_in, 'r') as f: conts = [[int(s) for s in i.rstrip().split()] for i in f.readlines()] # Remove last line if empty if conts[-1] == '': conts = conts[:-1] # Total values # Get rest of items # Print stuff out print_ssec("Inputs: ") print_("parsed input successfully") return Problem()
def run_sonar(self, system): """ Function to run sonar-scanner. """ utils.print_( ">> Executando SonarQube no sistema {} ...".format(system)) try: command = self.sonar_scanner + " -D project.settings={}{}.sonarsource.properties".format( self.sonar_folder, system) output = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, encoding="utf-8") if "EXECUTION FAILURE" in output.stdout: utils.error_text( "Nao foi possivel executar o SonarQube no sistema {}". format(system)) utils.system_exit_ok() if "major" in output.stdout or "critical" in output.stdout: webbrowser.open( self.sonar_folder + "issues-report/{}/issues-report-{}.html".format( system, system), new=2) utils.ok_text("Relatorio disponibilizado no navegador.") self.scanner_error = True else: utils.ok_text("Analise concluida.") except Exception: utils.error_text( "Nao foi possivel executar o SonarQube no sistema {}".format( system)) utils.system_exit_ok() self.remove_configuration_file(system)
def find_modified_files(self): """ Function to find modified files. """ utils.print_(">> Analisando arquivos C# no stage ...") try: modified_files = self.git_repository.head.commit.diff() if not modified_files: utils.ok_text("Nenhum arquivo alterado.") utils.system_exit_ok() for file in modified_files: _, file_extension = os.path.splitext(file.a_path) if file.change_type != "D" and file_extension.lower() == ".cs": dictionary = self.find_modified_systems(file) self.files.append(dictionary) if len(self.files) == 0: utils.ok_text("Nenhum arquivo alterado.") utils.system_exit_ok() self.systems = {file["ID"] for file in self.files} self.systems = sorted(self.systems) for system in self.systems: index = list(self.systems).index(system) + 1 utils.print_("{}. Sistema: {}".format(index, system)) files = { file["File"] for file in self.files if file["ID"] == system } files = sorted(files) for file in files: utils.print_(" - " + file) utils.print_("") except Exception: utils.error_text( "Nao foi possivel encontrar os arquivos modificados no stage.") utils.system_exit_ok()
def getConfig(): """Download the speedtest.net configuration and return only the data we are interested in. Returns None if error.""" request = build_request('://www.speedtest.net/speedtest-config.php') uh, e = catch_request(request) if e: utils.print_( 'netspeed_edge.py: Error: could not retrieve speedtest.net configuration: %s' % e) return None configxml = [] while 1: configxml.append(uh.read(10240)) if len(configxml[-1]) == 0: break if int(uh.code) != 200: utils.print_( 'netspeed_edge.py: Error: got HTTP code %s when trying to retrieve speedtest.net configuration' % str(uh.code)) return None uh.close() try: try: root = ET.fromstring(''.encode().join(configxml)) config = { 'client': root.find('client').attrib, 'times': root.find('times').attrib, 'download': root.find('download').attrib, 'upload': root.find('upload').attrib } except AttributeError: # Python3 branch root = DOM.parseString(''.join(configxml)) config = { 'client': getAttributesByTagName(root, 'client'), 'times': getAttributesByTagName(root, 'times'), 'download': getAttributesByTagName(root, 'download'), 'upload': getAttributesByTagName(root, 'upload') } except SyntaxError as e: utils.print_( 'netspeed_edge.py: Failed to parse speedtest.net configuration: %s' % str(e)) return None del root del configxml return config
def build_user_agent(): """Build a Mozilla/5.0 compatible User-Agent string""" global user_agent if user_agent: return user_agent ua_tuple = ('Mozilla/5.0', '(%s; U; %s; en-us)' % (platform.system(), platform.architecture()[0]), 'Python/%s' % platform.python_version(), '(KHTML, like Gecko)', 'speedtest-net-poc/%s' % __version__) if debug_flag: utils.print_('netspeed_edge.py: Platform information:') utils.print_('netspeed_edge.py:', platform.platform()) utils.print_('netspeed_edge.py: ', ua_tuple) user_agent = ' '.join(ua_tuple) return user_agent
def netpoc_init(): global last_date, target_server_criteria, run_interval global latitude, longitude, contract_id, contract_nonce, device_id, debug_flag global total_volume_MB_month, max_volume_MB_month global mqtt_broker, mqtt_tls, mqtt_port, mqtt_client_id #global wiotp_domain, wiotp_org_id, wiotp_device_auth_token, wiotp_device_type clear_monthly_data() socket.setdefaulttimeout(10) # Pre-cache the user agent string build_user_agent() last_date = datetime.datetime.now() # Log the settings we are running with utils.print_('netspeed_edge.py: Running with these settings:') utils.print_('netspeed_edge.py: Target network speed test server: %s' % target_server_criteria) utils.print_('netspeed_edge.py: Run interval: %d' % run_interval) utils.print_('netspeed_edge.py: Monthly bandwidth cap: %d' % max_volume_MB_month) utils.print_('netspeed_edge.py: Ping interval: %d' % PING_INTERVAL) utils.print_('netspeed_edge.py: Latitude: %s' % latitude) utils.print_('netspeed_edge.py: Longitude: %s' % longitude) utils.print_('netspeed_edge.py: Horizon agreement id: %s' % contract_id) utils.print_('netspeed_edge.py: Horizon hash: %s' % contract_nonce) utils.print_('netspeed_edge.py: Horizon device id: %s' % device_id) if 'HZN_EXCHANGE_URL' in os.environ: utils.print_('netspeed_edge.py: Horizon exchange URL: %s' % os.environ['HZN_EXCHANGE_URL']) utils.print_('netspeed_edge.py: MQTT broker hostname: %s' % mqtt_broker) utils.print_('netspeed_edge.py: MQTT broker port: %s' % mqtt_port) utils.print_('netspeed_edge.py: MQTT broker PEM file: %s' % mqtt_ca_file) utils.print_('netspeed_edge.py: REG_MAX_RETRIES: %d' % REG_MAX_RETRIES) utils.print_('netspeed_edge.py: REG_RETRY_DELAY: %d' % REG_RETRY_DELAY) utils.print_('netspeed_edge.py: REG_SUCCESS_SLEEP: %d' % REG_SUCCESS_SLEEP) utils.print_('netspeed_edge.py: SEND_MAX_RETRIES: %d' % SEND_MAX_RETRIES) utils.print_('netspeed_edge.py: SEND_RETRY_DELAY: %d' % SEND_RETRY_DELAY) utils.print_('netspeed_edge.py: SPEEDTEST_MAX_RETRIES: %d' % SPEEDTEST_MAX_RETRIES) utils.print_('netspeed_edge.py: SPEEDTEST_RETRY_DELAY: %d' % SPEEDTEST_RETRY_DELAY)
def speedtestscheduler(): """Gets speed data and schedules itself to run again at the next interval.""" global total_volume_MB_month global max_volume_exceeded, max_mbps_exceeded global last_date global policy_flag threading.Timer(run_interval, speedtestscheduler).start() current_date = datetime.datetime.now() if debug_flag: utils.print_('\nnetspeed_edge.py: Current date: ', current_date.strftime('%Y-%m-%d %H:%M:%S')) utils.print_('netspeed_edge.py: Last date: ', last_date.strftime('%Y-%m-%d %H:%M:%S')) if not (policy_flag): # do not check policy, perform netspeed test testresults = speedtest_with_retry() if not testresults: utils.print_( 'netspeed_edge.py: Error: speed test failed after maximum retries.' ) return """ if new month, clear all cumulative values and policy exceeded flags """ if (current_date.month != last_date.month): if (max_volume_exceeded | max_mbps_exceeded): if debug_flag: utils.print_('netspeed_edge.py: resuming network tests.') """ Clear monthly data and perform first network test of the month """ clear_monthly_data() last_date = current_date else: """ Check policy exceeded flags """ if (max_volume_exceeded | max_mbps_exceeded): return """ Perform netspeed test """ testresults = speedtest_with_retry() if not testresults: utils.print_( 'netspeed_edge.py: Error: speed test failed after maximum retries.' ) return """ Check data usage """ if (total_volume_MB_month > max_volume_MB_month): if debug_flag: utils.print_( 'netspeed_edge.py: Send volume exceeded. Total upload %0.3f MB > %0.3f' 'Suspending network tests...' % (total_volume_MB_month, max_volume_MB_month)) max_volume_exceeded = 1
def pingstatus(): """Gets ping latency info and schedules itself to run again at the next interval.""" global contract_id, device_id, json_filename threading.Timer(PING_INTERVAL, pingstatus).start() timestamp = datetime.datetime.now() uptime_raw = subprocess.check_output(["uptime"]) m = re.search('(?<=up ).+', uptime_raw) uptime_m = m.group(0) uptime_db_col = uptime_m.strip() load_avg_list = uptime_db_col.split('average:')[-1].strip().split(',') uptime_dict = { 'uptime': uptime_db_col.split(',')[0], 'load_avg': [float(la.strip()) for la in load_avg_list] } free_raw = subprocess.check_output(["free", "-mh"]) free_raw_lines = free_raw.splitlines() free_db_col = '' if len(free_raw_lines) >= 1: mem_cols = free_raw_lines[1].split() if len(mem_cols) >= 6: free_db_col = 'total:' + str(mem_cols[1]) + ', free:' + str( mem_cols[2]) + ', shared:' + str( mem_cols[3]) + ', buffers:' + str( mem_cols[4]) + ', cached:' + str(mem_cols[5]) ping_raw = subprocess.check_output(["ping", "-c", "1", "www.ibm.com"]) ping_raw_lines = ping_raw.splitlines() ping_db_col = '' matching_lines = [ line for line in ping_raw_lines if 'ping statistics ---' in line ] # make sure we got summary stats if len(matching_lines) > 0: ping_db_col = matching_lines[0].strip() matching_lines = [ line for line in ping_raw_lines if 'round-trip min/avg/max' in line ] # get the stats ping_dict = {} if len(matching_lines) > 0: ping_db_col = ping_db_col + matching_lines[0].strip() ping_list = matching_lines[0].split('=')[-1].strip().split('/')[ -3:] # should be [min, avg, max] if len(ping_list) > 0: ping_dict = { 'min': float(ping_list[0].strip()), 'avg': float(ping_list[1].strip()), 'max': float(ping_list[2].strip(' ms')) } df_raw = subprocess.check_output(["df", "-h"]) df_raw_lines = df_raw.splitlines() df_raw_lines.pop(0) df_db_col = '' df_lines = '' for l in df_raw_lines: df_cols = l.split() if len(df_cols) >= 6: df_line = '[' + df_cols[0] + ' size:' + df_cols[ 1] + ', used:' + df_cols[2] + ', avail:' + df_cols[ 3] + ', use%:' + df_cols[4] + ', mnt:' + df_cols[5] + ']' df_lines += df_line df_db_col = df_lines.strip() netspeedping = { 'uptime': uptime_dict, 'ping_ms': ping_dict, 'contract_id': contract_id, 'device_id': device_id } networkdata = { 't': calendar.timegm(timestamp.timetuple()), 'r': netspeedping } jsonpayload = json.dumps(networkdata) if (mqtt_flag): post_networkdata(jsonpayload, event_id='netspeed-ping') if (file_flag): jsonfile = open(json_filename, 'w') json.dump(jsonpayload, jsonfile, sort_keys=True, indent=4, ensure_ascii=False) jsonfile.write('\n') jsonfile.close() if debug_flag: utils.print_('netspeed_edge.py: ', str(jsonpayload))
## Get edge device env var's for WIoTP publish # Check primary env vars first hzn_organization = utils.check_env_var( 'HZN_ORGANIZATION') # automatically passed in by Horizon #wiotp_device_auth_token = utils.check_env_var('WIOTP_DEVICE_AUTH_TOKEN') # note: this is no longer needed because we can now send msgs an an app to edge-connector unauthenticated, as long as we are local. hzn_device_id = utils.check_env_var( 'HZN_DEVICE_ID', utils.get_serial() ) # automatically passed in by Horizon. Wiotp automatically gives this a value of: g@mygwtype@mygw # When the Workload is deployed by WIoTP-Horizon; HZN_DEVICE_ID ~= 'g@mygwtype@mygw'. ids = hzn_device_id.split('@') if len(ids) == 3: class_id, device_type, device_id = ids # the class id is not actually used anymore else: utils.print_("Error: HZN_DEVICE_ID must have the format: g@mygwtype@mygw") #utils.print_("Workload config.py: Optional override environment variables:") #utils.print_("Workload config.py: WIOTP_CLASS_ID=" + utils.check_env_var('WIOTP_CLASS_ID', '', False)) #utils.print_("Workload config.py: WIOTP_DEVICE_TYPE=" + utils.check_env_var('WIOTP_DEVICE_TYPE', '', False)) #utils.print_("Workload config.py: WIOTP_DEVICE_ID=" + utils.check_env_var('WIOTP_DEVICE_ID', '', False)) utils.print_("Workload config.py: Derived variables:") #utils.print_("Workload config.py: CLASS_ID=" + class_id) utils.print_("Workload config.py: DEVICE_TYPE=" + device_type) utils.print_("Workload config.py: DEVICE_ID=" + device_id) ## Environment variables that can optionally be set, or default # set in the pattern deployment_overrides field if you need to override wiotp_domain = utils.check_env_var('WIOTP_DOMAIN', 'internetofthings.ibmcloud.com', False)
def myspeedtest(): """ tests edge device's network speed using speedtest.net test """ global total_volume_MB_month, debug_flag, mqtt_flag global latitude, longitude, contract_id, device_id, jsonfile, file_flag global netpoc_error if debug_flag: utils.print_( 'netspeed_edge.py: Retrieving speedtest.net configuration...') try: config = getConfig() if not config: # When getConfig() hits an error, it prints it and returns None. Returning None here will cause our caller to retry. return None except URLError: # getConfig() catches this, but leaving it here for safety if debug_flag: utils.print_( 'netspeed_edge.py: Cannot retrieve speedtest configuration') return None if debug_flag: utils.print_( 'netspeed_edge.py: Retrieving speedtest.net server list...') if (target_server_criteria == 'closest') | (target_server_criteria == 'fastest'): if debug_flag: utils.print_('netspeed_edge.py: Testing from %(isp)s (%(ip)s)...' % config['client']) servers = closestServers(config['client']) # get top 5 closest servers if (target_server_criteria == 'fastest'): if debug_flag: utils.print_( 'netspeed_edge.py: Selecting best server based on latency...' ) best = getBestServer( servers ) # get server with lowest latency from 5 closest servers else: if debug_flag: utils.print_( 'netspeed_edge.py: Selecting best server based on distance...' ) best = getBestServer( servers) # looks like this is using same criteria as fastest?? elif (target_server_criteria == 'random'): # select random server if debug_flag: utils.print_('netspeed_edge.py: Selecting random server ...') servers = closestServers(config['client'], True) # get full list of servers serverrange = len(servers) targetserver = randint(0, serverrange - 1) serverid = servers[targetserver]['id'] if debug_flag: utils.print_('netspeed_edge.py: server[%d] out of %d: %s %s \n' % (targetserver, serverrange, serverid, servers[targetserver]['name'])) utils.print_('netspeed_edge.py: Testing from %(isp)s (%(ip)s)...' % config['client']) try: best = getBestServer(filter(lambda x: x['id'] == serverid, servers)) except IndexError as e: utils.print_('netspeed_edge.py: Invalid server ID: %s' % str(e)) return None timestamp = datetime.datetime.now() # get time of test if debug_flag: utils.print_(( 'netspeed_edge.py: Hosted by %(sponsor)s (%(name)s) [%(d)0.2f km]: ' '%(latency)s ms' % best).encode('utf-8', 'ignore')) # sizes = [350, 500, 750, 1000, 1500, 2000, 2500, 3000, 3500, 4000] sizes = [500, 1000, 2000] urls = [] for size in sizes: for i in range(0, 2): urls.append('%s/random%sx%s.jpg' % (os.path.dirname(best['url']), size, size)) if debug_flag: utils.print_('netspeed_edge.py: Testing download speed', end='') download_metrics = downloadSpeed(urls, not (debug_flag)) dlspeed = download_metrics['speed_Mbs'] sizesizes = [int(.25 * 1000 * 1000), int(.5 * 1000 * 1000)] sizes = [] for size in sizesizes: for i in range(0, 25): sizes.append(size) if debug_flag: utils.print_() utils.print_('netspeed_edge.py: Testing upload speed', end='') upload_metrics = uploadSpeed(best['url'], sizes, not (debug_flag)) ulspeed = upload_metrics['speed_Mbs'] """ if edge device lat and lon not available, use value computed by nettest """ if (latitude == DEFAULT_LAT): latitude = config['client']['lat'] if (longitude == DEFAULT_LON): longitude = config['client']['lon'] ping_dict = { "host": best['url'], "min": float(best['latency']), "max": float(best['latency']), "avg": float(best['latency']) } netspeedresults = { 'upload_Mbps': round(ulspeed, 4), 'download_Mbps': round(dlspeed, 4), 'ping_ms': ping_dict, 'distance_km': round(best['d'], 4), 'targetserver': [best['sponsor'], best['name'], best['country'], best['url']], 'latitude': float(latitude), 'longitude': float(longitude), 'device_id': device_id } networkdata = { 't': calendar.timegm(timestamp.timetuple()), 'r': netspeedresults } jsonpayload = json.dumps(networkdata) if (mqtt_flag): post_networkdata(jsonpayload, event_id='netspeed-speedtest') else: if debug_flag: utils.print_('netspeed_edge.py: ' + str(jsonpayload)) if (file_flag): jsonfile = open('./netspeedresults.json', 'w') json.dump(networkdata, jsonfile, sort_keys=True, indent=4, ensure_ascii=False) jsonfile.write('\n') jsonfile.close() """ keep track of data usage per month """ total_volume_MB_month += upload_metrics['data_MB'] total_volume_MB_month += download_metrics['data_MB'] if debug_flag: utils.print_('netspeed_edge.py: Total BW per month (MB): %0.3f MB \n' 'netspeed_edge.py: ' % (total_volume_MB_month)) return netspeedresults
def export_results(p: Problem, f_out: str): """Export the results to the file specified.""" print_ssec("Exporting results to {}".format(f_out)) print_("Exported.")
def closestServers(client, all=False): """Determine the 5 closest speedtest.net servers based on geographic distance""" global netpoc_error urls = [ '://www.speedtest.net/speedtest-servers-static.php', '://c.speedtest.net/speedtest-servers-static.php', '://www.speedtest.net/speedtest-servers.php', '://c.speedtest.net/speedtest-servers.php', ] errors = [] servers = {} for url in urls: try: request = build_request(url) uh, e = catch_request(request) if e: errors.append('%s' % e) raise SpeedtestCliServerListError serversxml = [] while 1: serversxml.append(uh.read(10240)) if len(serversxml[-1]) == 0: break if int(uh.code) != 200: uh.close() raise SpeedtestCliServerListError uh.close() try: try: root = ET.fromstring(''.encode().join(serversxml)) elements = root.getiterator('server') except AttributeError: # Python3 branch root = DOM.parseString(''.join(serversxml)) elements = root.getElementsByTagName('server') except SyntaxError: raise SpeedtestCliServerListError for server in elements: try: attrib = server.attrib except AttributeError: attrib = dict(list(server.attributes.items())) d = distance( [float(client['lat']), float(client['lon'])], [float(attrib.get('lat')), float(attrib.get('lon'))]) attrib['d'] = d if d not in servers: servers[d] = [attrib] else: servers[d].append(attrib) del root del serversxml del elements except SpeedtestCliServerListError: continue # We were able to fetch and parse the list of speedtest.net servers if servers: break if not servers: if debug_flag: utils.print_( 'netspeed_edge.py: Failed to retrieve list of speedtest.net servers:\n\n %s' % '\n'.join(errors)) netpoc_error = 'netx0002' # cannot get list of closest servers sys.exit(1) closest = [] for d in sorted(servers.keys()): # sort servers based on distance for s in servers[d]: closest.append(s) if len(closest) == 5 and not all: break else: continue break del servers return closest
def uploadSpeed(url, sizes, quiet=False): """Function to launch FilePutter threads and calculate upload speeds""" upload_metrics = { 'speed_Mbs': 0.0, # Mbits / sec 'data_MB': 0.0, # MBytes 'time_s': 0.0 # seconds } start = timeit.default_timer() def producer(q, sizes): for size in sizes: thread = FilePutter(url, start, size) thread.start() q.put(thread, True) if not quiet and not shutdown_event.isSet(): sys.stdout.write('.') sys.stdout.flush() finished = [] def consumer(q, total_sizes): while len(finished) < total_sizes: thread = q.get(True) while thread.isAlive(): thread.join(timeout=0.1) finished.append(thread.result) del thread q = Queue(6) prod_thread = threading.Thread(target=producer, args=(q, sizes)) cons_thread = threading.Thread(target=consumer, args=(q, len(sizes))) start = timeit.default_timer() prod_thread.start() cons_thread.start() while prod_thread.isAlive(): prod_thread.join(timeout=0.1) while cons_thread.isAlive(): cons_thread.join(timeout=0.1) time_s = (timeit.default_timer() - start) # total upload time in sec data_B = sum(finished) # total upload data in bytes # netspeed edge modifications upload_speed = data_B / time_s # upload speed in Bytes/sec upload_metrics = { 'speed_Mbs': ((upload_speed * 8) / 1024 / 1024), # speed in Mbits/sec 'data_MB': (data_B / 1024 / 1024), # data in MBytes 'time_s': time_s # time in seconds } if debug_flag: utils.print_( '\nnetspeed_edge.py: Data (Bytes): %d Time(sec): %0.3f Speed(Bytes/sec): %0.3f' % (data_B, time_s, upload_speed)) utils.print_( 'netspeed_edge.py: Upload Volume(MB): %(data_MB)0.3f Speed(Mbps): %(speed_Mbs)0.3f' % upload_metrics) return upload_metrics
def main(): global shutdown_event, target_server_criteria, run_interval global send_policy_MB_month, receive_policy_MB_month global policy_flag, mqtt_flag, debug_flag, file_flag, json_filename global netpoc_error description = ( 'summit poc network test to measure network bandwidth for edge device \n ' 'based on speedtest-cli: https://github.com/sivel/speedtest-cli.\n' '---------------------------------------------------------------------\n' 'ssh://https://github.com/open-horizon/examples ... netspeed...') parser = ArgParser(description=description) # Give optparse.OptionParser an `add_argument` method for # compatibility with argparse.ArgumentParser try: parser.add_argument = parser.add_option except AttributeError: pass parser.add_argument('--policy', action='store_true', help='apply service policy') parser.add_argument( '--target', default='closest', type=str, help= 'override server criteria: closest, fastest, random. Default closest') parser.add_argument('--verbose', action='store_true', help='verbose output; send test results to std output') parser.add_argument('--file', action='store_true', help='write test results to json file') parser.add_argument('--mqtt', action='store_true', help='send test results to mqtt') options = parser.parse_args() if isinstance(options, tuple): args = options[0] else: args = options del options if (args.verbose): debug_flag = 1 if (args.policy): policy_flag = 1 if (args.mqtt): mqtt_flag = 1 if args.target: target_server_criteria = args.target shutdown_event = threading.Event() signal.signal(signal.SIGINT, ctrl_c) netpoc_init() if args.file: """ values for testing purposes """ file_flag = 1 try: json_filename = './netspeedresults.json' # Ensure that we can open the json dump file jsonfile = open(json_filename, 'w') jsonfile.close() except IOError: utils.print_( 'netspeed_edge.py: Could not open file... writing results to std output\n' ) file_flag = 0 debug_flag = 1 try: # Every time these run, they schedule a timer to run themselves again at the next interval utils.print_("netspeed_edge.py: Netspeed running...") pingstatus() speedtestscheduler() except KeyboardInterrupt: if debug_flag: utils.print_('\nnetspeed_edge.py: Cancelling...') netpoc_error = 'netx0005' # unexpected interrupt
def search_digit(self): self.digit = [] self.digit = re.findall('\d+', self.utterance) print_("\nDigit", " ".join(self.digit))
import subprocess import urllib.request import zipfile import shutil import os import utils url_sonar_scanner = "https://sonarsource.bintray.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-3.0.3.778-windows.zip" sonar_scanner_zip = "C:/Sonar/sonar-scanner.zip" utils.print_(">> Configuring First Run ...") directory = "C:/Sonar" os.makedirs(directory, exist_ok=True) if not os.path.exists("C:/Sonar/sonar-scanner"): if not os.path.exists(sonar_scanner_zip): utils.print_(">> Downloading Sonar Scanner ...") urllib.request.urlretrieve(url_sonar_scanner, sonar_scanner_zip) if os.path.exists(sonar_scanner_zip): with zipfile.ZipFile(sonar_scanner_zip, "r") as zipfile: zipfile.extractall("C:/Sonar") os.remove(sonar_scanner_zip) if os.path.exists("C:/Sonar/sonar-scanner-3.0.3.778-windows"): os.rename("C:/Sonar/sonar-scanner-3.0.3.778-windows", "C:/Sonar/sonar-scanner") directory = "C:/Sonar/issues-report" os.makedirs(directory, exist_ok=True) file = "C:/Sonar/template/template.sonarsource.properties"