def browse_node_for_available_vessels(nodelocation): """ Contact the node at nodelocation and return a list of vesseldicts for each vessel on the node. """ try: # Ask the node for information about the vessels on it. vesseldict_list = experimentlib.browse_node(nodelocation) # Gather up a list of vesseldicts of the available vessels. available_vesseldict_list = [] for vesseldict in vesseldict_list: if is_vessel_available(vesseldict): available_vesseldict_list.append(vesseldict) # Just so we can watch the progress, print some output. # We display the nodelocation rather than the nodeid because it's more # interesting to look at, even though nodes can change location and this # isn't a unique identifier of the node. print("On " + nodelocation + " found " + str(len(available_vesseldict_list)) + " available vessels") return available_vesseldict_list except experimentlib.NodeCommunicationError, e: print("Failure on " + nodelocation + ": " + str(e))
def poll_data(): """ <Purpose> This is used to collect and store node/vessel data. <Arguments> None. <Exceptions> NodeLocationListEmptyError This error will be raised if we are unable to lookup any node locations because there won't be any nodes to look up any information on. ExperimentLibError If any experiment library errors occur they will be logged in 'fail_list.dat.gz' at the end of poll_data() <Side Effects> Creates a new folder at the users path (specified above) with a timestamp. Inside that folder will exist 2 gzip files one called 'nodes_list.gz' and one called 'fail_list.gz', these are compressed python pickles of the objects used in this script. <Returns> None. """ poll_identity = experimentlib.create_identity_from_key_files(PATH_TO_EXP_LIB + "stat_lib/perc20.publickey") # Creates a list of IPs that are associated with this identity. nodelocation_list = experimentlib.lookup_node_locations_by_identity(poll_identity) # The format of the timestamp is of the form: # YYYYMMDD_HHMM, with single digits zero padded (5 -> 05). curr_time = datetime.datetime.now() # Converts datetime object to a formated string. folder_name = datetime.datetime.strftime(curr_time, "%Y%m%d_%H%M") month_folder = datetime.datetime.strftime(curr_time, "%Y_%m") # If a folder does not exist for the month of the poll, it is created. if not os.path.isdir(PATH_TO_EXP_LIB + "seattle_node_data/" + month_folder): os.mkdir(PATH_TO_EXP_LIB + "seattle_node_data/" + month_folder) # Create the folder for this poll data to be stored. data_storage_path = PATH_TO_EXP_LIB + "seattle_node_data/" + month_folder + "/" + folder_name os.mkdir(data_storage_path) # Checks to make sure we have enough node locations to continue with the # poll, if not a NodeLocationListEmpty Error is raised. if(len(nodelocation_list) < 0): raise NodeLocationListEmptyError("Node location list is empty, " + "poll_data stopped at: " + str(curr_time)) # Creates initial gzip data files to be saved (appends if needed). nodes_file = gzip.open(data_storage_path + "/node_list.dat.gz",'ar') fail_file = gzip.open(data_storage_path + "/fail_list.dat.gz",'ar') # The initial data structures to be pickled. total_node_dicts = {} fail_dict = {} for node_ip in nodelocation_list: single_node_dict = {} single_node_dict['timestamp'] = curr_time time_before_browsenode = time.time() # If browse_node fails it will throw an ExperimentLibError which means we # are not able to communicate with the node so it is added to the list # of failed nodes. try: list_of_vessel_dicts = experimentlib.browse_node(node_ip) except experimentlib.SeattleExperimentError, err: time_atfail_browsenode = time.time() elapsed_time_fail = time_atfail_browsenode - time_before_browsenode fail_dict[str(node_ip)] = [err, elapsed_time_fail] time_before_browsenode = 0 time_after_browsenode = 0 continue time_after_browsenode = time.time() elapsed_time = time_after_browsenode - time_before_browsenode single_node_dict['responsetime'] = elapsed_time single_node_dict['nodelocation'] = node_ip single_node_dict['version'] = list_of_vessel_dicts[0].get('version') unique_id = list_of_vessel_dicts[0].get('nodeid') single_node_dict['nodeid'] = unique_id single_node_dict['vessels'] = list_of_vessel_dicts total_node_dicts[str(unique_id)] = single_node_dict
def poll_data(): """ <Purpose> This is used to collect and store node/vessel data. <Arguments> None. <Exceptions> NodeLocationListEmptyError This error will be raised if we are unable to lookup any node locations because there won't be any nodes to look up any information on. ExperimentLibError If any experiment library errors occur they will be logged in 'fail_list.dat.gz' at the end of poll_data() <Side Effects> Creates a new folder at the users path (specified above) with a timestamp. Inside that folder will exist 2 gzip files one called 'nodes_list.gz' and one called 'fail_list.gz', these are compressed python pickles of the objects used in this script. <Returns> None. """ poll_identity = experimentlib.create_identity_from_key_files( PATH_TO_EXP_LIB + "stat_lib/perc20.publickey") # Creates a list of IPs that are associated with this identity. nodelocation_list = experimentlib.lookup_node_locations_by_identity( poll_identity) # The format of the timestamp is of the form: # YYYYMMDD_HHMM, with single digits zero padded (5 -> 05). curr_time = datetime.datetime.now() # Converts datetime object to a formated string. folder_name = datetime.datetime.strftime(curr_time, "%Y%m%d_%H%M") month_folder = datetime.datetime.strftime(curr_time, "%Y_%m") # If a folder does not exist for the month of the poll, it is created. if not os.path.isdir(PATH_TO_EXP_LIB + "seattle_node_data/" + month_folder): os.mkdir(PATH_TO_EXP_LIB + "seattle_node_data/" + month_folder) # Create the folder for this poll data to be stored. data_storage_path = PATH_TO_EXP_LIB + "seattle_node_data/" + month_folder + "/" + folder_name os.mkdir(data_storage_path) # Checks to make sure we have enough node locations to continue with the # poll, if not a NodeLocationListEmpty Error is raised. if (len(nodelocation_list) < 0): raise NodeLocationListEmptyError("Node location list is empty, " + "poll_data stopped at: " + str(curr_time)) # Creates initial gzip data files to be saved (appends if needed). nodes_file = gzip.open(data_storage_path + "/node_list.dat.gz", 'ar') fail_file = gzip.open(data_storage_path + "/fail_list.dat.gz", 'ar') # The initial data structures to be pickled. total_node_dicts = {} fail_dict = {} for node_ip in nodelocation_list: single_node_dict = {} single_node_dict['timestamp'] = curr_time time_before_browsenode = time.time() # If browse_node fails it will throw an ExperimentLibError which means we # are not able to communicate with the node so it is added to the list # of failed nodes. try: list_of_vessel_dicts = experimentlib.browse_node(node_ip) except experimentlib.SeattleExperimentError, err: time_atfail_browsenode = time.time() elapsed_time_fail = time_atfail_browsenode - time_before_browsenode fail_dict[str(node_ip)] = [err, elapsed_time_fail] time_before_browsenode = 0 time_after_browsenode = 0 continue time_after_browsenode = time.time() elapsed_time = time_after_browsenode - time_before_browsenode single_node_dict['responsetime'] = elapsed_time single_node_dict['nodelocation'] = node_ip single_node_dict['version'] = list_of_vessel_dicts[0].get('version') unique_id = list_of_vessel_dicts[0].get('nodeid') single_node_dict['nodeid'] = unique_id single_node_dict['vessels'] = list_of_vessel_dicts total_node_dicts[str(unique_id)] = single_node_dict
def browse_node(location): """ <Purpose> Browses the node located at the given location and returns a condensed version of the vessel dict so it can be logged to file. <Arguments> location: The location of the node to browse, given as a string in the format used by Experiment Library. <Exceptions> Any Exception that is thrown by experimentlib.browse_node may be raised. <Side Effects> None. <Returns> A dictionary of node and lookup properties of the node at located at location. The dictionary contains the following values: 'nodeid': the md5 hash of the node's id 'location': the location of the node, almost certainly the passed location argument 'version': the version of seattle running on the node 'time': when the node information was received 'latency': the time taken to retrieve the information 'vesseldicts': a list of vessel dictionaries containing 'vesselname', 'status', 'ownerkey', and 'userkeys' keys. The 'ownerkey' and 'userkeys' are hashed using md5 to conserve space and improve human readability. """ starttime = time.time() vesseldictlist = experimentlib.browse_node(location) endtime = time.time() nodedict = {} nodeid = vesseldictlist[0]['nodeid'] hashfunc = hashlib.md5() hashfunc.update(nodeid) nodehash = hashfunc.hexdigest() nodedict['nodeid'] = nodehash nodedict['location'] = location nodedict['version'] = vesseldictlist[0]['version'] nodedict['time'] = endtime nodedict['latency'] = endtime - starttime nodedict['vesseldicts'] = [] for vesseldict in vesseldictlist: newvesseldict = {} newvesseldict['vesselname'] = vesseldict['vesselname'] newvesseldict['status'] = vesseldict['status'] # Hash all the keys to minimize the size of the log file. ownerkey = vesseldict['ownerkey'] hashfunc = hashlib.md5() hashfunc.update(str(ownerkey['e'])) hashfunc.update(str(ownerkey['n'])) newvesseldict['ownerkey'] = hashfunc.hexdigest() newvesseldict['userkeys'] = [] for userkey in vesseldict['userkeys']: hashfunc = hashlib.md5() hashfunc.update(str(userkey)) newvesseldict['userkeys'].append(hashfunc.hexdigest()) nodedict['vesseldicts'].append(newvesseldict) return nodedict