def load_k2_data_into_spaces(k2_spaces, k2_data): """ Loads the k2 data into the k2_spaces, then returns the list of spaces passed. """ for space in k2_spaces: # get the k2 data by id if applicable, name if not if "k2_id" in space["extended_info"]: data = get_k2_data_by_id(k2_data, space["extended_info"]["k2_id"]) elif "k2_name" in space["extended_info"]: data = get_k2_data_by_name(k2_data, space["extended_info"] ["k2_name"]) if data is None: logger.warning("Data not found as referenced by spot! Cleaning and" " continuing") utils.clean_spaces_labstats(data) continue # get the total and available number of computers total = data["total"] available = total - data["count"] # update the spot space["extended_info"]["auto_labstats_total"] = total space["extended_info"]["auto_labstats_available"] = available
def load_labstats_data(spaces, labstats_data, page_dict): """ Loads the data retrieved from the online labstats service into the spaces. """ for page_id, space_id in page_dict.iteritems(): # get the space by it's id in page_dict space = utils.get_space_from_spaces(spaces, space_id) if space is None: logger.warning("space " + str(space_id) + " missing from spaces!") continue # retrieve the labstat info for this space space_labstat = get_labstat_entry_by_label( labstats_data, space["extended_info"]["labstats_label"]) if space_labstat is None: logger.warning( "Labstat entry not found for label %s and space #" + str(space['id']), space["extended_info"]["labstats_label"]) utils.clean_spaces_labstats([space]) continue # load the dict into a variable for easy access extended_info = space["extended_info"] # load the new labstats info into the space's extended_info extended_info["auto_labstats_available"] = space_labstat["Available"] extended_info["auto_labstats_available"] += space_labstat["Offline"] extended_info["auto_labstats_total"] = space_labstat["Total"]
def load_labstats_data(spaces, labstats_data, page_dict): """ Loads the data retrieved from the online labstats service into the spaces. """ for page_id, space_id in page_dict.iteritems(): # get the space by it's id in page_dict space = utils.get_space_from_spaces(spaces, space_id) if space is None: logger.warning("space " + space_id + " missing from spaces!") continue # retrieve the labstat info for this space space_labstat = get_labstat_entry_by_label(labstats_data, space["extended_info"] ["labstats_label"]) if space_labstat is None: logger.warning("Labstat entry not found for label %s and space #" + space['id'], space["extended_info"] ["labstats_label"]) utils.clean_spaces_labstats([space]) continue # load the dict into a variable for easy access extended_info = space["extended_info"] # load the new labstats info into the space's extended_info extended_info["auto_labstats_available"] = space_labstat["Available"] extended_info["auto_labstats_available"] += space_labstat["Offline"] extended_info["auto_labstats_total"] = space_labstat["Total"]
def get_endpoint_data(k2_spaces): """ This method retrieves the data from the K2 server and then loads it into the spaces, returning them to be updated """ if not hasattr(settings, 'K2_URL'): raise(Exception("Required setting missing: K2_URL")) k2_data = get_k2_data() # if the k2 server is not working, then clean the spaces and return if k2_data is None: utils.clean_spaces_labstats(k2_spaces) logger.errror("K2 data retrieval failed!") return load_k2_data_into_spaces(k2_spaces, k2_data)
def get_endpoint_data(labstats_spaces): """ Retrieves the data relevant to the spaces passed to this method and then loads it into the spaces provided, which are then returned. """ customers = get_customers(labstats_spaces) for customer in customers: for page in customers[customer]: response = get_online_labstats_data(customer, page) # if we don't have any labstats data, log the error and delete the # labstats data if response is None: utils.clean_spaces_labstats(labstats_spaces) return page = customers[customer][page] load_labstats_data(labstats_spaces, response, page)
def test_clean_labstats(self): """ This tests the cleaned_labstats_data function in utils.py, ensuring that in the event of an error we don't mess up the data through trying to delete the outdated labstats info. """ labstats_data = self.load_json_file('seattle_labstats.json') cleaned_data = self.load_json_file('seattle_labstats_cleaned.json') labstats_data = utils.clean_spaces_labstats(labstats_data) self.assertEqual(labstats_data, cleaned_data)
def get_endpoint_data(labstats_spaces): """ Takes in a list of spaces from the labstats_daemon and then retrieves their labstats information from the seattle labstats service, at which point the data is merged and returned. """ # Updates the num_machines_available extended_info field # for spaces that have corresponding labstats. upload_spaces = [] try: groups = get_seattle_labstats_data() except SOAPTimeoutError as ex: logger.warning("SOAPTimeoutError encountered, Seattle labstats" " timed out", exc_info=1) return # if data retrieval failed, then clean the spaces and log the error if groups is None: utils.clean_spaces_labstats(labstats_spaces) return load_labstats_data(labstats_spaces, groups)
def load_endpoint_data(self, endpoint): """ This method handles the updating of an endpoint using the standard interface. """ try: url = endpoint.get_space_search_parameters() resp, content = self.client.request(url, 'GET') if (resp.status == 401): logger.error("Labstats daemon has outdated OAuth credentials!") return spaces = json.loads(content) except ValueError as ex: logger.warning( "JSON Exception found! Malformed data passed from" "spotseeker_server", exc_info=1) return to_remove = [] # validate spaces against utils.validate_space for space in spaces: if not utils.validate_space(space): to_remove.append(space) # remove noncompliant spaces for space in to_remove: spaces.remove(space) # get spaces that don't follow the endpoint standards to_clean = [] for space in spaces: try: endpoint.validate_space(space) except Exception as ex: logger.warning("Space invalid", exc_info=1) utils.clean_spaces_labstats([space]) to_clean.append(space) # if our endpoint rejects spaces, then save them until after the update for space in to_clean: spaces.remove(space) # send the spaces to be modified to the endpoint endpoint.get_endpoint_data(spaces) # add the to_clean spaces back in for space in to_clean: spaces.append(space) # upload the space data to the server response = utils.upload_data(spaces) # log any failures if response is not None and response['failure_descs']: errors = {} for failure in response['failure_descs']: if isinstance(failure['freason'], list): errors.update({failure['flocation']: []}) for reason in failure['freason']: errors[failure['flocation']].append(reason) else: errors.update({failure['flocation']: failure['freason']}) logger.warning("Errors putting to the server: %s", str(errors))
def load_endpoint_data(self, endpoint): """ This method handles the updating of an endpoint using the standard interface. """ try: url = endpoint.get_space_search_parameters() resp, content = self.client.request(url, 'GET') if(resp.status == 401): logger.error("Labstats daemon has outdated OAuth credentials!") return spaces = json.loads(content) except ValueError as ex: logger.warning("JSON Exception found! Malformed data passed from" "spotseeker_server", exc_info=1) return to_remove = [] # validate spaces against utils.validate_space for space in spaces: if not utils.validate_space(space): to_remove.append(space) # remove noncompliant spaces for space in to_remove: spaces.remove(space) # get spaces that don't follow the endpoint standards to_clean = [] for space in spaces: try: endpoint.validate_space(space) except Exception as ex: logger.warning("Space invalid", exc_info=1) utils.clean_spaces_labstats([space]) to_clean.append(space) # if our endpoint rejects spaces, then save them until after the update for space in to_clean: spaces.remove(space) # send the spaces to be modified to the endpoint endpoint.get_endpoint_data(spaces) # add the to_clean spaces back in for space in to_clean: spaces.append(space) # upload the space data to the server response = utils.upload_data(spaces) # log any failures if response is not None and response['failure_descs']: errors = {} for failure in response['failure_descs']: if isinstance(failure['freason'], list): errors.update({failure['flocation']: []}) for reason in failure['freason']: errors[failure['flocation']].append(reason) else: errors.update({failure['flocation']: failure['freason']}) logger.warning("Errors putting to the server: %s", str(errors))