def _remove_physical_machine(self, machine, timestamp): identity = self.graph_db.get_node_by_uuid(machine) if identity: self.graph_db.delete_node(identity, timestamp) LOG.info("Machine : %s deleted from landscape", machine) else: LOG.error("Machine : %s not in the landscape to delete!", machine)
def get_subgraph(node_id): """ Returns the subgraph using a node id as the start. """ LOG.info("Retrieving Subgraph with url %s", request.url) timestamp = request.args.get("timestamp") time_frame = request.args.get("timeframe", 0) geo = _bool(request.args.get("geo", False)) # filter arguments. filter_these = _bool(request.args.get("filter-these", True)) filter_node = request.args.get("filter-nodes", []) # Fetch the subgraph. subgraph = LANDSCAPE.graph_db.get_subgraph(node_id, timestmp=timestamp, timeframe=time_frame) if not subgraph: err_msg = "Node with ID '{}', not in the landscape.".format(node_id) LOG.error(err_msg) abort(400, err_msg) if filter_node: filter_node = ast.literal_eval(filter_node) subgraph = util_graph.filter_nodes(subgraph, filter_node, filter_these) if geo: subgraph = Geo.extract_geo(subgraph) return Response(subgraph, mimetype=MIME)
def update_service_container_metrics(self, id, device_id, end_time): coll = self.get_collection('service-container-metric') coll = coll['serviceContainerMetrics'] device_id = "device/{0}".format(device_id) end_time = float(end_time) // 1000000000 end_time = datetime.utcfromtimestamp(end_time) mlsec = end_time.microsecond json_end_time = end_time.strftime( '%Y-%m-%dT%H:%M:%S.%f{:02d}Z'.format(mlsec)) scm_id = None for item in coll: dev_id = item['device_id']['href'] cont_id = item['container_id'] if dev_id == device_id and cont_id == id: scm_id = item['id'] break if scm_id: url = self.cimi_url + '/' + scm_id data = {'stop_time': json_end_time} res = requests.put(url, headers=CIMI_SEC_HEADERS, verify=SSL_VERIFY, json=data) if res.status_code != 200: LOG.error(res.json()) return res else: return ""
def _check_conn_variables(user, password, auth_uri, project_name, project_id, user_domain_name, keystone_ver): """ Check that the environment variables have been found. Without connection variables to the openstack testbed it is impossible to build a landscape any openstack components and so an exception is thrown. :param user: Username. :param password: Password. :param auth_uri: URI to the Openstack testbed. :param project_name: Tenant Name. :param project_id: Project ID. :param user_domain_name: User domain name. """ envs = [ OS_USERNAME, OS_PASSWORD, OS_PROJECT_NAME, OS_PROJECT_ID, OS_AUTH_URL, OS_USER_DOMAIN_NAME ] msg = "" if keystone_ver == '3': #print [user, password, auth_uri, project_name, project_id, user_domain_name] if not user or not password or not auth_uri or not project_name or not project_id or not user_domain_name: msg = "Environment variables {e[0]}, {e[1]}, {e[2]}, {e[3]}, {e[4]} " \ "and {e[5]} are required".format(e=envs) else: #print [user, password, auth_uri, project_name] if not user or not password or not auth_uri or not project_name: msg = "Environment variables {e[0]}, {e[1]}, {e[2]} and {e[3]} are required".format( e=envs) if len(msg) > 0: LOG.error(msg) raise ValueError(msg)
def get_collection(self, collection, from_date=None, limit=None, updates=False): # try: fieldName = "created" if updates is True: fieldName = "updated" date_filter = "" if from_date: date_filter = '&$filter=' + fieldName + '>"%s"' % from_date limit_filter = "" if limit: limit_filter = "&$last={}".format(limit) url = self.cimi_url + '/' + collection + '?$orderby=' + \ fieldName + ':desc' + date_filter + limit_filter # print url res = requests.get(url, headers={'slipstream-authn-info': 'internal ADMIN'}, verify=SSL_VERIFY) if res.status_code == 200: return res.json() LOG.error("Request failed: " + str(res.status_code)) LOG.error("Response: " + str(res.json())) return dict()
def subscribe_to_event(self, event, collector): """ A collector subscribes to an event, so that it will be notified once an event occurs. :param event: Event name. :param collector: Collector class. """ if event not in self.events: LOG.error("Unknown event: %s. Not Registered.", event) else: if collector not in self.events.get(event): self.events.get(event).append(collector)
def listen_for_events(self): """ Entry point for the child event listener. """ msg = "Connecting to Openstack message queue at address: %s." with Connection(self.connection_string) as conn: consumer = OSMQueueConsumer(conn, self._queues(), self._cb_event) try: LOG.info(msg, self.connection_string) consumer.run() except exceptions.KombuError as exc: LOG.error(exc, exc_info=1)
def _ssh_client(host): try: ssh_client = paramiko.SSHClient() ssh_client.load_system_host_keys() ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh_client.connect(host, timeout=SSH_TIMEOUT) except (ssh_exception.NoValidConnectionsError, socket.error, ssh_exception.AuthenticationException) as err: LOG.error("Could not add ephemeral disks for host: %s", host) LOG.error("SSH Error for host %s: %s", host, err) return None return ssh_client
def delete_files(self, device): try: hostname = self.device_dict.get(device) if hostname: hwloc_path = os.path.join(paths.DATA_DIR, hostname + "_hwloc.xml") cpu_path = os.path.join(paths.DATA_DIR, hostname + "_cpuinfo.txt") os.remove(hwloc_path) os.remove(cpu_path) except Exception as ex: LOG.error( "Error deleting hwloc/cpuinfo for device: {} ({}), Error message:{}" .format(device, hostname, ex.message))
def __init__(self, conf_manager): self.cnf = conf_manager cimi_url = self.cnf.get_variable(CONFIG_SECTION_GENERAL, CONFIG_CIMI_URL) if cimi_url is None: LOG.error( "'CIMI_URL' has not been set in the 'general' section of the config file" ) return # TODO: certificate authentication issues if cimi_url.lower().find('https') > 0: requests.packages.urllib3.disable_warnings(InsecureRequestWarning) self.cimi_url = cimi_url
def add_service_container_metrics(self, id, device_id, start_time): url = self.cimi_url + '/service-container-metric' data = { 'container_id': id, 'device_id': { 'href': 'device/' + device_id }, 'start_time': start_time } resp = requests.post(url, headers=CIMI_SEC_HEADERS, verify=SSL_VERIFY, json=data) if resp.status_code != 201: LOG.error(resp.json()) return resp
def init_graph_db(self): """ Import excel file, create two .csv files for the nodes and edges in the landscape. Create two servers for each network switch in the landscape. Generate hwloc and cpuinfo for each machine and add files to the Data Directory. Create a network_description.yaml file with connections to servers and network switches. """ LOG.info("Deleting hwloc and cpuifo files") # Deleting hwloc and cpuinfo files is necessary for testing collector filelist = [ file for file in os.listdir(paths.DATA_DIR) if file.endswith(".txt") or file.endswith('.xml') or file.endswith('.yaml') ] for file in filelist: os.remove(os.path.join(paths.DATA_DIR, file)) if os.path.exists(os.path.join(paths.DATA_DIR, "nodes.csv")) and \ os.path.exists(os.path.join(paths.DATA_DIR, "links.csv")): node_array = pyexcel.get_sheet(file_name=os.path.join( paths.DATA_DIR, "nodes.csv"), name_columns_by_row=0) attributes = list(node_array.colnames) link_array = pyexcel.get_sheet(file_name=os.path.join( paths.DATA_DIR, "links.csv"), name_columns_by_row=0) LOG.info("Creating hwloc, cpu_info and network description files") for node in node_array: if node[1] == 'network' and node[2] == 'switch': connections = [] node_id = node[0] links = self._search_links(link_array, node[0]) for element in range(1, node[3] + 1): mac_address = self._create_hwloc_file(node_id, element) self._create_cpuinfo_file(node_id, element) connections.append(mac_address) connections.extend(links) self._add_network_switch(node, attributes, connections) else: LOG.error( "Node.csv file does not contain network switch data") else: LOG.error("CSV Files not in data directory")
def get_node_by_uuid(node_id): """ Returns a networkx graph containing the node. """ LOG.info("Retrieving node by uuid, with url %s", request.url) geo = _bool(request.args.get("geo", False)) graph = LANDSCAPE.graph_db.get_node_by_uuid_web(node_id) if not graph: err_msg = "Node with ID '{}', not in the landscape.".format(node_id) LOG.error(err_msg) abort(400, err_msg) if geo: graph = Geo.extract_geo(graph) return Response(graph, mimetype=MIME)
def get_events(self, from_date=None, event_type="DELETED", limit=None): # try: date_filter = "" if from_date: date_filter = '&$filter=created>"%s"' % from_date limit_filter = "" if limit: limit_filter = "&$last={}".format(limit) url = self.cimi_url + '/event?$orderby=created:desc$filter=content/state="' + \ event_type + '"' + date_filter + limit_filter res = requests.get(url, headers=CIMI_SEC_HEADERS, verify=SSL_VERIFY) if res.status_code == 200: return res.json() LOG.error("Request failed: " + str(res.status_code)) LOG.error("Response: " + str(res.json())) return dict()
def generate_files(self, device, dynamic={}): """ Queries the hwloc and cpuinfo methods and writes them to a file :param device: CIMI Device object containing hwloc and cpu_info methods :param dynamic: CIMI device-dynamic object pertaining to the device object :return: True if file successfully saved and hostname, False if errors encountered """ hostname = "" device_id = device['id'] try: hwloc = device.get("hwloc") if hwloc is None: LOG.error("hwLoc data has not been set for this device: " + device_id + ". No HwLoc file will be saved.") return False cpu_info = device.get("cpuinfo") if cpu_info is None: LOG.error("CPU_info data has not been set for this device: " + device_id + ". No CPU_info file will be saved.") if dynamic: hwloc, hostname = self._parse_hwloc(device, hwloc, dynamic) LOG.info("Dynamic data has been set for this device: " + device_id) else: hwloc, hostname = self._parse_hwloc(device, hwloc) LOG.error("Dynamic data has not been set for this device: " + device_id + ". No dynamic file will be saved.") self.device_dict[device_id] = hostname # save the dynamic info to file if dynamic: dynamic_path = os.path.join(paths.DATA_DIR, hostname + "_dynamic.add") self._write_to_file(dynamic_path, json.dumps(dynamic)) # save the cpu info to file if cpu_info: cpu_path = os.path.join(paths.DATA_DIR, hostname + "_cpuinfo.txt") self._write_to_file(cpu_path, cpu_info) # save the hwloc to file hwloc_path = os.path.join(paths.DATA_DIR, hostname + "_hwloc.xml") self._write_to_file(hwloc_path, hwloc) except Exception as ex: LOG.error( "General Error hwloc/cpuinfo for device: {} - Error message: {}" .format(device['id'], ex.message)) return False, None return True, hostname
def _add_physical_machine(self, machine, timestamp): """ Add a machine to graph database using the hwloc and cpuinfo files for a machine. :param machine: Machine name. :param timestamp: Epoch timestamp """ identity = self.graph_db.get_node_by_uuid(machine) if identity: LOG.error( "Machine : %s exists in an inactive state in the landscape.", machine) hwloc = self._get_hwloc(machine) if hwloc is not None: LOG.info("HWLocCollector - Adding machine: %s", machine) graph = self._create_nxgraph_from_hwloc(hwloc, machine) cpu_info = self._get_cpu_info(machine) if cpu_info is not None: self._enrich_graph_cpuinfo(graph, cpu_info) else: LOG.error("No cpu info for machine: %s", machine) # Store the physical host in the graph database. self._add_coordinates(graph, machine) self._filter_nodes(graph) self.store_nxgraph_to_graph_db(graph, self.graph_db, timestamp) else: LOG.error("No hwloc details for machine: %s", machine)
def get_devices(self): # try: cimi_url = self.cnf.get_variable(CONFIG_SECTION_GENERAL, CONFIG_CIMI_URL) if cimi_url is None: LOG.error( "'CIMI_URL' has not been set in the 'general' section of the config file" ) return dict() # TODO: certificate authentication issues if cimi_url.lower().find('https') > 0: requests.packages.urllib3.disable_warnings(InsecureRequestWarning) res = requests.get(cimi_url + '/device', headers={'slipstream-authn-info': 'internal ADMIN'}, verify=SSL_VERIFY) if res.status_code == 200: LOG.info("CIMI Connection OK. Devices returned: " + str(len(res.json()['devices']))) return res.json()['devices'] LOG.error("Request failed: " + str(res.status_code)) LOG.error("Response: " + str(res.text)) return dict()
def init_graph_db(self): """ Retrieve hwloc and cpu_info for each machine and add files to the Data Directory. """ LOG.info("Generating hwloc and cpu_info files") max_retry = int( self.cnf.get_variable(CONFIG_SECTION_GENERAL, CONFIG_CIMI_MAX_RETRY)) wait_time = int( self.cnf.get_variable(CONFIG_SECTION_GENERAL, CONFIG_CIMI_WAIT_TIME)) devices = dict() for i in range(max_retry): devices = self.get_devices() if bool(devices): LOG.info("Received non empty devices list from CIMI") break elif max_retry == i + 1: LOG.error( "Can't reach CIMI for maximum configured number of times. Exiting" ) exit(1) else: time_to_sleep = wait_time * (i + 1) time.sleep(time_to_sleep) LOG.info( "Received empty devices list from CIMI. Sleeping for %i s" % time_to_sleep) deviceDynamics = self.device_dynamic_dict() for device in devices: # also get device dynamic device_id = device['id'] dd = deviceDynamics.get(device_id) # device_full = {key: value for (key, value) in (device.items() + dd.items())} self.generate_files(device, dd)
def _get_client(self): """ Returns a Docker client accordingly to the configuration file :return: Docker client """ if len(self.docker_conf) > 2 and \ self.docker_conf[2] and self.docker_conf[3]: tls_config = docker.tls.TLSConfig( client_cert=(self.docker_conf[2], self.docker_conf[3]) ) else: tls_config = False try: client = docker.DockerClient( base_url=OSSwarmListener._get_connection_string(self.docker_conf), tls=tls_config ) #client = docker.from_env() return client except requests.ConnectionError as e: LOG.error('Please check Configuration file or Service availability') LOG.error(e.message) exit()
def init_graph_db(self): """ Retrieve hwloc and cpu_info for each machine and add files to the Data Directory. """ LOG.info("Generating hwloc and cpu_info files") devices = list() # get dataClay agent_id agent_id = self.cnf.get_variable(CONFIG_SECTION_GENERAL, CONFIG_VARIABLE_DC_AGENT) if agent_id is None: LOG.error( "'dataclay_agentid' has not been set in the 'general' section of the config file" ) return # get this device's agent this_agent = Agent.get_by_alias(agent_id) if self.generate_files(this_agent.device): devices.append(this_agent.device.device_id) # get child devices - if leader if this_agent.is_leader: for child in this_agent.children: if self.generate_files(child.device): devices.append(child.device.device_id) # write the device list to the config file device_list = ','.join(str(x) for x in devices) LOG.info("DataClay device list: " + device_list) self.conf_manager.set_variable(CONFIG_SECTION_PHYSICAL, CONFIG_VARIABLE_MACHINES, device_list) # cleanup api.finish()
def get_devices(self): try: cimi_url = self.cnf.get_variable(CONFIG_SECTION_GENERAL, CONFIG_CIMI_URL) if cimi_url is None: LOG.error( "'CIMI_URL' has not been set in the 'general' section of the config file" ) return res = requests.get( cimi_url + '/device', headers={'slipstream-authn-info': 'super ADMIN'}, verify=False) if res.status_code == 200: return res.json()['devices'] LOG.error("Request failed: " + res.status_code) LOG.error("Response: " + str(res.json())) return None except: LOG.error('Exception', sys.exc_info()[0]) return None
def generate_files(self, device): """ Queries the hwloc and cpuinfo methods and writes them to a file :param device: model_mf2c.classes.Device used to query the hwloc and cpu_info methods :return: True if file successfully save, False if errors encountered """ try: hwloc_path = path.join(paths.DATA_DIR, device.device_id + "_hwloc.xml") hwloc = device.get_hwloc() if hwloc is None: LOG.error("hwLoc data has not been set for this device: " + device.device_id + ". No HwLoc file will be saved.") return False self._write_to_file(hwloc_path, hwloc) cpu_path = path.join(paths.DATA_DIR, device.device_id + "_cpu_info.txt") cpu_info = device.get_CPU_info() if cpu_info is None: LOG.error("CPU_info data has not been set for this device: " + device.device_id + ". No CPU_info file will be saved.") return False self._write_to_file(cpu_path, cpu_info) except DataclayException as dc_ex: LOG.error("Error accessing hwloc/cpuinfo for device id: " + device.device_id + dc_ex.message) return False except: LOG.error( "General Error hwloc/cpuinfo for device id: " + device.device_id, sys.exc_info()[0]) return False return True
def get_device(self, device_id): cimi_url = self.cnf.get_variable(CONFIG_SECTION_GENERAL, CONFIG_CIMI_URL) if cimi_url is None: LOG.error( "'CIMI_URL' has not been set in the 'general' section of the config file" ) return res = requests.get(cimi_url + '/' + device_id, headers={'slipstream-authn-info': 'internal ADMIN'}, verify=False) if res.status_code == 200: return res.json() LOG.error("Request failed: " + str(res.status_code)) LOG.error("Response: " + str(res.json())) return dict()
def generate_files(self, device): """ Queries the hwloc and cpuinfo methods and writes them to a file :param device: CIMI Device object containing hwloc and cpu_info methods :return: True if file successfully saved and hostname, False if errors encountered """ hostname = "" try: hwloc = device["hwloc"] if hwloc is None: LOG.error("hwLoc data has not been set for this device: " + device.id + ". No HwLoc file will be saved.") return False cpu_info = device["cpuinfo"] if cpu_info is None: LOG.error("CPU_info data has not been set for this device: " + device.id + ". No CPU_info file will be saved.") return False device_id = device["id"][ 7:] # eg, device/737fe63b-2a34-44fe-9177-3aa6284ba2f5 doc_root = Et.fromstring(hwloc) for child in doc_root: if child.tag == "object" and child.attrib["type"] == "Machine": # get hostname for info in child.iter("info"): if info.attrib["name"] == "HostName": hostname = info.attrib["value"] break # add mf2c device id to hwloc file device_id_att = dict() device_id_att["name"] = MF2C_PATH_VALUE device_id_att["value"] = device_id Et.SubElement(child, "info", device_id_att) # add device's ip address to the hwloc file if device["ethernetAddress"]: ipaddress = self._get_ipaddress( device["ethernetAddress"]) ipaddress_att = dict() ipaddress_att["name"] = "ipaddress" ipaddress_att["value"] = ipaddress Et.SubElement(child, "info", ipaddress_att) hwloc = Et.tostring(doc_root) break # save the cpu info to file cpu_path = path.join(paths.DATA_DIR, hostname + "_cpuinfo.txt") self._write_to_file(cpu_path, cpu_info) # save the hwloc to file hwloc_path = path.join(paths.DATA_DIR, hostname + "_hwloc.xml") self._write_to_file(hwloc_path, hwloc) except: LOG.error( "General Error hwloc/cpuinfo for device id: " + device.id, sys.exc_info()[0]) return False, None return True, hostname