def request_data(api, bam_ip): """ Request data :param api: API :param bam_ip: IP of BAM -- IPv4 :return: [Result] -- String """ result = None try: user_name, password, gateway_url, _ = common.get_configuration() if not user_name or not password: return bam_url = "http://{}/Services/API?wsdl".format(bam_ip) login_session = login(user_name, password, gateway_url, bam_url) if not login_session: return url = "{}/{}".format(gateway_url, api) logger.info("Gateway_access-request_data - {}".format(url)) ts_request = login_session.get(url) logout(login_session, gateway_url) if ts_request.status_code != 200: return result = json.loads(ts_request.text) except Exception as exception: logger.error("Gateway_access-request_data - {}".format(exception)) logger.error(traceback.format_exc()) return result
def main(): # Clean logger.info("Clean memcached before init") memcached_host, memcached_port = get_memcached_config() mem_nfv = MemcachedNFV(memcached_host, memcached_port) mem_nfv.clean_memcached() mem_nfv.disconnect() # Init server list init_server_cached_list_api() # Scheduler for get statistic data_config = read_config_json_file(NFV_CONFIG_PATH) interval = int(data_config['interval']) executors = { 'default': { 'type': 'threadpool', 'max_workers': 20 }, 'processpool': ProcessPoolExecutor(max_workers=5) } job_defaults = {'coalesce': False, 'max_instances': 3} scheduler = BlockingScheduler() scheduler.configure(executors=executors, job_defaults=job_defaults) scheduler.add_job(scheduler_get_statistic_job, 'interval', seconds=interval) scheduler.start()
def get_server_statistic(server, call_k1=True): """ :param server: Json object :param call_k1: bool :return: template statistic object -- Json """ result_object = None try: if server['server_type'] == ServerType.BAM or server[ 'server_type'] == ServerType.VM_HOST: memory_usage, cpu_usage = server_bam_statistic( server['address'], server['snmp_config_data']) result_object = make_template_statistic_object( server['address'], server['server_type'], server['server_name'], memory_usage, cpu_usage) elif server['server_type'] == ServerType.BDDS: memory_usage, cpu_usage, dns_oper_state, queries = server_bdds_statistic( server['address'], server['snmp_config_data']) result_object = make_template_statistic_object( server['address'], server['server_type'], server['server_name'], memory_usage, cpu_usage, queries, server['udf'], dns_oper_state) except Exception as ex: logger.error("{}: {}-{}".format(server['server_name'], type(ex), ex)) logger.error(traceback.format_exc()) logger.info(f'get_server_statistic: {result_object}') if call_k1: k1.call_k1_api(result_object, 5) return result_object
def init_server_cached_list_api(): """[Call api to gateway init server bam and bdds to memcached] """ data_config = read_config_json_file(NFV_CONFIG_PATH) logger.info("Statistics collection-init_server_cached_list_api") result = gateway_access.request_data( 'gateway_nfv_plugin/init_server_cached_list', data_config['bam'][0]['ip']) logger.info( "Statistics collection-init_server_cached_list_api - {}".format( result))
def scheduler_get_statistic_job(): """ Scheduler to get statistic job """ # init memcached_host, memcached_port = get_memcached_config() mem_nfv = MemcachedNFV(memcached_host, memcached_port) statistics = collect_statistics(mem_nfv) # Close mem_nfv.disconnect() logger.info('Get statistic: %s' % statistics)
def call_k1_api(result_object, timeout=1): """[Call K1 API] :param result_object: :param timeout: default 1 :return: """ if not result_object: logger.debug("Result_object is none") return payload = prepare_payload_for_k1(result_object) logger.info(f'Payload of K1: {payload}') if is_kpi_none(payload): logger.info(f'KPIs are none. Do not call api k1') return data_config = read_config_json_file(NFV_CONFIG_PATH) headers = { 'Content-Type': 'application/json;charset=UTF-8', } try: host = data_config['k1_api']['address'] port = data_config['k1_api']['port'] uri = data_config['k1_api']['uri'] response = requests.post(f"http://{host}:{port}{uri}", headers=headers, data=json.dumps(payload), timeout=timeout) result_call = { 'content': response.content.decode('utf-8'), 'status_code': response.status_code } logger.info(f'Result call api k1: {result_call}') except KeyError as key_error: logger.error("Cannot get {} in config file {}".format(key_error)) logger.debug(traceback.format_exc()) except requests.RequestException: logger.error("Cannot request api to {}".format( data_config['k1_api']['address'])) logger.error("Payload of the failed request: {}".format(payload)) logger.debug(traceback.format_exc())
def collect_statistics(mem_nfv): """ :return: Array of statistic results """ # Get server list from memcached list_bdds, list_bam, list_vmhosts = mem_nfv.get_list_servers() logger.debug("List_bdds: {}\nList_bam: {}\nList_vmhost: {}".format( list_bdds, list_bam, list_vmhosts)) snmp_config = read_config_json_file(SNMP_CONFIG_PATH) logger.debug("Snmp config: {}".format(snmp_config)) list_servers = [] logger.info(f'List BDDS Size: {len(list_bdds)}') logger.info(f'Begin loop through list_bdds') for bdds in list_bdds: logger.debug(f'BDDS: {bdds}') bdds_config_name = get_snmp_server_config_name(snmp_config, bdds.name) logger.debug(f'BDDS config name {bdds_config_name}') try: list_servers.append({ 'server_type': ServerType.BDDS, 'server_name': bdds.name, 'address': bdds.ipv4_address, 'snmp_config_data': snmp_config[bdds_config_name], 'udf': bdds.udf }) except KeyError as exception: logger.error(f'Exception Key Error {exception}') logger.error(traceback.format_exc()) continue logger.info(f'List BAM Size: {len(list_bam)}') logger.info(f'Begin loop through list_bam') for bam in list_bam: try: logger.debug(f'BAM: {bam}') logger.debug(f'bam_name: {bam.name}') bam_config_name = get_snmp_server_config_name( snmp_config, bam.name) logger.debug(f'Bam config name {bam_config_name}') try: logger.info( f'Begin Append BAM server list Server {list_servers} ') list_servers.append({ 'server_type': ServerType.BAM, 'server_name': bam.name, 'address': bam.ipv4_address, 'snmp_config_data': snmp_config[bam_config_name] }) logger.info(f'End append BAM ===> List Server {list_servers}') except KeyError as exception: logger.error(f'Exception Key Error {exception}') logger.error(traceback.format_exc()) continue except Exception as exception: logger.info(f'Cant get bam.ipv4_address: {exception}') logger.info(f'List VMHOST Size: {len(list_vmhosts)}') logger.info(f'Begin loop through list_vmhosts') for vm_host in list_vmhosts: try: logger.debug(f'VM_HOST: {vm_host}') logger.debug(f'vm_name: {vm_host.name}') vm_host_config_name = get_snmp_server_config_name( snmp_config, vm_host.name) logger.debug(f'VM_HOST config name {vm_host_config_name}') try: logger.info( f'Begin Append VM_HOST server list Server {list_servers} ' ) list_servers.append({ 'server_type': ServerType.VM_HOST, 'server_name': vm_host.name, 'address': vm_host.ipv4_address, 'snmp_config_data': snmp_config[vm_host_config_name] }) logger.info( f'End append VM_HOST ===> List Server {list_servers}') except KeyError as exception: logger.error(f'Exception Key Error {exception}') logger.error(traceback.format_exc()) continue except Exception as exception: logger.info(f'Can not get vm_host.ip_address: {exception}') logger.info(f'Begin get statistic with list server {list_servers}') result = [] with PoolExecutor(max_workers=10) as executor: for result_object in executor.map(get_server_statistic, list_servers): result.append(result_object) return result
def prepare_payload_for_k1(result_statictis): """[Prepare payload for k1] :param result_statictis: :return: [payload] -- Json object """ vmname = result_statictis['server_name'] if result_statictis['server_type'] == ServerType.BAM: kpi_load = [{ "kpi_name": "cpu_load", "kpi_value": min(100, round(result_statictis['cpu_usage'])) if result_statictis['cpu_usage'] is not None else None }, { "kpi_name": "mem_load", "kpi_value": round(result_statictis['memory_usage']) if result_statictis['memory_usage'] else None }] elif result_statictis['server_type'] == ServerType.VM_HOST: kpi_load = [{ "kpi_name": "cpu_load", "kpi_value": min(100, round(result_statictis['cpu_usage'])) if result_statictis['cpu_usage'] is not None else None }, { "kpi_name": "mem_load", "kpi_value": round(result_statictis['memory_usage']) if result_statictis['memory_usage'] else None }] elif result_statictis['server_type'] == ServerType.BDDS: kpi_load = [{ "kpi_name": "cpu_load", "kpi_value": min(100, round(result_statictis['cpu_usage'])) if result_statictis['cpu_usage'] is not None else None }, { "kpi_name": "mem_load", "kpi_value": round(result_statictis['memory_usage']) if result_statictis['memory_usage'] else None }, { "kpi_name": "dns_queries", "kpi_value": round(result_statictis['queries']) if result_statictis['queries'] is not None else None }] try: udf_object = {} for udf_string in result_statictis['udf'].split(','): logger.info(udf_string) try: udf_string_array = udf_string.split(':') udf_object.update({udf_string_array[0]: udf_string_array[1]}) except KeyError as exeption: logger.info(f'UDF string has incorrect format') continue except Exception as exeption: logger.info(f'result statistic does not have udf') pass app_status = result_statictis.get('app_status', 'ready') payload = { # "vm_id": " ", "vm_type": result_statictis['server_type'], "vm_name": vmname, "app_status": app_status, "kpi_load": kpi_load } payload.update(udf_object) logger.info(f'Payload {payload}') return payload