def test_set_server_vmhost_successful(self, mock_server_type): server = {'name': mock.Mock(), 'ipv4_address': mock.Mock()} server_type = "VMHost" mock_server_type.VMHost.return_value = server_type MemcachedNFV.client = mock.Mock() memcached_nfv = MemcachedNFV(mock.Mock(), mock.Mock()) memcached_nfv.set_server(server, server_type)
def test_set_server_bam_exception(self, mock_server_type): # pylint: disable=missing-docstring server = {'name': mock.Mock(), 'ipv4_address': mock.Mock()} server_type = "BAM" mock_server_type.BAM.return_value = server_type MemcachedNFV.client = mock.Mock() memcached_nfv = MemcachedNFV(mock.Mock(), mock.Mock()) memcached_nfv.set_server(server, server_type)
def test_set_server_bam_successful(self, mock_log, mock_server_type): # pylint: disable=missing-docstring server = {'name': mock.Mock(), 'ipv4_address': mock.Mock()} server_type = "BAM" mock_server_type.BAM.return_value = server_type MemcachedNFV.client = mock.Mock() memcached_nfv = MemcachedNFV(mock.Mock(), mock.Mock()) memcached_nfv.set_server(server, server_type) key = "{}|{}".format(mock_server_type.BAM, server['ipv4_address']) mock_log.assert_called_with('Added {} to memcache'.format(key))
def init_server_cached_list(): """[init_server_cached_list: call api get servers in bam with CONFIGURATION_NAME] Returns: [json] -- [ Success: return {"Status": "SUCCESS"} Fail: return {"Status": "FAIL"}] """ try: data_config = read_config_json_file(NFV_CONFIG_PATH) memcached_host = data_config['memcached_host'] memcached_port = int(data_config['memcached_port']) configuration_name = data_config['bam_config_name'] g.user.logger.debug( 'Init_server_cached_list - configuration_name: {}'.format( configuration_name)) configuration_id = gateway_nfv_management.get_configuration_id( configuration_name) g.user.logger.info( 'Get list server of configure_id {}'.format(configuration_id)) list_servers = gateway_nfv_management.get_list_servers( configuration_id) g.user.logger.info( 'Init_server_cached_list - Number of get list server: {}'.format( len(list_servers))) # Init memcached mem_nfv = MemcachedNFV(memcached_host, memcached_port) # Set bam server info to memcached server bam_ip = data_config['bam'][0]['ip'] bam_name = data_config['bam'][0]['name'] mem_nfv.set_server({ 'name': bam_name, 'ipv4_address': bam_ip }, ServerType.BAM) # Set bdds server info to memcached server list_udf_name = [udf['name'] for udf in data_config['udfs_for_server']] for server in list_servers: mem_nfv.set_server(server, ServerType.BDDS, bam_ip, list_udf_name) # Set VM_HOST server info to memcached server vm_host_ip = data_config["vm_host_ip"] vm_name = data_config['vm_host_name'] mem_nfv.set_server({ 'name': vm_name, 'ipv4_address': vm_host_ip }, ServerType.VM_HOST) mem_nfv.disconnect() except Exception as exception: g.user.logger.error('Init_server_cached_list - {}'.format(exception)) g.user.logger.error(traceback.format_exc()) return jsonify({"Status": "FAIL"}) return jsonify({"Status": "SUCCESS"})
def scale_out(data): """ :param data: request in json for example: { "server_name": "bdd240s", "mgnt_server_ip": "192.168.88.240", "service_server_ipv4": "192.168.89.240", "service_server_ipv6": fdac::12, "service_server_netmask": 24, "service_server_v6_prefix": 64, "metadata": "can_scale_in=True" } :return: successful message """ try: g.user.logger.debug("Scale out request data: {}".format(data)) data_config = read_config_json_file(NFV_CONFIG_PATH) g.user.logger.debug("NFV config data: {}".format(data_config)) config_name = data_config['bam_config_name'] config_id = get_configuration_id(config_name) if not config_id: return jsonify({ "status": "Failed", "message": "Configuration id not found!" }), 404 g.user.logger.info('Starting check available server') server_ip = data['mgnt_server_ip'] server_ipv6 = None avail_server = is_check_available_server( server_ip, data_config['server_ssh_username'], data_config['server_ssh_password']) if not avail_server: return jsonify({ "status": "Failed", "message": "No available server ip!" }), 404 g.user.logger.info('Starting add server') server_properties = f"password={process_password.decrypt_password(data_config['server_deployment_password'])}|connected=true|upgrade=False" try: if data['metadata']: server_properties = f"{server_properties}|{data['metadata']}" except KeyError as exception: g.user.logger.error(str(exception)) g.user.logger.error(traceback.format_exc()) try: if (int(data['service_server_netmask']) <= 32) and (int(data['service_server_netmask']) > 0) and data['service_server_ipv4']: server_properties = f"{server_properties}|servicesIPv4Address={data['service_server_ipv4']}|servicesIPv4Netmask={cidr_to_netmask(data['service_server_netmask'])}" except KeyError as exception: g.user.logger.error(str(exception)) g.user.logger.error(traceback.format_exc()) try: if data['service_server_v6_prefix'] and data['service_server_ipv6']: server_ipv6 = data['service_server_ipv6'] + \ '/' + data['service_server_v6_prefix'] server_properties = f"{server_properties}|servicesIPv6Address={data['service_server_ipv6']}|servicesIPv6Subnet={data['service_server_v6_prefix']}" except KeyError as exception: g.user.logger.error(str(exception)) g.user.logger.error(traceback.format_exc()) if data_config['server_cap_profile']: g.user.logger.debug( f"Add server name {data['server_name']} server_ip {server_ip} profile {data_config['server_cap_profile']} properties {server_properties}" ) server_id = add_server(server_ip=server_ip, server_name=data['server_name'], config_id=config_id, profile=data_config['server_cap_profile'], properties=server_properties) else: g.user.logger.debug( f"Add server name {data['server_name']} server_ip {server_ip} properties {server_properties}" ) server_id = add_server(server_ip=server_ip, server_name=data['server_name'], config_id=config_id, properties=server_properties) g.user.logger.info('Starting create deployment roles') deploy_role = False if data_config['dns_view_names']: role_type = data.get("deploy_role", data_config['server_deploy_role']) for view_name in data_config['dns_view_names']: g.user.logger.debug( f"Create deployment role for server {data['server_name']} view_name {view_name} role_type {role_type}" ) role_id = create_deployment_roles( server_name=data['server_name'], server_id=server_id, config_id=config_id, view_name=view_name, role_type=role_type) if not role_id: g.user.logger.info( 'Cannot create deployment role for view name: %s' % view_name) continue deploy_role = True else: g.user.logger.info('dns_view_names not found!') if not deploy_role: g.user.logger.info('Cannot create any deployment roles!') return jsonify({ "status": "Failed", "message": "Create deployment role failed" }), 500 g.user.logger.info('Starting add raw option') g.user.logger.info( 'Starting deploy DNS configuration for server name: %s' % data['server_name']) deploy_server = deploy_server_config(server_id) if not deploy_server: g.user.logger.info('Deploy DNS configuration is failed!') g.user.logger.info( 'Starting wait for DNS deployment for server name: %s' % data['server_name']) deploy_status = wait_for_deployment(server_id) g.user.logger.info( 'Deployment status for server name %s, deploy status id %s' % (data['server_name'], deploy_status)) if 'anycast_config' in data_config: g.user.logger.info('Starting configure any cast') configure_anycast(server_ip, server_ipv6, data_config['server_ssh_username'], data_config['server_ssh_password'], data_config['anycast_config']) g.user.logger.info('Adding server to cache list') # Add BDDS to memcached bam_ip = data_config['bam'][0]['ip'] memcached_host = data_config['memcached_host'] memcached_port = int(data_config['memcached_port']) mem_nfv = MemcachedNFV(memcached_host, memcached_port) list_udf_name = [ value.split('=')[0].strip() for value in data['metadata'].split('|') ] mem_nfv.set_server( { 'id': server_id, 'name': data['server_name'], 'type': ServerType.BDDS, 'properties': f"defaultInterfaceAddress={server_ip}|{server_properties}" }, ServerType.BDDS, bam_ip, list_udf_name) g.user.logger.info( f"SUCCESS: Add server to memcached with info 'id': {server_id}, 'name': {data['server_name']}, 'type': {ServerType.BDDS}, 'bam_ip': {bam_ip}" ) g.user.logger.debug(f"'properties': {server_properties}") # Remove used candidate address on memcache mem_nfv.clean_network(data["mgnt_server_ip"]) mem_nfv.clean_network(data["service_server_ipv4"]) mem_nfv.clean_network(data["service_server_ipv6"]) # Add addresses to used list on memcache used_ipv4_memcache = mem_nfv.get_network("used_ipv4") used_ipv6_memcache = mem_nfv.get_network("used_ipv6") used_ipv4_memcache.append(data["mgnt_server_ip"]) if data["service_server_ipv4"]: used_ipv4_memcache.append(data["service_server_ipv4"]) if data["service_server_ipv6"]: used_ipv6_memcache.append(data["service_server_ipv6"]) ipv6_str = ",".join(used_ipv6_memcache) mem_nfv.set_network("used_ipv6", ipv6_str) ipv4_str = ",".join(used_ipv4_memcache) mem_nfv.set_network("used_ipv4", ipv4_str) except Exception as exception: g.user.logger.error(str(exception)) g.user.logger.error( f"Failed: Haven't add server to mem cached {exception}") g.user.logger.error(traceback.format_exc()) return jsonify({ "status": "Failed", "message": "Scale out failed", "error": str(exception) }), 500 return jsonify({ "status": "Successful", "message": "Scale out successfully", "error": "" }), 200