def setUp(self): base_config = "low-level/files/opt/seagate/sspl/conf/sspl.conf.LR2.yaml" base_config = os.path.dirname(os.path.abspath(__file__)).replace( "unittests/framework/base", base_config) base_config_url = f"yaml://{base_config}" self.tmp_dir = "/opt/seagate/cortx/sspl/tmp" self.existing_config = f"/{self.tmp_dir}/existing.conf" self.new_config = f"/{self.tmp_dir}/new.conf" self.merged_config = f"/{self.tmp_dir}/merged.conf" os.makedirs(self.tmp_dir, exist_ok=True) with open(self.existing_config, "w"): pass with open(self.new_config, "w"): pass self.existing_config_url = f"yaml://{self.existing_config}" self.new_config_url = f"yaml://{self.new_config}" self.merged_config_url = f"yaml://{self.merged_config}" Conf.load("base", base_config_url) Conf.load("existing", self.existing_config_url) Conf.load("new", self.new_config_url) # Delete below keys to get clean config Conf.delete("base", "CHANGED") Conf.delete("base", "OBSOLETE") # Create exising and new config file from base file Conf.copy("base", "existing") Conf.copy("base", "new") Conf.save("existing") Conf.save("new")
def validate(self): """Check for required packages are installed.""" # RPM dependency rpm_deps = {"cortx-sspl-test": None} # python 3rd party package dependency pip3_3ps_packages_test = {"Flask": "1.1.1"} pkg_validator = PkgV() pkg_validator.validate_pip3_pkgs(host=socket.getfqdn(), pkgs=pip3_3ps_packages_test, skip_version_check=False) pkg_validator.validate_rpm_pkgs(host=socket.getfqdn(), pkgs=rpm_deps, skip_version_check=True) # Load global, sspl and test configs Conf.load(SSPL_CONFIG_INDEX, sspl_config_path) Conf.load(SSPL_TEST_CONFIG_INDEX, sspl_test_config_path) # Take copy of supplied config passed to sspl_test and load it with open(self.sspl_test_gc_copy_file, "w") as f: f.write("") self.sspl_test_gc_copy_url = "yaml://%s" % self.sspl_test_gc_copy_file Conf.load(SSPL_TEST_GLOBAL_CONFIG, self.sspl_test_gc_copy_url) Conf.load("global_config", self.sspl_test_gc_url) Conf.copy("global_config", SSPL_TEST_GLOBAL_CONFIG) # Validate input configs machine_id = Utility.get_machine_id() self.node_type = Conf.get(SSPL_TEST_GLOBAL_CONFIG, "server_node>%s>type" % machine_id) enclosure_id = Conf.get( SSPL_TEST_GLOBAL_CONFIG, "server_node>%s>storage>enclosure_id" % machine_id) self.enclosure_type = Conf.get( SSPL_TEST_GLOBAL_CONFIG, "storage_enclosure>%s>type" % enclosure_id)
def merge_config(self, source_index:str, keys_to_include:list = None): """ In-place replaces of keys specified in keys_to_include from source to destination. In case keys_to_include is empty all keys are replace in-place. """ # TODO recurse flag will be deprecated in future. # Do changes in all places wherever its applicable # refer upgrade_config() in merge.py Conf.copy(source_index, self.default_index, keys_to_include, recurse = True)
def test_conf_store_a_backup_and_save_a_copy(self): """Test by creating a backup file and copying then saving it back.""" conf_file = 'json:/tmp/file1.json' load_config('csm_local', conf_file) Conf.load('backup', f"{conf_file}.bak") Conf.copy('csm_local', 'backup') Conf.save('backup') result_data = Conf.get_keys('backup', key_index=True) # Expected list should match the result_data list output expected_list = Conf.get_keys('csm_local') self.assertListEqual(expected_list, result_data)
def export_database_conf(cls): """Export database configuration to the Python dict.""" export_index = 'exportdbconf' Conf.load(export_index, 'dict:{"k":"v"}', fail_reload=False) Conf.copy(cls._db_index, export_index) db_config = { 'databases': Conf.get(export_index, 'databases'), 'models': Conf.get(export_index, 'models') } db_config['databases']["consul_db"]["config"]["port"] = int( db_config['databases']["consul_db"]["config"]["port"]) return db_config
def dump_global_config(self): """Dump provisioner global config and load it.""" url_spec = urlparse(self.global_config_copy_url) path = url_spec.path store_loc = os.path.dirname(path) if not os.path.exists(store_loc): os.makedirs(store_loc) with open(path, "w") as f: f.write("") # Make copy of global config Conf.load(GLOBAL_CONFIG_INDEX, self.global_config_copy_url) Conf.copy(self.prvsnr_global_config, GLOBAL_CONFIG_INDEX) Conf.save(GLOBAL_CONFIG_INDEX)
def copy_input_config(self, stage=None): """Dump input config in required format""" # Copy input config in another index url_spec = urlparse(global_config_path) path = url_spec.path store_loc = os.path.dirname(path) if not os.path.exists(store_loc): os.makedirs(store_loc) if not os.path.exists(path) or stage == "post_install": with open(path, "w") as f: f.write("") Conf.load(GLOBAL_CONFIG_INDEX, global_config_path) Conf.copy(PRVSNR_CONFIG_INDEX, GLOBAL_CONFIG_INDEX) Conf.save(GLOBAL_CONFIG_INDEX)
def test_conf_store_backup_and_save_a_copy(self): """Test by creating a backup file and copying then saving it back.""" conf_file = 'json:/tmp/file1.json' load_config('csm_local', conf_file) Conf.load('backup', f"{conf_file}.bak") Conf.copy('csm_local', 'backup') Conf.save('backup') result_data = Conf.get_keys('backup') # Expected list should match the result_data list output expected_list = [ 'bridge>name', 'bridge>username', 'bridge>manufacturer', 'bridge>model', 'bridge>pin', 'bridge>port', 'bridge>lte_type[0]', 'bridge>lte_type[1]' ] self.assertListEqual(expected_list, result_data)
def process(self): self.plan = self.args.plan[0] self.avoid_rmq = self.args.avoid_rmq # Take back up of sspl test config sspl_test_backup = '/etc/sspl_tests.conf.back' shutil.copyfile(sspl_test_file_path, sspl_test_backup) # Add global config in sspl_test config and revert the changes once test completes. # Global config path in sspl_tests.conf will be referred by sspl_tests later global_config_copy_url = Conf.get( SSPL_CONFIG_INDEX, "SYSTEM_INFORMATION>global_config_copy_url") Conf.copy(GLOBAL_CONFIG_INDEX, SSPL_TEST_CONFIG_INDEX) Conf.set(SSPL_CONFIG_INDEX, "SYSTEM_INFORMATION>global_config_copy_url", sspl_test_config_path) Conf.save(SSPL_CONFIG_INDEX) # Enable & disable sensors based on environment update_sensor_info(SSPL_TEST_CONFIG_INDEX) # Get rabbitmq values from sspl.conf and update sspl_tests.conf rmq_passwd = Conf.get(SSPL_CONFIG_INDEX, "RABBITMQEGRESSPROCESSOR>password") Conf.set(SSPL_TEST_CONFIG_INDEX, "RABBITMQEGRESSPROCESSOR>password", rmq_passwd) Conf.save(SSPL_TEST_CONFIG_INDEX) # TODO: Convert shell script to python # from cortx.sspl.sspl_test.run_qa_test import RunQATest # RunQATest(self.plan, self.avoid_rmq).run() CMD = "%s/run_qa_test.sh %s %s" % (TEST_DIR, self.plan, self.avoid_rmq) output, error, returncode = SimpleProcess(CMD).run( realtime_output=True) # Restore the original path/file & service, then throw exception # if execution is failed. Conf.set(SSPL_CONFIG_INDEX, "SYSTEM_INFORMATION>global_config_copy_url", global_config_copy_url) Conf.save(SSPL_CONFIG_INDEX) shutil.copyfile(sspl_test_backup, sspl_test_file_path) Service('dbus').process('restart', 'sspl-ll.service') if returncode != 0: raise SetupError(returncode, "%s - ERROR: %s - CMD %s", self.name, error, CMD)
def import_database_conf(cls, db_conf: str): """Import database configuration from the external ConfStore.""" consul_login = Conf.get(cls._cluster_index, 'cortx>external>consul>admin') consul_secret = Conf.get(cls._cluster_index, 'cortx>external>consul>secret') import_index = 'dbconfsrc' Conf.load(import_index, db_conf) Conf.copy(import_index, cls._db_index) Conf.set(cls._db_index, 'databases>consul_db>config>hosts[0]', cls._consul_host) Conf.set(cls._db_index, 'databases>consul_db>config>port', cls._consul_port) Conf.set(cls._db_index, 'databases>consul_db>config>login', consul_login) Conf.set(cls._db_index, 'databases>consul_db>config>password', consul_secret) Conf.save(cls._db_index)
def cluster_bootstrap(cortx_conf_url: str, force_override: bool = False): """ Description: Configures Cluster Components 1. Compares current installed version with New version 2. Invoke Mini Provisioners of cluster components deploy/upgrade based on version compatibility Paramaters: [IN] CORTX Config URL """ Conf.load(CortxProvisioner._conf_index, cortx_conf_url) Conf.load(CortxProvisioner._tmp_index, CortxProvisioner._tmp_cortx_conf_url) tmp_conf_keys = Conf.get_keys(CortxProvisioner._tmp_index) node_id = Conf.machine_id installed_version = Conf.get(CortxProvisioner._conf_index, f'node>{node_id}>provisioning>version') release_version = CortxProvisioner.cortx_release.get_release_version() if installed_version is None: Conf.copy(CortxProvisioner._tmp_index, CortxProvisioner._conf_index, tmp_conf_keys) Conf.save(CortxProvisioner._conf_index) CortxProvisioner._apply_consul_config(CortxProvisioner._conf_index) CortxProvisioner.cluster_deploy(cortx_conf_url, force_override) else: # TODO: add a case where release_version > installed_version but is not compatible. ret_code = CortxProvisioner.cortx_release.version_check( release_version, installed_version) if ret_code == 1: CortxProvisioner._prepare_diff(CortxProvisioner._conf_index, CortxProvisioner._tmp_index, CortxProvisioner._changeset_index) CortxProvisioner.cluster_upgrade(cortx_conf_url, force_override) # TODO: update_conf needs to be removed once gconf moves to consul. # Gconf update after upgrade should not be handled here if gconf is in consul. CortxProvisioner._update_conf(CortxProvisioner._conf_index, CortxProvisioner._tmp_index) # TODO: This will be removed once downgrade is also supported. elif ret_code == -1: raise CortxProvisionerError(errno.EINVAL, 'Downgrade is Not Supported') elif ret_code == 0: Conf.copy(CortxProvisioner._tmp_index, CortxProvisioner._conf_index, tmp_conf_keys) Conf.save(CortxProvisioner._conf_index) CortxProvisioner._apply_consul_config(CortxProvisioner._conf_index) CortxProvisioner.cluster_deploy(cortx_conf_url, force_override) else: raise CortxProvisionerError(errno.EINVAL, 'Internal error. Could not determine version. Invalid image.')
def _apply_consul_config(_conf_idx: str): try: num_endpoints = int(Conf.get(_conf_idx, 'cortx>external>consul>num_endpoints')) if num_endpoints == 0: raise CortxProvisionerError(errno.EINVAL, f"Invalid value for num_endpoints '{num_endpoints}'") for idx in range(0, num_endpoints): consul_endpoint = Conf.get(_conf_idx, f'cortx>external>consul>endpoints[{idx}]') if not consul_endpoint: raise CortxProvisionerError(errno.EINVAL, "Consul Endpoint can't be empty.") if urlparse(consul_endpoint).scheme not in ['http', 'https', 'tcp']: raise CortxProvisionerError(errno.EINVAL, f"Invalid Consul Endpoint {consul_endpoint}") if 'http' in consul_endpoint: break except ConfError as e: raise CortxProvisionerError(errno.EINVAL, f"Unable to get consul endpoint detail , Error:{e}") gconf_consul_url = consul_endpoint.replace('http','consul') + '/conf' Conf.load(CortxProvisioner._cortx_gconf_consul_index, gconf_consul_url) Conf.copy(_conf_idx, CortxProvisioner._cortx_gconf_consul_index, Conf.get_keys(_conf_idx)) Conf.save(CortxProvisioner._cortx_gconf_consul_index) # TODO: place the below code at a proper location when this function is removed. with open(const.CONSUL_CONF_URL, 'w') as f: f.write(gconf_consul_url)
def test_conf_store_ini(self): # basic set / set Conf.load('ini_1', 'ini:///tmp/file1.ini') Conf.set('ini_1', 'A>A1', '1') Conf.save('ini_1') # multi level key not supported by INI with self.assertRaises(KvError): Conf.set('ini_1', 'A>A1>A2', '1') # Check if operations reflected in file Conf.load('ini_2', 'ini:///tmp/file1.ini') with self.assertRaises(KvError): Conf.get('ini_2', 'A>A1>A2') self.assertEqual(Conf.get('ini_2', 'A>A1'), '1') Conf.delete('ini_2', 'A>A1') self.assertEqual(Conf.get('ini_2', 'A>A1'), None) self.assertEqual(Conf.get('ini_2', 'FOO>BAR'), None) # Confirm delete only reflected in memory Conf.load('ini_3', 'ini:///tmp/file1.ini') self.assertEqual(Conf.get('ini_2', 'A>A1'), '1') # Test copy function Conf.load('ini_4', 'ini:///tmp/file2.ini') Conf.copy('ini_3', 'ini_4') self.assertEqual(Conf.get('ini_4', 'A>A1'), '1') with self.assertRaises(KvError): Conf.get('ini_4', 'A>A1>A2') Conf.save('ini_4') # Test key case sensitivity Conf.set('ini_3', 'A>A1', '1') Conf.set('ini_3', 'a>a1', '2') self.assertNotEqual(Conf.get('ini_3', 'A>A1'), Conf.get('ini_3', 'A>a1'))
def copy(args): """Copy One or more Keys to the target config url""" key_list = None if len(args.args) < 1 else args.args[0].split(';') target_index = 'target' ConfCli.load(args.target_url, target_index) Conf.copy(ConfCli._index, target_index, key_list)
def validate(self): """Check for required packages are installed.""" # RPM dependency rpm_deps = {"cortx-sspl-test": None} # python 3rd party package dependency pip3_packages_dep = {"Flask": "1.1.1", "coverage": "5.5"} if not self.coverage_enabled: pip3_packages_dep.pop("coverage") # Validate pip3 python pkg with required version. for pkg, version in pip3_packages_dep.items(): installed_pkg = None uninstalled_pkg = False try: pkg_req = Requirement.parse(f"{pkg}=={version}") installed_pkg = working_set.find(pkg_req) except VersionConflict: cmd = f'pip3 uninstall -y {pkg}' _, err, ret = SimpleProcess(cmd).run() if ret: raise TestException( "Failed to uninstall the pip3 pkg: %s(v%s), " "due to an Error: %s" % (pkg, version, err)) uninstalled_pkg = True except Exception as err: raise TestException("Failed at verification of pip3 pkg: %s, " "due to an Error: %s" % (pkg, err)) if not installed_pkg or uninstalled_pkg: cmd = f'pip3 install {pkg}=={version}' _, err, ret = SimpleProcess(cmd).run() if ret: raise TestException( "Failed to install the pip3 pkg: %s(v%s), " "due to an Error: %s" % (pkg, version, err)) logger.info(f"Ensured Package Dependency: {pkg}(v{version}).") # Validate rpm dependencies pkg_validator = PkgV() pkg_validator.validate_rpm_pkgs(host=socket.getfqdn(), pkgs=rpm_deps, skip_version_check=True) # Load global, sspl and test configs Conf.load(SSPL_CONFIG_INDEX, sspl_config_path) Conf.load(SSPL_TEST_CONFIG_INDEX, sspl_test_config_path) # Take copy of supplied config passed to sspl_test and load it with open(self.sspl_test_gc_copy_file, "w") as f: f.write("") self.sspl_test_gc_copy_url = "yaml://%s" % self.sspl_test_gc_copy_file Conf.load(SSPL_TEST_GLOBAL_CONFIG, self.sspl_test_gc_copy_url) Conf.load("global_config", self.sspl_test_gc_url) Conf.copy("global_config", SSPL_TEST_GLOBAL_CONFIG) # Validate input configs machine_id = Utility.get_machine_id() self.node_type = Conf.get(SSPL_TEST_GLOBAL_CONFIG, "server_node>%s>type" % machine_id) enclosure_id = Conf.get( SSPL_TEST_GLOBAL_CONFIG, "server_node>%s>storage>enclosure_id" % machine_id) self.enclosure_type = Conf.get( SSPL_TEST_GLOBAL_CONFIG, "storage_enclosure>%s>type" % enclosure_id)
def merge_config(self, source_index: str, keys_to_include: list = None): """ In-place replaces of keys specified in keys_to_include from source to destination. In case keys_to_include is empty all keys are replace in-place. """ Conf.copy(source_index, self.default_index, keys_to_include)
def process(self): self.plan = self.args.plan[0] self.avoid_rmq = self.args.avoid_rmq # Take back up of sspl test config sspl_test_backup = '/etc/sspl_tests.conf.back' shutil.copyfile(sspl_test_file_path, sspl_test_backup) # Add global config in sspl_test config and revert the changes once test completes. # Global config path in sspl_tests.conf will be referred by sspl_tests later global_config_copy_url = Conf.get( SSPL_CONFIG_INDEX, "SYSTEM_INFORMATION>global_config_copy_url") Conf.copy(GLOBAL_CONFIG_INDEX, SSPL_TEST_CONFIG_INDEX) Conf.set(SSPL_CONFIG_INDEX, "SYSTEM_INFORMATION>global_config_copy_url", sspl_test_config_path) Conf.save(SSPL_CONFIG_INDEX) # Enable & disable sensors based on environment update_sensor_info(SSPL_TEST_CONFIG_INDEX) # Get rabbitmq values from sspl.conf and update sspl_tests.conf rmq_passwd = Conf.get(SSPL_CONFIG_INDEX, "RABBITMQEGRESSPROCESSOR>password") Conf.set(SSPL_TEST_CONFIG_INDEX, "RABBITMQEGRESSPROCESSOR>password", rmq_passwd) Conf.save(SSPL_TEST_CONFIG_INDEX) # TODO: Move lines 90-116 & 125-127 to RunQATest class # Create dummy service and add service name in /etc/sspl.conf service_name = "dummy_service.service" service_file_path_src = f"{TEST_DIR}/alerts/os/dummy_service_files/dummy_service.service" service_executable_code_src = f"{TEST_DIR}/alerts/os/dummy_service_files/dummy_service.py" service_file_path_des = "/etc/systemd/system" service_executable_code_des = "/var/cortx/sspl/test" os.makedirs(service_executable_code_des, 0o777, exist_ok=True) shutil.copy(service_executable_code_src, f'{service_executable_code_des}/dummy_service.py') # Make service file executable. cmd = f"chmod +x {service_executable_code_des}/dummy_service.py" _, error, returncode = SimpleProcess(cmd).run() if returncode != 0: print("%s error occurred while executing cmd: %s" % (error, cmd)) print("failed to assign execute permission for dummy_service.py."\ " dummy_service will fail.") # Copy service file to /etc/systemd/system/ path. shutil.copyfile(service_file_path_src, f'{service_file_path_des}/dummy_service.service') cmd = "systemctl daemon-reload" _, error, returncode = SimpleProcess(cmd).run() if returncode != 0: print(f"failed to execute '{cmd}', systemctl will be unable"\ f" to manage the dummy_service.service \nError: {error}") self.dbus_service.enable(service_name) self.dbus_service.start(service_name) service_list = Conf.get(SSPL_CONFIG_INDEX, "SERVICEMONITOR>monitored_services") service_list.append(service_name) Conf.set(SSPL_CONFIG_INDEX, "SERVICEMONITOR>monitored_services", service_list) threshold_inactive_time_original = Conf.get( SSPL_CONFIG_INDEX, "SERVICEMONITOR>threshold_inactive_time") threshold_inactive_time_new = 30 Conf.set(SSPL_CONFIG_INDEX, "SERVICEMONITOR>threshold_inactive_time", threshold_inactive_time_new) Conf.save(SSPL_CONFIG_INDEX) # TODO: Convert shell script to python # from cortx.sspl.sspl_test.run_qa_test import RunQATest # RunQATest(self.plan, self.avoid_rmq).run() CMD = "%s/run_qa_test.sh %s %s" % (TEST_DIR, self.plan, self.avoid_rmq) output, error, returncode = SimpleProcess(CMD).run( realtime_output=True) # Restore the original path/file & service, then throw exception # if execution is failed. service_list.remove(service_name) Conf.set(SSPL_CONFIG_INDEX, "SERVICEMONITOR>monitored_services", service_list) Conf.set(SSPL_CONFIG_INDEX, "SERVICEMONITOR>threshold_inactive_time", threshold_inactive_time_original) Conf.set(SSPL_CONFIG_INDEX, "SYSTEM_INFORMATION>global_config_copy_url", global_config_copy_url) Conf.save(SSPL_CONFIG_INDEX) shutil.copyfile(sspl_test_backup, sspl_test_file_path) self.dbus_service.restart('sspl-ll.service') if returncode != 0: raise SetupError(returncode, "%s - ERROR: %s - CMD %s", self.name, error, CMD)
def merge_config(self, configFile: str, oldSampleFile: str, newSampleFile: str, unsafeAttributesFile: str, filetype: str): """Core logic for updating config files during upgrade using conf store. Following is algorithm from merge: Iterate over all parameters sample.new file for every parameter, check - if it is marked as 'unsafe' in attributes file, skip - if it marked as 'safe' in the attributes file - diff the value in config and sample.old - if it is changed, skip - if it is not changed, we will overwrite the value in cfg file from sample.new - if it does not exist in cfg file add the value from sample.new file to cfg file - All the arrays in yaml are always overwritten""" #If config file is not present then abort merging. if not os.path.isfile(configFile): Log.error("config file %s does not exist" % configFile) raise Exception("ERROR: config file %s does not exist" % configFile) Log.info("config file %s upgrade started." % configFile) # old sample file conf_old_sample = filetype + oldSampleFile conf_old_sample_index = "conf_old_sample_index" Conf.load(conf_old_sample_index, conf_old_sample) # new sample file conf_new_sample = filetype + newSampleFile conf_new_sample_index = "conf_new_sample_index" Conf.load(conf_new_sample_index, conf_new_sample) conf_new_sample_keys = Conf.get_keys(conf_new_sample_index) # unsafe attribute file conf_unsafe_file = filetype + unsafeAttributesFile conf_unsafe_file_index = "conf_unsafe_file_index" Conf.load(conf_unsafe_file_index, conf_unsafe_file) conf_unsafe_file_keys = Conf.get_keys(conf_unsafe_file_index) # active config file conf_file = filetype + configFile conf_file_index = "conf_file_index" Conf.load(conf_file_index, conf_file) conf_file_keys = Conf.get_keys(conf_file_index) #logic to determine which keys to merge. keys_to_overwrite = [] for key in conf_new_sample_keys: #If key is marked for unsafe then do not modify/overwrite. if key in conf_unsafe_file_keys: continue #if key not present active config file then add it if key not in conf_file_keys: keys_to_overwrite.append(key) #if key is not unsafe and value is not changed by user then overwrite it. elif Conf.get(conf_file_index, key) == Conf.get(conf_old_sample_index, key): keys_to_overwrite.append(key) #if user has changed the value of the key then skip it. else: continue Conf.copy(conf_new_sample_index, conf_file_index, keys_to_overwrite) Conf.save(conf_file_index) Log.info("config file %s upgrade completed" % configFile)