def test_elasticsearch(self): """ This test only deploy the elasticsearch module, connect to it with and without credentials and teardown the environment Args: es (fixture) : fixture that deploy / teardown the elasticsearch """ full_log_path = get_full_test_logs_path(cname=self) log.info(f"Logs file path name is : {full_log_path}") log.info("The ElasticSearch deployment test started.") if self.es.get_health(): log.info("The Status of the elasticsearch is OK") else: log.warning("The Status of the elasticsearch is Not OK") log.info("Waiting another 30 sec.") time.sleep(30) if self.es.get_health(): log.info("The Status of the elasticsearch is OK") else: log.error("The Status of the elasticsearch is Not OK ! Exiting.") if self.es.get_health(): log.info("\nThe Elastic-Search server information :\n") log.info(f"The Elasticsearch IP is {self.es.get_ip()}") log.info(f"The Elasticsearch port is {self.es.get_port()}") log.info(f"The Password to connect is {self.es.get_password()}") else: assert False, "The Elasticsearch module is not ready !" log.info(f"Test UUDI is : {self.smallfile_run(self.es)}") assert self.es.dumping_all_data(full_log_path), "Can not Retrieve the test data" assert run_command( f"ls {full_log_path}/FullResults.tgz" ), "Results file did not retrieve from pod" try: main_es = Elasticsearch( [ { "host": defaults.ELASTICSEARCH_DEV_IP, "port": defaults.ELASTICSEARCE_PORT, "url": f"http://{defaults.ELASTICSEARCH_DEV_IP}:{defaults.ELASTICSEARCE_PORT}", } ] ) except esexp.ConnectionError: log.warning("Cannot connect to ES server in the LocalServer") main_es = None assert elasticsearch_load( main_es, full_log_path ), "Can not load data into Main ES server"
def elasticsearch_load(connection, target_path): """ Load all data from target_path/results into an elasticsearch (es) server. Args: connection (dict): a dictionary with the main elasticsearch server information {host: the server ip address, port: the port to connect} target_path (str): the path where data was dumped into Returns: bool: True if loading data succeed, False otherwise """ all_files = run_command(f"ls {target_path}/results/", out_format="list") if "Error in command" in all_files: log.error("There is No data to load into ES server") return False else: log.info( f"Loading data from {target_path} to ES server at {connection}") try: log.info(f"Creating new ES connection to {connection}") main_es = Elasticsearch([connection]) log.debug(f"Connection info : {main_es}") except esexp.ConnectionError: log.error(f"Can not connect to the main ES server on {connection}") return False for ind in all_files: if ".data." in ind: # load only data files and not mapping info file_name = f"{target_path}/results/{ind}" ind_name = ind.split(".")[0] log.info(f"Loading the {ind} data into the ES server") with open(file_name) as json_file: while True: line = json_file.readline() if line: full_data = json.loads(line) log.debug(f"Loading {full_data} into the ES") main_es.index(index=ind_name, doc_type="_doc", body=full_data) else: break return True
def cleanup(self): """ Do cleanup in the benchmark-operator namespace. delete the benchmark, an make sure no PVC's an no PV's are left. """ log.info("Deleting FIO benchmark") self.benchmark_obj.delete() time.sleep(180) # Getting all PVCs created in the test (if left). NL = "\\n" # NewLine character command = ["oc", "get", "pvc", "-n"] command.append(benchmark_operator.BMO_NAME) command.append("-o") command.append("template") command.append("--template") command.append("'{{range .items}}{{.metadata.name}}{{\"" + NL + "\"}}{{end}}'") pvcs_list = run_command(command, out_format="list") log.info(f"list of all PVCs :{pvcs_list}") for pvc in pvcs_list: pvc = pvc.replace("'", "") run_command( f"oc -n {benchmark_operator.BMO_NAME} delete pvc {pvc}") # Getting all PVs created in the test (if left). command[2] = "pv" command[8] = ( "'{{range .items}}{{.metadata.name}} {{.spec.claimRef.namespace}}{{\"" + NL + "\"}}{{end}}'") command.remove("-n") command.remove(benchmark_operator.BMO_NAME) pvs_list = run_command(command, out_format="list") log.info(f"list of all PVs :{pvs_list}") for line in pvs_list: try: pv, ns = line.split(" ") pv = pv.replace("'", "") if ns == benchmark_operator.BMO_NAME: log.info(f"Going to delete {pv}") run_command(f"oc delete pv {pv}") except Exception: pass
def dumping_all_data(self, target_path): """ Dump All data from the internal ES server to .tgz file. Args: target_path (str): the path where the results file will be copy into Return: bool: True if the dump operation succeed and return the results data to the host otherwise False """ log.info("dumping data from ES server to .tgz file") rsh_cmd = f"rsh {self.dump_pod} /elasticsearch-dump/esdumper.py --ip {self.get_ip()} --port {self.get_port()}" result = self.ocp.exec_oc_cmd(rsh_cmd, out_yaml_format=False, timeout=1200) if "ES dump is done." not in result: log.error("There is no data in the Elasticsearch server") return False else: src_file = result.split()[-1] log.info(f"Copy {src_file} from the client pod") cp_command = f"cp {self.dump_pod}:{src_file} {target_path}/FullResults.tgz" result = self.ocp.exec_oc_cmd(cp_command, timeout=120) log.info(f"The output from the POD is {result}") log.info("Extracting the FullResults.tgz file") kwargs = {"cwd": target_path} results = run_command(f"tar zxvf {target_path}/FullResults.tgz", **kwargs) log.debug(f"The untar results is {results}") if "Error in command" in results: log.warning("Can not untar the dumped file") return False return True
def cleanup(self): log.info("Deleting FIO benchmark") self.fio_cr_obj.delete() time.sleep(180) # Getting all PVCs created in the test (if left). NL = "\\n" # NewLine character command = ["oc", "get", "pvc", "-n"] command.append(constants.RIPSAW_NAMESPACE) command.append("-o") command.append("template") command.append("--template") command.append("'{{range .items}}{{.metadata.name}}{{\"" + NL + "\"}}{{end}}'") pvcs_list = run_command(command, out_format="list") log.info(f"list of all PVCs :{pvcs_list}") for pvc in pvcs_list: pvc = pvc.replace("'", "") run_command(f"oc -n {constants.RIPSAW_NAMESPACE} delete pvc {pvc}") # Getting all PVs created in the test (if left). command[2] = "pv" command[8] = ( "'{{range .items}}{{.metadata.name}} {{.spec.claimRef.namespace}}{{\"" + NL + "\"}}{{end}}'" ) command.remove("-n") command.remove(constants.RIPSAW_NAMESPACE) pvs_list = run_command(command, out_format="list") log.info(f"list of all PVs :{pvs_list}") for line in pvs_list: pv, ns = line.split(" ") pv = pv.replace("'", "") if ns == constants.RIPSAW_NAMESPACE: log.info(f"Going to delete {pv}") run_command(f"oc delete pv {pv}")
def elasticsearch_load(connection, target_path): """ Load all data from target_path/results into an elasticsearch (es) server. Args: connection (obj): an elasticsearch connection object target_path (str): the path where data was dumped into Returns: bool: True if loading data succeed, False otherwise """ # define a function that will load a text file def get_data_from_text_file(json_file): """ This function will return a list of docs stored in a text file Args: json_file (str): the file name to look for docs in Returns: list : list of documents as json dicts """ docs = [ l.strip() for l in open(str(json_file), encoding="utf8", errors="ignore") ] log.info(f"String docs length: {len(docs)}") doc_list = [] for num, doc in enumerate(docs): try: dict_doc = json.loads(doc) doc_list += [dict_doc] except json.decoder.JSONDecodeError as err: # print the errors log.error( f"ERROR for num: {num} -- JSONDecodeError: {err} for doc: {doc}" ) log.info(f"Dict docs length: {len(doc_list)}") return doc_list all_files = run_command(f"ls {target_path}/results/", out_format="list") if "Error in command" in all_files: log.error("There is No data to load into ES server") return False else: if connection is None: log.warning("There is no elasticsearch server to load data into") return False log.info(f"The ES connection is {connection}") for ind in all_files: if ".data." in ind: # load only data files and not mapping info file_name = f"{target_path}/results/{ind}" ind_name = ind.split(".")[0] log.info(f"Loading the {ind} data into the ES server") docs_list = get_data_from_text_file(file_name) try: log.info( "Attempting to index the list of docs using helpers.bulk()" ) resp = helpers.bulk(connection, docs_list, index=ind_name) log.info(f"helpers.bulk() RESPONSE: {resp}") except Exception as err: log.error(f"Elasticsearch helpers.bulk() ERROR:{err}") return True
def test_elasticsearch(self): """ This test do the following operations: * deploy the elasticsearch module * connect to it * run a simple SmallFile benchmark (to verify usability) * dump the results to a file * push the results from the file to the Dev. ES. * teardown the environment """ log.info("Test with 'Dummy' Storageclass") try: self.es = ElasticSearch(sc="dummy") except ElasticSearchNotDeployed: log.info("Raised as expected !") log.info("Test with 'Real' Storageclass") try: self.es = ElasticSearch() except ElasticSearchNotDeployed as ex: log.error("Raise as expected !") raise ex full_log_path = get_full_test_logs_path(cname=self) log.info(f"Logs file path name is : {full_log_path}") log.info("The ElasticSearch deployment test started.") if self.es.get_health(): log.info("The Status of the elasticsearch is OK") else: log.warning("The Status of the elasticsearch is Not OK") log.info("Waiting another 30 sec.") time.sleep(30) if self.es.get_health(): log.info("The Status of the elasticsearch is OK") else: log.error( "The Status of the elasticsearch is Not OK ! Exiting.") if self.es.get_health(): log.info("\nThe Elastic-Search server information :\n") log.info(f"The Elasticsearch IP is {self.es.get_ip()}") log.info(f"The Elasticsearch port is {self.es.get_port()}") log.info(f"The Password to connect is {self.es.get_password()}") else: assert False, "The Elasticsearch module is not ready !" log.info(f"Test UUDI is : {self.smallfile_run(self.es)}") assert self.es.dumping_all_data( full_log_path), "Can not Retrieve the test data" assert run_command(f"ls {full_log_path}/FullResults.tgz" ), "Results file did not retrieve from pod" # Try to use the development ES server for testing the elasticsearch_load # function to push data into ES server try: main_es = Elasticsearch([{ "host": defaults.ELASTICSEARCH_DEV_IP, "port": defaults.ELASTICSEARCE_PORT, "url": f"http://{defaults.ELASTICSEARCH_DEV_IP}:{defaults.ELASTICSEARCE_PORT}", }]) except esexp.ConnectionError: log.warning("Cannot connect to ES server in the LocalServer") main_es = None assert elasticsearch_load( main_es, full_log_path), "Can not load data into Main ES server"