def __validate_results(self, result_file_paths, sent_messages): def read_msg_from_json(msg): obj = json.loads(msg) return { "message": obj["message"], "source": obj["source"], "count": obj["count"], } received_messages = [] for path in result_file_paths: logging.info(f"Reading file {path}") with open(path, "r") as f: lines = f.readlines() logging.debug("First lines from file:") logging.debug("".join(lines[:5])) received_messages.extend( [read_msg_from_json(l) for l in lines]) logging.info("Validating results") return validate_all((len(received_messages) == len( sent_messages ), f"Expected to see {len(sent_messages)} messages in out file, but found {len(received_messages)}" ), (sorted(sent_messages, key=lambda k: k["count"]) == sorted(received_messages, key=lambda k: k["count"]), f"Sent messages differs from results"))
def run(self, scoped) -> Tuple[TestResult, Any]: env = os.environ.copy() if scoped: env["LD_PRELOAD"] = config.scope_path logging.debug(f"Command is {self.path}. Environment {env}") completed_proc = subprocess.run([self.path], env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=config.timeout) result = validate_all(( completed_proc.returncode == 0, f"Syscall test exited with return code {completed_proc.returncode}" )) if not result.passed: logging.warning(completed_proc.stderr) data = { "return_code": completed_proc.returncode, "stdout": completed_proc.stdout, "stderr": completed_proc.stderr } return result, data
def do_run(self, scoped) -> Tuple[TestResult, Any]: logging.info( f"Connecting to Splunk. User {config.username}, Password {config.password}" ) service = client.connect(username=config.username, password=config.password) service.namespace['owner'] = 'Nobody' collection_name = f"coll_{random_string(4)}" self.__create_collection(collection_name, service) collection = service.kvstore[collection_name] records_num = 100 records = [ json.dumps({"val": random_string(5)}) for i in range(records_num) ] logging.info(f"Inserting {len(records)} records in kvstore") for record in records: collection.data.insert(record) found_records = collection.data.query() logging.info(f"Found {len(found_records)} records") return validate_all(( len(found_records) == records_num, f"Expected to have {records_num} records in kvstore, but found {len(found_records)}" )), None
def validate(self, test_data: Any, scope_messages: List[str]) -> TestResult: return validate_all( (any("net.tx" in msg for msg in scope_messages if "#proc:nginx" in msg), "No 'net.tx' metrics is collected"), (any("net.rx" in msg for msg in scope_messages if "#proc:nginx" in msg), "No 'net.rx' metrics is collected"))
def do_run(self, scoped) -> Tuple[TestResult, Any]: logging.info(f"Connecting to Elasticsearch at {config.host}") es = self.__connect_to_es() logging.info(f"Elastic info: {es.info()}") documents_num = 100000 logging.info(f"Sending {documents_num} documents to Elastic.") index = f"test_{random_string(4)}" es.indices.create( index=index, body={ "mappings": { "properties": { "bucket": {"type": "integer"}, "body": {"type": "keyword"}, } } } ) helpers.bulk(es, self.documents_generator(documents_num), index=index) logging.info("Refreshing elastic index") es.indices.refresh(index=index) docs_in_index = es.count(index=index)["count"] limit = 2 expected_results = int(documents_num / 10) sr = es.search( index=index, body={ "query": { "term": {"bucket": 2} }, "sort": [{"body": {"order": "desc"}}], "size": limit, } ) return validate_all( (docs_in_index == documents_num, f"Expected to have {documents_num} docs in index, but found {docs_in_index}"), (sr['hits']['total']['value'] == expected_results, f"Expected to have {expected_results} search results, but found {sr['hits']['total']['value']}"), (len(sr['hits']['hits']) == limit, f"Expected to have {limit} hits returned, but found {len(sr['hits']['hits'])}"), ), sr
def append(self, line): try: split_line = validation.validate_all(line, self.taxi) except validation.Wrongstr: print( 'The string "', line, '" was not imported because you didnt enter a stirng data type ' ) return except validation.Wrongtype: print( 'The string "', line, '" was not imported because worn type of takci was entered ') except validation.Wrongstarttime: print( 'The string "', line, '" was not imported because you didnt enter a correct time format ' ) except ValueError: print('the string"', line, '"was not imported ') self.taxi.append(takciinfo(*split_line))
def validate_ab(benchmark_results): return validate_all( (benchmark_results.failed_requests == 0, f"Failed requests detected {benchmark_results.failed_requests}"), (benchmark_results.write_errors == 0, f"Write errors detected {benchmark_results.write_errors}"))